1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
34 #include "addresses.h"
40 #include "basic-block.h"
44 #include "tree-pass.h"
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
50 #define STACK_PUSH_CODE PRE_INC
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
58 #define STACK_POP_CODE POST_DEC
62 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
63 static rtx
*find_single_use_1 (rtx
, rtx
*);
64 static void validate_replace_src_1 (rtx
*, void *);
65 static rtx
split_insn (rtx
);
67 /* Nonzero means allow operands to be volatile.
68 This should be 0 if you are generating rtl, such as if you are calling
69 the functions in optabs.c and expmed.c (most of the time).
70 This should be 1 if all valid insns need to be recognized,
71 such as in regclass.c and final.c and reload.c.
73 init_recog and init_recog_no_volatile are responsible for setting this. */
77 struct recog_data recog_data
;
79 /* Contains a vector of operand_alternative structures for every operand.
80 Set up by preprocess_constraints. */
81 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
83 /* On return from `constrain_operands', indicate which alternative
86 int which_alternative
;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed
;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
102 init_recog_no_volatile (void)
114 /* Check that X is an insn-body for an `asm' with operands
115 and that the operands mentioned in it are legitimate. */
118 check_asm_operands (rtx x
)
122 const char **constraints
;
125 /* Post-reload, be more strict with things. */
126 if (reload_completed
)
128 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
129 extract_insn (make_insn_raw (x
));
130 constrain_operands (1);
131 return which_alternative
>= 0;
134 noperands
= asm_noperands (x
);
140 operands
= alloca (noperands
* sizeof (rtx
));
141 constraints
= alloca (noperands
* sizeof (char *));
143 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
145 for (i
= 0; i
< noperands
; i
++)
147 const char *c
= constraints
[i
];
150 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
151 c
= constraints
[c
[0] - '0'];
153 if (! asm_operand_ok (operands
[i
], c
))
160 /* Static data for the next two routines. */
162 typedef struct change_t
170 static change_t
*changes
;
171 static int changes_allocated
;
173 static int num_changes
= 0;
175 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
176 at which NEW will be placed. If OBJECT is zero, no validation is done,
177 the change is simply made.
179 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
180 will be called with the address and mode as parameters. If OBJECT is
181 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
184 IN_GROUP is nonzero if this is part of a group of changes that must be
185 performed as a group. In that case, the changes will be stored. The
186 function `apply_change_group' will validate and apply the changes.
188 If IN_GROUP is zero, this is a single change. Try to recognize the insn
189 or validate the memory reference with the change applied. If the result
190 is not valid for the machine, suppress the change and return zero.
191 Otherwise, perform the change and return 1. */
194 validate_change (rtx object
, rtx
*loc
, rtx
new, int in_group
)
198 if (old
== new || rtx_equal_p (old
, new))
201 gcc_assert (in_group
!= 0 || num_changes
== 0);
205 /* Save the information describing this change. */
206 if (num_changes
>= changes_allocated
)
208 if (changes_allocated
== 0)
209 /* This value allows for repeated substitutions inside complex
210 indexed addresses, or changes in up to 5 insns. */
211 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
213 changes_allocated
*= 2;
215 changes
= xrealloc (changes
, sizeof (change_t
) * changes_allocated
);
218 changes
[num_changes
].object
= object
;
219 changes
[num_changes
].loc
= loc
;
220 changes
[num_changes
].old
= old
;
222 if (object
&& !MEM_P (object
))
224 /* Set INSN_CODE to force rerecognition of insn. Save old code in
226 changes
[num_changes
].old_code
= INSN_CODE (object
);
227 INSN_CODE (object
) = -1;
232 /* If we are making a group of changes, return 1. Otherwise, validate the
233 change group we made. */
238 return apply_change_group ();
242 /* This subroutine of apply_change_group verifies whether the changes to INSN
243 were valid; i.e. whether INSN can still be recognized. */
246 insn_invalid_p (rtx insn
)
248 rtx pat
= PATTERN (insn
);
249 int num_clobbers
= 0;
250 /* If we are before reload and the pattern is a SET, see if we can add
252 int icode
= recog (pat
, insn
,
253 (GET_CODE (pat
) == SET
254 && ! reload_completed
&& ! reload_in_progress
)
255 ? &num_clobbers
: 0);
256 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
259 /* If this is an asm and the operand aren't legal, then fail. Likewise if
260 this is not an asm and the insn wasn't recognized. */
261 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
262 || (!is_asm
&& icode
< 0))
265 /* If we have to add CLOBBERs, fail if we have to add ones that reference
266 hard registers since our callers can't know if they are live or not.
267 Otherwise, add them. */
268 if (num_clobbers
> 0)
272 if (added_clobbers_hard_reg_p (icode
))
275 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
276 XVECEXP (newpat
, 0, 0) = pat
;
277 add_clobbers (newpat
, icode
);
278 PATTERN (insn
) = pat
= newpat
;
281 /* After reload, verify that all constraints are satisfied. */
282 if (reload_completed
)
286 if (! constrain_operands (1))
290 INSN_CODE (insn
) = icode
;
294 /* Return number of changes made and not validated yet. */
296 num_changes_pending (void)
301 /* Tentatively apply the changes numbered NUM and up.
302 Return 1 if all changes are valid, zero otherwise. */
305 verify_changes (int num
)
308 rtx last_validated
= NULL_RTX
;
310 /* The changes have been applied and all INSN_CODEs have been reset to force
313 The changes are valid if we aren't given an object, or if we are
314 given a MEM and it still is a valid address, or if this is in insn
315 and it is recognized. In the latter case, if reload has completed,
316 we also require that the operands meet the constraints for
319 for (i
= num
; i
< num_changes
; i
++)
321 rtx object
= changes
[i
].object
;
323 /* If there is no object to test or if it is the same as the one we
324 already tested, ignore it. */
325 if (object
== 0 || object
== last_validated
)
330 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
333 else if (insn_invalid_p (object
))
335 rtx pat
= PATTERN (object
);
337 /* Perhaps we couldn't recognize the insn because there were
338 extra CLOBBERs at the end. If so, try to re-recognize
339 without the last CLOBBER (later iterations will cause each of
340 them to be eliminated, in turn). But don't do this if we
341 have an ASM_OPERAND. */
342 if (GET_CODE (pat
) == PARALLEL
343 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
344 && asm_noperands (PATTERN (object
)) < 0)
348 if (XVECLEN (pat
, 0) == 2)
349 newpat
= XVECEXP (pat
, 0, 0);
355 = gen_rtx_PARALLEL (VOIDmode
,
356 rtvec_alloc (XVECLEN (pat
, 0) - 1));
357 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
358 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
361 /* Add a new change to this group to replace the pattern
362 with this new pattern. Then consider this change
363 as having succeeded. The change we added will
364 cause the entire call to fail if things remain invalid.
366 Note that this can lose if a later change than the one
367 we are processing specified &XVECEXP (PATTERN (object), 0, X)
368 but this shouldn't occur. */
370 validate_change (object
, &PATTERN (object
), newpat
, 1);
373 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
374 /* If this insn is a CLOBBER or USE, it is always valid, but is
380 last_validated
= object
;
383 return (i
== num_changes
);
386 /* A group of changes has previously been issued with validate_change and
387 verified with verify_changes. Update the BB_DIRTY flags of the affected
388 blocks, and clear num_changes. */
391 confirm_change_group (void)
396 for (i
= 0; i
< num_changes
; i
++)
397 if (changes
[i
].object
398 && INSN_P (changes
[i
].object
)
399 && (bb
= BLOCK_FOR_INSN (changes
[i
].object
)))
400 bb
->flags
|= BB_DIRTY
;
405 /* Apply a group of changes previously issued with `validate_change'.
406 If all changes are valid, call confirm_change_group and return 1,
407 otherwise, call cancel_changes and return 0. */
410 apply_change_group (void)
412 if (verify_changes (0))
414 confirm_change_group ();
425 /* Return the number of changes so far in the current group. */
428 num_validated_changes (void)
433 /* Retract the changes numbered NUM and up. */
436 cancel_changes (int num
)
440 /* Back out all the changes. Do this in the opposite order in which
442 for (i
= num_changes
- 1; i
>= num
; i
--)
444 *changes
[i
].loc
= changes
[i
].old
;
445 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
446 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
451 /* Replace every occurrence of FROM in X with TO. Mark each change with
452 validate_change passing OBJECT. */
455 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
461 enum machine_mode op0_mode
= VOIDmode
;
462 int prev_changes
= num_changes
;
469 fmt
= GET_RTX_FORMAT (code
);
471 op0_mode
= GET_MODE (XEXP (x
, 0));
473 /* X matches FROM if it is the same rtx or they are both referring to the
474 same register in the same mode. Avoid calling rtx_equal_p unless the
475 operands look similar. */
478 || (REG_P (x
) && REG_P (from
)
479 && GET_MODE (x
) == GET_MODE (from
)
480 && REGNO (x
) == REGNO (from
))
481 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
482 && rtx_equal_p (x
, from
)))
484 validate_change (object
, loc
, to
, 1);
488 /* Call ourself recursively to perform the replacements.
489 We must not replace inside already replaced expression, otherwise we
490 get infinite recursion for replacements like (reg X)->(subreg (reg X))
491 done by regmove, so we must special case shared ASM_OPERANDS. */
493 if (GET_CODE (x
) == PARALLEL
)
495 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
497 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
498 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
500 /* Verify that operands are really shared. */
501 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
502 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
504 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
508 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
512 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
515 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
516 else if (fmt
[i
] == 'E')
517 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
518 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
521 /* If we didn't substitute, there is nothing more to do. */
522 if (num_changes
== prev_changes
)
525 /* Allow substituted expression to have different mode. This is used by
526 regmove to change mode of pseudo register. */
527 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
528 op0_mode
= GET_MODE (XEXP (x
, 0));
530 /* Do changes needed to keep rtx consistent. Don't do any other
531 simplifications, as it is not our job. */
533 if (SWAPPABLE_OPERANDS_P (x
)
534 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
536 validate_change (object
, loc
,
537 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
538 : swap_condition (code
),
539 GET_MODE (x
), XEXP (x
, 1),
548 /* If we have a PLUS whose second operand is now a CONST_INT, use
549 simplify_gen_binary to try to simplify it.
550 ??? We may want later to remove this, once simplification is
551 separated from this function. */
552 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
553 validate_change (object
, loc
,
555 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
558 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
559 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
560 validate_change (object
, loc
,
562 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
563 simplify_gen_unary (NEG
,
564 GET_MODE (x
), XEXP (x
, 1),
569 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
571 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
573 /* If any of the above failed, substitute in something that
574 we know won't be recognized. */
576 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
577 validate_change (object
, loc
, new, 1);
581 /* All subregs possible to simplify should be simplified. */
582 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
585 /* Subregs of VOIDmode operands are incorrect. */
586 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
587 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
589 validate_change (object
, loc
, new, 1);
593 /* If we are replacing a register with memory, try to change the memory
594 to be the mode required for memory in extract operations (this isn't
595 likely to be an insertion operation; if it was, nothing bad will
596 happen, we might just fail in some cases). */
598 if (MEM_P (XEXP (x
, 0))
599 && GET_CODE (XEXP (x
, 1)) == CONST_INT
600 && GET_CODE (XEXP (x
, 2)) == CONST_INT
601 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
602 && !MEM_VOLATILE_P (XEXP (x
, 0)))
604 enum machine_mode wanted_mode
= VOIDmode
;
605 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
606 int pos
= INTVAL (XEXP (x
, 2));
608 if (GET_CODE (x
) == ZERO_EXTRACT
)
610 enum machine_mode new_mode
611 = mode_for_extraction (EP_extzv
, 1);
612 if (new_mode
!= MAX_MACHINE_MODE
)
613 wanted_mode
= new_mode
;
615 else if (GET_CODE (x
) == SIGN_EXTRACT
)
617 enum machine_mode new_mode
618 = mode_for_extraction (EP_extv
, 1);
619 if (new_mode
!= MAX_MACHINE_MODE
)
620 wanted_mode
= new_mode
;
623 /* If we have a narrower mode, we can do something. */
624 if (wanted_mode
!= VOIDmode
625 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
627 int offset
= pos
/ BITS_PER_UNIT
;
630 /* If the bytes and bits are counted differently, we
631 must adjust the offset. */
632 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
634 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
637 pos
%= GET_MODE_BITSIZE (wanted_mode
);
639 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
641 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
642 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
653 /* Try replacing every occurrence of FROM in INSN with TO. After all
654 changes have been made, validate by seeing if INSN is still valid. */
657 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
659 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
660 return apply_change_group ();
663 /* Try replacing every occurrence of FROM in INSN with TO. */
666 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
668 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
671 /* Function called by note_uses to replace used subexpressions. */
672 struct validate_replace_src_data
674 rtx from
; /* Old RTX */
675 rtx to
; /* New RTX */
676 rtx insn
; /* Insn in which substitution is occurring. */
680 validate_replace_src_1 (rtx
*x
, void *data
)
682 struct validate_replace_src_data
*d
683 = (struct validate_replace_src_data
*) data
;
685 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
688 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
692 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
694 struct validate_replace_src_data d
;
699 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
702 /* Try simplify INSN.
703 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
704 pattern and return true if something was simplified. */
707 validate_simplify_insn (rtx insn
)
713 pat
= PATTERN (insn
);
715 if (GET_CODE (pat
) == SET
)
717 newpat
= simplify_rtx (SET_SRC (pat
));
718 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
719 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
720 newpat
= simplify_rtx (SET_DEST (pat
));
721 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
722 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
724 else if (GET_CODE (pat
) == PARALLEL
)
725 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
727 rtx s
= XVECEXP (pat
, 0, i
);
729 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
731 newpat
= simplify_rtx (SET_SRC (s
));
732 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
733 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
734 newpat
= simplify_rtx (SET_DEST (s
));
735 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
736 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
739 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
743 /* Return 1 if the insn using CC0 set by INSN does not contain
744 any ordered tests applied to the condition codes.
745 EQ and NE tests do not count. */
748 next_insn_tests_no_inequality (rtx insn
)
750 rtx next
= next_cc0_user (insn
);
752 /* If there is no next insn, we have to take the conservative choice. */
756 return (INSN_P (next
)
757 && ! inequality_comparisons_p (PATTERN (next
)));
761 /* This is used by find_single_use to locate an rtx that contains exactly one
762 use of DEST, which is typically either a REG or CC0. It returns a
763 pointer to the innermost rtx expression containing DEST. Appearances of
764 DEST that are being used to totally replace it are not counted. */
767 find_single_use_1 (rtx dest
, rtx
*loc
)
770 enum rtx_code code
= GET_CODE (x
);
788 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
789 of a REG that occupies all of the REG, the insn uses DEST if
790 it is mentioned in the destination or the source. Otherwise, we
791 need just check the source. */
792 if (GET_CODE (SET_DEST (x
)) != CC0
793 && GET_CODE (SET_DEST (x
)) != PC
794 && !REG_P (SET_DEST (x
))
795 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
796 && REG_P (SUBREG_REG (SET_DEST (x
)))
797 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
798 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
799 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
800 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
803 return find_single_use_1 (dest
, &SET_SRC (x
));
807 return find_single_use_1 (dest
, &XEXP (x
, 0));
813 /* If it wasn't one of the common cases above, check each expression and
814 vector of this code. Look for a unique usage of DEST. */
816 fmt
= GET_RTX_FORMAT (code
);
817 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
821 if (dest
== XEXP (x
, i
)
822 || (REG_P (dest
) && REG_P (XEXP (x
, i
))
823 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
826 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
829 result
= this_result
;
830 else if (this_result
)
831 /* Duplicate usage. */
834 else if (fmt
[i
] == 'E')
838 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
840 if (XVECEXP (x
, i
, j
) == dest
842 && REG_P (XVECEXP (x
, i
, j
))
843 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
846 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
849 result
= this_result
;
850 else if (this_result
)
859 /* See if DEST, produced in INSN, is used only a single time in the
860 sequel. If so, return a pointer to the innermost rtx expression in which
863 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
865 This routine will return usually zero either before flow is called (because
866 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
867 note can't be trusted).
869 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
870 care about REG_DEAD notes or LOG_LINKS.
872 Otherwise, we find the single use by finding an insn that has a
873 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
874 only referenced once in that insn, we know that it must be the first
875 and last insn referencing DEST. */
878 find_single_use (rtx dest
, rtx insn
, rtx
*ploc
)
887 next
= NEXT_INSN (insn
);
889 || (!NONJUMP_INSN_P (next
) && !JUMP_P (next
)))
892 result
= find_single_use_1 (dest
, &PATTERN (next
));
899 if (reload_completed
|| reload_in_progress
|| !REG_P (dest
))
902 for (next
= next_nonnote_insn (insn
);
903 next
!= 0 && !LABEL_P (next
);
904 next
= next_nonnote_insn (next
))
905 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
907 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
908 if (XEXP (link
, 0) == insn
)
913 result
= find_single_use_1 (dest
, &PATTERN (next
));
923 /* Return 1 if OP is a valid general operand for machine mode MODE.
924 This is either a register reference, a memory reference,
925 or a constant. In the case of a memory reference, the address
926 is checked for general validity for the target machine.
928 Register and memory references must have mode MODE in order to be valid,
929 but some constants have no machine mode and are valid for any mode.
931 If MODE is VOIDmode, OP is checked for validity for whatever mode
934 The main use of this function is as a predicate in match_operand
935 expressions in the machine description.
937 For an explanation of this function's behavior for registers of
938 class NO_REGS, see the comment for `register_operand'. */
941 general_operand (rtx op
, enum machine_mode mode
)
943 enum rtx_code code
= GET_CODE (op
);
945 if (mode
== VOIDmode
)
946 mode
= GET_MODE (op
);
948 /* Don't accept CONST_INT or anything similar
949 if the caller wants something floating. */
950 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
951 && GET_MODE_CLASS (mode
) != MODE_INT
952 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
955 if (GET_CODE (op
) == CONST_INT
957 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
961 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
963 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
964 && LEGITIMATE_CONSTANT_P (op
));
966 /* Except for certain constants with VOIDmode, already checked for,
967 OP's mode must match MODE if MODE specifies a mode. */
969 if (GET_MODE (op
) != mode
)
974 rtx sub
= SUBREG_REG (op
);
976 #ifdef INSN_SCHEDULING
977 /* On machines that have insn scheduling, we want all memory
978 reference to be explicit, so outlaw paradoxical SUBREGs.
979 However, we must allow them after reload so that they can
980 get cleaned up by cleanup_subreg_operands. */
981 if (!reload_completed
&& MEM_P (sub
)
982 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
985 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
986 may result in incorrect reference. We should simplify all valid
987 subregs of MEM anyway. But allow this after reload because we
988 might be called from cleanup_subreg_operands.
990 ??? This is a kludge. */
991 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
995 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
996 create such rtl, and we must reject it. */
997 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
998 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1002 code
= GET_CODE (op
);
1006 /* A register whose class is NO_REGS is not a general operand. */
1007 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1008 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
1012 rtx y
= XEXP (op
, 0);
1014 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1017 /* Use the mem's mode, since it will be reloaded thus. */
1018 if (memory_address_p (GET_MODE (op
), y
))
1025 /* Return 1 if OP is a valid memory address for a memory reference
1028 The main use of this function is as a predicate in match_operand
1029 expressions in the machine description. */
1032 address_operand (rtx op
, enum machine_mode mode
)
1034 return memory_address_p (mode
, op
);
1037 /* Return 1 if OP is a register reference of mode MODE.
1038 If MODE is VOIDmode, accept a register in any mode.
1040 The main use of this function is as a predicate in match_operand
1041 expressions in the machine description.
1043 As a special exception, registers whose class is NO_REGS are
1044 not accepted by `register_operand'. The reason for this change
1045 is to allow the representation of special architecture artifacts
1046 (such as a condition code register) without extending the rtl
1047 definitions. Since registers of class NO_REGS cannot be used
1048 as registers in any case where register classes are examined,
1049 it is most consistent to keep this function from accepting them. */
1052 register_operand (rtx op
, enum machine_mode mode
)
1054 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1057 if (GET_CODE (op
) == SUBREG
)
1059 rtx sub
= SUBREG_REG (op
);
1061 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1062 because it is guaranteed to be reloaded into one.
1063 Just make sure the MEM is valid in itself.
1064 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1065 but currently it does result from (SUBREG (REG)...) where the
1066 reg went on the stack.) */
1067 if (! reload_completed
&& MEM_P (sub
))
1068 return general_operand (op
, mode
);
1070 #ifdef CANNOT_CHANGE_MODE_CLASS
1072 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1073 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1074 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1075 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1079 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1080 create such rtl, and we must reject it. */
1081 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1082 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1088 /* We don't consider registers whose class is NO_REGS
1089 to be a register operand. */
1091 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1092 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1095 /* Return 1 for a register in Pmode; ignore the tested mode. */
1098 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1100 return register_operand (op
, Pmode
);
1103 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1104 or a hard register. */
1107 scratch_operand (rtx op
, enum machine_mode mode
)
1109 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1112 return (GET_CODE (op
) == SCRATCH
1114 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1117 /* Return 1 if OP is a valid immediate operand for mode MODE.
1119 The main use of this function is as a predicate in match_operand
1120 expressions in the machine description. */
1123 immediate_operand (rtx op
, enum machine_mode mode
)
1125 /* Don't accept CONST_INT or anything similar
1126 if the caller wants something floating. */
1127 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1128 && GET_MODE_CLASS (mode
) != MODE_INT
1129 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1132 if (GET_CODE (op
) == CONST_INT
1134 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1137 return (CONSTANT_P (op
)
1138 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1139 || GET_MODE (op
) == VOIDmode
)
1140 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1141 && LEGITIMATE_CONSTANT_P (op
));
1144 /* Returns 1 if OP is an operand that is a CONST_INT. */
1147 const_int_operand (rtx op
, enum machine_mode mode
)
1149 if (GET_CODE (op
) != CONST_INT
)
1152 if (mode
!= VOIDmode
1153 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1159 /* Returns 1 if OP is an operand that is a constant integer or constant
1160 floating-point number. */
1163 const_double_operand (rtx op
, enum machine_mode mode
)
1165 /* Don't accept CONST_INT or anything similar
1166 if the caller wants something floating. */
1167 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1168 && GET_MODE_CLASS (mode
) != MODE_INT
1169 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1172 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1173 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1174 || GET_MODE (op
) == VOIDmode
));
1177 /* Return 1 if OP is a general operand that is not an immediate operand. */
1180 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1182 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1185 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1188 nonmemory_operand (rtx op
, enum machine_mode mode
)
1190 if (CONSTANT_P (op
))
1192 /* Don't accept CONST_INT or anything similar
1193 if the caller wants something floating. */
1194 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1195 && GET_MODE_CLASS (mode
) != MODE_INT
1196 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1199 if (GET_CODE (op
) == CONST_INT
1201 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1204 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1205 || mode
== VOIDmode
)
1206 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1207 && LEGITIMATE_CONSTANT_P (op
));
1210 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1213 if (GET_CODE (op
) == SUBREG
)
1215 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1216 because it is guaranteed to be reloaded into one.
1217 Just make sure the MEM is valid in itself.
1218 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1219 but currently it does result from (SUBREG (REG)...) where the
1220 reg went on the stack.) */
1221 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1222 return general_operand (op
, mode
);
1223 op
= SUBREG_REG (op
);
1226 /* We don't consider registers whose class is NO_REGS
1227 to be a register operand. */
1229 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1230 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1233 /* Return 1 if OP is a valid operand that stands for pushing a
1234 value of mode MODE onto the stack.
1236 The main use of this function is as a predicate in match_operand
1237 expressions in the machine description. */
1240 push_operand (rtx op
, enum machine_mode mode
)
1242 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1244 #ifdef PUSH_ROUNDING
1245 rounded_size
= PUSH_ROUNDING (rounded_size
);
1251 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1256 if (rounded_size
== GET_MODE_SIZE (mode
))
1258 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1263 if (GET_CODE (op
) != PRE_MODIFY
1264 || GET_CODE (XEXP (op
, 1)) != PLUS
1265 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1266 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1267 #ifdef STACK_GROWS_DOWNWARD
1268 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1270 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1276 return XEXP (op
, 0) == stack_pointer_rtx
;
1279 /* Return 1 if OP is a valid operand that stands for popping a
1280 value of mode MODE off the stack.
1282 The main use of this function is as a predicate in match_operand
1283 expressions in the machine description. */
1286 pop_operand (rtx op
, enum machine_mode mode
)
1291 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1296 if (GET_CODE (op
) != STACK_POP_CODE
)
1299 return XEXP (op
, 0) == stack_pointer_rtx
;
1302 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1305 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1307 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1314 /* Return 1 if OP is a valid memory reference with mode MODE,
1315 including a valid address.
1317 The main use of this function is as a predicate in match_operand
1318 expressions in the machine description. */
1321 memory_operand (rtx op
, enum machine_mode mode
)
1325 if (! reload_completed
)
1326 /* Note that no SUBREG is a memory operand before end of reload pass,
1327 because (SUBREG (MEM...)) forces reloading into a register. */
1328 return MEM_P (op
) && general_operand (op
, mode
);
1330 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1334 if (GET_CODE (inner
) == SUBREG
)
1335 inner
= SUBREG_REG (inner
);
1337 return (MEM_P (inner
) && general_operand (op
, mode
));
1340 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1341 that is, a memory reference whose address is a general_operand. */
1344 indirect_operand (rtx op
, enum machine_mode mode
)
1346 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1347 if (! reload_completed
1348 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1350 int offset
= SUBREG_BYTE (op
);
1351 rtx inner
= SUBREG_REG (op
);
1353 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1356 /* The only way that we can have a general_operand as the resulting
1357 address is if OFFSET is zero and the address already is an operand
1358 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1361 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1362 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1363 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1364 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1365 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1369 && memory_operand (op
, mode
)
1370 && general_operand (XEXP (op
, 0), Pmode
));
1373 /* Return 1 if this is a comparison operator. This allows the use of
1374 MATCH_OPERATOR to recognize all the branch insns. */
1377 comparison_operator (rtx op
, enum machine_mode mode
)
1379 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1380 && COMPARISON_P (op
));
1383 /* If BODY is an insn body that uses ASM_OPERANDS,
1384 return the number of operands (both input and output) in the insn.
1385 Otherwise return -1. */
1388 asm_noperands (rtx body
)
1390 switch (GET_CODE (body
))
1393 /* No output operands: return number of input operands. */
1394 return ASM_OPERANDS_INPUT_LENGTH (body
);
1396 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1397 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1398 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1402 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1403 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1405 /* Multiple output operands, or 1 output plus some clobbers:
1406 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1410 /* Count backwards through CLOBBERs to determine number of SETs. */
1411 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1413 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1415 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1419 /* N_SETS is now number of output operands. */
1422 /* Verify that all the SETs we have
1423 came from a single original asm_operands insn
1424 (so that invalid combinations are blocked). */
1425 for (i
= 0; i
< n_sets
; i
++)
1427 rtx elt
= XVECEXP (body
, 0, i
);
1428 if (GET_CODE (elt
) != SET
)
1430 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1432 /* If these ASM_OPERANDS rtx's came from different original insns
1433 then they aren't allowed together. */
1434 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1435 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1438 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1441 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1443 /* 0 outputs, but some clobbers:
1444 body is [(asm_operands ...) (clobber (reg ...))...]. */
1447 /* Make sure all the other parallel things really are clobbers. */
1448 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1449 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1452 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1461 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1462 copy its operands (both input and output) into the vector OPERANDS,
1463 the locations of the operands within the insn into the vector OPERAND_LOCS,
1464 and the constraints for the operands into CONSTRAINTS.
1465 Write the modes of the operands into MODES.
1466 Return the assembler-template.
1468 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1469 we don't store that info. */
1472 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1473 const char **constraints
, enum machine_mode
*modes
)
1477 const char *template = 0;
1479 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1481 rtx asmop
= SET_SRC (body
);
1482 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1484 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1486 for (i
= 1; i
< noperands
; i
++)
1489 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1491 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1493 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1495 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1498 /* The output is in the SET.
1499 Its constraint is in the ASM_OPERANDS itself. */
1501 operands
[0] = SET_DEST (body
);
1503 operand_locs
[0] = &SET_DEST (body
);
1505 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1507 modes
[0] = GET_MODE (SET_DEST (body
));
1508 template = ASM_OPERANDS_TEMPLATE (asmop
);
1510 else if (GET_CODE (body
) == ASM_OPERANDS
)
1513 /* No output operands: BODY is (asm_operands ....). */
1515 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1517 /* The input operands are found in the 1st element vector. */
1518 /* Constraints for inputs are in the 2nd element vector. */
1519 for (i
= 0; i
< noperands
; i
++)
1522 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1524 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1526 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1528 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1530 template = ASM_OPERANDS_TEMPLATE (asmop
);
1532 else if (GET_CODE (body
) == PARALLEL
1533 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1534 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1536 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1537 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1538 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1539 int nout
= 0; /* Does not include CLOBBERs. */
1541 /* At least one output, plus some CLOBBERs. */
1543 /* The outputs are in the SETs.
1544 Their constraints are in the ASM_OPERANDS itself. */
1545 for (i
= 0; i
< nparallel
; i
++)
1547 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1548 break; /* Past last SET */
1551 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1553 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1555 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1557 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1561 for (i
= 0; i
< nin
; i
++)
1564 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1566 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1568 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1570 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1573 template = ASM_OPERANDS_TEMPLATE (asmop
);
1575 else if (GET_CODE (body
) == PARALLEL
1576 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1578 /* No outputs, but some CLOBBERs. */
1580 rtx asmop
= XVECEXP (body
, 0, 0);
1581 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1583 for (i
= 0; i
< nin
; i
++)
1586 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1588 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1590 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1592 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1595 template = ASM_OPERANDS_TEMPLATE (asmop
);
1601 /* Check if an asm_operand matches its constraints.
1602 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1605 asm_operand_ok (rtx op
, const char *constraint
)
1609 /* Use constrain_operands after reload. */
1610 gcc_assert (!reload_completed
);
1614 char c
= *constraint
;
1631 case '0': case '1': case '2': case '3': case '4':
1632 case '5': case '6': case '7': case '8': case '9':
1633 /* For best results, our caller should have given us the
1634 proper matching constraint, but we can't actually fail
1635 the check if they didn't. Indicate that results are
1639 while (ISDIGIT (*constraint
));
1645 if (address_operand (op
, VOIDmode
))
1650 case 'V': /* non-offsettable */
1651 if (memory_operand (op
, VOIDmode
))
1655 case 'o': /* offsettable */
1656 if (offsettable_nonstrict_memref_p (op
))
1661 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1662 excepting those that expand_call created. Further, on some
1663 machines which do not have generalized auto inc/dec, an inc/dec
1664 is not a memory_operand.
1666 Match any memory and hope things are resolved after reload. */
1670 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1671 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1678 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1679 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1685 if (GET_CODE (op
) == CONST_DOUBLE
1686 || (GET_CODE (op
) == CONST_VECTOR
1687 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1692 if (GET_CODE (op
) == CONST_DOUBLE
1693 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1697 if (GET_CODE (op
) == CONST_DOUBLE
1698 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1703 if (GET_CODE (op
) == CONST_INT
1704 || (GET_CODE (op
) == CONST_DOUBLE
1705 && GET_MODE (op
) == VOIDmode
))
1710 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1715 if (GET_CODE (op
) == CONST_INT
1716 || (GET_CODE (op
) == CONST_DOUBLE
1717 && GET_MODE (op
) == VOIDmode
))
1722 if (GET_CODE (op
) == CONST_INT
1723 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1727 if (GET_CODE (op
) == CONST_INT
1728 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1732 if (GET_CODE (op
) == CONST_INT
1733 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1737 if (GET_CODE (op
) == CONST_INT
1738 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1742 if (GET_CODE (op
) == CONST_INT
1743 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1747 if (GET_CODE (op
) == CONST_INT
1748 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1752 if (GET_CODE (op
) == CONST_INT
1753 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1757 if (GET_CODE (op
) == CONST_INT
1758 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1767 if (general_operand (op
, VOIDmode
))
1772 /* For all other letters, we first check for a register class,
1773 otherwise it is an EXTRA_CONSTRAINT. */
1774 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1777 if (GET_MODE (op
) == BLKmode
)
1779 if (register_operand (op
, VOIDmode
))
1782 #ifdef EXTRA_CONSTRAINT_STR
1783 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1785 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1786 /* Every memory operand can be reloaded to fit. */
1787 && memory_operand (op
, VOIDmode
))
1789 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1790 /* Every address operand can be reloaded to fit. */
1791 && address_operand (op
, VOIDmode
))
1796 len
= CONSTRAINT_LEN (c
, constraint
);
1799 while (--len
&& *constraint
);
1807 /* Given an rtx *P, if it is a sum containing an integer constant term,
1808 return the location (type rtx *) of the pointer to that constant term.
1809 Otherwise, return a null pointer. */
1812 find_constant_term_loc (rtx
*p
)
1815 enum rtx_code code
= GET_CODE (*p
);
1817 /* If *P IS such a constant term, P is its location. */
1819 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1823 /* Otherwise, if not a sum, it has no constant term. */
1825 if (GET_CODE (*p
) != PLUS
)
1828 /* If one of the summands is constant, return its location. */
1830 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1831 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1834 /* Otherwise, check each summand for containing a constant term. */
1836 if (XEXP (*p
, 0) != 0)
1838 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1843 if (XEXP (*p
, 1) != 0)
1845 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1853 /* Return 1 if OP is a memory reference
1854 whose address contains no side effects
1855 and remains valid after the addition
1856 of a positive integer less than the
1857 size of the object being referenced.
1859 We assume that the original address is valid and do not check it.
1861 This uses strict_memory_address_p as a subroutine, so
1862 don't use it before reload. */
1865 offsettable_memref_p (rtx op
)
1867 return ((MEM_P (op
))
1868 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1871 /* Similar, but don't require a strictly valid mem ref:
1872 consider pseudo-regs valid as index or base regs. */
1875 offsettable_nonstrict_memref_p (rtx op
)
1877 return ((MEM_P (op
))
1878 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1881 /* Return 1 if Y is a memory address which contains no side effects
1882 and would remain valid after the addition of a positive integer
1883 less than the size of that mode.
1885 We assume that the original address is valid and do not check it.
1886 We do check that it is valid for narrower modes.
1888 If STRICTP is nonzero, we require a strictly valid address,
1889 for the sake of use in reload.c. */
1892 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1894 enum rtx_code ycode
= GET_CODE (y
);
1898 int (*addressp
) (enum machine_mode
, rtx
) =
1899 (strictp
? strict_memory_address_p
: memory_address_p
);
1900 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1902 if (CONSTANT_ADDRESS_P (y
))
1905 /* Adjusting an offsettable address involves changing to a narrower mode.
1906 Make sure that's OK. */
1908 if (mode_dependent_address_p (y
))
1911 /* ??? How much offset does an offsettable BLKmode reference need?
1912 Clearly that depends on the situation in which it's being used.
1913 However, the current situation in which we test 0xffffffff is
1914 less than ideal. Caveat user. */
1916 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1918 /* If the expression contains a constant term,
1919 see if it remains valid when max possible offset is added. */
1921 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1926 *y2
= plus_constant (*y2
, mode_sz
- 1);
1927 /* Use QImode because an odd displacement may be automatically invalid
1928 for any wider mode. But it should be valid for a single byte. */
1929 good
= (*addressp
) (QImode
, y
);
1931 /* In any case, restore old contents of memory. */
1936 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1939 /* The offset added here is chosen as the maximum offset that
1940 any instruction could need to add when operating on something
1941 of the specified mode. We assume that if Y and Y+c are
1942 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1943 go inside a LO_SUM here, so we do so as well. */
1944 if (GET_CODE (y
) == LO_SUM
1946 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1947 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1948 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1950 z
= plus_constant (y
, mode_sz
- 1);
1952 /* Use QImode because an odd displacement may be automatically invalid
1953 for any wider mode. But it should be valid for a single byte. */
1954 return (*addressp
) (QImode
, z
);
1957 /* Return 1 if ADDR is an address-expression whose effect depends
1958 on the mode of the memory reference it is used in.
1960 Autoincrement addressing is a typical example of mode-dependence
1961 because the amount of the increment depends on the mode. */
1964 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED
/* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1966 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1968 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1969 win
: ATTRIBUTE_UNUSED_LABEL
1973 /* Like extract_insn, but save insn extracted and don't extract again, when
1974 called again for the same insn expecting that recog_data still contain the
1975 valid information. This is used primary by gen_attr infrastructure that
1976 often does extract insn again and again. */
1978 extract_insn_cached (rtx insn
)
1980 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1982 extract_insn (insn
);
1983 recog_data
.insn
= insn
;
1986 /* Do cached extract_insn, constrain_operands and complain about failures.
1987 Used by insn_attrtab. */
1989 extract_constrain_insn_cached (rtx insn
)
1991 extract_insn_cached (insn
);
1992 if (which_alternative
== -1
1993 && !constrain_operands (reload_completed
))
1994 fatal_insn_not_found (insn
);
1997 /* Do cached constrain_operands and complain about failures. */
1999 constrain_operands_cached (int strict
)
2001 if (which_alternative
== -1)
2002 return constrain_operands (strict
);
2007 /* Analyze INSN and fill in recog_data. */
2010 extract_insn (rtx insn
)
2015 rtx body
= PATTERN (insn
);
2017 recog_data
.insn
= NULL
;
2018 recog_data
.n_operands
= 0;
2019 recog_data
.n_alternatives
= 0;
2020 recog_data
.n_dups
= 0;
2021 which_alternative
= -1;
2023 switch (GET_CODE (body
))
2033 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2038 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2039 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2040 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2046 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2049 /* This insn is an `asm' with operands. */
2051 /* expand_asm_operands makes sure there aren't too many operands. */
2052 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2054 /* Now get the operand values and constraints out of the insn. */
2055 decode_asm_operands (body
, recog_data
.operand
,
2056 recog_data
.operand_loc
,
2057 recog_data
.constraints
,
2058 recog_data
.operand_mode
);
2061 const char *p
= recog_data
.constraints
[0];
2062 recog_data
.n_alternatives
= 1;
2064 recog_data
.n_alternatives
+= (*p
++ == ',');
2068 fatal_insn_not_found (insn
);
2072 /* Ordinary insn: recognize it, get the operands via insn_extract
2073 and get the constraints. */
2075 icode
= recog_memoized (insn
);
2077 fatal_insn_not_found (insn
);
2079 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2080 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2081 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2083 insn_extract (insn
);
2085 for (i
= 0; i
< noperands
; i
++)
2087 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2088 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2089 /* VOIDmode match_operands gets mode from their real operand. */
2090 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2091 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2094 for (i
= 0; i
< noperands
; i
++)
2095 recog_data
.operand_type
[i
]
2096 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2097 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2100 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2103 /* After calling extract_insn, you can use this function to extract some
2104 information from the constraint strings into a more usable form.
2105 The collected data is stored in recog_op_alt. */
2107 preprocess_constraints (void)
2111 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2112 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2113 * sizeof (struct operand_alternative
)));
2115 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2118 struct operand_alternative
*op_alt
;
2119 const char *p
= recog_data
.constraints
[i
];
2121 op_alt
= recog_op_alt
[i
];
2123 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2125 op_alt
[j
].cl
= NO_REGS
;
2126 op_alt
[j
].constraint
= p
;
2127 op_alt
[j
].matches
= -1;
2128 op_alt
[j
].matched
= -1;
2130 if (*p
== '\0' || *p
== ',')
2132 op_alt
[j
].anything_ok
= 1;
2142 while (c
!= ',' && c
!= '\0');
2143 if (c
== ',' || c
== '\0')
2151 case '=': case '+': case '*': case '%':
2152 case 'E': case 'F': case 'G': case 'H':
2153 case 's': case 'i': case 'n':
2154 case 'I': case 'J': case 'K': case 'L':
2155 case 'M': case 'N': case 'O': case 'P':
2156 /* These don't say anything we care about. */
2160 op_alt
[j
].reject
+= 6;
2163 op_alt
[j
].reject
+= 600;
2166 op_alt
[j
].earlyclobber
= 1;
2169 case '0': case '1': case '2': case '3': case '4':
2170 case '5': case '6': case '7': case '8': case '9':
2173 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2174 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2180 op_alt
[j
].memory_ok
= 1;
2183 op_alt
[j
].decmem_ok
= 1;
2186 op_alt
[j
].incmem_ok
= 1;
2189 op_alt
[j
].nonoffmem_ok
= 1;
2192 op_alt
[j
].offmem_ok
= 1;
2195 op_alt
[j
].anything_ok
= 1;
2199 op_alt
[j
].is_address
= 1;
2200 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2201 [(int) base_reg_class (VOIDmode
, ADDRESS
, SCRATCH
)];
2207 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2211 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2213 op_alt
[j
].memory_ok
= 1;
2216 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2218 op_alt
[j
].is_address
= 1;
2220 = (reg_class_subunion
2221 [(int) op_alt
[j
].cl
]
2222 [(int) base_reg_class (VOIDmode
, ADDRESS
,
2228 = (reg_class_subunion
2229 [(int) op_alt
[j
].cl
]
2230 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2233 p
+= CONSTRAINT_LEN (c
, p
);
2239 /* Check the operands of an insn against the insn's operand constraints
2240 and return 1 if they are valid.
2241 The information about the insn's operands, constraints, operand modes
2242 etc. is obtained from the global variables set up by extract_insn.
2244 WHICH_ALTERNATIVE is set to a number which indicates which
2245 alternative of constraints was matched: 0 for the first alternative,
2246 1 for the next, etc.
2248 In addition, when two operands are required to match
2249 and it happens that the output operand is (reg) while the
2250 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2251 make the output operand look like the input.
2252 This is because the output operand is the one the template will print.
2254 This is used in final, just before printing the assembler code and by
2255 the routines that determine an insn's attribute.
2257 If STRICT is a positive nonzero value, it means that we have been
2258 called after reload has been completed. In that case, we must
2259 do all checks strictly. If it is zero, it means that we have been called
2260 before reload has completed. In that case, we first try to see if we can
2261 find an alternative that matches strictly. If not, we try again, this
2262 time assuming that reload will fix up the insn. This provides a "best
2263 guess" for the alternative and is used to compute attributes of insns prior
2264 to reload. A negative value of STRICT is used for this internal call. */
2272 constrain_operands (int strict
)
2274 const char *constraints
[MAX_RECOG_OPERANDS
];
2275 int matching_operands
[MAX_RECOG_OPERANDS
];
2276 int earlyclobber
[MAX_RECOG_OPERANDS
];
2279 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2280 int funny_match_index
;
2282 which_alternative
= 0;
2283 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2286 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2288 constraints
[c
] = recog_data
.constraints
[c
];
2289 matching_operands
[c
] = -1;
2294 int seen_earlyclobber_at
= -1;
2297 funny_match_index
= 0;
2299 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2301 rtx op
= recog_data
.operand
[opno
];
2302 enum machine_mode mode
= GET_MODE (op
);
2303 const char *p
= constraints
[opno
];
2309 earlyclobber
[opno
] = 0;
2311 /* A unary operator may be accepted by the predicate, but it
2312 is irrelevant for matching constraints. */
2316 if (GET_CODE (op
) == SUBREG
)
2318 if (REG_P (SUBREG_REG (op
))
2319 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2320 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2321 GET_MODE (SUBREG_REG (op
)),
2324 op
= SUBREG_REG (op
);
2327 /* An empty constraint or empty alternative
2328 allows anything which matched the pattern. */
2329 if (*p
== 0 || *p
== ',')
2333 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2342 case '?': case '!': case '*': case '%':
2347 /* Ignore rest of this alternative as far as
2348 constraint checking is concerned. */
2351 while (*p
&& *p
!= ',');
2356 earlyclobber
[opno
] = 1;
2357 if (seen_earlyclobber_at
< 0)
2358 seen_earlyclobber_at
= opno
;
2361 case '0': case '1': case '2': case '3': case '4':
2362 case '5': case '6': case '7': case '8': case '9':
2364 /* This operand must be the same as a previous one.
2365 This kind of constraint is used for instructions such
2366 as add when they take only two operands.
2368 Note that the lower-numbered operand is passed first.
2370 If we are not testing strictly, assume that this
2371 constraint will be satisfied. */
2376 match
= strtoul (p
, &end
, 10);
2383 rtx op1
= recog_data
.operand
[match
];
2384 rtx op2
= recog_data
.operand
[opno
];
2386 /* A unary operator may be accepted by the predicate,
2387 but it is irrelevant for matching constraints. */
2389 op1
= XEXP (op1
, 0);
2391 op2
= XEXP (op2
, 0);
2393 val
= operands_match_p (op1
, op2
);
2396 matching_operands
[opno
] = match
;
2397 matching_operands
[match
] = opno
;
2402 /* If output is *x and input is *--x, arrange later
2403 to change the output to *--x as well, since the
2404 output op is the one that will be printed. */
2405 if (val
== 2 && strict
> 0)
2407 funny_match
[funny_match_index
].this = opno
;
2408 funny_match
[funny_match_index
++].other
= match
;
2415 /* p is used for address_operands. When we are called by
2416 gen_reload, no one will have checked that the address is
2417 strictly valid, i.e., that all pseudos requiring hard regs
2418 have gotten them. */
2420 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2425 /* No need to check general_operand again;
2426 it was done in insn-recog.c. Well, except that reload
2427 doesn't check the validity of its replacements, but
2428 that should only matter when there's a bug. */
2430 /* Anything goes unless it is a REG and really has a hard reg
2431 but the hard reg is not in the class GENERAL_REGS. */
2435 || GENERAL_REGS
== ALL_REGS
2436 || (reload_in_progress
2437 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2438 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2441 else if (strict
< 0 || general_operand (op
, mode
))
2446 /* This is used for a MATCH_SCRATCH in the cases when
2447 we don't actually need anything. So anything goes
2453 /* Memory operands must be valid, to the extent
2454 required by STRICT. */
2458 && !strict_memory_address_p (GET_MODE (op
),
2462 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2466 /* Before reload, accept what reload can turn into mem. */
2467 else if (strict
< 0 && CONSTANT_P (op
))
2469 /* During reload, accept a pseudo */
2470 else if (reload_in_progress
&& REG_P (op
)
2471 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2477 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2478 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2484 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2485 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2491 if (GET_CODE (op
) == CONST_DOUBLE
2492 || (GET_CODE (op
) == CONST_VECTOR
2493 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2499 if (GET_CODE (op
) == CONST_DOUBLE
2500 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2505 if (GET_CODE (op
) == CONST_INT
2506 || (GET_CODE (op
) == CONST_DOUBLE
2507 && GET_MODE (op
) == VOIDmode
))
2510 if (CONSTANT_P (op
))
2515 if (GET_CODE (op
) == CONST_INT
2516 || (GET_CODE (op
) == CONST_DOUBLE
2517 && GET_MODE (op
) == VOIDmode
))
2529 if (GET_CODE (op
) == CONST_INT
2530 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2536 && ((strict
> 0 && ! offsettable_memref_p (op
))
2538 && !(CONSTANT_P (op
) || MEM_P (op
)))
2539 || (reload_in_progress
2541 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2546 if ((strict
> 0 && offsettable_memref_p (op
))
2547 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2548 /* Before reload, accept what reload can handle. */
2550 && (CONSTANT_P (op
) || MEM_P (op
)))
2551 /* During reload, accept a pseudo */
2552 || (reload_in_progress
&& REG_P (op
)
2553 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2562 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2568 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2569 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2571 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2574 #ifdef EXTRA_CONSTRAINT_STR
2575 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2578 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2579 /* Every memory operand can be reloaded to fit. */
2580 && ((strict
< 0 && MEM_P (op
))
2581 /* Before reload, accept what reload can turn
2583 || (strict
< 0 && CONSTANT_P (op
))
2584 /* During reload, accept a pseudo */
2585 || (reload_in_progress
&& REG_P (op
)
2586 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2588 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2589 /* Every address operand can be reloaded to fit. */
2596 while (p
+= len
, c
);
2598 constraints
[opno
] = p
;
2599 /* If this operand did not win somehow,
2600 this alternative loses. */
2604 /* This alternative won; the operands are ok.
2605 Change whichever operands this alternative says to change. */
2610 /* See if any earlyclobber operand conflicts with some other
2613 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2614 for (eopno
= seen_earlyclobber_at
;
2615 eopno
< recog_data
.n_operands
;
2617 /* Ignore earlyclobber operands now in memory,
2618 because we would often report failure when we have
2619 two memory operands, one of which was formerly a REG. */
2620 if (earlyclobber
[eopno
]
2621 && REG_P (recog_data
.operand
[eopno
]))
2622 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2623 if ((MEM_P (recog_data
.operand
[opno
])
2624 || recog_data
.operand_type
[opno
] != OP_OUT
)
2626 /* Ignore things like match_operator operands. */
2627 && *recog_data
.constraints
[opno
] != 0
2628 && ! (matching_operands
[opno
] == eopno
2629 && operands_match_p (recog_data
.operand
[opno
],
2630 recog_data
.operand
[eopno
]))
2631 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2632 recog_data
.operand
[eopno
]))
2637 while (--funny_match_index
>= 0)
2639 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2640 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2647 which_alternative
++;
2649 while (which_alternative
< recog_data
.n_alternatives
);
2651 which_alternative
= -1;
2652 /* If we are about to reject this, but we are not to test strictly,
2653 try a very loose test. Only return failure if it fails also. */
2655 return constrain_operands (-1);
2660 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2661 is a hard reg in class CLASS when its regno is offset by OFFSET
2662 and changed to mode MODE.
2663 If REG occupies multiple hard regs, all of them must be in CLASS. */
2666 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2667 enum machine_mode mode
)
2669 int regno
= REGNO (operand
);
2674 if (regno
< FIRST_PSEUDO_REGISTER
2675 && TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2680 for (sr
= hard_regno_nregs
[regno
][mode
] - 1;
2682 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2691 /* Split single instruction. Helper function for split_all_insns and
2692 split_all_insns_noflow. Return last insn in the sequence if successful,
2693 or NULL if unsuccessful. */
2696 split_insn (rtx insn
)
2698 /* Split insns here to get max fine-grain parallelism. */
2699 rtx first
= PREV_INSN (insn
);
2700 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2705 /* try_split returns the NOTE that INSN became. */
2706 SET_INSN_DELETED (insn
);
2708 /* ??? Coddle to md files that generate subregs in post-reload
2709 splitters instead of computing the proper hard register. */
2710 if (reload_completed
&& first
!= last
)
2712 first
= NEXT_INSN (first
);
2716 cleanup_subreg_operands (first
);
2719 first
= NEXT_INSN (first
);
2725 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2728 split_all_insns (int upd_life
)
2734 blocks
= sbitmap_alloc (last_basic_block
);
2735 sbitmap_zero (blocks
);
2738 FOR_EACH_BB_REVERSE (bb
)
2741 bool finish
= false;
2743 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2745 /* Can't use `next_real_insn' because that might go across
2746 CODE_LABELS and short-out basic blocks. */
2747 next
= NEXT_INSN (insn
);
2748 finish
= (insn
== BB_END (bb
));
2751 rtx set
= single_set (insn
);
2753 /* Don't split no-op move insns. These should silently
2754 disappear later in final. Splitting such insns would
2755 break the code that handles REG_NO_CONFLICT blocks. */
2756 if (set
&& set_noop_p (set
))
2758 /* Nops get in the way while scheduling, so delete them
2759 now if register allocation has already been done. It
2760 is too risky to try to do this before register
2761 allocation, and there are unlikely to be very many
2762 nops then anyways. */
2763 if (reload_completed
)
2765 /* If the no-op set has a REG_UNUSED note, we need
2766 to update liveness information. */
2767 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
))
2769 SET_BIT (blocks
, bb
->index
);
2772 /* ??? Is life info affected by deleting edges? */
2773 delete_insn_and_edges (insn
);
2778 rtx last
= split_insn (insn
);
2781 /* The split sequence may include barrier, but the
2782 BB boundary we are interested in will be set to
2785 while (BARRIER_P (last
))
2786 last
= PREV_INSN (last
);
2787 SET_BIT (blocks
, bb
->index
);
2797 int old_last_basic_block
= last_basic_block
;
2799 find_many_sub_basic_blocks (blocks
);
2801 if (old_last_basic_block
!= last_basic_block
&& upd_life
)
2802 blocks
= sbitmap_resize (blocks
, last_basic_block
, 1);
2805 if (changed
&& upd_life
)
2806 update_life_info (blocks
, UPDATE_LIFE_GLOBAL_RM_NOTES
,
2809 #ifdef ENABLE_CHECKING
2810 verify_flow_info ();
2813 sbitmap_free (blocks
);
2816 /* Same as split_all_insns, but do not expect CFG to be available.
2817 Used by machine dependent reorg passes. */
2820 split_all_insns_noflow (void)
2824 for (insn
= get_insns (); insn
; insn
= next
)
2826 next
= NEXT_INSN (insn
);
2829 /* Don't split no-op move insns. These should silently
2830 disappear later in final. Splitting such insns would
2831 break the code that handles REG_NO_CONFLICT blocks. */
2832 rtx set
= single_set (insn
);
2833 if (set
&& set_noop_p (set
))
2835 /* Nops get in the way while scheduling, so delete them
2836 now if register allocation has already been done. It
2837 is too risky to try to do this before register
2838 allocation, and there are unlikely to be very many
2841 ??? Should we use delete_insn when the CFG isn't valid? */
2842 if (reload_completed
)
2843 delete_insn_and_edges (insn
);
2852 #ifdef HAVE_peephole2
2853 struct peep2_insn_data
2859 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2860 static int peep2_current
;
2861 /* The number of instructions available to match a peep2. */
2862 int peep2_current_count
;
2864 /* A non-insn marker indicating the last insn of the block.
2865 The live_before regset for this element is correct, indicating
2866 global_live_at_end for the block. */
2867 #define PEEP2_EOB pc_rtx
2869 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2870 does not exist. Used by the recognizer to find the next insn to match
2871 in a multi-insn pattern. */
2874 peep2_next_insn (int n
)
2876 gcc_assert (n
<= peep2_current_count
);
2879 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2880 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2882 return peep2_insn_data
[n
].insn
;
2885 /* Return true if REGNO is dead before the Nth non-note insn
2889 peep2_regno_dead_p (int ofs
, int regno
)
2891 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2893 ofs
+= peep2_current
;
2894 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2895 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2897 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2899 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2902 /* Similarly for a REG. */
2905 peep2_reg_dead_p (int ofs
, rtx reg
)
2909 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2911 ofs
+= peep2_current
;
2912 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2913 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2915 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2917 regno
= REGNO (reg
);
2918 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2920 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2925 /* Try to find a hard register of mode MODE, matching the register class in
2926 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2927 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2928 in which case the only condition is that the register must be available
2929 before CURRENT_INSN.
2930 Registers that already have bits set in REG_SET will not be considered.
2932 If an appropriate register is available, it will be returned and the
2933 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2937 peep2_find_free_register (int from
, int to
, const char *class_str
,
2938 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2940 static int search_ofs
;
2945 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2946 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2948 from
+= peep2_current
;
2949 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2950 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2951 to
+= peep2_current
;
2952 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2953 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2955 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2956 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2960 HARD_REG_SET this_live
;
2962 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2964 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2965 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2966 IOR_HARD_REG_SET (live
, this_live
);
2969 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2970 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2972 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2974 int raw_regno
, regno
, success
, j
;
2976 /* Distribute the free registers as much as possible. */
2977 raw_regno
= search_ofs
+ i
;
2978 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2979 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2980 #ifdef REG_ALLOC_ORDER
2981 regno
= reg_alloc_order
[raw_regno
];
2986 /* Don't allocate fixed registers. */
2987 if (fixed_regs
[regno
])
2989 /* Make sure the register is of the right class. */
2990 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2992 /* And can support the mode we need. */
2993 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2995 /* And that we don't create an extra save/restore. */
2996 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
2998 /* And we don't clobber traceback for noreturn functions. */
2999 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
3000 && (! reload_completed
|| frame_pointer_needed
))
3004 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
3006 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3007 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3015 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
3016 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
3018 /* Start the next search with the next register. */
3019 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3021 search_ofs
= raw_regno
;
3023 return gen_rtx_REG (mode
, regno
);
3031 /* Perform the peephole2 optimization pass. */
3034 peephole2_optimize (void)
3040 #ifdef HAVE_conditional_execution
3044 bool do_cleanup_cfg
= false;
3045 bool do_global_life_update
= false;
3046 bool do_rebuild_jump_labels
= false;
3048 /* Initialize the regsets we're going to use. */
3049 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3050 peep2_insn_data
[i
].live_before
= ALLOC_REG_SET (®_obstack
);
3051 live
= ALLOC_REG_SET (®_obstack
);
3053 #ifdef HAVE_conditional_execution
3054 blocks
= sbitmap_alloc (last_basic_block
);
3055 sbitmap_zero (blocks
);
3058 count_or_remove_death_notes (NULL
, 1);
3061 FOR_EACH_BB_REVERSE (bb
)
3063 struct propagate_block_info
*pbi
;
3064 reg_set_iterator rsi
;
3067 /* Indicate that all slots except the last holds invalid data. */
3068 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3069 peep2_insn_data
[i
].insn
= NULL_RTX
;
3070 peep2_current_count
= 0;
3072 /* Indicate that the last slot contains live_after data. */
3073 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3074 peep2_current
= MAX_INSNS_PER_PEEP2
;
3076 /* Start up propagation. */
3077 COPY_REG_SET (live
, bb
->il
.rtl
->global_live_at_end
);
3078 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3080 #ifdef HAVE_conditional_execution
3081 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3083 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3086 for (insn
= BB_END (bb
); ; insn
= prev
)
3088 prev
= PREV_INSN (insn
);
3091 rtx
try, before_try
, x
;
3094 bool was_call
= false;
3096 /* Record this insn. */
3097 if (--peep2_current
< 0)
3098 peep2_current
= MAX_INSNS_PER_PEEP2
;
3099 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3100 && peep2_insn_data
[peep2_current
].insn
== NULL_RTX
)
3101 peep2_current_count
++;
3102 peep2_insn_data
[peep2_current
].insn
= insn
;
3103 propagate_one_insn (pbi
, insn
);
3104 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3106 if (RTX_FRAME_RELATED_P (insn
))
3108 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3109 substitution would lose the
3110 REG_FRAME_RELATED_EXPR that is attached. */
3111 peep2_current_count
= 0;
3115 /* Match the peephole. */
3116 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3120 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3121 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3122 cfg-related call notes. */
3123 for (i
= 0; i
<= match_len
; ++i
)
3126 rtx old_insn
, new_insn
, note
;
3128 j
= i
+ peep2_current
;
3129 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3130 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3131 old_insn
= peep2_insn_data
[j
].insn
;
3132 if (!CALL_P (old_insn
))
3137 while (new_insn
!= NULL_RTX
)
3139 if (CALL_P (new_insn
))
3141 new_insn
= NEXT_INSN (new_insn
);
3144 gcc_assert (new_insn
!= NULL_RTX
);
3146 CALL_INSN_FUNCTION_USAGE (new_insn
)
3147 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3149 for (note
= REG_NOTES (old_insn
);
3151 note
= XEXP (note
, 1))
3152 switch (REG_NOTE_KIND (note
))
3156 REG_NOTES (new_insn
)
3157 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
3159 REG_NOTES (new_insn
));
3161 /* Discard all other reg notes. */
3165 /* Croak if there is another call in the sequence. */
3166 while (++i
<= match_len
)
3168 j
= i
+ peep2_current
;
3169 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3170 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3171 old_insn
= peep2_insn_data
[j
].insn
;
3172 gcc_assert (!CALL_P (old_insn
));
3177 i
= match_len
+ peep2_current
;
3178 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3179 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3181 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3182 REG_EH_REGION
, NULL_RTX
);
3184 /* Replace the old sequence with the new. */
3185 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3186 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3187 before_try
= PREV_INSN (insn
);
3188 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3190 /* Re-insert the EH_REGION notes. */
3191 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3196 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3197 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3200 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3202 || (flag_non_call_exceptions
3203 && may_trap_p (PATTERN (x
))
3204 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3208 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3212 if (x
!= BB_END (bb
) && eh_edge
)
3217 nfte
= split_block (bb
, x
);
3218 flags
= (eh_edge
->flags
3219 & (EDGE_EH
| EDGE_ABNORMAL
));
3221 flags
|= EDGE_ABNORMAL_CALL
;
3222 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3225 nehe
->probability
= eh_edge
->probability
;
3227 = REG_BR_PROB_BASE
- nehe
->probability
;
3229 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3230 #ifdef HAVE_conditional_execution
3231 SET_BIT (blocks
, nfte
->dest
->index
);
3239 /* Converting possibly trapping insn to non-trapping is
3240 possible. Zap dummy outgoing edges. */
3241 do_cleanup_cfg
|= purge_dead_edges (bb
);
3244 #ifdef HAVE_conditional_execution
3245 /* With conditional execution, we cannot back up the
3246 live information so easily, since the conditional
3247 death data structures are not so self-contained.
3248 So record that we've made a modification to this
3249 block and update life information at the end. */
3250 SET_BIT (blocks
, bb
->index
);
3253 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3254 peep2_insn_data
[i
].insn
= NULL_RTX
;
3255 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3256 peep2_current_count
= 0;
3258 /* Back up lifetime information past the end of the
3259 newly created sequence. */
3260 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3262 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3264 /* Update life information for the new sequence. */
3271 i
= MAX_INSNS_PER_PEEP2
;
3272 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3273 && peep2_insn_data
[i
].insn
== NULL_RTX
)
3274 peep2_current_count
++;
3275 peep2_insn_data
[i
].insn
= x
;
3276 propagate_one_insn (pbi
, x
);
3277 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3283 /* ??? Should verify that LIVE now matches what we
3284 had before the new sequence. */
3289 /* If we generated a jump instruction, it won't have
3290 JUMP_LABEL set. Recompute after we're done. */
3291 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3294 do_rebuild_jump_labels
= true;
3300 if (insn
== BB_HEAD (bb
))
3304 /* Some peepholes can decide the don't need one or more of their
3305 inputs. If this happens, local life update is not enough. */
3306 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb
->il
.rtl
->global_live_at_start
, live
,
3309 do_global_life_update
= true;
3313 free_propagate_block_info (pbi
);
3316 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3317 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3318 FREE_REG_SET (live
);
3320 if (do_rebuild_jump_labels
)
3321 rebuild_jump_labels (get_insns ());
3323 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3324 we've changed global life since exception handlers are no longer
3329 do_global_life_update
= true;
3331 if (do_global_life_update
)
3332 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES
, PROP_DEATH_NOTES
);
3333 #ifdef HAVE_conditional_execution
3336 count_or_remove_death_notes (blocks
, 1);
3337 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3339 sbitmap_free (blocks
);
3342 #endif /* HAVE_peephole2 */
3344 /* Common predicates for use with define_bypass. */
3346 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3347 data not the address operand(s) of the store. IN_INSN must be
3348 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3352 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3354 rtx out_set
, in_set
;
3356 in_set
= single_set (in_insn
);
3357 gcc_assert (in_set
);
3359 if (!MEM_P (SET_DEST (in_set
)))
3362 out_set
= single_set (out_insn
);
3365 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3373 out_pat
= PATTERN (out_insn
);
3374 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3376 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3378 rtx exp
= XVECEXP (out_pat
, 0, i
);
3380 if (GET_CODE (exp
) == CLOBBER
)
3383 gcc_assert (GET_CODE (exp
) == SET
);
3385 if (reg_mentioned_p (SET_DEST (exp
), SET_DEST (in_set
)))
3393 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3394 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3395 or multiple set; IN_INSN should be single_set for truth, but for convenience
3396 of insn categorization may be any JUMP or CALL insn. */
3399 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3401 rtx out_set
, in_set
;
3403 in_set
= single_set (in_insn
);
3406 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3410 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3412 in_set
= SET_SRC (in_set
);
3414 out_set
= single_set (out_insn
);
3417 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3418 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3426 out_pat
= PATTERN (out_insn
);
3427 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3429 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3431 rtx exp
= XVECEXP (out_pat
, 0, i
);
3433 if (GET_CODE (exp
) == CLOBBER
)
3436 gcc_assert (GET_CODE (exp
) == SET
);
3438 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3439 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3448 gate_handle_peephole2 (void)
3450 return (optimize
> 0 && flag_peephole2
);
3454 rest_of_handle_peephole2 (void)
3456 #ifdef HAVE_peephole2
3457 peephole2_optimize ();
3462 struct tree_opt_pass pass_peephole2
=
3464 "peephole2", /* name */
3465 gate_handle_peephole2
, /* gate */
3466 rest_of_handle_peephole2
, /* execute */
3469 0, /* static_pass_number */
3470 TV_PEEPHOLE2
, /* tv_id */
3471 0, /* properties_required */
3472 0, /* properties_provided */
3473 0, /* properties_destroyed */
3474 0, /* todo_flags_start */
3475 TODO_dump_func
, /* todo_flags_finish */
3480 rest_of_handle_split_all_insns (void)
3482 split_all_insns (1);
3486 struct tree_opt_pass pass_split_all_insns
=
3488 "split1", /* name */
3490 rest_of_handle_split_all_insns
, /* execute */
3493 0, /* static_pass_number */
3495 0, /* properties_required */
3496 0, /* properties_provided */
3497 0, /* properties_destroyed */
3498 0, /* todo_flags_start */
3499 TODO_dump_func
, /* todo_flags_finish */
3503 /* The placement of the splitting that we do for shorten_branches
3504 depends on whether regstack is used by the target or not. */
3506 gate_do_final_split (void)
3508 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3515 struct tree_opt_pass pass_split_for_shorten_branches
=
3517 "split3", /* name */
3518 gate_do_final_split
, /* gate */
3519 split_all_insns_noflow
, /* execute */
3522 0, /* static_pass_number */
3523 TV_SHORTEN_BRANCH
, /* tv_id */
3524 0, /* properties_required */
3525 0, /* properties_provided */
3526 0, /* properties_destroyed */
3527 0, /* todo_flags_start */
3528 TODO_dump_func
, /* todo_flags_finish */
3534 gate_handle_split_before_regstack (void)
3536 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3537 /* If flow2 creates new instructions which need splitting
3538 and scheduling after reload is not done, they might not be
3539 split until final which doesn't allow splitting
3540 if HAVE_ATTR_length. */
3541 # ifdef INSN_SCHEDULING
3542 return (optimize
&& !flag_schedule_insns_after_reload
);
3551 struct tree_opt_pass pass_split_before_regstack
=
3553 "split2", /* name */
3554 gate_handle_split_before_regstack
, /* gate */
3555 rest_of_handle_split_all_insns
, /* execute */
3558 0, /* static_pass_number */
3559 TV_SHORTEN_BRANCH
, /* tv_id */
3560 0, /* properties_required */
3561 0, /* properties_provided */
3562 0, /* properties_destroyed */
3563 0, /* todo_flags_start */
3564 TODO_dump_func
, /* todo_flags_finish */