1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #include "tree-pass.h"
45 #ifndef STACK_PUSH_CODE
46 #ifdef STACK_GROWS_DOWNWARD
47 #define STACK_PUSH_CODE PRE_DEC
49 #define STACK_PUSH_CODE PRE_INC
53 #ifndef STACK_POP_CODE
54 #ifdef STACK_GROWS_DOWNWARD
55 #define STACK_POP_CODE POST_INC
57 #define STACK_POP_CODE POST_DEC
61 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
62 static rtx
*find_single_use_1 (rtx
, rtx
*);
63 static void validate_replace_src_1 (rtx
*, void *);
64 static rtx
split_insn (rtx
);
66 /* Nonzero means allow operands to be volatile.
67 This should be 0 if you are generating rtl, such as if you are calling
68 the functions in optabs.c and expmed.c (most of the time).
69 This should be 1 if all valid insns need to be recognized,
70 such as in regclass.c and final.c and reload.c.
72 init_recog and init_recog_no_volatile are responsible for setting this. */
76 struct recog_data recog_data
;
78 /* Contains a vector of operand_alternative structures for every operand.
79 Set up by preprocess_constraints. */
80 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
82 /* On return from `constrain_operands', indicate which alternative
85 int which_alternative
;
87 /* Nonzero after end of reload pass.
88 Set to 1 or 0 by toplev.c.
89 Controls the significance of (SUBREG (MEM)). */
93 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
94 int epilogue_completed
;
96 /* Initialize data used by the function `recog'.
97 This must be called once in the compilation of a function
98 before any insn recognition may be done in the function. */
101 init_recog_no_volatile (void)
113 /* Check that X is an insn-body for an `asm' with operands
114 and that the operands mentioned in it are legitimate. */
117 check_asm_operands (rtx x
)
121 const char **constraints
;
124 /* Post-reload, be more strict with things. */
125 if (reload_completed
)
127 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
128 extract_insn (make_insn_raw (x
));
129 constrain_operands (1);
130 return which_alternative
>= 0;
133 noperands
= asm_noperands (x
);
139 operands
= alloca (noperands
* sizeof (rtx
));
140 constraints
= alloca (noperands
* sizeof (char *));
142 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
144 for (i
= 0; i
< noperands
; i
++)
146 const char *c
= constraints
[i
];
149 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
150 c
= constraints
[c
[0] - '0'];
152 if (! asm_operand_ok (operands
[i
], c
))
159 /* Static data for the next two routines. */
161 typedef struct change_t
169 static change_t
*changes
;
170 static int changes_allocated
;
172 static int num_changes
= 0;
174 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
175 at which NEW will be placed. If OBJECT is zero, no validation is done,
176 the change is simply made.
178 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
179 will be called with the address and mode as parameters. If OBJECT is
180 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
183 IN_GROUP is nonzero if this is part of a group of changes that must be
184 performed as a group. In that case, the changes will be stored. The
185 function `apply_change_group' will validate and apply the changes.
187 If IN_GROUP is zero, this is a single change. Try to recognize the insn
188 or validate the memory reference with the change applied. If the result
189 is not valid for the machine, suppress the change and return zero.
190 Otherwise, perform the change and return 1. */
193 validate_change (rtx object
, rtx
*loc
, rtx
new, int in_group
)
197 if (old
== new || rtx_equal_p (old
, new))
200 gcc_assert (in_group
!= 0 || num_changes
== 0);
204 /* Save the information describing this change. */
205 if (num_changes
>= changes_allocated
)
207 if (changes_allocated
== 0)
208 /* This value allows for repeated substitutions inside complex
209 indexed addresses, or changes in up to 5 insns. */
210 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
212 changes_allocated
*= 2;
214 changes
= xrealloc (changes
, sizeof (change_t
) * changes_allocated
);
217 changes
[num_changes
].object
= object
;
218 changes
[num_changes
].loc
= loc
;
219 changes
[num_changes
].old
= old
;
221 if (object
&& !MEM_P (object
))
223 /* Set INSN_CODE to force rerecognition of insn. Save old code in
225 changes
[num_changes
].old_code
= INSN_CODE (object
);
226 INSN_CODE (object
) = -1;
231 /* If we are making a group of changes, return 1. Otherwise, validate the
232 change group we made. */
237 return apply_change_group ();
241 /* Function to be passed to for_each_rtx to test whether a piece of
242 RTL contains any mem/v. */
244 volatile_mem_p (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
246 return (MEM_P (*x
) && MEM_VOLATILE_P (*x
));
249 /* Same as validate_change, but doesn't support groups, and it accepts
250 volatile mems if they're already present in the original insn. */
253 validate_change_maybe_volatile (rtx object
, rtx
*loc
, rtx
new)
257 if (validate_change (object
, loc
, new, 0))
261 /* If there isn't a volatile MEM, there's nothing we can do. */
262 || !for_each_rtx (&PATTERN (object
), volatile_mem_p
, 0)
263 /* Make sure we're not adding or removing volatile MEMs. */
264 || for_each_rtx (loc
, volatile_mem_p
, 0)
265 || for_each_rtx (&new, volatile_mem_p
, 0)
266 || !insn_invalid_p (object
))
271 gcc_assert (!insn_invalid_p (object
));
273 result
= validate_change (object
, loc
, new, 0);
280 /* This subroutine of apply_change_group verifies whether the changes to INSN
281 were valid; i.e. whether INSN can still be recognized. */
284 insn_invalid_p (rtx insn
)
286 rtx pat
= PATTERN (insn
);
287 int num_clobbers
= 0;
288 /* If we are before reload and the pattern is a SET, see if we can add
290 int icode
= recog (pat
, insn
,
291 (GET_CODE (pat
) == SET
292 && ! reload_completed
&& ! reload_in_progress
)
293 ? &num_clobbers
: 0);
294 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
297 /* If this is an asm and the operand aren't legal, then fail. Likewise if
298 this is not an asm and the insn wasn't recognized. */
299 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
300 || (!is_asm
&& icode
< 0))
303 /* If we have to add CLOBBERs, fail if we have to add ones that reference
304 hard registers since our callers can't know if they are live or not.
305 Otherwise, add them. */
306 if (num_clobbers
> 0)
310 if (added_clobbers_hard_reg_p (icode
))
313 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
314 XVECEXP (newpat
, 0, 0) = pat
;
315 add_clobbers (newpat
, icode
);
316 PATTERN (insn
) = pat
= newpat
;
319 /* After reload, verify that all constraints are satisfied. */
320 if (reload_completed
)
324 if (! constrain_operands (1))
328 INSN_CODE (insn
) = icode
;
332 /* Return number of changes made and not validated yet. */
334 num_changes_pending (void)
339 /* Tentatively apply the changes numbered NUM and up.
340 Return 1 if all changes are valid, zero otherwise. */
343 verify_changes (int num
)
346 rtx last_validated
= NULL_RTX
;
348 /* The changes have been applied and all INSN_CODEs have been reset to force
351 The changes are valid if we aren't given an object, or if we are
352 given a MEM and it still is a valid address, or if this is in insn
353 and it is recognized. In the latter case, if reload has completed,
354 we also require that the operands meet the constraints for
357 for (i
= num
; i
< num_changes
; i
++)
359 rtx object
= changes
[i
].object
;
361 /* If there is no object to test or if it is the same as the one we
362 already tested, ignore it. */
363 if (object
== 0 || object
== last_validated
)
368 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
371 else if (insn_invalid_p (object
))
373 rtx pat
= PATTERN (object
);
375 /* Perhaps we couldn't recognize the insn because there were
376 extra CLOBBERs at the end. If so, try to re-recognize
377 without the last CLOBBER (later iterations will cause each of
378 them to be eliminated, in turn). But don't do this if we
379 have an ASM_OPERAND. */
380 if (GET_CODE (pat
) == PARALLEL
381 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
382 && asm_noperands (PATTERN (object
)) < 0)
386 if (XVECLEN (pat
, 0) == 2)
387 newpat
= XVECEXP (pat
, 0, 0);
393 = gen_rtx_PARALLEL (VOIDmode
,
394 rtvec_alloc (XVECLEN (pat
, 0) - 1));
395 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
396 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
399 /* Add a new change to this group to replace the pattern
400 with this new pattern. Then consider this change
401 as having succeeded. The change we added will
402 cause the entire call to fail if things remain invalid.
404 Note that this can lose if a later change than the one
405 we are processing specified &XVECEXP (PATTERN (object), 0, X)
406 but this shouldn't occur. */
408 validate_change (object
, &PATTERN (object
), newpat
, 1);
411 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
412 /* If this insn is a CLOBBER or USE, it is always valid, but is
418 last_validated
= object
;
421 return (i
== num_changes
);
424 /* A group of changes has previously been issued with validate_change and
425 verified with verify_changes. Update the BB_DIRTY flags of the affected
426 blocks, and clear num_changes. */
429 confirm_change_group (void)
434 for (i
= 0; i
< num_changes
; i
++)
435 if (changes
[i
].object
436 && INSN_P (changes
[i
].object
)
437 && (bb
= BLOCK_FOR_INSN (changes
[i
].object
)))
438 bb
->flags
|= BB_DIRTY
;
443 /* Apply a group of changes previously issued with `validate_change'.
444 If all changes are valid, call confirm_change_group and return 1,
445 otherwise, call cancel_changes and return 0. */
448 apply_change_group (void)
450 if (verify_changes (0))
452 confirm_change_group ();
463 /* Return the number of changes so far in the current group. */
466 num_validated_changes (void)
471 /* Retract the changes numbered NUM and up. */
474 cancel_changes (int num
)
478 /* Back out all the changes. Do this in the opposite order in which
480 for (i
= num_changes
- 1; i
>= num
; i
--)
482 *changes
[i
].loc
= changes
[i
].old
;
483 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
484 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
489 /* Replace every occurrence of FROM in X with TO. Mark each change with
490 validate_change passing OBJECT. */
493 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
499 enum machine_mode op0_mode
= VOIDmode
;
500 int prev_changes
= num_changes
;
507 fmt
= GET_RTX_FORMAT (code
);
509 op0_mode
= GET_MODE (XEXP (x
, 0));
511 /* X matches FROM if it is the same rtx or they are both referring to the
512 same register in the same mode. Avoid calling rtx_equal_p unless the
513 operands look similar. */
516 || (REG_P (x
) && REG_P (from
)
517 && GET_MODE (x
) == GET_MODE (from
)
518 && REGNO (x
) == REGNO (from
))
519 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
520 && rtx_equal_p (x
, from
)))
522 validate_change (object
, loc
, to
, 1);
526 /* Call ourself recursively to perform the replacements.
527 We must not replace inside already replaced expression, otherwise we
528 get infinite recursion for replacements like (reg X)->(subreg (reg X))
529 done by regmove, so we must special case shared ASM_OPERANDS. */
531 if (GET_CODE (x
) == PARALLEL
)
533 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
535 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
536 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
538 /* Verify that operands are really shared. */
539 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
540 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
542 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
546 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
550 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
553 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
554 else if (fmt
[i
] == 'E')
555 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
556 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
559 /* If we didn't substitute, there is nothing more to do. */
560 if (num_changes
== prev_changes
)
563 /* Allow substituted expression to have different mode. This is used by
564 regmove to change mode of pseudo register. */
565 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
566 op0_mode
= GET_MODE (XEXP (x
, 0));
568 /* Do changes needed to keep rtx consistent. Don't do any other
569 simplifications, as it is not our job. */
571 if (SWAPPABLE_OPERANDS_P (x
)
572 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
574 validate_change (object
, loc
,
575 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
576 : swap_condition (code
),
577 GET_MODE (x
), XEXP (x
, 1),
586 /* If we have a PLUS whose second operand is now a CONST_INT, use
587 simplify_gen_binary to try to simplify it.
588 ??? We may want later to remove this, once simplification is
589 separated from this function. */
590 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
591 validate_change (object
, loc
,
593 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
596 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
597 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
598 validate_change (object
, loc
,
600 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
601 simplify_gen_unary (NEG
,
602 GET_MODE (x
), XEXP (x
, 1),
607 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
609 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
611 /* If any of the above failed, substitute in something that
612 we know won't be recognized. */
614 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
615 validate_change (object
, loc
, new, 1);
619 /* All subregs possible to simplify should be simplified. */
620 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
623 /* Subregs of VOIDmode operands are incorrect. */
624 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
625 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
627 validate_change (object
, loc
, new, 1);
631 /* If we are replacing a register with memory, try to change the memory
632 to be the mode required for memory in extract operations (this isn't
633 likely to be an insertion operation; if it was, nothing bad will
634 happen, we might just fail in some cases). */
636 if (MEM_P (XEXP (x
, 0))
637 && GET_CODE (XEXP (x
, 1)) == CONST_INT
638 && GET_CODE (XEXP (x
, 2)) == CONST_INT
639 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
640 && !MEM_VOLATILE_P (XEXP (x
, 0)))
642 enum machine_mode wanted_mode
= VOIDmode
;
643 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
644 int pos
= INTVAL (XEXP (x
, 2));
646 if (GET_CODE (x
) == ZERO_EXTRACT
)
648 enum machine_mode new_mode
649 = mode_for_extraction (EP_extzv
, 1);
650 if (new_mode
!= MAX_MACHINE_MODE
)
651 wanted_mode
= new_mode
;
653 else if (GET_CODE (x
) == SIGN_EXTRACT
)
655 enum machine_mode new_mode
656 = mode_for_extraction (EP_extv
, 1);
657 if (new_mode
!= MAX_MACHINE_MODE
)
658 wanted_mode
= new_mode
;
661 /* If we have a narrower mode, we can do something. */
662 if (wanted_mode
!= VOIDmode
663 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
665 int offset
= pos
/ BITS_PER_UNIT
;
668 /* If the bytes and bits are counted differently, we
669 must adjust the offset. */
670 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
672 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
675 pos
%= GET_MODE_BITSIZE (wanted_mode
);
677 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
679 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
680 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
691 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
692 with TO. After all changes have been made, validate by seeing
693 if INSN is still valid. */
696 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
698 validate_replace_rtx_1 (loc
, from
, to
, insn
);
699 return apply_change_group ();
702 /* Try replacing every occurrence of FROM in INSN with TO. After all
703 changes have been made, validate by seeing if INSN is still valid. */
706 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
708 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
709 return apply_change_group ();
712 /* Try replacing every occurrence of FROM in INSN with TO. */
715 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
717 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
720 /* Function called by note_uses to replace used subexpressions. */
721 struct validate_replace_src_data
723 rtx from
; /* Old RTX */
724 rtx to
; /* New RTX */
725 rtx insn
; /* Insn in which substitution is occurring. */
729 validate_replace_src_1 (rtx
*x
, void *data
)
731 struct validate_replace_src_data
*d
732 = (struct validate_replace_src_data
*) data
;
734 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
737 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
741 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
743 struct validate_replace_src_data d
;
748 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
752 /* Return 1 if the insn using CC0 set by INSN does not contain
753 any ordered tests applied to the condition codes.
754 EQ and NE tests do not count. */
757 next_insn_tests_no_inequality (rtx insn
)
759 rtx next
= next_cc0_user (insn
);
761 /* If there is no next insn, we have to take the conservative choice. */
765 return (INSN_P (next
)
766 && ! inequality_comparisons_p (PATTERN (next
)));
770 /* This is used by find_single_use to locate an rtx that contains exactly one
771 use of DEST, which is typically either a REG or CC0. It returns a
772 pointer to the innermost rtx expression containing DEST. Appearances of
773 DEST that are being used to totally replace it are not counted. */
776 find_single_use_1 (rtx dest
, rtx
*loc
)
779 enum rtx_code code
= GET_CODE (x
);
797 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
798 of a REG that occupies all of the REG, the insn uses DEST if
799 it is mentioned in the destination or the source. Otherwise, we
800 need just check the source. */
801 if (GET_CODE (SET_DEST (x
)) != CC0
802 && GET_CODE (SET_DEST (x
)) != PC
803 && !REG_P (SET_DEST (x
))
804 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
805 && REG_P (SUBREG_REG (SET_DEST (x
)))
806 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
807 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
808 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
809 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
812 return find_single_use_1 (dest
, &SET_SRC (x
));
816 return find_single_use_1 (dest
, &XEXP (x
, 0));
822 /* If it wasn't one of the common cases above, check each expression and
823 vector of this code. Look for a unique usage of DEST. */
825 fmt
= GET_RTX_FORMAT (code
);
826 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
830 if (dest
== XEXP (x
, i
)
831 || (REG_P (dest
) && REG_P (XEXP (x
, i
))
832 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
835 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
838 result
= this_result
;
839 else if (this_result
)
840 /* Duplicate usage. */
843 else if (fmt
[i
] == 'E')
847 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
849 if (XVECEXP (x
, i
, j
) == dest
851 && REG_P (XVECEXP (x
, i
, j
))
852 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
855 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
858 result
= this_result
;
859 else if (this_result
)
868 /* See if DEST, produced in INSN, is used only a single time in the
869 sequel. If so, return a pointer to the innermost rtx expression in which
872 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
874 This routine will return usually zero either before flow is called (because
875 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
876 note can't be trusted).
878 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
879 care about REG_DEAD notes or LOG_LINKS.
881 Otherwise, we find the single use by finding an insn that has a
882 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
883 only referenced once in that insn, we know that it must be the first
884 and last insn referencing DEST. */
887 find_single_use (rtx dest
, rtx insn
, rtx
*ploc
)
896 next
= NEXT_INSN (insn
);
898 || (!NONJUMP_INSN_P (next
) && !JUMP_P (next
)))
901 result
= find_single_use_1 (dest
, &PATTERN (next
));
908 if (reload_completed
|| reload_in_progress
|| !REG_P (dest
))
911 for (next
= next_nonnote_insn (insn
);
912 next
!= 0 && !LABEL_P (next
);
913 next
= next_nonnote_insn (next
))
914 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
916 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
917 if (XEXP (link
, 0) == insn
)
922 result
= find_single_use_1 (dest
, &PATTERN (next
));
932 /* Return 1 if OP is a valid general operand for machine mode MODE.
933 This is either a register reference, a memory reference,
934 or a constant. In the case of a memory reference, the address
935 is checked for general validity for the target machine.
937 Register and memory references must have mode MODE in order to be valid,
938 but some constants have no machine mode and are valid for any mode.
940 If MODE is VOIDmode, OP is checked for validity for whatever mode
943 The main use of this function is as a predicate in match_operand
944 expressions in the machine description.
946 For an explanation of this function's behavior for registers of
947 class NO_REGS, see the comment for `register_operand'. */
950 general_operand (rtx op
, enum machine_mode mode
)
952 enum rtx_code code
= GET_CODE (op
);
954 if (mode
== VOIDmode
)
955 mode
= GET_MODE (op
);
957 /* Don't accept CONST_INT or anything similar
958 if the caller wants something floating. */
959 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
960 && GET_MODE_CLASS (mode
) != MODE_INT
961 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
964 if (GET_CODE (op
) == CONST_INT
966 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
970 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
972 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
973 && LEGITIMATE_CONSTANT_P (op
));
975 /* Except for certain constants with VOIDmode, already checked for,
976 OP's mode must match MODE if MODE specifies a mode. */
978 if (GET_MODE (op
) != mode
)
983 rtx sub
= SUBREG_REG (op
);
985 #ifdef INSN_SCHEDULING
986 /* On machines that have insn scheduling, we want all memory
987 reference to be explicit, so outlaw paradoxical SUBREGs.
988 However, we must allow them after reload so that they can
989 get cleaned up by cleanup_subreg_operands. */
990 if (!reload_completed
&& MEM_P (sub
)
991 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
994 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
995 may result in incorrect reference. We should simplify all valid
996 subregs of MEM anyway. But allow this after reload because we
997 might be called from cleanup_subreg_operands.
999 ??? This is a kludge. */
1000 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
1004 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1005 create such rtl, and we must reject it. */
1006 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1007 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1011 code
= GET_CODE (op
);
1015 /* A register whose class is NO_REGS is not a general operand. */
1016 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1017 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
1021 rtx y
= XEXP (op
, 0);
1023 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1026 /* Use the mem's mode, since it will be reloaded thus. */
1027 if (memory_address_p (GET_MODE (op
), y
))
1034 /* Return 1 if OP is a valid memory address for a memory reference
1037 The main use of this function is as a predicate in match_operand
1038 expressions in the machine description. */
1041 address_operand (rtx op
, enum machine_mode mode
)
1043 return memory_address_p (mode
, op
);
1046 /* Return 1 if OP is a register reference of mode MODE.
1047 If MODE is VOIDmode, accept a register in any mode.
1049 The main use of this function is as a predicate in match_operand
1050 expressions in the machine description.
1052 As a special exception, registers whose class is NO_REGS are
1053 not accepted by `register_operand'. The reason for this change
1054 is to allow the representation of special architecture artifacts
1055 (such as a condition code register) without extending the rtl
1056 definitions. Since registers of class NO_REGS cannot be used
1057 as registers in any case where register classes are examined,
1058 it is most consistent to keep this function from accepting them. */
1061 register_operand (rtx op
, enum machine_mode mode
)
1063 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1066 if (GET_CODE (op
) == SUBREG
)
1068 rtx sub
= SUBREG_REG (op
);
1070 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1071 because it is guaranteed to be reloaded into one.
1072 Just make sure the MEM is valid in itself.
1073 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1074 but currently it does result from (SUBREG (REG)...) where the
1075 reg went on the stack.) */
1076 if (! reload_completed
&& MEM_P (sub
))
1077 return general_operand (op
, mode
);
1079 #ifdef CANNOT_CHANGE_MODE_CLASS
1081 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1082 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1083 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1084 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1088 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1089 create such rtl, and we must reject it. */
1090 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1091 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1097 /* We don't consider registers whose class is NO_REGS
1098 to be a register operand. */
1100 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1101 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1104 /* Return 1 for a register in Pmode; ignore the tested mode. */
1107 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1109 return register_operand (op
, Pmode
);
1112 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1113 or a hard register. */
1116 scratch_operand (rtx op
, enum machine_mode mode
)
1118 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1121 return (GET_CODE (op
) == SCRATCH
1123 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1126 /* Return 1 if OP is a valid immediate operand for mode MODE.
1128 The main use of this function is as a predicate in match_operand
1129 expressions in the machine description. */
1132 immediate_operand (rtx op
, enum machine_mode mode
)
1134 /* Don't accept CONST_INT or anything similar
1135 if the caller wants something floating. */
1136 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1137 && GET_MODE_CLASS (mode
) != MODE_INT
1138 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1141 if (GET_CODE (op
) == CONST_INT
1143 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1146 return (CONSTANT_P (op
)
1147 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1148 || GET_MODE (op
) == VOIDmode
)
1149 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1150 && LEGITIMATE_CONSTANT_P (op
));
1153 /* Returns 1 if OP is an operand that is a CONST_INT. */
1156 const_int_operand (rtx op
, enum machine_mode mode
)
1158 if (GET_CODE (op
) != CONST_INT
)
1161 if (mode
!= VOIDmode
1162 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1168 /* Returns 1 if OP is an operand that is a constant integer or constant
1169 floating-point number. */
1172 const_double_operand (rtx op
, enum machine_mode mode
)
1174 /* Don't accept CONST_INT or anything similar
1175 if the caller wants something floating. */
1176 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1177 && GET_MODE_CLASS (mode
) != MODE_INT
1178 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1181 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1182 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1183 || GET_MODE (op
) == VOIDmode
));
1186 /* Return 1 if OP is a general operand that is not an immediate operand. */
1189 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1191 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1194 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1197 nonmemory_operand (rtx op
, enum machine_mode mode
)
1199 if (CONSTANT_P (op
))
1201 /* Don't accept CONST_INT or anything similar
1202 if the caller wants something floating. */
1203 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1204 && GET_MODE_CLASS (mode
) != MODE_INT
1205 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1208 if (GET_CODE (op
) == CONST_INT
1210 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1213 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1214 || mode
== VOIDmode
)
1215 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1216 && LEGITIMATE_CONSTANT_P (op
));
1219 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1222 if (GET_CODE (op
) == SUBREG
)
1224 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1225 because it is guaranteed to be reloaded into one.
1226 Just make sure the MEM is valid in itself.
1227 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1228 but currently it does result from (SUBREG (REG)...) where the
1229 reg went on the stack.) */
1230 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1231 return general_operand (op
, mode
);
1232 op
= SUBREG_REG (op
);
1235 /* We don't consider registers whose class is NO_REGS
1236 to be a register operand. */
1238 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1239 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1242 /* Return 1 if OP is a valid operand that stands for pushing a
1243 value of mode MODE onto the stack.
1245 The main use of this function is as a predicate in match_operand
1246 expressions in the machine description. */
1249 push_operand (rtx op
, enum machine_mode mode
)
1251 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1253 #ifdef PUSH_ROUNDING
1254 rounded_size
= PUSH_ROUNDING (rounded_size
);
1260 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1265 if (rounded_size
== GET_MODE_SIZE (mode
))
1267 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1272 if (GET_CODE (op
) != PRE_MODIFY
1273 || GET_CODE (XEXP (op
, 1)) != PLUS
1274 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1275 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1276 #ifdef STACK_GROWS_DOWNWARD
1277 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1279 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1285 return XEXP (op
, 0) == stack_pointer_rtx
;
1288 /* Return 1 if OP is a valid operand that stands for popping a
1289 value of mode MODE off the stack.
1291 The main use of this function is as a predicate in match_operand
1292 expressions in the machine description. */
1295 pop_operand (rtx op
, enum machine_mode mode
)
1300 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1305 if (GET_CODE (op
) != STACK_POP_CODE
)
1308 return XEXP (op
, 0) == stack_pointer_rtx
;
1311 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1314 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1316 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1323 /* Return 1 if OP is a valid memory reference with mode MODE,
1324 including a valid address.
1326 The main use of this function is as a predicate in match_operand
1327 expressions in the machine description. */
1330 memory_operand (rtx op
, enum machine_mode mode
)
1334 if (! reload_completed
)
1335 /* Note that no SUBREG is a memory operand before end of reload pass,
1336 because (SUBREG (MEM...)) forces reloading into a register. */
1337 return MEM_P (op
) && general_operand (op
, mode
);
1339 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1343 if (GET_CODE (inner
) == SUBREG
)
1344 inner
= SUBREG_REG (inner
);
1346 return (MEM_P (inner
) && general_operand (op
, mode
));
1349 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1350 that is, a memory reference whose address is a general_operand. */
1353 indirect_operand (rtx op
, enum machine_mode mode
)
1355 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1356 if (! reload_completed
1357 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1359 int offset
= SUBREG_BYTE (op
);
1360 rtx inner
= SUBREG_REG (op
);
1362 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1365 /* The only way that we can have a general_operand as the resulting
1366 address is if OFFSET is zero and the address already is an operand
1367 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1370 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1371 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1372 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1373 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1374 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1378 && memory_operand (op
, mode
)
1379 && general_operand (XEXP (op
, 0), Pmode
));
1382 /* Return 1 if this is a comparison operator. This allows the use of
1383 MATCH_OPERATOR to recognize all the branch insns. */
1386 comparison_operator (rtx op
, enum machine_mode mode
)
1388 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1389 && COMPARISON_P (op
));
1392 /* If BODY is an insn body that uses ASM_OPERANDS,
1393 return the number of operands (both input and output) in the insn.
1394 Otherwise return -1. */
1397 asm_noperands (rtx body
)
1399 switch (GET_CODE (body
))
1402 /* No output operands: return number of input operands. */
1403 return ASM_OPERANDS_INPUT_LENGTH (body
);
1405 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1406 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1407 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1411 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1412 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1414 /* Multiple output operands, or 1 output plus some clobbers:
1415 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1419 /* Count backwards through CLOBBERs to determine number of SETs. */
1420 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1422 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1424 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1428 /* N_SETS is now number of output operands. */
1431 /* Verify that all the SETs we have
1432 came from a single original asm_operands insn
1433 (so that invalid combinations are blocked). */
1434 for (i
= 0; i
< n_sets
; i
++)
1436 rtx elt
= XVECEXP (body
, 0, i
);
1437 if (GET_CODE (elt
) != SET
)
1439 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1441 /* If these ASM_OPERANDS rtx's came from different original insns
1442 then they aren't allowed together. */
1443 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1444 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1447 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1450 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1452 /* 0 outputs, but some clobbers:
1453 body is [(asm_operands ...) (clobber (reg ...))...]. */
1456 /* Make sure all the other parallel things really are clobbers. */
1457 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1458 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1461 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1470 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1471 copy its operands (both input and output) into the vector OPERANDS,
1472 the locations of the operands within the insn into the vector OPERAND_LOCS,
1473 and the constraints for the operands into CONSTRAINTS.
1474 Write the modes of the operands into MODES.
1475 Return the assembler-template.
1477 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1478 we don't store that info. */
1481 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1482 const char **constraints
, enum machine_mode
*modes
)
1486 const char *template = 0;
1488 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1490 rtx asmop
= SET_SRC (body
);
1491 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1493 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1495 for (i
= 1; i
< noperands
; i
++)
1498 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1500 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1502 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1504 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1507 /* The output is in the SET.
1508 Its constraint is in the ASM_OPERANDS itself. */
1510 operands
[0] = SET_DEST (body
);
1512 operand_locs
[0] = &SET_DEST (body
);
1514 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1516 modes
[0] = GET_MODE (SET_DEST (body
));
1517 template = ASM_OPERANDS_TEMPLATE (asmop
);
1519 else if (GET_CODE (body
) == ASM_OPERANDS
)
1522 /* No output operands: BODY is (asm_operands ....). */
1524 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1526 /* The input operands are found in the 1st element vector. */
1527 /* Constraints for inputs are in the 2nd element vector. */
1528 for (i
= 0; i
< noperands
; i
++)
1531 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1533 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1535 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1537 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1539 template = ASM_OPERANDS_TEMPLATE (asmop
);
1541 else if (GET_CODE (body
) == PARALLEL
1542 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1543 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1545 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1546 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1547 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1548 int nout
= 0; /* Does not include CLOBBERs. */
1550 /* At least one output, plus some CLOBBERs. */
1552 /* The outputs are in the SETs.
1553 Their constraints are in the ASM_OPERANDS itself. */
1554 for (i
= 0; i
< nparallel
; i
++)
1556 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1557 break; /* Past last SET */
1560 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1562 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1564 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1566 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1570 for (i
= 0; i
< nin
; i
++)
1573 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1575 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1577 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1579 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1582 template = ASM_OPERANDS_TEMPLATE (asmop
);
1584 else if (GET_CODE (body
) == PARALLEL
1585 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1587 /* No outputs, but some CLOBBERs. */
1589 rtx asmop
= XVECEXP (body
, 0, 0);
1590 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1592 for (i
= 0; i
< nin
; i
++)
1595 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1597 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1599 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1601 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1604 template = ASM_OPERANDS_TEMPLATE (asmop
);
1610 /* Check if an asm_operand matches its constraints.
1611 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1614 asm_operand_ok (rtx op
, const char *constraint
)
1618 /* Use constrain_operands after reload. */
1619 gcc_assert (!reload_completed
);
1623 char c
= *constraint
;
1640 case '0': case '1': case '2': case '3': case '4':
1641 case '5': case '6': case '7': case '8': case '9':
1642 /* For best results, our caller should have given us the
1643 proper matching constraint, but we can't actually fail
1644 the check if they didn't. Indicate that results are
1648 while (ISDIGIT (*constraint
));
1654 if (address_operand (op
, VOIDmode
))
1659 case 'V': /* non-offsettable */
1660 if (memory_operand (op
, VOIDmode
))
1664 case 'o': /* offsettable */
1665 if (offsettable_nonstrict_memref_p (op
))
1670 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1671 excepting those that expand_call created. Further, on some
1672 machines which do not have generalized auto inc/dec, an inc/dec
1673 is not a memory_operand.
1675 Match any memory and hope things are resolved after reload. */
1679 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1680 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1687 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1688 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1694 if (GET_CODE (op
) == CONST_DOUBLE
1695 || (GET_CODE (op
) == CONST_VECTOR
1696 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1701 if (GET_CODE (op
) == CONST_DOUBLE
1702 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1706 if (GET_CODE (op
) == CONST_DOUBLE
1707 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1712 if (GET_CODE (op
) == CONST_INT
1713 || (GET_CODE (op
) == CONST_DOUBLE
1714 && GET_MODE (op
) == VOIDmode
))
1719 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1724 if (GET_CODE (op
) == CONST_INT
1725 || (GET_CODE (op
) == CONST_DOUBLE
1726 && GET_MODE (op
) == VOIDmode
))
1731 if (GET_CODE (op
) == CONST_INT
1732 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1736 if (GET_CODE (op
) == CONST_INT
1737 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1741 if (GET_CODE (op
) == CONST_INT
1742 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1746 if (GET_CODE (op
) == CONST_INT
1747 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1751 if (GET_CODE (op
) == CONST_INT
1752 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1756 if (GET_CODE (op
) == CONST_INT
1757 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1761 if (GET_CODE (op
) == CONST_INT
1762 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1766 if (GET_CODE (op
) == CONST_INT
1767 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1776 if (general_operand (op
, VOIDmode
))
1781 /* For all other letters, we first check for a register class,
1782 otherwise it is an EXTRA_CONSTRAINT. */
1783 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1786 if (GET_MODE (op
) == BLKmode
)
1788 if (register_operand (op
, VOIDmode
))
1791 #ifdef EXTRA_CONSTRAINT_STR
1792 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1794 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1795 /* Every memory operand can be reloaded to fit. */
1796 && memory_operand (op
, VOIDmode
))
1798 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1799 /* Every address operand can be reloaded to fit. */
1800 && address_operand (op
, VOIDmode
))
1805 len
= CONSTRAINT_LEN (c
, constraint
);
1808 while (--len
&& *constraint
);
1816 /* Given an rtx *P, if it is a sum containing an integer constant term,
1817 return the location (type rtx *) of the pointer to that constant term.
1818 Otherwise, return a null pointer. */
1821 find_constant_term_loc (rtx
*p
)
1824 enum rtx_code code
= GET_CODE (*p
);
1826 /* If *P IS such a constant term, P is its location. */
1828 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1832 /* Otherwise, if not a sum, it has no constant term. */
1834 if (GET_CODE (*p
) != PLUS
)
1837 /* If one of the summands is constant, return its location. */
1839 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1840 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1843 /* Otherwise, check each summand for containing a constant term. */
1845 if (XEXP (*p
, 0) != 0)
1847 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1852 if (XEXP (*p
, 1) != 0)
1854 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1862 /* Return 1 if OP is a memory reference
1863 whose address contains no side effects
1864 and remains valid after the addition
1865 of a positive integer less than the
1866 size of the object being referenced.
1868 We assume that the original address is valid and do not check it.
1870 This uses strict_memory_address_p as a subroutine, so
1871 don't use it before reload. */
1874 offsettable_memref_p (rtx op
)
1876 return ((MEM_P (op
))
1877 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1880 /* Similar, but don't require a strictly valid mem ref:
1881 consider pseudo-regs valid as index or base regs. */
1884 offsettable_nonstrict_memref_p (rtx op
)
1886 return ((MEM_P (op
))
1887 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1890 /* Return 1 if Y is a memory address which contains no side effects
1891 and would remain valid after the addition of a positive integer
1892 less than the size of that mode.
1894 We assume that the original address is valid and do not check it.
1895 We do check that it is valid for narrower modes.
1897 If STRICTP is nonzero, we require a strictly valid address,
1898 for the sake of use in reload.c. */
1901 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1903 enum rtx_code ycode
= GET_CODE (y
);
1907 int (*addressp
) (enum machine_mode
, rtx
) =
1908 (strictp
? strict_memory_address_p
: memory_address_p
);
1909 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1911 if (CONSTANT_ADDRESS_P (y
))
1914 /* Adjusting an offsettable address involves changing to a narrower mode.
1915 Make sure that's OK. */
1917 if (mode_dependent_address_p (y
))
1920 /* ??? How much offset does an offsettable BLKmode reference need?
1921 Clearly that depends on the situation in which it's being used.
1922 However, the current situation in which we test 0xffffffff is
1923 less than ideal. Caveat user. */
1925 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1927 /* If the expression contains a constant term,
1928 see if it remains valid when max possible offset is added. */
1930 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1935 *y2
= plus_constant (*y2
, mode_sz
- 1);
1936 /* Use QImode because an odd displacement may be automatically invalid
1937 for any wider mode. But it should be valid for a single byte. */
1938 good
= (*addressp
) (QImode
, y
);
1940 /* In any case, restore old contents of memory. */
1945 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1948 /* The offset added here is chosen as the maximum offset that
1949 any instruction could need to add when operating on something
1950 of the specified mode. We assume that if Y and Y+c are
1951 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1952 go inside a LO_SUM here, so we do so as well. */
1953 if (GET_CODE (y
) == LO_SUM
1955 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1956 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1957 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1959 z
= plus_constant (y
, mode_sz
- 1);
1961 /* Use QImode because an odd displacement may be automatically invalid
1962 for any wider mode. But it should be valid for a single byte. */
1963 return (*addressp
) (QImode
, z
);
1966 /* Return 1 if ADDR is an address-expression whose effect depends
1967 on the mode of the memory reference it is used in.
1969 Autoincrement addressing is a typical example of mode-dependence
1970 because the amount of the increment depends on the mode. */
1973 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED
/* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1975 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1977 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1978 win
: ATTRIBUTE_UNUSED_LABEL
1982 /* Like extract_insn, but save insn extracted and don't extract again, when
1983 called again for the same insn expecting that recog_data still contain the
1984 valid information. This is used primary by gen_attr infrastructure that
1985 often does extract insn again and again. */
1987 extract_insn_cached (rtx insn
)
1989 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1991 extract_insn (insn
);
1992 recog_data
.insn
= insn
;
1994 /* Do cached extract_insn, constrain_operands and complain about failures.
1995 Used by insn_attrtab. */
1997 extract_constrain_insn_cached (rtx insn
)
1999 extract_insn_cached (insn
);
2000 if (which_alternative
== -1
2001 && !constrain_operands (reload_completed
))
2002 fatal_insn_not_found (insn
);
2004 /* Do cached constrain_operands and complain about failures. */
2006 constrain_operands_cached (int strict
)
2008 if (which_alternative
== -1)
2009 return constrain_operands (strict
);
2014 /* Analyze INSN and fill in recog_data. */
2017 extract_insn (rtx insn
)
2022 rtx body
= PATTERN (insn
);
2024 recog_data
.insn
= NULL
;
2025 recog_data
.n_operands
= 0;
2026 recog_data
.n_alternatives
= 0;
2027 recog_data
.n_dups
= 0;
2028 which_alternative
= -1;
2030 switch (GET_CODE (body
))
2040 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2045 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2046 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2047 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2053 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2056 /* This insn is an `asm' with operands. */
2058 /* expand_asm_operands makes sure there aren't too many operands. */
2059 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2061 /* Now get the operand values and constraints out of the insn. */
2062 decode_asm_operands (body
, recog_data
.operand
,
2063 recog_data
.operand_loc
,
2064 recog_data
.constraints
,
2065 recog_data
.operand_mode
);
2068 const char *p
= recog_data
.constraints
[0];
2069 recog_data
.n_alternatives
= 1;
2071 recog_data
.n_alternatives
+= (*p
++ == ',');
2075 fatal_insn_not_found (insn
);
2079 /* Ordinary insn: recognize it, get the operands via insn_extract
2080 and get the constraints. */
2082 icode
= recog_memoized (insn
);
2084 fatal_insn_not_found (insn
);
2086 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2087 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2088 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2090 insn_extract (insn
);
2092 for (i
= 0; i
< noperands
; i
++)
2094 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2095 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2096 /* VOIDmode match_operands gets mode from their real operand. */
2097 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2098 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2101 for (i
= 0; i
< noperands
; i
++)
2102 recog_data
.operand_type
[i
]
2103 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2104 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2107 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2110 /* After calling extract_insn, you can use this function to extract some
2111 information from the constraint strings into a more usable form.
2112 The collected data is stored in recog_op_alt. */
2114 preprocess_constraints (void)
2118 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2119 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2120 * sizeof (struct operand_alternative
)));
2122 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2125 struct operand_alternative
*op_alt
;
2126 const char *p
= recog_data
.constraints
[i
];
2128 op_alt
= recog_op_alt
[i
];
2130 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2132 op_alt
[j
].cl
= NO_REGS
;
2133 op_alt
[j
].constraint
= p
;
2134 op_alt
[j
].matches
= -1;
2135 op_alt
[j
].matched
= -1;
2137 if (*p
== '\0' || *p
== ',')
2139 op_alt
[j
].anything_ok
= 1;
2149 while (c
!= ',' && c
!= '\0');
2150 if (c
== ',' || c
== '\0')
2158 case '=': case '+': case '*': case '%':
2159 case 'E': case 'F': case 'G': case 'H':
2160 case 's': case 'i': case 'n':
2161 case 'I': case 'J': case 'K': case 'L':
2162 case 'M': case 'N': case 'O': case 'P':
2163 /* These don't say anything we care about. */
2167 op_alt
[j
].reject
+= 6;
2170 op_alt
[j
].reject
+= 600;
2173 op_alt
[j
].earlyclobber
= 1;
2176 case '0': case '1': case '2': case '3': case '4':
2177 case '5': case '6': case '7': case '8': case '9':
2180 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2181 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2187 op_alt
[j
].memory_ok
= 1;
2190 op_alt
[j
].decmem_ok
= 1;
2193 op_alt
[j
].incmem_ok
= 1;
2196 op_alt
[j
].nonoffmem_ok
= 1;
2199 op_alt
[j
].offmem_ok
= 1;
2202 op_alt
[j
].anything_ok
= 1;
2206 op_alt
[j
].is_address
= 1;
2207 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2208 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
2214 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2218 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2220 op_alt
[j
].memory_ok
= 1;
2223 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2225 op_alt
[j
].is_address
= 1;
2227 = (reg_class_subunion
2228 [(int) op_alt
[j
].cl
]
2229 [(int) MODE_BASE_REG_CLASS (VOIDmode
)]);
2234 = (reg_class_subunion
2235 [(int) op_alt
[j
].cl
]
2236 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2239 p
+= CONSTRAINT_LEN (c
, p
);
2245 /* Check the operands of an insn against the insn's operand constraints
2246 and return 1 if they are valid.
2247 The information about the insn's operands, constraints, operand modes
2248 etc. is obtained from the global variables set up by extract_insn.
2250 WHICH_ALTERNATIVE is set to a number which indicates which
2251 alternative of constraints was matched: 0 for the first alternative,
2252 1 for the next, etc.
2254 In addition, when two operands are required to match
2255 and it happens that the output operand is (reg) while the
2256 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2257 make the output operand look like the input.
2258 This is because the output operand is the one the template will print.
2260 This is used in final, just before printing the assembler code and by
2261 the routines that determine an insn's attribute.
2263 If STRICT is a positive nonzero value, it means that we have been
2264 called after reload has been completed. In that case, we must
2265 do all checks strictly. If it is zero, it means that we have been called
2266 before reload has completed. In that case, we first try to see if we can
2267 find an alternative that matches strictly. If not, we try again, this
2268 time assuming that reload will fix up the insn. This provides a "best
2269 guess" for the alternative and is used to compute attributes of insns prior
2270 to reload. A negative value of STRICT is used for this internal call. */
2278 constrain_operands (int strict
)
2280 const char *constraints
[MAX_RECOG_OPERANDS
];
2281 int matching_operands
[MAX_RECOG_OPERANDS
];
2282 int earlyclobber
[MAX_RECOG_OPERANDS
];
2285 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2286 int funny_match_index
;
2288 which_alternative
= 0;
2289 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2292 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2294 constraints
[c
] = recog_data
.constraints
[c
];
2295 matching_operands
[c
] = -1;
2300 int seen_earlyclobber_at
= -1;
2303 funny_match_index
= 0;
2305 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2307 rtx op
= recog_data
.operand
[opno
];
2308 enum machine_mode mode
= GET_MODE (op
);
2309 const char *p
= constraints
[opno
];
2315 earlyclobber
[opno
] = 0;
2317 #ifndef KEEP_UNARY_OPERATORS_AT_CONSTRAINT_CHECKING
2318 /* This macro and the code within is slated for removal in
2319 4.2, hence not documented further than in this comment.
2320 It only makes a difference if both an insn operand
2321 predicate is absent or allows unary operators and its
2322 constraints are present. See gcc-patches mailing list
2324 <URL:http://gcc.gnu.org/ml/gcc-patches/2005-10/msg00940.html>. */
2326 /* A unary operator may be accepted by the predicate, but it
2327 is irrelevant for matching constraints. */
2332 if (GET_CODE (op
) == SUBREG
)
2334 if (REG_P (SUBREG_REG (op
))
2335 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2336 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2337 GET_MODE (SUBREG_REG (op
)),
2340 op
= SUBREG_REG (op
);
2343 /* An empty constraint or empty alternative
2344 allows anything which matched the pattern. */
2345 if (*p
== 0 || *p
== ',')
2349 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2358 case '?': case '!': case '*': case '%':
2363 /* Ignore rest of this alternative as far as
2364 constraint checking is concerned. */
2367 while (*p
&& *p
!= ',');
2372 earlyclobber
[opno
] = 1;
2373 if (seen_earlyclobber_at
< 0)
2374 seen_earlyclobber_at
= opno
;
2377 case '0': case '1': case '2': case '3': case '4':
2378 case '5': case '6': case '7': case '8': case '9':
2380 /* This operand must be the same as a previous one.
2381 This kind of constraint is used for instructions such
2382 as add when they take only two operands.
2384 Note that the lower-numbered operand is passed first.
2386 If we are not testing strictly, assume that this
2387 constraint will be satisfied. */
2392 match
= strtoul (p
, &end
, 10);
2399 rtx op1
= recog_data
.operand
[match
];
2400 rtx op2
= recog_data
.operand
[opno
];
2402 #ifndef KEEP_UNARY_OPERATORS_AT_CONSTRAINT_CHECKING
2403 /* See comment at similar #ifndef above. */
2405 /* A unary operator may be accepted by the predicate,
2406 but it is irrelevant for matching constraints. */
2408 op1
= XEXP (op1
, 0);
2410 op2
= XEXP (op2
, 0);
2413 val
= operands_match_p (op1
, op2
);
2416 matching_operands
[opno
] = match
;
2417 matching_operands
[match
] = opno
;
2422 /* If output is *x and input is *--x, arrange later
2423 to change the output to *--x as well, since the
2424 output op is the one that will be printed. */
2425 if (val
== 2 && strict
> 0)
2427 funny_match
[funny_match_index
].this = opno
;
2428 funny_match
[funny_match_index
++].other
= match
;
2435 /* p is used for address_operands. When we are called by
2436 gen_reload, no one will have checked that the address is
2437 strictly valid, i.e., that all pseudos requiring hard regs
2438 have gotten them. */
2440 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2445 /* No need to check general_operand again;
2446 it was done in insn-recog.c. Well, except that reload
2447 doesn't check the validity of its replacements, but
2448 that should only matter when there's a bug. */
2450 /* Anything goes unless it is a REG and really has a hard reg
2451 but the hard reg is not in the class GENERAL_REGS. */
2455 || GENERAL_REGS
== ALL_REGS
2456 || (reload_in_progress
2457 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2458 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2461 else if (strict
< 0 || general_operand (op
, mode
))
2466 /* This is used for a MATCH_SCRATCH in the cases when
2467 we don't actually need anything. So anything goes
2473 /* Memory operands must be valid, to the extent
2474 required by STRICT. */
2478 && !strict_memory_address_p (GET_MODE (op
),
2482 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2486 /* Before reload, accept what reload can turn into mem. */
2487 else if (strict
< 0 && CONSTANT_P (op
))
2489 /* During reload, accept a pseudo */
2490 else if (reload_in_progress
&& REG_P (op
)
2491 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2497 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2498 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2504 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2505 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2511 if (GET_CODE (op
) == CONST_DOUBLE
2512 || (GET_CODE (op
) == CONST_VECTOR
2513 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2519 if (GET_CODE (op
) == CONST_DOUBLE
2520 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2525 if (GET_CODE (op
) == CONST_INT
2526 || (GET_CODE (op
) == CONST_DOUBLE
2527 && GET_MODE (op
) == VOIDmode
))
2530 if (CONSTANT_P (op
))
2535 if (GET_CODE (op
) == CONST_INT
2536 || (GET_CODE (op
) == CONST_DOUBLE
2537 && GET_MODE (op
) == VOIDmode
))
2549 if (GET_CODE (op
) == CONST_INT
2550 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2556 && ((strict
> 0 && ! offsettable_memref_p (op
))
2558 && !(CONSTANT_P (op
) || MEM_P (op
)))
2559 || (reload_in_progress
2561 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2566 if ((strict
> 0 && offsettable_memref_p (op
))
2567 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2568 /* Before reload, accept what reload can handle. */
2570 && (CONSTANT_P (op
) || MEM_P (op
)))
2571 /* During reload, accept a pseudo */
2572 || (reload_in_progress
&& REG_P (op
)
2573 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2582 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2588 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2589 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2591 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2594 #ifdef EXTRA_CONSTRAINT_STR
2595 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2598 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2599 /* Every memory operand can be reloaded to fit. */
2600 && ((strict
< 0 && MEM_P (op
))
2601 /* Before reload, accept what reload can turn
2603 || (strict
< 0 && CONSTANT_P (op
))
2604 /* During reload, accept a pseudo */
2605 || (reload_in_progress
&& REG_P (op
)
2606 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2608 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2609 /* Every address operand can be reloaded to fit. */
2616 while (p
+= len
, c
);
2618 constraints
[opno
] = p
;
2619 /* If this operand did not win somehow,
2620 this alternative loses. */
2624 /* This alternative won; the operands are ok.
2625 Change whichever operands this alternative says to change. */
2630 /* See if any earlyclobber operand conflicts with some other
2633 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2634 for (eopno
= seen_earlyclobber_at
;
2635 eopno
< recog_data
.n_operands
;
2637 /* Ignore earlyclobber operands now in memory,
2638 because we would often report failure when we have
2639 two memory operands, one of which was formerly a REG. */
2640 if (earlyclobber
[eopno
]
2641 && REG_P (recog_data
.operand
[eopno
]))
2642 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2643 if ((MEM_P (recog_data
.operand
[opno
])
2644 || recog_data
.operand_type
[opno
] != OP_OUT
)
2646 /* Ignore things like match_operator operands. */
2647 && *recog_data
.constraints
[opno
] != 0
2648 && ! (matching_operands
[opno
] == eopno
2649 && operands_match_p (recog_data
.operand
[opno
],
2650 recog_data
.operand
[eopno
]))
2651 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2652 recog_data
.operand
[eopno
]))
2657 while (--funny_match_index
>= 0)
2659 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2660 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2667 which_alternative
++;
2669 while (which_alternative
< recog_data
.n_alternatives
);
2671 which_alternative
= -1;
2672 /* If we are about to reject this, but we are not to test strictly,
2673 try a very loose test. Only return failure if it fails also. */
2675 return constrain_operands (-1);
2680 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2681 is a hard reg in class CLASS when its regno is offset by OFFSET
2682 and changed to mode MODE.
2683 If REG occupies multiple hard regs, all of them must be in CLASS. */
2686 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2687 enum machine_mode mode
)
2689 int regno
= REGNO (operand
);
2690 if (regno
< FIRST_PSEUDO_REGISTER
2691 && TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2696 for (sr
= hard_regno_nregs
[regno
][mode
] - 1;
2698 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2707 /* Split single instruction. Helper function for split_all_insns and
2708 split_all_insns_noflow. Return last insn in the sequence if successful,
2709 or NULL if unsuccessful. */
2712 split_insn (rtx insn
)
2714 /* Split insns here to get max fine-grain parallelism. */
2715 rtx first
= PREV_INSN (insn
);
2716 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2721 /* try_split returns the NOTE that INSN became. */
2722 SET_INSN_DELETED (insn
);
2724 /* ??? Coddle to md files that generate subregs in post-reload
2725 splitters instead of computing the proper hard register. */
2726 if (reload_completed
&& first
!= last
)
2728 first
= NEXT_INSN (first
);
2732 cleanup_subreg_operands (first
);
2735 first
= NEXT_INSN (first
);
2741 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2744 split_all_insns (int upd_life
)
2750 blocks
= sbitmap_alloc (last_basic_block
);
2751 sbitmap_zero (blocks
);
2754 FOR_EACH_BB_REVERSE (bb
)
2757 bool finish
= false;
2759 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2761 /* Can't use `next_real_insn' because that might go across
2762 CODE_LABELS and short-out basic blocks. */
2763 next
= NEXT_INSN (insn
);
2764 finish
= (insn
== BB_END (bb
));
2767 rtx set
= single_set (insn
);
2769 /* Don't split no-op move insns. These should silently
2770 disappear later in final. Splitting such insns would
2771 break the code that handles REG_NO_CONFLICT blocks. */
2772 if (set
&& set_noop_p (set
))
2774 /* Nops get in the way while scheduling, so delete them
2775 now if register allocation has already been done. It
2776 is too risky to try to do this before register
2777 allocation, and there are unlikely to be very many
2778 nops then anyways. */
2779 if (reload_completed
)
2781 /* If the no-op set has a REG_UNUSED note, we need
2782 to update liveness information. */
2783 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
))
2785 SET_BIT (blocks
, bb
->index
);
2788 /* ??? Is life info affected by deleting edges? */
2789 delete_insn_and_edges (insn
);
2794 rtx last
= split_insn (insn
);
2797 /* The split sequence may include barrier, but the
2798 BB boundary we are interested in will be set to
2801 while (BARRIER_P (last
))
2802 last
= PREV_INSN (last
);
2803 SET_BIT (blocks
, bb
->index
);
2813 int old_last_basic_block
= last_basic_block
;
2815 find_many_sub_basic_blocks (blocks
);
2817 if (old_last_basic_block
!= last_basic_block
&& upd_life
)
2818 blocks
= sbitmap_resize (blocks
, last_basic_block
, 1);
2821 if (changed
&& upd_life
)
2822 update_life_info (blocks
, UPDATE_LIFE_GLOBAL_RM_NOTES
,
2825 #ifdef ENABLE_CHECKING
2826 verify_flow_info ();
2829 sbitmap_free (blocks
);
2832 /* Same as split_all_insns, but do not expect CFG to be available.
2833 Used by machine dependent reorg passes. */
2836 split_all_insns_noflow (void)
2840 for (insn
= get_insns (); insn
; insn
= next
)
2842 next
= NEXT_INSN (insn
);
2845 /* Don't split no-op move insns. These should silently
2846 disappear later in final. Splitting such insns would
2847 break the code that handles REG_NO_CONFLICT blocks. */
2848 rtx set
= single_set (insn
);
2849 if (set
&& set_noop_p (set
))
2851 /* Nops get in the way while scheduling, so delete them
2852 now if register allocation has already been done. It
2853 is too risky to try to do this before register
2854 allocation, and there are unlikely to be very many
2857 ??? Should we use delete_insn when the CFG isn't valid? */
2858 if (reload_completed
)
2859 delete_insn_and_edges (insn
);
2867 #ifdef HAVE_peephole2
2868 struct peep2_insn_data
2874 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2875 static int peep2_current
;
2876 /* The number of instructions available to match a peep2. */
2877 int peep2_current_count
;
2879 /* A non-insn marker indicating the last insn of the block.
2880 The live_before regset for this element is correct, indicating
2881 global_live_at_end for the block. */
2882 #define PEEP2_EOB pc_rtx
2884 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2885 does not exist. Used by the recognizer to find the next insn to match
2886 in a multi-insn pattern. */
2889 peep2_next_insn (int n
)
2891 gcc_assert (n
<= peep2_current_count
);
2894 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2895 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2897 return peep2_insn_data
[n
].insn
;
2900 /* Return true if REGNO is dead before the Nth non-note insn
2904 peep2_regno_dead_p (int ofs
, int regno
)
2906 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2908 ofs
+= peep2_current
;
2909 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2910 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2912 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2914 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2917 /* Similarly for a REG. */
2920 peep2_reg_dead_p (int ofs
, rtx reg
)
2924 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2926 ofs
+= peep2_current
;
2927 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2928 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2930 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2932 regno
= REGNO (reg
);
2933 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2935 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2940 /* Try to find a hard register of mode MODE, matching the register class in
2941 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2942 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2943 in which case the only condition is that the register must be available
2944 before CURRENT_INSN.
2945 Registers that already have bits set in REG_SET will not be considered.
2947 If an appropriate register is available, it will be returned and the
2948 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2952 peep2_find_free_register (int from
, int to
, const char *class_str
,
2953 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2955 static int search_ofs
;
2960 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2961 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2963 from
+= peep2_current
;
2964 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2965 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2966 to
+= peep2_current
;
2967 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2968 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2970 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2971 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2975 HARD_REG_SET this_live
;
2977 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2979 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2980 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2981 IOR_HARD_REG_SET (live
, this_live
);
2984 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2985 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2987 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2989 int raw_regno
, regno
, success
, j
;
2991 /* Distribute the free registers as much as possible. */
2992 raw_regno
= search_ofs
+ i
;
2993 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2994 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2995 #ifdef REG_ALLOC_ORDER
2996 regno
= reg_alloc_order
[raw_regno
];
3001 /* Don't allocate fixed registers. */
3002 if (fixed_regs
[regno
])
3004 /* Make sure the register is of the right class. */
3005 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
3007 /* And can support the mode we need. */
3008 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3010 /* And that we don't create an extra save/restore. */
3011 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
3013 /* And we don't clobber traceback for noreturn functions. */
3014 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
3015 && (! reload_completed
|| frame_pointer_needed
))
3019 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
3021 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3022 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3030 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
3031 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
3033 /* Start the next search with the next register. */
3034 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3036 search_ofs
= raw_regno
;
3038 return gen_rtx_REG (mode
, regno
);
3046 /* Perform the peephole2 optimization pass. */
3049 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED
)
3055 #ifdef HAVE_conditional_execution
3059 bool do_cleanup_cfg
= false;
3060 bool do_global_life_update
= false;
3061 bool do_rebuild_jump_labels
= false;
3063 /* Initialize the regsets we're going to use. */
3064 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3065 peep2_insn_data
[i
].live_before
= ALLOC_REG_SET (®_obstack
);
3066 live
= ALLOC_REG_SET (®_obstack
);
3068 #ifdef HAVE_conditional_execution
3069 blocks
= sbitmap_alloc (last_basic_block
);
3070 sbitmap_zero (blocks
);
3073 count_or_remove_death_notes (NULL
, 1);
3076 FOR_EACH_BB_REVERSE (bb
)
3078 struct propagate_block_info
*pbi
;
3079 reg_set_iterator rsi
;
3082 /* Indicate that all slots except the last holds invalid data. */
3083 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3084 peep2_insn_data
[i
].insn
= NULL_RTX
;
3085 peep2_current_count
= 0;
3087 /* Indicate that the last slot contains live_after data. */
3088 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3089 peep2_current
= MAX_INSNS_PER_PEEP2
;
3091 /* Start up propagation. */
3092 COPY_REG_SET (live
, bb
->il
.rtl
->global_live_at_end
);
3093 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3095 #ifdef HAVE_conditional_execution
3096 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3098 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3101 for (insn
= BB_END (bb
); ; insn
= prev
)
3103 prev
= PREV_INSN (insn
);
3106 rtx
try, before_try
, x
;
3109 bool was_call
= false;
3111 /* Record this insn. */
3112 if (--peep2_current
< 0)
3113 peep2_current
= MAX_INSNS_PER_PEEP2
;
3114 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3115 && peep2_insn_data
[peep2_current
].insn
== NULL_RTX
)
3116 peep2_current_count
++;
3117 peep2_insn_data
[peep2_current
].insn
= insn
;
3118 propagate_one_insn (pbi
, insn
);
3119 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3121 if (RTX_FRAME_RELATED_P (insn
))
3123 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3124 substitution would lose the
3125 REG_FRAME_RELATED_EXPR that is attached. */
3126 peep2_current_count
= 0;
3130 /* Match the peephole. */
3131 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3135 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3136 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3137 cfg-related call notes. */
3138 for (i
= 0; i
<= match_len
; ++i
)
3141 rtx old_insn
, new_insn
, note
;
3143 j
= i
+ peep2_current
;
3144 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3145 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3146 old_insn
= peep2_insn_data
[j
].insn
;
3147 if (!CALL_P (old_insn
))
3152 while (new_insn
!= NULL_RTX
)
3154 if (CALL_P (new_insn
))
3156 new_insn
= NEXT_INSN (new_insn
);
3159 gcc_assert (new_insn
!= NULL_RTX
);
3161 CALL_INSN_FUNCTION_USAGE (new_insn
)
3162 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3164 for (note
= REG_NOTES (old_insn
);
3166 note
= XEXP (note
, 1))
3167 switch (REG_NOTE_KIND (note
))
3171 REG_NOTES (new_insn
)
3172 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
3174 REG_NOTES (new_insn
));
3176 /* Discard all other reg notes. */
3180 /* Croak if there is another call in the sequence. */
3181 while (++i
<= match_len
)
3183 j
= i
+ peep2_current
;
3184 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3185 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3186 old_insn
= peep2_insn_data
[j
].insn
;
3187 gcc_assert (!CALL_P (old_insn
));
3192 i
= match_len
+ peep2_current
;
3193 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3194 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3196 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3197 REG_EH_REGION
, NULL_RTX
);
3199 /* Replace the old sequence with the new. */
3200 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3201 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3202 before_try
= PREV_INSN (insn
);
3203 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3205 /* Re-insert the EH_REGION notes. */
3206 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3211 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3212 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3215 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3217 || (flag_non_call_exceptions
3218 && may_trap_p (PATTERN (x
))
3219 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3223 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3227 if (x
!= BB_END (bb
) && eh_edge
)
3232 nfte
= split_block (bb
, x
);
3233 flags
= (eh_edge
->flags
3234 & (EDGE_EH
| EDGE_ABNORMAL
));
3236 flags
|= EDGE_ABNORMAL_CALL
;
3237 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3240 nehe
->probability
= eh_edge
->probability
;
3242 = REG_BR_PROB_BASE
- nehe
->probability
;
3244 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3245 #ifdef HAVE_conditional_execution
3246 SET_BIT (blocks
, nfte
->dest
->index
);
3254 /* Converting possibly trapping insn to non-trapping is
3255 possible. Zap dummy outgoing edges. */
3256 do_cleanup_cfg
|= purge_dead_edges (bb
);
3259 #ifdef HAVE_conditional_execution
3260 /* With conditional execution, we cannot back up the
3261 live information so easily, since the conditional
3262 death data structures are not so self-contained.
3263 So record that we've made a modification to this
3264 block and update life information at the end. */
3265 SET_BIT (blocks
, bb
->index
);
3268 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3269 peep2_insn_data
[i
].insn
= NULL_RTX
;
3270 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3271 peep2_current_count
= 0;
3273 /* Back up lifetime information past the end of the
3274 newly created sequence. */
3275 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3277 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3279 /* Update life information for the new sequence. */
3286 i
= MAX_INSNS_PER_PEEP2
;
3287 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
3288 && peep2_insn_data
[i
].insn
== NULL_RTX
)
3289 peep2_current_count
++;
3290 peep2_insn_data
[i
].insn
= x
;
3291 propagate_one_insn (pbi
, x
);
3292 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3298 /* ??? Should verify that LIVE now matches what we
3299 had before the new sequence. */
3304 /* If we generated a jump instruction, it won't have
3305 JUMP_LABEL set. Recompute after we're done. */
3306 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3309 do_rebuild_jump_labels
= true;
3315 if (insn
== BB_HEAD (bb
))
3319 /* Some peepholes can decide the don't need one or more of their
3320 inputs. If this happens, local life update is not enough. */
3321 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb
->il
.rtl
->global_live_at_start
, live
,
3324 do_global_life_update
= true;
3328 free_propagate_block_info (pbi
);
3331 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3332 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3333 FREE_REG_SET (live
);
3335 if (do_rebuild_jump_labels
)
3336 rebuild_jump_labels (get_insns ());
3338 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3339 we've changed global life since exception handlers are no longer
3344 do_global_life_update
= true;
3346 if (do_global_life_update
)
3347 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES
, PROP_DEATH_NOTES
);
3348 #ifdef HAVE_conditional_execution
3351 count_or_remove_death_notes (blocks
, 1);
3352 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3354 sbitmap_free (blocks
);
3357 #endif /* HAVE_peephole2 */
3359 /* Common predicates for use with define_bypass. */
3361 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3362 data not the address operand(s) of the store. IN_INSN must be
3363 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3367 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3369 rtx out_set
, in_set
;
3371 in_set
= single_set (in_insn
);
3372 gcc_assert (in_set
);
3374 if (!MEM_P (SET_DEST (in_set
)))
3377 out_set
= single_set (out_insn
);
3380 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3388 out_pat
= PATTERN (out_insn
);
3389 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3391 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3393 rtx exp
= XVECEXP (out_pat
, 0, i
);
3395 if (GET_CODE (exp
) == CLOBBER
)
3398 gcc_assert (GET_CODE (exp
) == SET
);
3400 if (reg_mentioned_p (SET_DEST (exp
), SET_DEST (in_set
)))
3408 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3409 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3410 or multiple set; IN_INSN should be single_set for truth, but for convenience
3411 of insn categorization may be any JUMP or CALL insn. */
3414 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3416 rtx out_set
, in_set
;
3418 in_set
= single_set (in_insn
);
3421 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3425 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3427 in_set
= SET_SRC (in_set
);
3429 out_set
= single_set (out_insn
);
3432 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3433 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3441 out_pat
= PATTERN (out_insn
);
3442 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3444 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3446 rtx exp
= XVECEXP (out_pat
, 0, i
);
3448 if (GET_CODE (exp
) == CLOBBER
)
3451 gcc_assert (GET_CODE (exp
) == SET
);
3453 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3454 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3463 gate_handle_peephole2 (void)
3465 return (optimize
> 0 && flag_peephole2
);
3469 rest_of_handle_peephole2 (void)
3471 #ifdef HAVE_peephole2
3472 peephole2_optimize (dump_file
);
3476 struct tree_opt_pass pass_peephole2
=
3478 "peephole2", /* name */
3479 gate_handle_peephole2
, /* gate */
3480 rest_of_handle_peephole2
, /* execute */
3483 0, /* static_pass_number */
3484 TV_PEEPHOLE2
, /* tv_id */
3485 0, /* properties_required */
3486 0, /* properties_provided */
3487 0, /* properties_destroyed */
3488 0, /* todo_flags_start */
3489 TODO_dump_func
, /* todo_flags_finish */
3494 rest_of_handle_split_all_insns (void)
3496 split_all_insns (1);
3499 struct tree_opt_pass pass_split_all_insns
=
3501 "split1", /* name */
3503 rest_of_handle_split_all_insns
, /* execute */
3506 0, /* static_pass_number */
3508 0, /* properties_required */
3509 0, /* properties_provided */
3510 0, /* properties_destroyed */
3511 0, /* todo_flags_start */
3512 TODO_dump_func
, /* todo_flags_finish */
3516 /* The placement of the splitting that we do for shorten_branches
3517 depends on whether regstack is used by the target or not. */
3519 gate_do_final_split (void)
3521 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3528 struct tree_opt_pass pass_split_for_shorten_branches
=
3530 "split3", /* name */
3531 gate_do_final_split
, /* gate */
3532 split_all_insns_noflow
, /* execute */
3535 0, /* static_pass_number */
3536 TV_SHORTEN_BRANCH
, /* tv_id */
3537 0, /* properties_required */
3538 0, /* properties_provided */
3539 0, /* properties_destroyed */
3540 0, /* todo_flags_start */
3541 TODO_dump_func
, /* todo_flags_finish */
3547 gate_handle_split_before_regstack (void)
3549 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3550 /* If flow2 creates new instructions which need splitting
3551 and scheduling after reload is not done, they might not be
3552 split until final which doesn't allow splitting
3553 if HAVE_ATTR_length. */
3554 # ifdef INSN_SCHEDULING
3555 return (optimize
&& !flag_schedule_insns_after_reload
);
3564 struct tree_opt_pass pass_split_before_regstack
=
3566 "split2", /* name */
3567 gate_handle_split_before_regstack
, /* gate */
3568 rest_of_handle_split_all_insns
, /* execute */
3571 0, /* static_pass_number */
3572 TV_SHORTEN_BRANCH
, /* tv_id */
3573 0, /* properties_required */
3574 0, /* properties_provided */
3575 0, /* properties_destroyed */
3576 0, /* todo_flags_start */
3577 TODO_dump_func
, /* todo_flags_finish */