1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
37 #include "basic-block.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
45 #define STACK_PUSH_CODE PRE_INC
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
53 #define STACK_POP_CODE POST_DEC
57 static void validate_replace_rtx_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
58 static rtx
*find_single_use_1
PARAMS ((rtx
, rtx
*));
59 static void validate_replace_src_1
PARAMS ((rtx
*, void *));
60 static rtx split_insn
PARAMS ((rtx
));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data recog_data
;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
78 /* On return from `constrain_operands', indicate which alternative
81 int which_alternative
;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
94 init_recog_no_volatile ()
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized_1 (insn
)
118 if (INSN_CODE (insn
) < 0)
119 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, 0);
120 return INSN_CODE (insn
);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x
)
132 const char **constraints
;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed
)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x
));
140 constrain_operands (1);
141 return which_alternative
>= 0;
144 noperands
= asm_noperands (x
);
150 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
151 constraints
= (const char **) alloca (noperands
* sizeof (char *));
153 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
155 for (i
= 0; i
< noperands
; i
++)
157 const char *c
= constraints
[i
];
160 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
161 c
= constraints
[c
[0] - '0'];
163 if (! asm_operand_ok (operands
[i
], c
))
170 /* Static data for the next two routines. */
172 typedef struct change_t
180 static change_t
*changes
;
181 static int changes_allocated
;
183 static int num_changes
= 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is nonzero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object
, loc
, new, in_group
)
212 if (old
== new || rtx_equal_p (old
, new))
215 if (in_group
== 0 && num_changes
!= 0)
220 /* Save the information describing this change. */
221 if (num_changes
>= changes_allocated
)
223 if (changes_allocated
== 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
228 changes_allocated
*= 2;
231 (change_t
*) xrealloc (changes
,
232 sizeof (change_t
) * changes_allocated
);
235 changes
[num_changes
].object
= object
;
236 changes
[num_changes
].loc
= loc
;
237 changes
[num_changes
].old
= old
;
239 if (object
&& GET_CODE (object
) != MEM
)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 changes
[num_changes
].old_code
= INSN_CODE (object
);
244 INSN_CODE (object
) = -1;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn
)
265 rtx pat
= PATTERN (insn
);
266 int num_clobbers
= 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
269 int icode
= recog (pat
, insn
,
270 (GET_CODE (pat
) == SET
271 && ! reload_completed
&& ! reload_in_progress
)
272 ? &num_clobbers
: 0);
273 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
279 || (!is_asm
&& icode
< 0))
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers
> 0)
289 if (added_clobbers_hard_reg_p (icode
))
292 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
293 XVECEXP (newpat
, 0, 0) = pat
;
294 add_clobbers (newpat
, icode
);
295 PATTERN (insn
) = pat
= newpat
;
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed
)
303 if (! constrain_operands (1))
307 INSN_CODE (insn
) = icode
;
311 /* Return number of changes made and not validated yet. */
313 num_changes_pending ()
318 /* Apply a group of changes previously issued with `validate_change'.
319 Return 1 if all changes are valid, zero otherwise. */
322 apply_change_group ()
325 rtx last_validated
= NULL_RTX
;
327 /* The changes have been applied and all INSN_CODEs have been reset to force
330 The changes are valid if we aren't given an object, or if we are
331 given a MEM and it still is a valid address, or if this is in insn
332 and it is recognized. In the latter case, if reload has completed,
333 we also require that the operands meet the constraints for
336 for (i
= 0; i
< num_changes
; i
++)
338 rtx object
= changes
[i
].object
;
340 /* if there is no object to test or if it is the same as the one we
341 already tested, ignore it. */
342 if (object
== 0 || object
== last_validated
)
345 if (GET_CODE (object
) == MEM
)
347 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
350 else if (insn_invalid_p (object
))
352 rtx pat
= PATTERN (object
);
354 /* Perhaps we couldn't recognize the insn because there were
355 extra CLOBBERs at the end. If so, try to re-recognize
356 without the last CLOBBER (later iterations will cause each of
357 them to be eliminated, in turn). But don't do this if we
358 have an ASM_OPERAND. */
359 if (GET_CODE (pat
) == PARALLEL
360 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
361 && asm_noperands (PATTERN (object
)) < 0)
365 if (XVECLEN (pat
, 0) == 2)
366 newpat
= XVECEXP (pat
, 0, 0);
372 = gen_rtx_PARALLEL (VOIDmode
,
373 rtvec_alloc (XVECLEN (pat
, 0) - 1));
374 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
375 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
378 /* Add a new change to this group to replace the pattern
379 with this new pattern. Then consider this change
380 as having succeeded. The change we added will
381 cause the entire call to fail if things remain invalid.
383 Note that this can lose if a later change than the one
384 we are processing specified &XVECEXP (PATTERN (object), 0, X)
385 but this shouldn't occur. */
387 validate_change (object
, &PATTERN (object
), newpat
, 1);
390 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
391 /* If this insn is a CLOBBER or USE, it is always valid, but is
397 last_validated
= object
;
400 if (i
== num_changes
)
404 for (i
= 0; i
< num_changes
; i
++)
405 if (changes
[i
].object
406 && INSN_P (changes
[i
].object
)
407 && (bb
= BLOCK_FOR_INSN (changes
[i
].object
)))
408 bb
->flags
|= BB_DIRTY
;
420 /* Return the number of changes so far in the current group. */
423 num_validated_changes ()
428 /* Retract the changes numbered NUM and up. */
436 /* Back out all the changes. Do this in the opposite order in which
438 for (i
= num_changes
- 1; i
>= num
; i
--)
440 *changes
[i
].loc
= changes
[i
].old
;
441 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
442 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
447 /* Replace every occurrence of FROM in X with TO. Mark each change with
448 validate_change passing OBJECT. */
451 validate_replace_rtx_1 (loc
, from
, to
, object
)
453 rtx from
, to
, object
;
459 enum machine_mode op0_mode
= VOIDmode
;
460 int prev_changes
= num_changes
;
467 fmt
= GET_RTX_FORMAT (code
);
469 op0_mode
= GET_MODE (XEXP (x
, 0));
471 /* X matches FROM if it is the same rtx or they are both referring to the
472 same register in the same mode. Avoid calling rtx_equal_p unless the
473 operands look similar. */
476 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
477 && GET_MODE (x
) == GET_MODE (from
)
478 && REGNO (x
) == REGNO (from
))
479 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
480 && rtx_equal_p (x
, from
)))
482 validate_change (object
, loc
, to
, 1);
486 /* Call ourself recursively to perform the replacements. */
488 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
491 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
492 else if (fmt
[i
] == 'E')
493 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
494 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
497 /* If we didn't substitute, there is nothing more to do. */
498 if (num_changes
== prev_changes
)
501 /* Allow substituted expression to have different mode. This is used by
502 regmove to change mode of pseudo register. */
503 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
504 op0_mode
= GET_MODE (XEXP (x
, 0));
506 /* Do changes needed to keep rtx consistent. Don't do any other
507 simplifications, as it is not our job. */
509 if ((GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
510 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
512 validate_change (object
, loc
,
513 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
514 : swap_condition (code
),
515 GET_MODE (x
), XEXP (x
, 1),
524 /* If we have a PLUS whose second operand is now a CONST_INT, use
525 simplify_gen_binary to try to simplify it.
526 ??? We may want later to remove this, once simplification is
527 separated from this function. */
528 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
529 validate_change (object
, loc
,
531 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
534 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
535 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
536 validate_change (object
, loc
,
538 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
539 simplify_gen_unary (NEG
,
540 GET_MODE (x
), XEXP (x
, 1),
545 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
547 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
549 /* If any of the above failed, substitute in something that
550 we know won't be recognized. */
552 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
553 validate_change (object
, loc
, new, 1);
557 /* All subregs possible to simplify should be simplified. */
558 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
561 /* Subregs of VOIDmode operands are incorrect. */
562 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
563 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
565 validate_change (object
, loc
, new, 1);
569 /* If we are replacing a register with memory, try to change the memory
570 to be the mode required for memory in extract operations (this isn't
571 likely to be an insertion operation; if it was, nothing bad will
572 happen, we might just fail in some cases). */
574 if (GET_CODE (XEXP (x
, 0)) == MEM
575 && GET_CODE (XEXP (x
, 1)) == CONST_INT
576 && GET_CODE (XEXP (x
, 2)) == CONST_INT
577 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
578 && !MEM_VOLATILE_P (XEXP (x
, 0)))
580 enum machine_mode wanted_mode
= VOIDmode
;
581 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
582 int pos
= INTVAL (XEXP (x
, 2));
584 if (GET_CODE (x
) == ZERO_EXTRACT
)
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extzv
, 1);
588 if (new_mode
!= MAX_MACHINE_MODE
)
589 wanted_mode
= new_mode
;
591 else if (GET_CODE (x
) == SIGN_EXTRACT
)
593 enum machine_mode new_mode
594 = mode_for_extraction (EP_extv
, 1);
595 if (new_mode
!= MAX_MACHINE_MODE
)
596 wanted_mode
= new_mode
;
599 /* If we have a narrower mode, we can do something. */
600 if (wanted_mode
!= VOIDmode
601 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
603 int offset
= pos
/ BITS_PER_UNIT
;
606 /* If the bytes and bits are counted differently, we
607 must adjust the offset. */
608 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
610 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
613 pos
%= GET_MODE_BITSIZE (wanted_mode
);
615 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
617 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
618 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
629 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
630 with TO. After all changes have been made, validate by seeing
631 if INSN is still valid. */
634 validate_replace_rtx_subexp (from
, to
, insn
, loc
)
635 rtx from
, to
, insn
, *loc
;
637 validate_replace_rtx_1 (loc
, from
, to
, insn
);
638 return apply_change_group ();
641 /* Try replacing every occurrence of FROM in INSN with TO. After all
642 changes have been made, validate by seeing if INSN is still valid. */
645 validate_replace_rtx (from
, to
, insn
)
648 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
649 return apply_change_group ();
652 /* Try replacing every occurrence of FROM in INSN with TO. */
655 validate_replace_rtx_group (from
, to
, insn
)
658 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
661 /* Function called by note_uses to replace used subexpressions. */
662 struct validate_replace_src_data
664 rtx from
; /* Old RTX */
665 rtx to
; /* New RTX */
666 rtx insn
; /* Insn in which substitution is occurring. */
670 validate_replace_src_1 (x
, data
)
674 struct validate_replace_src_data
*d
675 = (struct validate_replace_src_data
*) data
;
677 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
680 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
684 validate_replace_src_group (from
, to
, insn
)
687 struct validate_replace_src_data d
;
692 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
695 /* Same as validate_repalace_src_group, but validate by seeing if
696 INSN is still valid. */
698 validate_replace_src (from
, to
, insn
)
701 validate_replace_src_group (from
, to
, insn
);
702 return apply_change_group ();
706 /* Return 1 if the insn using CC0 set by INSN does not contain
707 any ordered tests applied to the condition codes.
708 EQ and NE tests do not count. */
711 next_insn_tests_no_inequality (insn
)
714 rtx next
= next_cc0_user (insn
);
716 /* If there is no next insn, we have to take the conservative choice. */
720 return ((GET_CODE (next
) == JUMP_INSN
721 || GET_CODE (next
) == INSN
722 || GET_CODE (next
) == CALL_INSN
)
723 && ! inequality_comparisons_p (PATTERN (next
)));
726 #if 0 /* This is useless since the insn that sets the cc's
727 must be followed immediately by the use of them. */
728 /* Return 1 if the CC value set up by INSN is not used. */
731 next_insns_test_no_inequality (insn
)
734 rtx next
= NEXT_INSN (insn
);
736 for (; next
!= 0; next
= NEXT_INSN (next
))
738 if (GET_CODE (next
) == CODE_LABEL
739 || GET_CODE (next
) == BARRIER
)
741 if (GET_CODE (next
) == NOTE
)
743 if (inequality_comparisons_p (PATTERN (next
)))
745 if (sets_cc0_p (PATTERN (next
)) == 1)
747 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
755 /* This is used by find_single_use to locate an rtx that contains exactly one
756 use of DEST, which is typically either a REG or CC0. It returns a
757 pointer to the innermost rtx expression containing DEST. Appearances of
758 DEST that are being used to totally replace it are not counted. */
761 find_single_use_1 (dest
, loc
)
766 enum rtx_code code
= GET_CODE (x
);
784 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
785 of a REG that occupies all of the REG, the insn uses DEST if
786 it is mentioned in the destination or the source. Otherwise, we
787 need just check the source. */
788 if (GET_CODE (SET_DEST (x
)) != CC0
789 && GET_CODE (SET_DEST (x
)) != PC
790 && GET_CODE (SET_DEST (x
)) != REG
791 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
792 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
793 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
794 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
795 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
796 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
799 return find_single_use_1 (dest
, &SET_SRC (x
));
803 return find_single_use_1 (dest
, &XEXP (x
, 0));
809 /* If it wasn't one of the common cases above, check each expression and
810 vector of this code. Look for a unique usage of DEST. */
812 fmt
= GET_RTX_FORMAT (code
);
813 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
817 if (dest
== XEXP (x
, i
)
818 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
819 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
822 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
825 result
= this_result
;
826 else if (this_result
)
827 /* Duplicate usage. */
830 else if (fmt
[i
] == 'E')
834 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
836 if (XVECEXP (x
, i
, j
) == dest
837 || (GET_CODE (dest
) == REG
838 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
839 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
842 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
845 result
= this_result
;
846 else if (this_result
)
855 /* See if DEST, produced in INSN, is used only a single time in the
856 sequel. If so, return a pointer to the innermost rtx expression in which
859 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
861 This routine will return usually zero either before flow is called (because
862 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
863 note can't be trusted).
865 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
866 care about REG_DEAD notes or LOG_LINKS.
868 Otherwise, we find the single use by finding an insn that has a
869 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
870 only referenced once in that insn, we know that it must be the first
871 and last insn referencing DEST. */
874 find_single_use (dest
, insn
, ploc
)
886 next
= NEXT_INSN (insn
);
888 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
891 result
= find_single_use_1 (dest
, &PATTERN (next
));
898 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
901 for (next
= next_nonnote_insn (insn
);
902 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
903 next
= next_nonnote_insn (next
))
904 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
906 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
907 if (XEXP (link
, 0) == insn
)
912 result
= find_single_use_1 (dest
, &PATTERN (next
));
922 /* Return 1 if OP is a valid general operand for machine mode MODE.
923 This is either a register reference, a memory reference,
924 or a constant. In the case of a memory reference, the address
925 is checked for general validity for the target machine.
927 Register and memory references must have mode MODE in order to be valid,
928 but some constants have no machine mode and are valid for any mode.
930 If MODE is VOIDmode, OP is checked for validity for whatever mode
933 The main use of this function is as a predicate in match_operand
934 expressions in the machine description.
936 For an explanation of this function's behavior for registers of
937 class NO_REGS, see the comment for `register_operand'. */
940 general_operand (op
, mode
)
942 enum machine_mode mode
;
944 enum rtx_code code
= GET_CODE (op
);
946 if (mode
== VOIDmode
)
947 mode
= GET_MODE (op
);
949 /* Don't accept CONST_INT or anything similar
950 if the caller wants something floating. */
951 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
952 && GET_MODE_CLASS (mode
) != MODE_INT
953 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
956 if (GET_CODE (op
) == CONST_INT
958 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
962 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
964 #ifdef LEGITIMATE_PIC_OPERAND_P
965 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
967 && LEGITIMATE_CONSTANT_P (op
));
969 /* Except for certain constants with VOIDmode, already checked for,
970 OP's mode must match MODE if MODE specifies a mode. */
972 if (GET_MODE (op
) != mode
)
977 rtx sub
= SUBREG_REG (op
);
979 #ifdef INSN_SCHEDULING
980 /* On machines that have insn scheduling, we want all memory
981 reference to be explicit, so outlaw paradoxical SUBREGs. */
982 if (GET_CODE (sub
) == MEM
983 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
986 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
987 may result in incorrect reference. We should simplify all valid
988 subregs of MEM anyway. But allow this after reload because we
989 might be called from cleanup_subreg_operands.
991 ??? This is a kludge. */
992 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
993 && GET_CODE (sub
) == MEM
)
996 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
997 create such rtl, and we must reject it. */
998 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
999 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1003 code
= GET_CODE (op
);
1007 /* A register whose class is NO_REGS is not a general operand. */
1008 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1009 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
1013 rtx y
= XEXP (op
, 0);
1015 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1018 if (GET_CODE (y
) == ADDRESSOF
)
1021 /* Use the mem's mode, since it will be reloaded thus. */
1022 mode
= GET_MODE (op
);
1023 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
1026 /* Pretend this is an operand for now; we'll run force_operand
1027 on its replacement in fixup_var_refs_1. */
1028 if (code
== ADDRESSOF
)
1037 /* Return 1 if OP is a valid memory address for a memory reference
1040 The main use of this function is as a predicate in match_operand
1041 expressions in the machine description. */
1044 address_operand (op
, mode
)
1046 enum machine_mode mode
;
1048 return memory_address_p (mode
, op
);
1051 /* Return 1 if OP is a register reference of mode MODE.
1052 If MODE is VOIDmode, accept a register in any mode.
1054 The main use of this function is as a predicate in match_operand
1055 expressions in the machine description.
1057 As a special exception, registers whose class is NO_REGS are
1058 not accepted by `register_operand'. The reason for this change
1059 is to allow the representation of special architecture artifacts
1060 (such as a condition code register) without extending the rtl
1061 definitions. Since registers of class NO_REGS cannot be used
1062 as registers in any case where register classes are examined,
1063 it is most consistent to keep this function from accepting them. */
1066 register_operand (op
, mode
)
1068 enum machine_mode mode
;
1070 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1073 if (GET_CODE (op
) == SUBREG
)
1075 rtx sub
= SUBREG_REG (op
);
1077 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1078 because it is guaranteed to be reloaded into one.
1079 Just make sure the MEM is valid in itself.
1080 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1081 but currently it does result from (SUBREG (REG)...) where the
1082 reg went on the stack.) */
1083 if (! reload_completed
&& GET_CODE (sub
) == MEM
)
1084 return general_operand (op
, mode
);
1086 #ifdef CANNOT_CHANGE_MODE_CLASS
1087 if (GET_CODE (sub
) == REG
1088 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1089 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1090 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1091 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1095 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1096 create such rtl, and we must reject it. */
1097 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1098 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1104 /* If we have an ADDRESSOF, consider it valid since it will be
1105 converted into something that will not be a MEM. */
1106 if (GET_CODE (op
) == ADDRESSOF
)
1109 /* We don't consider registers whose class is NO_REGS
1110 to be a register operand. */
1111 return (GET_CODE (op
) == REG
1112 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1113 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1116 /* Return 1 for a register in Pmode; ignore the tested mode. */
1119 pmode_register_operand (op
, mode
)
1121 enum machine_mode mode ATTRIBUTE_UNUSED
;
1123 return register_operand (op
, Pmode
);
1126 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1127 or a hard register. */
1130 scratch_operand (op
, mode
)
1132 enum machine_mode mode
;
1134 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1137 return (GET_CODE (op
) == SCRATCH
1138 || (GET_CODE (op
) == REG
1139 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1142 /* Return 1 if OP is a valid immediate operand for mode MODE.
1144 The main use of this function is as a predicate in match_operand
1145 expressions in the machine description. */
1148 immediate_operand (op
, mode
)
1150 enum machine_mode mode
;
1152 /* Don't accept CONST_INT or anything similar
1153 if the caller wants something floating. */
1154 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1155 && GET_MODE_CLASS (mode
) != MODE_INT
1156 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1159 if (GET_CODE (op
) == CONST_INT
1161 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1164 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1165 result in 0/1. It seems a safe assumption that this is
1166 in range for everyone. */
1167 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1170 return (CONSTANT_P (op
)
1171 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1172 || GET_MODE (op
) == VOIDmode
)
1173 #ifdef LEGITIMATE_PIC_OPERAND_P
1174 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1176 && LEGITIMATE_CONSTANT_P (op
));
1179 /* Returns 1 if OP is an operand that is a CONST_INT. */
1182 const_int_operand (op
, mode
)
1184 enum machine_mode mode
;
1186 if (GET_CODE (op
) != CONST_INT
)
1189 if (mode
!= VOIDmode
1190 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1196 /* Returns 1 if OP is an operand that is a constant integer or constant
1197 floating-point number. */
1200 const_double_operand (op
, mode
)
1202 enum machine_mode mode
;
1204 /* Don't accept CONST_INT or anything similar
1205 if the caller wants something floating. */
1206 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1207 && GET_MODE_CLASS (mode
) != MODE_INT
1208 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1211 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1212 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1213 || GET_MODE (op
) == VOIDmode
));
1216 /* Return 1 if OP is a general operand that is not an immediate operand. */
1219 nonimmediate_operand (op
, mode
)
1221 enum machine_mode mode
;
1223 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1226 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1229 nonmemory_operand (op
, mode
)
1231 enum machine_mode mode
;
1233 if (CONSTANT_P (op
))
1235 /* Don't accept CONST_INT or anything similar
1236 if the caller wants something floating. */
1237 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1238 && GET_MODE_CLASS (mode
) != MODE_INT
1239 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1242 if (GET_CODE (op
) == CONST_INT
1244 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1247 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1248 || mode
== VOIDmode
)
1249 #ifdef LEGITIMATE_PIC_OPERAND_P
1250 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1252 && LEGITIMATE_CONSTANT_P (op
));
1255 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1258 if (GET_CODE (op
) == SUBREG
)
1260 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1261 because it is guaranteed to be reloaded into one.
1262 Just make sure the MEM is valid in itself.
1263 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1264 but currently it does result from (SUBREG (REG)...) where the
1265 reg went on the stack.) */
1266 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1267 return general_operand (op
, mode
);
1268 op
= SUBREG_REG (op
);
1271 /* We don't consider registers whose class is NO_REGS
1272 to be a register operand. */
1273 return (GET_CODE (op
) == REG
1274 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1275 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1278 /* Return 1 if OP is a valid operand that stands for pushing a
1279 value of mode MODE onto the stack.
1281 The main use of this function is as a predicate in match_operand
1282 expressions in the machine description. */
1285 push_operand (op
, mode
)
1287 enum machine_mode mode
;
1289 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1291 #ifdef PUSH_ROUNDING
1292 rounded_size
= PUSH_ROUNDING (rounded_size
);
1295 if (GET_CODE (op
) != MEM
)
1298 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1303 if (rounded_size
== GET_MODE_SIZE (mode
))
1305 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1310 if (GET_CODE (op
) != PRE_MODIFY
1311 || GET_CODE (XEXP (op
, 1)) != PLUS
1312 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1313 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1314 #ifdef STACK_GROWS_DOWNWARD
1315 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1317 || INTVAL (XEXP (XEXP (op
, 1), 1)) != rounded_size
1323 return XEXP (op
, 0) == stack_pointer_rtx
;
1326 /* Return 1 if OP is a valid operand that stands for popping a
1327 value of mode MODE off the stack.
1329 The main use of this function is as a predicate in match_operand
1330 expressions in the machine description. */
1333 pop_operand (op
, mode
)
1335 enum machine_mode mode
;
1337 if (GET_CODE (op
) != MEM
)
1340 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1345 if (GET_CODE (op
) != STACK_POP_CODE
)
1348 return XEXP (op
, 0) == stack_pointer_rtx
;
1351 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1354 memory_address_p (mode
, addr
)
1355 enum machine_mode mode ATTRIBUTE_UNUSED
;
1358 if (GET_CODE (addr
) == ADDRESSOF
)
1361 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1368 /* Return 1 if OP is a valid memory reference with mode MODE,
1369 including a valid address.
1371 The main use of this function is as a predicate in match_operand
1372 expressions in the machine description. */
1375 memory_operand (op
, mode
)
1377 enum machine_mode mode
;
1381 if (! reload_completed
)
1382 /* Note that no SUBREG is a memory operand before end of reload pass,
1383 because (SUBREG (MEM...)) forces reloading into a register. */
1384 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1386 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1390 if (GET_CODE (inner
) == SUBREG
)
1391 inner
= SUBREG_REG (inner
);
1393 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1396 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1397 that is, a memory reference whose address is a general_operand. */
1400 indirect_operand (op
, mode
)
1402 enum machine_mode mode
;
1404 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1405 if (! reload_completed
1406 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1408 int offset
= SUBREG_BYTE (op
);
1409 rtx inner
= SUBREG_REG (op
);
1411 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1414 /* The only way that we can have a general_operand as the resulting
1415 address is if OFFSET is zero and the address already is an operand
1416 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1419 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1420 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1421 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1422 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1423 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1426 return (GET_CODE (op
) == MEM
1427 && memory_operand (op
, mode
)
1428 && general_operand (XEXP (op
, 0), Pmode
));
1431 /* Return 1 if this is a comparison operator. This allows the use of
1432 MATCH_OPERATOR to recognize all the branch insns. */
1435 comparison_operator (op
, mode
)
1437 enum machine_mode mode
;
1439 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1440 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1443 /* If BODY is an insn body that uses ASM_OPERANDS,
1444 return the number of operands (both input and output) in the insn.
1445 Otherwise return -1. */
1448 asm_noperands (body
)
1451 switch (GET_CODE (body
))
1454 /* No output operands: return number of input operands. */
1455 return ASM_OPERANDS_INPUT_LENGTH (body
);
1457 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1458 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1459 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1463 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1464 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1466 /* Multiple output operands, or 1 output plus some clobbers:
1467 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1471 /* Count backwards through CLOBBERs to determine number of SETs. */
1472 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1474 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1476 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1480 /* N_SETS is now number of output operands. */
1483 /* Verify that all the SETs we have
1484 came from a single original asm_operands insn
1485 (so that invalid combinations are blocked). */
1486 for (i
= 0; i
< n_sets
; i
++)
1488 rtx elt
= XVECEXP (body
, 0, i
);
1489 if (GET_CODE (elt
) != SET
)
1491 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1493 /* If these ASM_OPERANDS rtx's came from different original insns
1494 then they aren't allowed together. */
1495 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1496 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1499 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1502 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1504 /* 0 outputs, but some clobbers:
1505 body is [(asm_operands ...) (clobber (reg ...))...]. */
1508 /* Make sure all the other parallel things really are clobbers. */
1509 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1510 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1513 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1522 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1523 copy its operands (both input and output) into the vector OPERANDS,
1524 the locations of the operands within the insn into the vector OPERAND_LOCS,
1525 and the constraints for the operands into CONSTRAINTS.
1526 Write the modes of the operands into MODES.
1527 Return the assembler-template.
1529 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1530 we don't store that info. */
1533 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1537 const char **constraints
;
1538 enum machine_mode
*modes
;
1542 const char *template = 0;
1544 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1546 rtx asmop
= SET_SRC (body
);
1547 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1549 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1551 for (i
= 1; i
< noperands
; i
++)
1554 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1556 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1558 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1560 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1563 /* The output is in the SET.
1564 Its constraint is in the ASM_OPERANDS itself. */
1566 operands
[0] = SET_DEST (body
);
1568 operand_locs
[0] = &SET_DEST (body
);
1570 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1572 modes
[0] = GET_MODE (SET_DEST (body
));
1573 template = ASM_OPERANDS_TEMPLATE (asmop
);
1575 else if (GET_CODE (body
) == ASM_OPERANDS
)
1578 /* No output operands: BODY is (asm_operands ....). */
1580 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1582 /* The input operands are found in the 1st element vector. */
1583 /* Constraints for inputs are in the 2nd element vector. */
1584 for (i
= 0; i
< noperands
; i
++)
1587 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1589 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1591 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1593 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1595 template = ASM_OPERANDS_TEMPLATE (asmop
);
1597 else if (GET_CODE (body
) == PARALLEL
1598 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1599 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1601 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1602 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1603 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1604 int nout
= 0; /* Does not include CLOBBERs. */
1606 /* At least one output, plus some CLOBBERs. */
1608 /* The outputs are in the SETs.
1609 Their constraints are in the ASM_OPERANDS itself. */
1610 for (i
= 0; i
< nparallel
; i
++)
1612 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1613 break; /* Past last SET */
1616 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1618 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1620 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1622 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1626 for (i
= 0; i
< nin
; i
++)
1629 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1631 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1633 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1635 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1638 template = ASM_OPERANDS_TEMPLATE (asmop
);
1640 else if (GET_CODE (body
) == PARALLEL
1641 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1643 /* No outputs, but some CLOBBERs. */
1645 rtx asmop
= XVECEXP (body
, 0, 0);
1646 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1648 for (i
= 0; i
< nin
; i
++)
1651 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1653 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1655 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1657 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1660 template = ASM_OPERANDS_TEMPLATE (asmop
);
1666 /* Check if an asm_operand matches it's constraints.
1667 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1670 asm_operand_ok (op
, constraint
)
1672 const char *constraint
;
1676 /* Use constrain_operands after reload. */
1677 if (reload_completed
)
1682 char c
= *constraint
++;
1696 case '0': case '1': case '2': case '3': case '4':
1697 case '5': case '6': case '7': case '8': case '9':
1698 /* For best results, our caller should have given us the
1699 proper matching constraint, but we can't actually fail
1700 the check if they didn't. Indicate that results are
1702 while (ISDIGIT (*constraint
))
1708 if (address_operand (op
, VOIDmode
))
1713 case 'V': /* non-offsettable */
1714 if (memory_operand (op
, VOIDmode
))
1718 case 'o': /* offsettable */
1719 if (offsettable_nonstrict_memref_p (op
))
1724 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1725 excepting those that expand_call created. Further, on some
1726 machines which do not have generalized auto inc/dec, an inc/dec
1727 is not a memory_operand.
1729 Match any memory and hope things are resolved after reload. */
1731 if (GET_CODE (op
) == MEM
1733 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1734 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1739 if (GET_CODE (op
) == MEM
1741 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1742 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1748 if (GET_CODE (op
) == CONST_DOUBLE
1749 || (GET_CODE (op
) == CONST_VECTOR
1750 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1755 if (GET_CODE (op
) == CONST_DOUBLE
1756 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'G'))
1760 if (GET_CODE (op
) == CONST_DOUBLE
1761 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'H'))
1766 if (GET_CODE (op
) == CONST_INT
1767 || (GET_CODE (op
) == CONST_DOUBLE
1768 && GET_MODE (op
) == VOIDmode
))
1774 #ifdef LEGITIMATE_PIC_OPERAND_P
1775 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1782 if (GET_CODE (op
) == CONST_INT
1783 || (GET_CODE (op
) == CONST_DOUBLE
1784 && GET_MODE (op
) == VOIDmode
))
1789 if (GET_CODE (op
) == CONST_INT
1790 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'))
1794 if (GET_CODE (op
) == CONST_INT
1795 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'J'))
1799 if (GET_CODE (op
) == CONST_INT
1800 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1804 if (GET_CODE (op
) == CONST_INT
1805 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'))
1809 if (GET_CODE (op
) == CONST_INT
1810 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
1814 if (GET_CODE (op
) == CONST_INT
1815 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'N'))
1819 if (GET_CODE (op
) == CONST_INT
1820 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'O'))
1824 if (GET_CODE (op
) == CONST_INT
1825 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P'))
1833 if (general_operand (op
, VOIDmode
))
1838 /* For all other letters, we first check for a register class,
1839 otherwise it is an EXTRA_CONSTRAINT. */
1840 if (REG_CLASS_FROM_LETTER (c
) != NO_REGS
)
1843 if (GET_MODE (op
) == BLKmode
)
1845 if (register_operand (op
, VOIDmode
))
1848 #ifdef EXTRA_CONSTRAINT
1849 if (EXTRA_CONSTRAINT (op
, c
))
1851 if (EXTRA_MEMORY_CONSTRAINT (c
))
1853 /* Every memory operand can be reloaded to fit. */
1854 if (memory_operand (op
, VOIDmode
))
1857 if (EXTRA_ADDRESS_CONSTRAINT (c
))
1859 /* Every address operand can be reloaded to fit. */
1860 if (address_operand (op
, VOIDmode
))
1871 /* Given an rtx *P, if it is a sum containing an integer constant term,
1872 return the location (type rtx *) of the pointer to that constant term.
1873 Otherwise, return a null pointer. */
1876 find_constant_term_loc (p
)
1880 enum rtx_code code
= GET_CODE (*p
);
1882 /* If *P IS such a constant term, P is its location. */
1884 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1888 /* Otherwise, if not a sum, it has no constant term. */
1890 if (GET_CODE (*p
) != PLUS
)
1893 /* If one of the summands is constant, return its location. */
1895 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1896 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1899 /* Otherwise, check each summand for containing a constant term. */
1901 if (XEXP (*p
, 0) != 0)
1903 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1908 if (XEXP (*p
, 1) != 0)
1910 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1918 /* Return 1 if OP is a memory reference
1919 whose address contains no side effects
1920 and remains valid after the addition
1921 of a positive integer less than the
1922 size of the object being referenced.
1924 We assume that the original address is valid and do not check it.
1926 This uses strict_memory_address_p as a subroutine, so
1927 don't use it before reload. */
1930 offsettable_memref_p (op
)
1933 return ((GET_CODE (op
) == MEM
)
1934 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1937 /* Similar, but don't require a strictly valid mem ref:
1938 consider pseudo-regs valid as index or base regs. */
1941 offsettable_nonstrict_memref_p (op
)
1944 return ((GET_CODE (op
) == MEM
)
1945 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1948 /* Return 1 if Y is a memory address which contains no side effects
1949 and would remain valid after the addition of a positive integer
1950 less than the size of that mode.
1952 We assume that the original address is valid and do not check it.
1953 We do check that it is valid for narrower modes.
1955 If STRICTP is nonzero, we require a strictly valid address,
1956 for the sake of use in reload.c. */
1959 offsettable_address_p (strictp
, mode
, y
)
1961 enum machine_mode mode
;
1964 enum rtx_code ycode
= GET_CODE (y
);
1968 int (*addressp
) PARAMS ((enum machine_mode
, rtx
)) =
1969 (strictp
? strict_memory_address_p
: memory_address_p
);
1970 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1972 if (CONSTANT_ADDRESS_P (y
))
1975 /* Adjusting an offsettable address involves changing to a narrower mode.
1976 Make sure that's OK. */
1978 if (mode_dependent_address_p (y
))
1981 /* ??? How much offset does an offsettable BLKmode reference need?
1982 Clearly that depends on the situation in which it's being used.
1983 However, the current situation in which we test 0xffffffff is
1984 less than ideal. Caveat user. */
1986 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1988 /* If the expression contains a constant term,
1989 see if it remains valid when max possible offset is added. */
1991 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1996 *y2
= plus_constant (*y2
, mode_sz
- 1);
1997 /* Use QImode because an odd displacement may be automatically invalid
1998 for any wider mode. But it should be valid for a single byte. */
1999 good
= (*addressp
) (QImode
, y
);
2001 /* In any case, restore old contents of memory. */
2006 if (GET_RTX_CLASS (ycode
) == 'a')
2009 /* The offset added here is chosen as the maximum offset that
2010 any instruction could need to add when operating on something
2011 of the specified mode. We assume that if Y and Y+c are
2012 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2013 go inside a LO_SUM here, so we do so as well. */
2014 if (GET_CODE (y
) == LO_SUM
2016 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
2017 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
2018 plus_constant (XEXP (y
, 1), mode_sz
- 1));
2020 z
= plus_constant (y
, mode_sz
- 1);
2022 /* Use QImode because an odd displacement may be automatically invalid
2023 for any wider mode. But it should be valid for a single byte. */
2024 return (*addressp
) (QImode
, z
);
2027 /* Return 1 if ADDR is an address-expression whose effect depends
2028 on the mode of the memory reference it is used in.
2030 Autoincrement addressing is a typical example of mode-dependence
2031 because the amount of the increment depends on the mode. */
2034 mode_dependent_address_p (addr
)
2035 rtx addr ATTRIBUTE_UNUSED
; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2037 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
2039 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2040 win
: ATTRIBUTE_UNUSED_LABEL
2044 /* Return 1 if OP is a general operand
2045 other than a memory ref with a mode dependent address. */
2048 mode_independent_operand (op
, mode
)
2049 enum machine_mode mode
;
2054 if (! general_operand (op
, mode
))
2057 if (GET_CODE (op
) != MEM
)
2060 addr
= XEXP (op
, 0);
2061 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
2063 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2064 lose
: ATTRIBUTE_UNUSED_LABEL
2068 /* Like extract_insn, but save insn extracted and don't extract again, when
2069 called again for the same insn expecting that recog_data still contain the
2070 valid information. This is used primary by gen_attr infrastructure that
2071 often does extract insn again and again. */
2073 extract_insn_cached (insn
)
2076 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2078 extract_insn (insn
);
2079 recog_data
.insn
= insn
;
2081 /* Do cached extract_insn, constrain_operand and complain about failures.
2082 Used by insn_attrtab. */
2084 extract_constrain_insn_cached (insn
)
2087 extract_insn_cached (insn
);
2088 if (which_alternative
== -1
2089 && !constrain_operands (reload_completed
))
2090 fatal_insn_not_found (insn
);
2092 /* Do cached constrain_operand and complain about failures. */
2094 constrain_operands_cached (strict
)
2097 if (which_alternative
== -1)
2098 return constrain_operands (strict
);
2103 /* Analyze INSN and fill in recog_data. */
2112 rtx body
= PATTERN (insn
);
2114 recog_data
.insn
= NULL
;
2115 recog_data
.n_operands
= 0;
2116 recog_data
.n_alternatives
= 0;
2117 recog_data
.n_dups
= 0;
2118 which_alternative
= -1;
2120 switch (GET_CODE (body
))
2130 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2135 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2136 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2137 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2143 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2146 /* This insn is an `asm' with operands. */
2148 /* expand_asm_operands makes sure there aren't too many operands. */
2149 if (noperands
> MAX_RECOG_OPERANDS
)
2152 /* Now get the operand values and constraints out of the insn. */
2153 decode_asm_operands (body
, recog_data
.operand
,
2154 recog_data
.operand_loc
,
2155 recog_data
.constraints
,
2156 recog_data
.operand_mode
);
2159 const char *p
= recog_data
.constraints
[0];
2160 recog_data
.n_alternatives
= 1;
2162 recog_data
.n_alternatives
+= (*p
++ == ',');
2166 fatal_insn_not_found (insn
);
2170 /* Ordinary insn: recognize it, get the operands via insn_extract
2171 and get the constraints. */
2173 icode
= recog_memoized (insn
);
2175 fatal_insn_not_found (insn
);
2177 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2178 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2179 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2181 insn_extract (insn
);
2183 for (i
= 0; i
< noperands
; i
++)
2185 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2186 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2187 /* VOIDmode match_operands gets mode from their real operand. */
2188 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2189 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2192 for (i
= 0; i
< noperands
; i
++)
2193 recog_data
.operand_type
[i
]
2194 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2195 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2198 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2202 /* After calling extract_insn, you can use this function to extract some
2203 information from the constraint strings into a more usable form.
2204 The collected data is stored in recog_op_alt. */
2206 preprocess_constraints ()
2210 memset (recog_op_alt
, 0, sizeof recog_op_alt
);
2211 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2214 struct operand_alternative
*op_alt
;
2215 const char *p
= recog_data
.constraints
[i
];
2217 op_alt
= recog_op_alt
[i
];
2219 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2221 op_alt
[j
].class = NO_REGS
;
2222 op_alt
[j
].constraint
= p
;
2223 op_alt
[j
].matches
= -1;
2224 op_alt
[j
].matched
= -1;
2226 if (*p
== '\0' || *p
== ',')
2228 op_alt
[j
].anything_ok
= 1;
2238 while (c
!= ',' && c
!= '\0');
2239 if (c
== ',' || c
== '\0')
2244 case '=': case '+': case '*': case '%':
2245 case 'E': case 'F': case 'G': case 'H':
2246 case 's': case 'i': case 'n':
2247 case 'I': case 'J': case 'K': case 'L':
2248 case 'M': case 'N': case 'O': case 'P':
2249 /* These don't say anything we care about. */
2253 op_alt
[j
].reject
+= 6;
2256 op_alt
[j
].reject
+= 600;
2259 op_alt
[j
].earlyclobber
= 1;
2262 case '0': case '1': case '2': case '3': case '4':
2263 case '5': case '6': case '7': case '8': case '9':
2266 op_alt
[j
].matches
= strtoul (p
- 1, &end
, 10);
2267 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2273 op_alt
[j
].memory_ok
= 1;
2276 op_alt
[j
].decmem_ok
= 1;
2279 op_alt
[j
].incmem_ok
= 1;
2282 op_alt
[j
].nonoffmem_ok
= 1;
2285 op_alt
[j
].offmem_ok
= 1;
2288 op_alt
[j
].anything_ok
= 1;
2292 op_alt
[j
].is_address
= 1;
2293 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class]
2294 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
2298 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2302 if (EXTRA_MEMORY_CONSTRAINT (c
))
2304 op_alt
[j
].memory_ok
= 1;
2307 if (EXTRA_ADDRESS_CONSTRAINT (c
))
2309 op_alt
[j
].is_address
= 1;
2310 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class]
2311 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
2315 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c
)];
2323 /* Check the operands of an insn against the insn's operand constraints
2324 and return 1 if they are valid.
2325 The information about the insn's operands, constraints, operand modes
2326 etc. is obtained from the global variables set up by extract_insn.
2328 WHICH_ALTERNATIVE is set to a number which indicates which
2329 alternative of constraints was matched: 0 for the first alternative,
2330 1 for the next, etc.
2332 In addition, when two operands are match
2333 and it happens that the output operand is (reg) while the
2334 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2335 make the output operand look like the input.
2336 This is because the output operand is the one the template will print.
2338 This is used in final, just before printing the assembler code and by
2339 the routines that determine an insn's attribute.
2341 If STRICT is a positive nonzero value, it means that we have been
2342 called after reload has been completed. In that case, we must
2343 do all checks strictly. If it is zero, it means that we have been called
2344 before reload has completed. In that case, we first try to see if we can
2345 find an alternative that matches strictly. If not, we try again, this
2346 time assuming that reload will fix up the insn. This provides a "best
2347 guess" for the alternative and is used to compute attributes of insns prior
2348 to reload. A negative value of STRICT is used for this internal call. */
2356 constrain_operands (strict
)
2359 const char *constraints
[MAX_RECOG_OPERANDS
];
2360 int matching_operands
[MAX_RECOG_OPERANDS
];
2361 int earlyclobber
[MAX_RECOG_OPERANDS
];
2364 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2365 int funny_match_index
;
2367 which_alternative
= 0;
2368 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2371 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2373 constraints
[c
] = recog_data
.constraints
[c
];
2374 matching_operands
[c
] = -1;
2381 funny_match_index
= 0;
2383 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2385 rtx op
= recog_data
.operand
[opno
];
2386 enum machine_mode mode
= GET_MODE (op
);
2387 const char *p
= constraints
[opno
];
2392 earlyclobber
[opno
] = 0;
2394 /* A unary operator may be accepted by the predicate, but it
2395 is irrelevant for matching constraints. */
2396 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2399 if (GET_CODE (op
) == SUBREG
)
2401 if (GET_CODE (SUBREG_REG (op
)) == REG
2402 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2403 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2404 GET_MODE (SUBREG_REG (op
)),
2407 op
= SUBREG_REG (op
);
2410 /* An empty constraint or empty alternative
2411 allows anything which matched the pattern. */
2412 if (*p
== 0 || *p
== ',')
2415 while (*p
&& (c
= *p
++) != ',')
2418 case '?': case '!': case '*': case '%':
2423 /* Ignore rest of this alternative as far as
2424 constraint checking is concerned. */
2425 while (*p
&& *p
!= ',')
2430 earlyclobber
[opno
] = 1;
2433 case '0': case '1': case '2': case '3': case '4':
2434 case '5': case '6': case '7': case '8': case '9':
2436 /* This operand must be the same as a previous one.
2437 This kind of constraint is used for instructions such
2438 as add when they take only two operands.
2440 Note that the lower-numbered operand is passed first.
2442 If we are not testing strictly, assume that this
2443 constraint will be satisfied. */
2448 match
= strtoul (p
- 1, &end
, 10);
2455 rtx op1
= recog_data
.operand
[match
];
2456 rtx op2
= recog_data
.operand
[opno
];
2458 /* A unary operator may be accepted by the predicate,
2459 but it is irrelevant for matching constraints. */
2460 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2461 op1
= XEXP (op1
, 0);
2462 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2463 op2
= XEXP (op2
, 0);
2465 val
= operands_match_p (op1
, op2
);
2468 matching_operands
[opno
] = match
;
2469 matching_operands
[match
] = opno
;
2474 /* If output is *x and input is *--x, arrange later
2475 to change the output to *--x as well, since the
2476 output op is the one that will be printed. */
2477 if (val
== 2 && strict
> 0)
2479 funny_match
[funny_match_index
].this = opno
;
2480 funny_match
[funny_match_index
++].other
= match
;
2486 /* p is used for address_operands. When we are called by
2487 gen_reload, no one will have checked that the address is
2488 strictly valid, i.e., that all pseudos requiring hard regs
2489 have gotten them. */
2491 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2496 /* No need to check general_operand again;
2497 it was done in insn-recog.c. */
2499 /* Anything goes unless it is a REG and really has a hard reg
2500 but the hard reg is not in the class GENERAL_REGS. */
2502 || GENERAL_REGS
== ALL_REGS
2503 || GET_CODE (op
) != REG
2504 || (reload_in_progress
2505 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2506 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2511 /* This is used for a MATCH_SCRATCH in the cases when
2512 we don't actually need anything. So anything goes
2518 /* Memory operands must be valid, to the extent
2519 required by STRICT. */
2520 if (GET_CODE (op
) == MEM
)
2523 && !strict_memory_address_p (GET_MODE (op
),
2527 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2531 /* Before reload, accept what reload can turn into mem. */
2532 else if (strict
< 0 && CONSTANT_P (op
))
2534 /* During reload, accept a pseudo */
2535 else if (reload_in_progress
&& GET_CODE (op
) == REG
2536 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2541 if (GET_CODE (op
) == MEM
2542 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2543 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2548 if (GET_CODE (op
) == MEM
2549 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2550 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2556 if (GET_CODE (op
) == CONST_DOUBLE
2557 || (GET_CODE (op
) == CONST_VECTOR
2558 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2564 if (GET_CODE (op
) == CONST_DOUBLE
2565 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
2570 if (GET_CODE (op
) == CONST_INT
2571 || (GET_CODE (op
) == CONST_DOUBLE
2572 && GET_MODE (op
) == VOIDmode
))
2575 if (CONSTANT_P (op
))
2580 if (GET_CODE (op
) == CONST_INT
2581 || (GET_CODE (op
) == CONST_DOUBLE
2582 && GET_MODE (op
) == VOIDmode
))
2594 if (GET_CODE (op
) == CONST_INT
2595 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
2600 if (GET_CODE (op
) == MEM
2601 && ((strict
> 0 && ! offsettable_memref_p (op
))
2603 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2604 || (reload_in_progress
2605 && !(GET_CODE (op
) == REG
2606 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2611 if ((strict
> 0 && offsettable_memref_p (op
))
2612 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2613 /* Before reload, accept what reload can handle. */
2615 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2616 /* During reload, accept a pseudo */
2617 || (reload_in_progress
&& GET_CODE (op
) == REG
2618 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2624 enum reg_class
class;
2626 class = (c
== 'r' ? GENERAL_REGS
: REG_CLASS_FROM_LETTER (c
));
2627 if (class != NO_REGS
)
2631 && GET_CODE (op
) == REG
2632 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2633 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2634 || (GET_CODE (op
) == REG
2635 && reg_fits_class_p (op
, class, offset
, mode
)))
2638 #ifdef EXTRA_CONSTRAINT
2639 else if (EXTRA_CONSTRAINT (op
, c
))
2642 if (EXTRA_MEMORY_CONSTRAINT (c
))
2644 /* Every memory operand can be reloaded to fit. */
2645 if (strict
< 0 && GET_CODE (op
) == MEM
)
2648 /* Before reload, accept what reload can turn into mem. */
2649 if (strict
< 0 && CONSTANT_P (op
))
2652 /* During reload, accept a pseudo */
2653 if (reload_in_progress
&& GET_CODE (op
) == REG
2654 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2657 if (EXTRA_ADDRESS_CONSTRAINT (c
))
2659 /* Every address operand can be reloaded to fit. */
2668 constraints
[opno
] = p
;
2669 /* If this operand did not win somehow,
2670 this alternative loses. */
2674 /* This alternative won; the operands are ok.
2675 Change whichever operands this alternative says to change. */
2680 /* See if any earlyclobber operand conflicts with some other
2684 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2685 /* Ignore earlyclobber operands now in memory,
2686 because we would often report failure when we have
2687 two memory operands, one of which was formerly a REG. */
2688 if (earlyclobber
[eopno
]
2689 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2690 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2691 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2692 || recog_data
.operand_type
[opno
] != OP_OUT
)
2694 /* Ignore things like match_operator operands. */
2695 && *recog_data
.constraints
[opno
] != 0
2696 && ! (matching_operands
[opno
] == eopno
2697 && operands_match_p (recog_data
.operand
[opno
],
2698 recog_data
.operand
[eopno
]))
2699 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2700 recog_data
.operand
[eopno
]))
2705 while (--funny_match_index
>= 0)
2707 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2708 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2715 which_alternative
++;
2717 while (which_alternative
< recog_data
.n_alternatives
);
2719 which_alternative
= -1;
2720 /* If we are about to reject this, but we are not to test strictly,
2721 try a very loose test. Only return failure if it fails also. */
2723 return constrain_operands (-1);
2728 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2729 is a hard reg in class CLASS when its regno is offset by OFFSET
2730 and changed to mode MODE.
2731 If REG occupies multiple hard regs, all of them must be in CLASS. */
2734 reg_fits_class_p (operand
, class, offset
, mode
)
2736 enum reg_class
class;
2738 enum machine_mode mode
;
2740 int regno
= REGNO (operand
);
2741 if (regno
< FIRST_PSEUDO_REGISTER
2742 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2747 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2749 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2758 /* Split single instruction. Helper function for split_all_insns and
2759 split_all_insns_noflow. Return last insn in the sequence if successful,
2760 or NULL if unsuccessful. */
2766 /* Split insns here to get max fine-grain parallelism. */
2767 rtx first
= PREV_INSN (insn
);
2768 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2773 /* try_split returns the NOTE that INSN became. */
2774 PUT_CODE (insn
, NOTE
);
2775 NOTE_SOURCE_FILE (insn
) = 0;
2776 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2778 /* ??? Coddle to md files that generate subregs in post-reload
2779 splitters instead of computing the proper hard register. */
2780 if (reload_completed
&& first
!= last
)
2782 first
= NEXT_INSN (first
);
2786 cleanup_subreg_operands (first
);
2789 first
= NEXT_INSN (first
);
2795 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2798 split_all_insns (upd_life
)
2805 blocks
= sbitmap_alloc (last_basic_block
);
2806 sbitmap_zero (blocks
);
2809 FOR_EACH_BB_REVERSE (bb
)
2812 bool finish
= false;
2814 for (insn
= bb
->head
; !finish
; insn
= next
)
2816 /* Can't use `next_real_insn' because that might go across
2817 CODE_LABELS and short-out basic blocks. */
2818 next
= NEXT_INSN (insn
);
2819 finish
= (insn
== bb
->end
);
2822 rtx set
= single_set (insn
);
2824 /* Don't split no-op move insns. These should silently
2825 disappear later in final. Splitting such insns would
2826 break the code that handles REG_NO_CONFLICT blocks. */
2827 if (set
&& set_noop_p (set
))
2829 /* Nops get in the way while scheduling, so delete them
2830 now if register allocation has already been done. It
2831 is too risky to try to do this before register
2832 allocation, and there are unlikely to be very many
2833 nops then anyways. */
2834 if (reload_completed
)
2836 /* If the no-op set has a REG_UNUSED note, we need
2837 to update liveness information. */
2838 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
))
2840 SET_BIT (blocks
, bb
->index
);
2843 /* ??? Is life info affected by deleting edges? */
2844 delete_insn_and_edges (insn
);
2849 rtx last
= split_insn (insn
);
2852 /* The split sequence may include barrier, but the
2853 BB boundary we are interested in will be set to
2856 while (GET_CODE (last
) == BARRIER
)
2857 last
= PREV_INSN (last
);
2858 SET_BIT (blocks
, bb
->index
);
2868 int old_last_basic_block
= last_basic_block
;
2870 find_many_sub_basic_blocks (blocks
);
2872 if (old_last_basic_block
!= last_basic_block
&& upd_life
)
2873 blocks
= sbitmap_resize (blocks
, last_basic_block
, 1);
2876 if (changed
&& upd_life
)
2877 update_life_info (blocks
, UPDATE_LIFE_GLOBAL_RM_NOTES
,
2878 PROP_DEATH_NOTES
| PROP_REG_INFO
);
2880 #ifdef ENABLE_CHECKING
2881 verify_flow_info ();
2884 sbitmap_free (blocks
);
2887 /* Same as split_all_insns, but do not expect CFG to be available.
2888 Used by machine depedent reorg passes. */
2891 split_all_insns_noflow ()
2895 for (insn
= get_insns (); insn
; insn
= next
)
2897 next
= NEXT_INSN (insn
);
2900 /* Don't split no-op move insns. These should silently
2901 disappear later in final. Splitting such insns would
2902 break the code that handles REG_NO_CONFLICT blocks. */
2903 rtx set
= single_set (insn
);
2904 if (set
&& set_noop_p (set
))
2906 /* Nops get in the way while scheduling, so delete them
2907 now if register allocation has already been done. It
2908 is too risky to try to do this before register
2909 allocation, and there are unlikely to be very many
2912 ??? Should we use delete_insn when the CFG isn't valid? */
2913 if (reload_completed
)
2914 delete_insn_and_edges (insn
);
2922 #ifdef HAVE_peephole2
2923 struct peep2_insn_data
2929 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2930 static int peep2_current
;
2932 /* A non-insn marker indicating the last insn of the block.
2933 The live_before regset for this element is correct, indicating
2934 global_live_at_end for the block. */
2935 #define PEEP2_EOB pc_rtx
2937 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2938 does not exist. Used by the recognizer to find the next insn to match
2939 in a multi-insn pattern. */
2945 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2949 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2950 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2952 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2954 return peep2_insn_data
[n
].insn
;
2957 /* Return true if REGNO is dead before the Nth non-note insn
2961 peep2_regno_dead_p (ofs
, regno
)
2965 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2968 ofs
+= peep2_current
;
2969 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2970 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2972 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2975 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2978 /* Similarly for a REG. */
2981 peep2_reg_dead_p (ofs
, reg
)
2987 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2990 ofs
+= peep2_current
;
2991 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2992 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2994 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2997 regno
= REGNO (reg
);
2998 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
3000 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
3005 /* Try to find a hard register of mode MODE, matching the register class in
3006 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3007 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3008 in which case the only condition is that the register must be available
3009 before CURRENT_INSN.
3010 Registers that already have bits set in REG_SET will not be considered.
3012 If an appropriate register is available, it will be returned and the
3013 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3017 peep2_find_free_register (from
, to
, class_str
, mode
, reg_set
)
3019 const char *class_str
;
3020 enum machine_mode mode
;
3021 HARD_REG_SET
*reg_set
;
3023 static int search_ofs
;
3024 enum reg_class
class;
3028 if (from
>= MAX_INSNS_PER_PEEP2
+ 1 || to
>= MAX_INSNS_PER_PEEP2
+ 1)
3031 from
+= peep2_current
;
3032 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
3033 from
-= MAX_INSNS_PER_PEEP2
+ 1;
3034 to
+= peep2_current
;
3035 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
3036 to
-= MAX_INSNS_PER_PEEP2
+ 1;
3038 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
3040 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3044 HARD_REG_SET this_live
;
3046 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
3048 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
3050 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
3051 IOR_HARD_REG_SET (live
, this_live
);
3054 class = (class_str
[0] == 'r' ? GENERAL_REGS
3055 : REG_CLASS_FROM_LETTER (class_str
[0]));
3057 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3059 int raw_regno
, regno
, success
, j
;
3061 /* Distribute the free registers as much as possible. */
3062 raw_regno
= search_ofs
+ i
;
3063 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3064 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3065 #ifdef REG_ALLOC_ORDER
3066 regno
= reg_alloc_order
[raw_regno
];
3071 /* Don't allocate fixed registers. */
3072 if (fixed_regs
[regno
])
3074 /* Make sure the register is of the right class. */
3075 if (! TEST_HARD_REG_BIT (reg_class_contents
[class], regno
))
3077 /* And can support the mode we need. */
3078 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3080 /* And that we don't create an extra save/restore. */
3081 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
3083 /* And we don't clobber traceback for noreturn functions. */
3084 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
3085 && (! reload_completed
|| frame_pointer_needed
))
3089 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
3091 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3092 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3100 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
3101 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
3103 /* Start the next search with the next register. */
3104 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3106 search_ofs
= raw_regno
;
3108 return gen_rtx_REG (mode
, regno
);
3116 /* Perform the peephole2 optimization pass. */
3119 peephole2_optimize (dump_file
)
3120 FILE *dump_file ATTRIBUTE_UNUSED
;
3122 regset_head rs_heads
[MAX_INSNS_PER_PEEP2
+ 2];
3127 #ifdef HAVE_conditional_execution
3131 bool do_cleanup_cfg
= false;
3132 bool do_rebuild_jump_labels
= false;
3134 /* Initialize the regsets we're going to use. */
3135 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3136 peep2_insn_data
[i
].live_before
= INITIALIZE_REG_SET (rs_heads
[i
]);
3137 live
= INITIALIZE_REG_SET (rs_heads
[i
]);
3139 #ifdef HAVE_conditional_execution
3140 blocks
= sbitmap_alloc (last_basic_block
);
3141 sbitmap_zero (blocks
);
3144 count_or_remove_death_notes (NULL
, 1);
3147 FOR_EACH_BB_REVERSE (bb
)
3149 struct propagate_block_info
*pbi
;
3151 /* Indicate that all slots except the last holds invalid data. */
3152 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3153 peep2_insn_data
[i
].insn
= NULL_RTX
;
3155 /* Indicate that the last slot contains live_after data. */
3156 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3157 peep2_current
= MAX_INSNS_PER_PEEP2
;
3159 /* Start up propagation. */
3160 COPY_REG_SET (live
, bb
->global_live_at_end
);
3161 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3163 #ifdef HAVE_conditional_execution
3164 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3166 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3169 for (insn
= bb
->end
; ; insn
= prev
)
3171 prev
= PREV_INSN (insn
);
3174 rtx
try, before_try
, x
;
3177 bool was_call
= false;
3179 /* Record this insn. */
3180 if (--peep2_current
< 0)
3181 peep2_current
= MAX_INSNS_PER_PEEP2
;
3182 peep2_insn_data
[peep2_current
].insn
= insn
;
3183 propagate_one_insn (pbi
, insn
);
3184 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3186 /* Match the peephole. */
3187 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3190 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3191 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3192 cfg-related call notes. */
3193 for (i
= 0; i
<= match_len
; ++i
)
3196 rtx old_insn
, new_insn
, note
;
3198 j
= i
+ peep2_current
;
3199 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3200 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3201 old_insn
= peep2_insn_data
[j
].insn
;
3202 if (GET_CODE (old_insn
) != CALL_INSN
)
3207 while (new_insn
!= NULL_RTX
)
3209 if (GET_CODE (new_insn
) == CALL_INSN
)
3211 new_insn
= NEXT_INSN (new_insn
);
3214 if (new_insn
== NULL_RTX
)
3217 CALL_INSN_FUNCTION_USAGE (new_insn
)
3218 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3220 for (note
= REG_NOTES (old_insn
);
3222 note
= XEXP (note
, 1))
3223 switch (REG_NOTE_KIND (note
))
3227 case REG_ALWAYS_RETURN
:
3228 REG_NOTES (new_insn
)
3229 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
3231 REG_NOTES (new_insn
));
3233 /* Discard all other reg notes. */
3237 /* Croak if there is another call in the sequence. */
3238 while (++i
<= match_len
)
3240 j
= i
+ peep2_current
;
3241 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3242 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3243 old_insn
= peep2_insn_data
[j
].insn
;
3244 if (GET_CODE (old_insn
) == CALL_INSN
)
3250 i
= match_len
+ peep2_current
;
3251 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3252 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3254 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3255 REG_EH_REGION
, NULL_RTX
);
3257 /* Replace the old sequence with the new. */
3258 try = emit_insn_after_scope (try, peep2_insn_data
[i
].insn
,
3259 INSN_SCOPE (peep2_insn_data
[i
].insn
));
3260 before_try
= PREV_INSN (insn
);
3261 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3263 /* Re-insert the EH_REGION notes. */
3264 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3268 for (eh_edge
= bb
->succ
; eh_edge
3269 ; eh_edge
= eh_edge
->succ_next
)
3270 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3273 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3274 if (GET_CODE (x
) == CALL_INSN
3275 || (flag_non_call_exceptions
3276 && may_trap_p (PATTERN (x
))
3277 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3281 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3285 if (x
!= bb
->end
&& eh_edge
)
3290 nfte
= split_block (bb
, x
);
3291 flags
= (eh_edge
->flags
3292 & (EDGE_EH
| EDGE_ABNORMAL
));
3293 if (GET_CODE (x
) == CALL_INSN
)
3294 flags
|= EDGE_ABNORMAL_CALL
;
3295 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3298 nehe
->probability
= eh_edge
->probability
;
3300 = REG_BR_PROB_BASE
- nehe
->probability
;
3302 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3303 #ifdef HAVE_conditional_execution
3304 SET_BIT (blocks
, nfte
->dest
->index
);
3312 /* Converting possibly trapping insn to non-trapping is
3313 possible. Zap dummy outgoing edges. */
3314 do_cleanup_cfg
|= purge_dead_edges (bb
);
3317 #ifdef HAVE_conditional_execution
3318 /* With conditional execution, we cannot back up the
3319 live information so easily, since the conditional
3320 death data structures are not so self-contained.
3321 So record that we've made a modification to this
3322 block and update life information at the end. */
3323 SET_BIT (blocks
, bb
->index
);
3326 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3327 peep2_insn_data
[i
].insn
= NULL_RTX
;
3328 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3330 /* Back up lifetime information past the end of the
3331 newly created sequence. */
3332 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3334 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3336 /* Update life information for the new sequence. */
3343 i
= MAX_INSNS_PER_PEEP2
;
3344 peep2_insn_data
[i
].insn
= x
;
3345 propagate_one_insn (pbi
, x
);
3346 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3352 /* ??? Should verify that LIVE now matches what we
3353 had before the new sequence. */
3358 /* If we generated a jump instruction, it won't have
3359 JUMP_LABEL set. Recompute after we're done. */
3360 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3361 if (GET_CODE (x
) == JUMP_INSN
)
3363 do_rebuild_jump_labels
= true;
3369 if (insn
== bb
->head
)
3373 free_propagate_block_info (pbi
);
3376 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3377 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3378 FREE_REG_SET (live
);
3380 if (do_rebuild_jump_labels
)
3381 rebuild_jump_labels (get_insns ());
3383 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3384 we've changed global life since exception handlers are no longer
3389 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES
, PROP_DEATH_NOTES
);
3391 #ifdef HAVE_conditional_execution
3394 count_or_remove_death_notes (blocks
, 1);
3395 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3397 sbitmap_free (blocks
);
3400 #endif /* HAVE_peephole2 */
3402 /* Common predicates for use with define_bypass. */
3404 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3405 data not the address operand(s) of the store. IN_INSN must be
3406 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3410 store_data_bypass_p (out_insn
, in_insn
)
3411 rtx out_insn
, in_insn
;
3413 rtx out_set
, in_set
;
3415 in_set
= single_set (in_insn
);
3419 if (GET_CODE (SET_DEST (in_set
)) != MEM
)
3422 out_set
= single_set (out_insn
);
3425 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3433 out_pat
= PATTERN (out_insn
);
3434 if (GET_CODE (out_pat
) != PARALLEL
)
3437 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3439 rtx exp
= XVECEXP (out_pat
, 0, i
);
3441 if (GET_CODE (exp
) == CLOBBER
)
3444 if (GET_CODE (exp
) != SET
)
3447 if (reg_mentioned_p (SET_DEST (exp
), SET_DEST (in_set
)))
3455 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3456 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3457 or multiple set; IN_INSN should be single_set for truth, but for convenience
3458 of insn categorization may be any JUMP or CALL insn. */
3461 if_test_bypass_p (out_insn
, in_insn
)
3462 rtx out_insn
, in_insn
;
3464 rtx out_set
, in_set
;
3466 in_set
= single_set (in_insn
);
3469 if (GET_CODE (in_insn
) == JUMP_INSN
|| GET_CODE (in_insn
) == CALL_INSN
)
3474 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3476 in_set
= SET_SRC (in_set
);
3478 out_set
= single_set (out_insn
);
3481 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3482 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3490 out_pat
= PATTERN (out_insn
);
3491 if (GET_CODE (out_pat
) != PARALLEL
)
3494 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3496 rtx exp
= XVECEXP (out_pat
, 0, i
);
3498 if (GET_CODE (exp
) == CLOBBER
)
3501 if (GET_CODE (exp
) != SET
)
3504 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3505 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))