1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "rtl-error.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
33 #include "addresses.h"
37 #include "basic-block.h"
40 #include "tree-pass.h"
42 #include "insn-codes.h"
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
48 #define STACK_PUSH_CODE PRE_INC
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
56 #define STACK_POP_CODE POST_DEC
60 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
, bool);
61 static void validate_replace_src_1 (rtx
*, void *);
62 static rtx
split_insn (rtx
);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data_d recog_data
;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative
;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed
;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile (void)
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
114 asm_labels_ok (rtx body
)
119 asmop
= extract_asm_operands (body
);
120 if (asmop
== NULL_RTX
)
123 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
134 check_asm_operands (rtx x
)
138 const char **constraints
;
141 if (!asm_labels_ok (x
))
144 /* Post-reload, be more strict with things. */
145 if (reload_completed
)
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x
));
149 constrain_operands (1);
150 return which_alternative
>= 0;
153 noperands
= asm_noperands (x
);
159 operands
= XALLOCAVEC (rtx
, noperands
);
160 constraints
= XALLOCAVEC (const char *, noperands
);
162 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
164 for (i
= 0; i
< noperands
; i
++)
166 const char *c
= constraints
[i
];
169 if (! asm_operand_ok (operands
[i
], c
, constraints
))
176 /* Static data for the next two routines. */
178 typedef struct change_t
187 static change_t
*changes
;
188 static int changes_allocated
;
190 static int num_changes
= 0;
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
211 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
215 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
218 gcc_assert (in_group
!= 0 || num_changes
== 0);
222 /* Save the information describing this change. */
223 if (num_changes
>= changes_allocated
)
225 if (changes_allocated
== 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
230 changes_allocated
*= 2;
232 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
235 changes
[num_changes
].object
= object
;
236 changes
[num_changes
].loc
= loc
;
237 changes
[num_changes
].old
= old
;
238 changes
[num_changes
].unshare
= unshare
;
240 if (object
&& !MEM_P (object
))
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
244 changes
[num_changes
].old_code
= INSN_CODE (object
);
245 INSN_CODE (object
) = -1;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
256 return apply_change_group ();
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
263 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
265 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
272 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
274 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
282 Return true if anything was changed. */
284 canonicalize_change_group (rtx insn
, rtx x
)
286 if (COMMUTATIVE_P (x
)
287 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem
= XEXP (x
, 0);
292 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
293 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
309 insn_invalid_p (rtx insn
, bool in_group
)
311 rtx pat
= PATTERN (insn
);
312 int num_clobbers
= 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
315 int icode
= recog (pat
, insn
,
316 (GET_CODE (pat
) == SET
317 && ! reload_completed
&& ! reload_in_progress
)
318 ? &num_clobbers
: 0);
319 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
322 /* If this is an asm and the operand aren't legal, then fail. Likewise if
323 this is not an asm and the insn wasn't recognized. */
324 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
325 || (!is_asm
&& icode
< 0))
328 /* If we have to add CLOBBERs, fail if we have to add ones that reference
329 hard registers since our callers can't know if they are live or not.
330 Otherwise, add them. */
331 if (num_clobbers
> 0)
335 if (added_clobbers_hard_reg_p (icode
))
338 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
339 XVECEXP (newpat
, 0, 0) = pat
;
340 add_clobbers (newpat
, icode
);
342 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
344 PATTERN (insn
) = pat
= newpat
;
347 /* After reload, verify that all constraints are satisfied. */
348 if (reload_completed
)
352 if (! constrain_operands (1))
356 INSN_CODE (insn
) = icode
;
360 /* Return number of changes made and not validated yet. */
362 num_changes_pending (void)
367 /* Tentatively apply the changes numbered NUM and up.
368 Return 1 if all changes are valid, zero otherwise. */
371 verify_changes (int num
)
374 rtx last_validated
= NULL_RTX
;
376 /* The changes have been applied and all INSN_CODEs have been reset to force
379 The changes are valid if we aren't given an object, or if we are
380 given a MEM and it still is a valid address, or if this is in insn
381 and it is recognized. In the latter case, if reload has completed,
382 we also require that the operands meet the constraints for
385 for (i
= num
; i
< num_changes
; i
++)
387 rtx object
= changes
[i
].object
;
389 /* If there is no object to test or if it is the same as the one we
390 already tested, ignore it. */
391 if (object
== 0 || object
== last_validated
)
396 if (! memory_address_addr_space_p (GET_MODE (object
),
398 MEM_ADDR_SPACE (object
)))
401 else if (/* changes[i].old might be zero, e.g. when putting a
402 REG_FRAME_RELATED_EXPR into a previously empty list. */
404 && REG_P (changes
[i
].old
)
405 && asm_noperands (PATTERN (object
)) > 0
406 && REG_EXPR (changes
[i
].old
) != NULL_TREE
407 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes
[i
].old
))
408 && DECL_REGISTER (REG_EXPR (changes
[i
].old
)))
410 /* Don't allow changes of hard register operands to inline
411 assemblies if they have been defined as register asm ("x"). */
414 else if (DEBUG_INSN_P (object
))
416 else if (insn_invalid_p (object
, true))
418 rtx pat
= PATTERN (object
);
420 /* Perhaps we couldn't recognize the insn because there were
421 extra CLOBBERs at the end. If so, try to re-recognize
422 without the last CLOBBER (later iterations will cause each of
423 them to be eliminated, in turn). But don't do this if we
424 have an ASM_OPERAND. */
425 if (GET_CODE (pat
) == PARALLEL
426 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
427 && asm_noperands (PATTERN (object
)) < 0)
431 if (XVECLEN (pat
, 0) == 2)
432 newpat
= XVECEXP (pat
, 0, 0);
438 = gen_rtx_PARALLEL (VOIDmode
,
439 rtvec_alloc (XVECLEN (pat
, 0) - 1));
440 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
441 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
444 /* Add a new change to this group to replace the pattern
445 with this new pattern. Then consider this change
446 as having succeeded. The change we added will
447 cause the entire call to fail if things remain invalid.
449 Note that this can lose if a later change than the one
450 we are processing specified &XVECEXP (PATTERN (object), 0, X)
451 but this shouldn't occur. */
453 validate_change (object
, &PATTERN (object
), newpat
, 1);
456 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
457 || GET_CODE (pat
) == VAR_LOCATION
)
458 /* If this insn is a CLOBBER or USE, it is always valid, but is
464 last_validated
= object
;
467 return (i
== num_changes
);
470 /* A group of changes has previously been issued with validate_change
471 and verified with verify_changes. Call df_insn_rescan for each of
472 the insn changed and clear num_changes. */
475 confirm_change_group (void)
478 rtx last_object
= NULL
;
480 for (i
= 0; i
< num_changes
; i
++)
482 rtx object
= changes
[i
].object
;
484 if (changes
[i
].unshare
)
485 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
487 /* Avoid unnecessary rescanning when multiple changes to same instruction
491 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
492 df_insn_rescan (last_object
);
493 last_object
= object
;
497 if (last_object
&& INSN_P (last_object
))
498 df_insn_rescan (last_object
);
502 /* Apply a group of changes previously issued with `validate_change'.
503 If all changes are valid, call confirm_change_group and return 1,
504 otherwise, call cancel_changes and return 0. */
507 apply_change_group (void)
509 if (verify_changes (0))
511 confirm_change_group ();
522 /* Return the number of changes so far in the current group. */
525 num_validated_changes (void)
530 /* Retract the changes numbered NUM and up. */
533 cancel_changes (int num
)
537 /* Back out all the changes. Do this in the opposite order in which
539 for (i
= num_changes
- 1; i
>= num
; i
--)
541 *changes
[i
].loc
= changes
[i
].old
;
542 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
543 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
548 /* Reduce conditional compilation elsewhere. */
551 #define CODE_FOR_extv CODE_FOR_nothing
555 #define CODE_FOR_extzv CODE_FOR_nothing
558 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
562 simplify_while_replacing (rtx
*loc
, rtx to
, rtx object
,
563 enum machine_mode op0_mode
)
566 enum rtx_code code
= GET_CODE (x
);
569 if (SWAPPABLE_OPERANDS_P (x
)
570 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
572 validate_unshare_change (object
, loc
,
573 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
574 : swap_condition (code
),
575 GET_MODE (x
), XEXP (x
, 1),
584 /* If we have a PLUS whose second operand is now a CONST_INT, use
585 simplify_gen_binary to try to simplify it.
586 ??? We may want later to remove this, once simplification is
587 separated from this function. */
588 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
589 validate_change (object
, loc
,
591 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
594 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
595 validate_change (object
, loc
,
597 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
598 simplify_gen_unary (NEG
,
599 GET_MODE (x
), XEXP (x
, 1),
604 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
606 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
608 /* If any of the above failed, substitute in something that
609 we know won't be recognized. */
611 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
612 validate_change (object
, loc
, new_rtx
, 1);
616 /* All subregs possible to simplify should be simplified. */
617 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
620 /* Subregs of VOIDmode operands are incorrect. */
621 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
622 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
624 validate_change (object
, loc
, new_rtx
, 1);
628 /* If we are replacing a register with memory, try to change the memory
629 to be the mode required for memory in extract operations (this isn't
630 likely to be an insertion operation; if it was, nothing bad will
631 happen, we might just fail in some cases). */
633 if (MEM_P (XEXP (x
, 0))
634 && CONST_INT_P (XEXP (x
, 1))
635 && CONST_INT_P (XEXP (x
, 2))
636 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
637 MEM_ADDR_SPACE (XEXP (x
, 0)))
638 && !MEM_VOLATILE_P (XEXP (x
, 0)))
640 enum machine_mode wanted_mode
= VOIDmode
;
641 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
642 int pos
= INTVAL (XEXP (x
, 2));
644 if (GET_CODE (x
) == ZERO_EXTRACT
&& HAVE_extzv
)
646 wanted_mode
= insn_data
[CODE_FOR_extzv
].operand
[1].mode
;
647 if (wanted_mode
== VOIDmode
)
648 wanted_mode
= word_mode
;
650 else if (GET_CODE (x
) == SIGN_EXTRACT
&& HAVE_extv
)
652 wanted_mode
= insn_data
[CODE_FOR_extv
].operand
[1].mode
;
653 if (wanted_mode
== VOIDmode
)
654 wanted_mode
= word_mode
;
657 /* If we have a narrower mode, we can do something. */
658 if (wanted_mode
!= VOIDmode
659 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
661 int offset
= pos
/ BITS_PER_UNIT
;
664 /* If the bytes and bits are counted differently, we
665 must adjust the offset. */
666 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
668 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
671 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
672 == GET_MODE_BITSIZE (wanted_mode
));
673 pos
%= GET_MODE_BITSIZE (wanted_mode
);
675 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
677 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
678 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
689 /* Replace every occurrence of FROM in X with TO. Mark each change with
690 validate_change passing OBJECT. */
693 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
,
700 enum machine_mode op0_mode
= VOIDmode
;
701 int prev_changes
= num_changes
;
707 fmt
= GET_RTX_FORMAT (code
);
709 op0_mode
= GET_MODE (XEXP (x
, 0));
711 /* X matches FROM if it is the same rtx or they are both referring to the
712 same register in the same mode. Avoid calling rtx_equal_p unless the
713 operands look similar. */
716 || (REG_P (x
) && REG_P (from
)
717 && GET_MODE (x
) == GET_MODE (from
)
718 && REGNO (x
) == REGNO (from
))
719 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
720 && rtx_equal_p (x
, from
)))
722 validate_unshare_change (object
, loc
, to
, 1);
726 /* Call ourself recursively to perform the replacements.
727 We must not replace inside already replaced expression, otherwise we
728 get infinite recursion for replacements like (reg X)->(subreg (reg X))
729 so we must special case shared ASM_OPERANDS. */
731 if (GET_CODE (x
) == PARALLEL
)
733 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
735 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
736 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
738 /* Verify that operands are really shared. */
739 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
740 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
742 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
743 from
, to
, object
, simplify
);
746 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
751 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
754 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
755 else if (fmt
[i
] == 'E')
756 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
757 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
761 /* If we didn't substitute, there is nothing more to do. */
762 if (num_changes
== prev_changes
)
765 /* ??? The regmove is no more, so is this aberration still necessary? */
766 /* Allow substituted expression to have different mode. This is used by
767 regmove to change mode of pseudo register. */
768 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
769 op0_mode
= GET_MODE (XEXP (x
, 0));
771 /* Do changes needed to keep rtx consistent. Don't do any other
772 simplifications, as it is not our job. */
774 simplify_while_replacing (loc
, to
, object
, op0_mode
);
777 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
778 with TO. After all changes have been made, validate by seeing
779 if INSN is still valid. */
782 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
784 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
785 return apply_change_group ();
788 /* Try replacing every occurrence of FROM in INSN with TO. After all
789 changes have been made, validate by seeing if INSN is still valid. */
792 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
794 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
795 return apply_change_group ();
798 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
799 is a part of INSN. After all changes have been made, validate by seeing if
801 validate_replace_rtx (from, to, insn) is equivalent to
802 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
805 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx insn
)
807 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
808 return apply_change_group ();
811 /* Same as above, but do not simplify rtx afterwards. */
813 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
816 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
817 return apply_change_group ();
821 /* Try replacing every occurrence of FROM in INSN with TO. This also
822 will replace in REG_EQUAL and REG_EQUIV notes. */
825 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
828 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
829 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
830 if (REG_NOTE_KIND (note
) == REG_EQUAL
831 || REG_NOTE_KIND (note
) == REG_EQUIV
)
832 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
835 /* Function called by note_uses to replace used subexpressions. */
836 struct validate_replace_src_data
838 rtx from
; /* Old RTX */
839 rtx to
; /* New RTX */
840 rtx insn
; /* Insn in which substitution is occurring. */
844 validate_replace_src_1 (rtx
*x
, void *data
)
846 struct validate_replace_src_data
*d
847 = (struct validate_replace_src_data
*) data
;
849 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
852 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
856 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
858 struct validate_replace_src_data d
;
863 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
866 /* Try simplify INSN.
867 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
868 pattern and return true if something was simplified. */
871 validate_simplify_insn (rtx insn
)
877 pat
= PATTERN (insn
);
879 if (GET_CODE (pat
) == SET
)
881 newpat
= simplify_rtx (SET_SRC (pat
));
882 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
883 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
884 newpat
= simplify_rtx (SET_DEST (pat
));
885 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
886 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
888 else if (GET_CODE (pat
) == PARALLEL
)
889 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
891 rtx s
= XVECEXP (pat
, 0, i
);
893 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
895 newpat
= simplify_rtx (SET_SRC (s
));
896 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
897 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
898 newpat
= simplify_rtx (SET_DEST (s
));
899 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
900 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
903 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
907 /* Return 1 if the insn using CC0 set by INSN does not contain
908 any ordered tests applied to the condition codes.
909 EQ and NE tests do not count. */
912 next_insn_tests_no_inequality (rtx insn
)
914 rtx next
= next_cc0_user (insn
);
916 /* If there is no next insn, we have to take the conservative choice. */
920 return (INSN_P (next
)
921 && ! inequality_comparisons_p (PATTERN (next
)));
925 /* Return 1 if OP is a valid general operand for machine mode MODE.
926 This is either a register reference, a memory reference,
927 or a constant. In the case of a memory reference, the address
928 is checked for general validity for the target machine.
930 Register and memory references must have mode MODE in order to be valid,
931 but some constants have no machine mode and are valid for any mode.
933 If MODE is VOIDmode, OP is checked for validity for whatever mode
936 The main use of this function is as a predicate in match_operand
937 expressions in the machine description. */
940 general_operand (rtx op
, enum machine_mode mode
)
942 enum rtx_code code
= GET_CODE (op
);
944 if (mode
== VOIDmode
)
945 mode
= GET_MODE (op
);
947 /* Don't accept CONST_INT or anything similar
948 if the caller wants something floating. */
949 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
950 && GET_MODE_CLASS (mode
) != MODE_INT
951 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
956 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
960 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
962 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
963 && targetm
.legitimate_constant_p (mode
== VOIDmode
967 /* Except for certain constants with VOIDmode, already checked for,
968 OP's mode must match MODE if MODE specifies a mode. */
970 if (GET_MODE (op
) != mode
)
975 rtx sub
= SUBREG_REG (op
);
977 #ifdef INSN_SCHEDULING
978 /* On machines that have insn scheduling, we want all memory
979 reference to be explicit, so outlaw paradoxical SUBREGs.
980 However, we must allow them after reload so that they can
981 get cleaned up by cleanup_subreg_operands. */
982 if (!reload_completed
&& MEM_P (sub
)
983 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
986 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
987 may result in incorrect reference. We should simplify all valid
988 subregs of MEM anyway. But allow this after reload because we
989 might be called from cleanup_subreg_operands.
991 ??? This is a kludge. */
992 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
996 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
997 create such rtl, and we must reject it. */
998 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
999 /* LRA can use subreg to store a floating point value in an
1000 integer mode. Although the floating point and the
1001 integer modes need the same number of hard registers, the
1002 size of floating point mode can be less than the integer
1004 && ! lra_in_progress
1005 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1009 code
= GET_CODE (op
);
1013 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1014 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1018 rtx y
= XEXP (op
, 0);
1020 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1023 /* Use the mem's mode, since it will be reloaded thus. */
1024 if (memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1031 /* Return 1 if OP is a valid memory address for a memory reference
1034 The main use of this function is as a predicate in match_operand
1035 expressions in the machine description. */
1038 address_operand (rtx op
, enum machine_mode mode
)
1040 return memory_address_p (mode
, op
);
1043 /* Return 1 if OP is a register reference of mode MODE.
1044 If MODE is VOIDmode, accept a register in any mode.
1046 The main use of this function is as a predicate in match_operand
1047 expressions in the machine description. */
1050 register_operand (rtx op
, enum machine_mode mode
)
1052 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1055 if (GET_CODE (op
) == SUBREG
)
1057 rtx sub
= SUBREG_REG (op
);
1059 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1060 because it is guaranteed to be reloaded into one.
1061 Just make sure the MEM is valid in itself.
1062 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1063 but currently it does result from (SUBREG (REG)...) where the
1064 reg went on the stack.) */
1065 if (! reload_completed
&& MEM_P (sub
))
1066 return general_operand (op
, mode
);
1068 #ifdef CANNOT_CHANGE_MODE_CLASS
1070 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1071 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1072 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1073 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1074 /* LRA can generate some invalid SUBREGS just for matched
1075 operand reload presentation. LRA needs to treat them as
1077 && ! LRA_SUBREG_P (op
))
1081 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1082 create such rtl, and we must reject it. */
1083 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1084 /* LRA can use subreg to store a floating point value in an
1085 integer mode. Although the floating point and the
1086 integer modes need the same number of hard registers, the
1087 size of floating point mode can be less than the integer
1089 && ! lra_in_progress
1090 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1097 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1098 || in_hard_reg_set_p (operand_reg_set
,
1099 GET_MODE (op
), REGNO (op
))));
1102 /* Return 1 for a register in Pmode; ignore the tested mode. */
1105 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1107 return register_operand (op
, Pmode
);
1110 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1111 or a hard register. */
1114 scratch_operand (rtx op
, enum machine_mode mode
)
1116 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1119 return (GET_CODE (op
) == SCRATCH
1121 && (lra_in_progress
|| REGNO (op
) < FIRST_PSEUDO_REGISTER
)));
1124 /* Return 1 if OP is a valid immediate operand for mode MODE.
1126 The main use of this function is as a predicate in match_operand
1127 expressions in the machine description. */
1130 immediate_operand (rtx op
, enum machine_mode mode
)
1132 /* Don't accept CONST_INT or anything similar
1133 if the caller wants something floating. */
1134 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1135 && GET_MODE_CLASS (mode
) != MODE_INT
1136 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1139 if (CONST_INT_P (op
)
1141 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1144 return (CONSTANT_P (op
)
1145 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1146 || GET_MODE (op
) == VOIDmode
)
1147 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1148 && targetm
.legitimate_constant_p (mode
== VOIDmode
1153 /* Returns 1 if OP is an operand that is a CONST_INT. */
1156 const_int_operand (rtx op
, enum machine_mode mode
)
1158 if (!CONST_INT_P (op
))
1161 if (mode
!= VOIDmode
1162 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1168 /* Returns 1 if OP is an operand that is a constant integer or constant
1169 floating-point number. */
1172 const_double_operand (rtx op
, enum machine_mode mode
)
1174 /* Don't accept CONST_INT or anything similar
1175 if the caller wants something floating. */
1176 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1177 && GET_MODE_CLASS (mode
) != MODE_INT
1178 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1181 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1182 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1183 || GET_MODE (op
) == VOIDmode
));
1186 /* Return 1 if OP is a general operand that is not an immediate operand. */
1189 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1191 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1194 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1197 nonmemory_operand (rtx op
, enum machine_mode mode
)
1199 if (CONSTANT_P (op
))
1200 return immediate_operand (op
, mode
);
1202 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1205 if (GET_CODE (op
) == SUBREG
)
1207 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1208 because it is guaranteed to be reloaded into one.
1209 Just make sure the MEM is valid in itself.
1210 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1211 but currently it does result from (SUBREG (REG)...) where the
1212 reg went on the stack.) */
1213 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1214 return general_operand (op
, mode
);
1215 op
= SUBREG_REG (op
);
1219 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1220 || in_hard_reg_set_p (operand_reg_set
,
1221 GET_MODE (op
), REGNO (op
))));
1224 /* Return 1 if OP is a valid operand that stands for pushing a
1225 value of mode MODE onto the stack.
1227 The main use of this function is as a predicate in match_operand
1228 expressions in the machine description. */
1231 push_operand (rtx op
, enum machine_mode mode
)
1233 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1235 #ifdef PUSH_ROUNDING
1236 rounded_size
= PUSH_ROUNDING (rounded_size
);
1242 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1247 if (rounded_size
== GET_MODE_SIZE (mode
))
1249 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1254 if (GET_CODE (op
) != PRE_MODIFY
1255 || GET_CODE (XEXP (op
, 1)) != PLUS
1256 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1257 || !CONST_INT_P (XEXP (XEXP (op
, 1), 1))
1258 #ifdef STACK_GROWS_DOWNWARD
1259 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1261 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1267 return XEXP (op
, 0) == stack_pointer_rtx
;
1270 /* Return 1 if OP is a valid operand that stands for popping a
1271 value of mode MODE off the stack.
1273 The main use of this function is as a predicate in match_operand
1274 expressions in the machine description. */
1277 pop_operand (rtx op
, enum machine_mode mode
)
1282 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1287 if (GET_CODE (op
) != STACK_POP_CODE
)
1290 return XEXP (op
, 0) == stack_pointer_rtx
;
1293 /* Return 1 if ADDR is a valid memory address
1294 for mode MODE in address space AS. */
1297 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
1298 rtx addr
, addr_space_t as
)
1300 #ifdef GO_IF_LEGITIMATE_ADDRESS
1301 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1302 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1308 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1312 /* Return 1 if OP is a valid memory reference with mode MODE,
1313 including a valid address.
1315 The main use of this function is as a predicate in match_operand
1316 expressions in the machine description. */
1319 memory_operand (rtx op
, enum machine_mode mode
)
1323 if (! reload_completed
)
1324 /* Note that no SUBREG is a memory operand before end of reload pass,
1325 because (SUBREG (MEM...)) forces reloading into a register. */
1326 return MEM_P (op
) && general_operand (op
, mode
);
1328 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1332 if (GET_CODE (inner
) == SUBREG
)
1333 inner
= SUBREG_REG (inner
);
1335 return (MEM_P (inner
) && general_operand (op
, mode
));
1338 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1339 that is, a memory reference whose address is a general_operand. */
1342 indirect_operand (rtx op
, enum machine_mode mode
)
1344 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1345 if (! reload_completed
1346 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1348 int offset
= SUBREG_BYTE (op
);
1349 rtx inner
= SUBREG_REG (op
);
1351 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1354 /* The only way that we can have a general_operand as the resulting
1355 address is if OFFSET is zero and the address already is an operand
1356 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1359 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1360 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1361 && CONST_INT_P (XEXP (XEXP (inner
, 0), 1))
1362 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1363 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1367 && memory_operand (op
, mode
)
1368 && general_operand (XEXP (op
, 0), Pmode
));
1371 /* Return 1 if this is an ordered comparison operator (not including
1372 ORDERED and UNORDERED). */
1375 ordered_comparison_operator (rtx op
, enum machine_mode mode
)
1377 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1379 switch (GET_CODE (op
))
1397 /* Return 1 if this is a comparison operator. This allows the use of
1398 MATCH_OPERATOR to recognize all the branch insns. */
1401 comparison_operator (rtx op
, enum machine_mode mode
)
1403 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1404 && COMPARISON_P (op
));
1407 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1410 extract_asm_operands (rtx body
)
1413 switch (GET_CODE (body
))
1419 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1420 tmp
= SET_SRC (body
);
1421 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1426 tmp
= XVECEXP (body
, 0, 0);
1427 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1429 if (GET_CODE (tmp
) == SET
)
1431 tmp
= SET_SRC (tmp
);
1432 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1443 /* If BODY is an insn body that uses ASM_OPERANDS,
1444 return the number of operands (both input and output) in the insn.
1445 Otherwise return -1. */
1448 asm_noperands (const_rtx body
)
1450 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1456 if (GET_CODE (body
) == SET
)
1458 else if (GET_CODE (body
) == PARALLEL
)
1461 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1463 /* Multiple output operands, or 1 output plus some clobbers:
1465 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1466 /* Count backwards through CLOBBERs to determine number of SETs. */
1467 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1469 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1471 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1475 /* N_SETS is now number of output operands. */
1478 /* Verify that all the SETs we have
1479 came from a single original asm_operands insn
1480 (so that invalid combinations are blocked). */
1481 for (i
= 0; i
< n_sets
; i
++)
1483 rtx elt
= XVECEXP (body
, 0, i
);
1484 if (GET_CODE (elt
) != SET
)
1486 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1488 /* If these ASM_OPERANDS rtx's came from different original insns
1489 then they aren't allowed together. */
1490 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1491 != ASM_OPERANDS_INPUT_VEC (asm_op
))
1497 /* 0 outputs, but some clobbers:
1498 body is [(asm_operands ...) (clobber (reg ...))...]. */
1499 /* Make sure all the other parallel things really are clobbers. */
1500 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1501 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1506 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
1507 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
1510 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1511 copy its operands (both input and output) into the vector OPERANDS,
1512 the locations of the operands within the insn into the vector OPERAND_LOCS,
1513 and the constraints for the operands into CONSTRAINTS.
1514 Write the modes of the operands into MODES.
1515 Return the assembler-template.
1517 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1518 we don't store that info. */
1521 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1522 const char **constraints
, enum machine_mode
*modes
,
1525 int nbase
= 0, n
, i
;
1528 switch (GET_CODE (body
))
1531 /* Zero output asm: BODY is (asm_operands ...). */
1536 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1537 asmop
= SET_SRC (body
);
1539 /* The output is in the SET.
1540 Its constraint is in the ASM_OPERANDS itself. */
1542 operands
[0] = SET_DEST (body
);
1544 operand_locs
[0] = &SET_DEST (body
);
1546 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1548 modes
[0] = GET_MODE (SET_DEST (body
));
1554 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1556 asmop
= XVECEXP (body
, 0, 0);
1557 if (GET_CODE (asmop
) == SET
)
1559 asmop
= SET_SRC (asmop
);
1561 /* At least one output, plus some CLOBBERs. The outputs are in
1562 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1563 for (i
= 0; i
< nparallel
; i
++)
1565 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1566 break; /* Past last SET */
1568 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1570 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1572 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1574 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1585 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1586 for (i
= 0; i
< n
; i
++)
1589 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1591 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1593 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1595 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1599 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
1600 for (i
= 0; i
< n
; i
++)
1603 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
1605 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
1607 constraints
[nbase
+ i
] = "";
1609 modes
[nbase
+ i
] = Pmode
;
1613 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1615 return ASM_OPERANDS_TEMPLATE (asmop
);
1618 /* Check if an asm_operand matches its constraints.
1619 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1622 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
1626 bool incdec_ok
= false;
1629 /* Use constrain_operands after reload. */
1630 gcc_assert (!reload_completed
);
1632 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1633 many alternatives as required to match the other operands. */
1634 if (*constraint
== '\0')
1639 char c
= *constraint
;
1656 case '0': case '1': case '2': case '3': case '4':
1657 case '5': case '6': case '7': case '8': case '9':
1658 /* If caller provided constraints pointer, look up
1659 the matching constraint. Otherwise, our caller should have
1660 given us the proper matching constraint, but we can't
1661 actually fail the check if they didn't. Indicate that
1662 results are inconclusive. */
1666 unsigned long match
;
1668 match
= strtoul (constraint
, &end
, 10);
1670 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
1671 constraint
= (const char *) end
;
1677 while (ISDIGIT (*constraint
));
1684 if (address_operand (op
, VOIDmode
))
1688 case TARGET_MEM_CONSTRAINT
:
1689 case 'V': /* non-offsettable */
1690 if (memory_operand (op
, VOIDmode
))
1694 case 'o': /* offsettable */
1695 if (offsettable_nonstrict_memref_p (op
))
1700 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1701 excepting those that expand_call created. Further, on some
1702 machines which do not have generalized auto inc/dec, an inc/dec
1703 is not a memory_operand.
1705 Match any memory and hope things are resolved after reload. */
1709 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1710 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1720 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1721 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1730 if (CONST_DOUBLE_AS_FLOAT_P (op
)
1731 || (GET_CODE (op
) == CONST_VECTOR
1732 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1737 if (CONST_DOUBLE_AS_FLOAT_P (op
)
1738 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1742 if (CONST_DOUBLE_AS_FLOAT_P (op
)
1743 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1748 if (CONST_SCALAR_INT_P (op
))
1753 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1758 if (CONST_SCALAR_INT_P (op
))
1763 if (CONST_INT_P (op
)
1764 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1768 if (CONST_INT_P (op
)
1769 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1773 if (CONST_INT_P (op
)
1774 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1778 if (CONST_INT_P (op
)
1779 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1783 if (CONST_INT_P (op
)
1784 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1788 if (CONST_INT_P (op
)
1789 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1793 if (CONST_INT_P (op
)
1794 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1798 if (CONST_INT_P (op
)
1799 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1808 if (general_operand (op
, VOIDmode
))
1813 /* For all other letters, we first check for a register class,
1814 otherwise it is an EXTRA_CONSTRAINT. */
1815 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1818 if (GET_MODE (op
) == BLKmode
)
1820 if (register_operand (op
, VOIDmode
))
1823 #ifdef EXTRA_CONSTRAINT_STR
1824 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
))
1825 /* Every memory operand can be reloaded to fit. */
1826 result
= result
|| memory_operand (op
, VOIDmode
);
1827 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
))
1828 /* Every address operand can be reloaded to fit. */
1829 result
= result
|| address_operand (op
, VOIDmode
);
1830 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1835 len
= CONSTRAINT_LEN (c
, constraint
);
1838 while (--len
&& *constraint
);
1844 /* For operands without < or > constraints reject side-effects. */
1845 if (!incdec_ok
&& result
&& MEM_P (op
))
1846 switch (GET_CODE (XEXP (op
, 0)))
1863 /* Given an rtx *P, if it is a sum containing an integer constant term,
1864 return the location (type rtx *) of the pointer to that constant term.
1865 Otherwise, return a null pointer. */
1868 find_constant_term_loc (rtx
*p
)
1871 enum rtx_code code
= GET_CODE (*p
);
1873 /* If *P IS such a constant term, P is its location. */
1875 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1879 /* Otherwise, if not a sum, it has no constant term. */
1881 if (GET_CODE (*p
) != PLUS
)
1884 /* If one of the summands is constant, return its location. */
1886 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1887 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1890 /* Otherwise, check each summand for containing a constant term. */
1892 if (XEXP (*p
, 0) != 0)
1894 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1899 if (XEXP (*p
, 1) != 0)
1901 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1909 /* Return 1 if OP is a memory reference
1910 whose address contains no side effects
1911 and remains valid after the addition
1912 of a positive integer less than the
1913 size of the object being referenced.
1915 We assume that the original address is valid and do not check it.
1917 This uses strict_memory_address_p as a subroutine, so
1918 don't use it before reload. */
1921 offsettable_memref_p (rtx op
)
1923 return ((MEM_P (op
))
1924 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
1925 MEM_ADDR_SPACE (op
)));
1928 /* Similar, but don't require a strictly valid mem ref:
1929 consider pseudo-regs valid as index or base regs. */
1932 offsettable_nonstrict_memref_p (rtx op
)
1934 return ((MEM_P (op
))
1935 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
1936 MEM_ADDR_SPACE (op
)));
1939 /* Return 1 if Y is a memory address which contains no side effects
1940 and would remain valid for address space AS after the addition of
1941 a positive integer less than the size of that mode.
1943 We assume that the original address is valid and do not check it.
1944 We do check that it is valid for narrower modes.
1946 If STRICTP is nonzero, we require a strictly valid address,
1947 for the sake of use in reload.c. */
1950 offsettable_address_addr_space_p (int strictp
, enum machine_mode mode
, rtx y
,
1953 enum rtx_code ycode
= GET_CODE (y
);
1957 int (*addressp
) (enum machine_mode
, rtx
, addr_space_t
) =
1958 (strictp
? strict_memory_address_addr_space_p
1959 : memory_address_addr_space_p
);
1960 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1962 if (CONSTANT_ADDRESS_P (y
))
1965 /* Adjusting an offsettable address involves changing to a narrower mode.
1966 Make sure that's OK. */
1968 if (mode_dependent_address_p (y
, as
))
1971 enum machine_mode address_mode
= GET_MODE (y
);
1972 if (address_mode
== VOIDmode
)
1973 address_mode
= targetm
.addr_space
.address_mode (as
);
1974 #ifdef POINTERS_EXTEND_UNSIGNED
1975 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
1978 /* ??? How much offset does an offsettable BLKmode reference need?
1979 Clearly that depends on the situation in which it's being used.
1980 However, the current situation in which we test 0xffffffff is
1981 less than ideal. Caveat user. */
1983 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1985 /* If the expression contains a constant term,
1986 see if it remains valid when max possible offset is added. */
1988 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1993 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
1994 /* Use QImode because an odd displacement may be automatically invalid
1995 for any wider mode. But it should be valid for a single byte. */
1996 good
= (*addressp
) (QImode
, y
, as
);
1998 /* In any case, restore old contents of memory. */
2003 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2006 /* The offset added here is chosen as the maximum offset that
2007 any instruction could need to add when operating on something
2008 of the specified mode. We assume that if Y and Y+c are
2009 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2010 go inside a LO_SUM here, so we do so as well. */
2011 if (GET_CODE (y
) == LO_SUM
2013 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
2014 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2015 plus_constant (address_mode
, XEXP (y
, 1),
2017 #ifdef POINTERS_EXTEND_UNSIGNED
2018 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2019 else if (POINTERS_EXTEND_UNSIGNED
> 0
2020 && GET_CODE (y
) == ZERO_EXTEND
2021 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2022 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2023 plus_constant (pointer_mode
, XEXP (y
, 0),
2027 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2029 /* Use QImode because an odd displacement may be automatically invalid
2030 for any wider mode. But it should be valid for a single byte. */
2031 return (*addressp
) (QImode
, z
, as
);
2034 /* Return 1 if ADDR is an address-expression whose effect depends
2035 on the mode of the memory reference it is used in.
2037 ADDRSPACE is the address space associated with the address.
2039 Autoincrement addressing is a typical example of mode-dependence
2040 because the amount of the increment depends on the mode. */
2043 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2045 /* Auto-increment addressing with anything other than post_modify
2046 or pre_modify always introduces a mode dependency. Catch such
2047 cases now instead of deferring to the target. */
2048 if (GET_CODE (addr
) == PRE_INC
2049 || GET_CODE (addr
) == POST_INC
2050 || GET_CODE (addr
) == PRE_DEC
2051 || GET_CODE (addr
) == POST_DEC
)
2054 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2057 /* Like extract_insn, but save insn extracted and don't extract again, when
2058 called again for the same insn expecting that recog_data still contain the
2059 valid information. This is used primary by gen_attr infrastructure that
2060 often does extract insn again and again. */
2062 extract_insn_cached (rtx insn
)
2064 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2066 extract_insn (insn
);
2067 recog_data
.insn
= insn
;
2070 /* Do cached extract_insn, constrain_operands and complain about failures.
2071 Used by insn_attrtab. */
2073 extract_constrain_insn_cached (rtx insn
)
2075 extract_insn_cached (insn
);
2076 if (which_alternative
== -1
2077 && !constrain_operands (reload_completed
))
2078 fatal_insn_not_found (insn
);
2081 /* Do cached constrain_operands and complain about failures. */
2083 constrain_operands_cached (int strict
)
2085 if (which_alternative
== -1)
2086 return constrain_operands (strict
);
2091 /* Analyze INSN and fill in recog_data. */
2094 extract_insn (rtx insn
)
2099 rtx body
= PATTERN (insn
);
2101 recog_data
.n_operands
= 0;
2102 recog_data
.n_alternatives
= 0;
2103 recog_data
.n_dups
= 0;
2104 recog_data
.is_asm
= false;
2106 switch (GET_CODE (body
))
2117 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2122 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2123 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2124 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2130 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2133 /* This insn is an `asm' with operands. */
2135 /* expand_asm_operands makes sure there aren't too many operands. */
2136 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2138 /* Now get the operand values and constraints out of the insn. */
2139 decode_asm_operands (body
, recog_data
.operand
,
2140 recog_data
.operand_loc
,
2141 recog_data
.constraints
,
2142 recog_data
.operand_mode
, NULL
);
2143 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2146 const char *p
= recog_data
.constraints
[0];
2147 recog_data
.n_alternatives
= 1;
2149 recog_data
.n_alternatives
+= (*p
++ == ',');
2151 recog_data
.is_asm
= true;
2154 fatal_insn_not_found (insn
);
2158 /* Ordinary insn: recognize it, get the operands via insn_extract
2159 and get the constraints. */
2161 icode
= recog_memoized (insn
);
2163 fatal_insn_not_found (insn
);
2165 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2166 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2167 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2169 insn_extract (insn
);
2171 for (i
= 0; i
< noperands
; i
++)
2173 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2174 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2175 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2176 /* VOIDmode match_operands gets mode from their real operand. */
2177 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2178 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2181 for (i
= 0; i
< noperands
; i
++)
2182 recog_data
.operand_type
[i
]
2183 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2184 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2187 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2189 if (INSN_CODE (insn
) < 0)
2190 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2191 recog_data
.alternative_enabled_p
[i
] = true;
2194 recog_data
.insn
= insn
;
2195 for (i
= 0; i
< recog_data
.n_alternatives
; i
++)
2197 which_alternative
= i
;
2198 recog_data
.alternative_enabled_p
[i
]
2199 = HAVE_ATTR_enabled
? get_attr_enabled (insn
) : 1;
2203 recog_data
.insn
= NULL
;
2204 which_alternative
= -1;
2207 /* After calling extract_insn, you can use this function to extract some
2208 information from the constraint strings into a more usable form.
2209 The collected data is stored in recog_op_alt. */
2211 preprocess_constraints (void)
2215 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2216 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2217 * sizeof (struct operand_alternative
)));
2219 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2222 struct operand_alternative
*op_alt
;
2223 const char *p
= recog_data
.constraints
[i
];
2225 op_alt
= recog_op_alt
[i
];
2227 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2229 op_alt
[j
].cl
= NO_REGS
;
2230 op_alt
[j
].constraint
= p
;
2231 op_alt
[j
].matches
= -1;
2232 op_alt
[j
].matched
= -1;
2234 if (!recog_data
.alternative_enabled_p
[j
])
2236 p
= skip_alternative (p
);
2240 if (*p
== '\0' || *p
== ',')
2242 op_alt
[j
].anything_ok
= 1;
2252 while (c
!= ',' && c
!= '\0');
2253 if (c
== ',' || c
== '\0')
2261 case '=': case '+': case '*': case '%':
2262 case 'E': case 'F': case 'G': case 'H':
2263 case 's': case 'i': case 'n':
2264 case 'I': case 'J': case 'K': case 'L':
2265 case 'M': case 'N': case 'O': case 'P':
2266 /* These don't say anything we care about. */
2270 op_alt
[j
].reject
+= 6;
2273 op_alt
[j
].reject
+= 600;
2276 op_alt
[j
].earlyclobber
= 1;
2279 case '0': case '1': case '2': case '3': case '4':
2280 case '5': case '6': case '7': case '8': case '9':
2283 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2284 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2289 case TARGET_MEM_CONSTRAINT
:
2290 op_alt
[j
].memory_ok
= 1;
2293 op_alt
[j
].decmem_ok
= 1;
2296 op_alt
[j
].incmem_ok
= 1;
2299 op_alt
[j
].nonoffmem_ok
= 1;
2302 op_alt
[j
].offmem_ok
= 1;
2305 op_alt
[j
].anything_ok
= 1;
2309 op_alt
[j
].is_address
= 1;
2310 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2311 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2318 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2322 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2324 op_alt
[j
].memory_ok
= 1;
2327 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2329 op_alt
[j
].is_address
= 1;
2331 = (reg_class_subunion
2332 [(int) op_alt
[j
].cl
]
2333 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2334 ADDRESS
, SCRATCH
)]);
2339 = (reg_class_subunion
2340 [(int) op_alt
[j
].cl
]
2341 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2344 p
+= CONSTRAINT_LEN (c
, p
);
2350 /* Check the operands of an insn against the insn's operand constraints
2351 and return 1 if they are valid.
2352 The information about the insn's operands, constraints, operand modes
2353 etc. is obtained from the global variables set up by extract_insn.
2355 WHICH_ALTERNATIVE is set to a number which indicates which
2356 alternative of constraints was matched: 0 for the first alternative,
2357 1 for the next, etc.
2359 In addition, when two operands are required to match
2360 and it happens that the output operand is (reg) while the
2361 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2362 make the output operand look like the input.
2363 This is because the output operand is the one the template will print.
2365 This is used in final, just before printing the assembler code and by
2366 the routines that determine an insn's attribute.
2368 If STRICT is a positive nonzero value, it means that we have been
2369 called after reload has been completed. In that case, we must
2370 do all checks strictly. If it is zero, it means that we have been called
2371 before reload has completed. In that case, we first try to see if we can
2372 find an alternative that matches strictly. If not, we try again, this
2373 time assuming that reload will fix up the insn. This provides a "best
2374 guess" for the alternative and is used to compute attributes of insns prior
2375 to reload. A negative value of STRICT is used for this internal call. */
2383 constrain_operands (int strict
)
2385 const char *constraints
[MAX_RECOG_OPERANDS
];
2386 int matching_operands
[MAX_RECOG_OPERANDS
];
2387 int earlyclobber
[MAX_RECOG_OPERANDS
];
2390 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2391 int funny_match_index
;
2393 which_alternative
= 0;
2394 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2397 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2399 constraints
[c
] = recog_data
.constraints
[c
];
2400 matching_operands
[c
] = -1;
2405 int seen_earlyclobber_at
= -1;
2408 funny_match_index
= 0;
2410 if (!recog_data
.alternative_enabled_p
[which_alternative
])
2414 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2415 constraints
[i
] = skip_alternative (constraints
[i
]);
2417 which_alternative
++;
2421 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2423 rtx op
= recog_data
.operand
[opno
];
2424 enum machine_mode mode
= GET_MODE (op
);
2425 const char *p
= constraints
[opno
];
2431 earlyclobber
[opno
] = 0;
2433 /* A unary operator may be accepted by the predicate, but it
2434 is irrelevant for matching constraints. */
2438 if (GET_CODE (op
) == SUBREG
)
2440 if (REG_P (SUBREG_REG (op
))
2441 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2442 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2443 GET_MODE (SUBREG_REG (op
)),
2446 op
= SUBREG_REG (op
);
2449 /* An empty constraint or empty alternative
2450 allows anything which matched the pattern. */
2451 if (*p
== 0 || *p
== ',')
2455 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2464 case '?': case '!': case '*': case '%':
2469 /* Ignore rest of this alternative as far as
2470 constraint checking is concerned. */
2473 while (*p
&& *p
!= ',');
2478 earlyclobber
[opno
] = 1;
2479 if (seen_earlyclobber_at
< 0)
2480 seen_earlyclobber_at
= opno
;
2483 case '0': case '1': case '2': case '3': case '4':
2484 case '5': case '6': case '7': case '8': case '9':
2486 /* This operand must be the same as a previous one.
2487 This kind of constraint is used for instructions such
2488 as add when they take only two operands.
2490 Note that the lower-numbered operand is passed first.
2492 If we are not testing strictly, assume that this
2493 constraint will be satisfied. */
2498 match
= strtoul (p
, &end
, 10);
2505 rtx op1
= recog_data
.operand
[match
];
2506 rtx op2
= recog_data
.operand
[opno
];
2508 /* A unary operator may be accepted by the predicate,
2509 but it is irrelevant for matching constraints. */
2511 op1
= XEXP (op1
, 0);
2513 op2
= XEXP (op2
, 0);
2515 val
= operands_match_p (op1
, op2
);
2518 matching_operands
[opno
] = match
;
2519 matching_operands
[match
] = opno
;
2524 /* If output is *x and input is *--x, arrange later
2525 to change the output to *--x as well, since the
2526 output op is the one that will be printed. */
2527 if (val
== 2 && strict
> 0)
2529 funny_match
[funny_match_index
].this_op
= opno
;
2530 funny_match
[funny_match_index
++].other
= match
;
2537 /* p is used for address_operands. When we are called by
2538 gen_reload, no one will have checked that the address is
2539 strictly valid, i.e., that all pseudos requiring hard regs
2540 have gotten them. */
2542 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2547 /* No need to check general_operand again;
2548 it was done in insn-recog.c. Well, except that reload
2549 doesn't check the validity of its replacements, but
2550 that should only matter when there's a bug. */
2552 /* Anything goes unless it is a REG and really has a hard reg
2553 but the hard reg is not in the class GENERAL_REGS. */
2557 || GENERAL_REGS
== ALL_REGS
2558 || (reload_in_progress
2559 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2560 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2563 else if (strict
< 0 || general_operand (op
, mode
))
2568 /* This is used for a MATCH_SCRATCH in the cases when
2569 we don't actually need anything. So anything goes
2574 case TARGET_MEM_CONSTRAINT
:
2575 /* Memory operands must be valid, to the extent
2576 required by STRICT. */
2580 && !strict_memory_address_addr_space_p
2581 (GET_MODE (op
), XEXP (op
, 0),
2582 MEM_ADDR_SPACE (op
)))
2585 && !memory_address_addr_space_p
2586 (GET_MODE (op
), XEXP (op
, 0),
2587 MEM_ADDR_SPACE (op
)))
2591 /* Before reload, accept what reload can turn into mem. */
2592 else if (strict
< 0 && CONSTANT_P (op
))
2594 /* During reload, accept a pseudo */
2595 else if (reload_in_progress
&& REG_P (op
)
2596 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2602 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2603 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2609 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2610 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2616 if (CONST_DOUBLE_AS_FLOAT_P (op
)
2617 || (GET_CODE (op
) == CONST_VECTOR
2618 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2624 if (CONST_DOUBLE_AS_FLOAT_P (op
)
2625 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2630 if (CONST_SCALAR_INT_P (op
))
2633 if (CONSTANT_P (op
))
2638 if (CONST_SCALAR_INT_P (op
))
2650 if (CONST_INT_P (op
)
2651 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2657 && ((strict
> 0 && ! offsettable_memref_p (op
))
2659 && !(CONSTANT_P (op
) || MEM_P (op
)))
2660 || (reload_in_progress
2662 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2667 if ((strict
> 0 && offsettable_memref_p (op
))
2668 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2669 /* Before reload, accept what reload can handle. */
2671 && (CONSTANT_P (op
) || MEM_P (op
)))
2672 /* During reload, accept a pseudo */
2673 || (reload_in_progress
&& REG_P (op
)
2674 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2683 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2689 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2690 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2692 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2695 #ifdef EXTRA_CONSTRAINT_STR
2696 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2699 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2700 /* Every memory operand can be reloaded to fit. */
2701 && ((strict
< 0 && MEM_P (op
))
2702 /* Before reload, accept what reload can turn
2704 || (strict
< 0 && CONSTANT_P (op
))
2705 /* During reload, accept a pseudo */
2706 || (reload_in_progress
&& REG_P (op
)
2707 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2709 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2710 /* Every address operand can be reloaded to fit. */
2713 /* Cater to architectures like IA-64 that define extra memory
2714 constraints without using define_memory_constraint. */
2715 else if (reload_in_progress
2717 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
2718 && reg_renumber
[REGNO (op
)] < 0
2719 && reg_equiv_mem (REGNO (op
)) != 0
2720 && EXTRA_CONSTRAINT_STR
2721 (reg_equiv_mem (REGNO (op
)), c
, p
))
2727 while (p
+= len
, c
);
2729 constraints
[opno
] = p
;
2730 /* If this operand did not win somehow,
2731 this alternative loses. */
2735 /* This alternative won; the operands are ok.
2736 Change whichever operands this alternative says to change. */
2741 /* See if any earlyclobber operand conflicts with some other
2744 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2745 for (eopno
= seen_earlyclobber_at
;
2746 eopno
< recog_data
.n_operands
;
2748 /* Ignore earlyclobber operands now in memory,
2749 because we would often report failure when we have
2750 two memory operands, one of which was formerly a REG. */
2751 if (earlyclobber
[eopno
]
2752 && REG_P (recog_data
.operand
[eopno
]))
2753 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2754 if ((MEM_P (recog_data
.operand
[opno
])
2755 || recog_data
.operand_type
[opno
] != OP_OUT
)
2757 /* Ignore things like match_operator operands. */
2758 && *recog_data
.constraints
[opno
] != 0
2759 && ! (matching_operands
[opno
] == eopno
2760 && operands_match_p (recog_data
.operand
[opno
],
2761 recog_data
.operand
[eopno
]))
2762 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2763 recog_data
.operand
[eopno
]))
2768 while (--funny_match_index
>= 0)
2770 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2771 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2775 /* For operands without < or > constraints reject side-effects. */
2776 if (recog_data
.is_asm
)
2778 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2779 if (MEM_P (recog_data
.operand
[opno
]))
2780 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
2788 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
2789 && strchr (recog_data
.constraints
[opno
], '>')
2802 which_alternative
++;
2804 while (which_alternative
< recog_data
.n_alternatives
);
2806 which_alternative
= -1;
2807 /* If we are about to reject this, but we are not to test strictly,
2808 try a very loose test. Only return failure if it fails also. */
2810 return constrain_operands (-1);
2815 /* Return true iff OPERAND (assumed to be a REG rtx)
2816 is a hard reg in class CLASS when its regno is offset by OFFSET
2817 and changed to mode MODE.
2818 If REG occupies multiple hard regs, all of them must be in CLASS. */
2821 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
2822 enum machine_mode mode
)
2824 unsigned int regno
= REGNO (operand
);
2829 /* Regno must not be a pseudo register. Offset may be negative. */
2830 return (HARD_REGISTER_NUM_P (regno
)
2831 && HARD_REGISTER_NUM_P (regno
+ offset
)
2832 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
2836 /* Split single instruction. Helper function for split_all_insns and
2837 split_all_insns_noflow. Return last insn in the sequence if successful,
2838 or NULL if unsuccessful. */
2841 split_insn (rtx insn
)
2843 /* Split insns here to get max fine-grain parallelism. */
2844 rtx first
= PREV_INSN (insn
);
2845 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2846 rtx insn_set
, last_set
, note
;
2851 /* If the original instruction was a single set that was known to be
2852 equivalent to a constant, see if we can say the same about the last
2853 instruction in the split sequence. The two instructions must set
2854 the same destination. */
2855 insn_set
= single_set (insn
);
2858 last_set
= single_set (last
);
2859 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
2861 note
= find_reg_equal_equiv_note (insn
);
2862 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2863 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
2864 else if (CONSTANT_P (SET_SRC (insn_set
)))
2865 set_unique_reg_note (last
, REG_EQUAL
,
2866 copy_rtx (SET_SRC (insn_set
)));
2870 /* try_split returns the NOTE that INSN became. */
2871 SET_INSN_DELETED (insn
);
2873 /* ??? Coddle to md files that generate subregs in post-reload
2874 splitters instead of computing the proper hard register. */
2875 if (reload_completed
&& first
!= last
)
2877 first
= NEXT_INSN (first
);
2881 cleanup_subreg_operands (first
);
2884 first
= NEXT_INSN (first
);
2891 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2894 split_all_insns (void)
2900 blocks
= sbitmap_alloc (last_basic_block
);
2901 bitmap_clear (blocks
);
2904 FOR_EACH_BB_REVERSE (bb
)
2907 bool finish
= false;
2909 rtl_profile_for_bb (bb
);
2910 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2912 /* Can't use `next_real_insn' because that might go across
2913 CODE_LABELS and short-out basic blocks. */
2914 next
= NEXT_INSN (insn
);
2915 finish
= (insn
== BB_END (bb
));
2918 rtx set
= single_set (insn
);
2920 /* Don't split no-op move insns. These should silently
2921 disappear later in final. Splitting such insns would
2922 break the code that handles LIBCALL blocks. */
2923 if (set
&& set_noop_p (set
))
2925 /* Nops get in the way while scheduling, so delete them
2926 now if register allocation has already been done. It
2927 is too risky to try to do this before register
2928 allocation, and there are unlikely to be very many
2929 nops then anyways. */
2930 if (reload_completed
)
2931 delete_insn_and_edges (insn
);
2935 if (split_insn (insn
))
2937 bitmap_set_bit (blocks
, bb
->index
);
2945 default_rtl_profile ();
2947 find_many_sub_basic_blocks (blocks
);
2949 #ifdef ENABLE_CHECKING
2950 verify_flow_info ();
2953 sbitmap_free (blocks
);
2956 /* Same as split_all_insns, but do not expect CFG to be available.
2957 Used by machine dependent reorg passes. */
2960 split_all_insns_noflow (void)
2964 for (insn
= get_insns (); insn
; insn
= next
)
2966 next
= NEXT_INSN (insn
);
2969 /* Don't split no-op move insns. These should silently
2970 disappear later in final. Splitting such insns would
2971 break the code that handles LIBCALL blocks. */
2972 rtx set
= single_set (insn
);
2973 if (set
&& set_noop_p (set
))
2975 /* Nops get in the way while scheduling, so delete them
2976 now if register allocation has already been done. It
2977 is too risky to try to do this before register
2978 allocation, and there are unlikely to be very many
2981 ??? Should we use delete_insn when the CFG isn't valid? */
2982 if (reload_completed
)
2983 delete_insn_and_edges (insn
);
2992 #ifdef HAVE_peephole2
2993 struct peep2_insn_data
2999 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3000 static int peep2_current
;
3002 static bool peep2_do_rebuild_jump_labels
;
3003 static bool peep2_do_cleanup_cfg
;
3005 /* The number of instructions available to match a peep2. */
3006 int peep2_current_count
;
3008 /* A non-insn marker indicating the last insn of the block.
3009 The live_before regset for this element is correct, indicating
3010 DF_LIVE_OUT for the block. */
3011 #define PEEP2_EOB pc_rtx
3013 /* Wrap N to fit into the peep2_insn_data buffer. */
3016 peep2_buf_position (int n
)
3018 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3019 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3023 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3024 does not exist. Used by the recognizer to find the next insn to match
3025 in a multi-insn pattern. */
3028 peep2_next_insn (int n
)
3030 gcc_assert (n
<= peep2_current_count
);
3032 n
= peep2_buf_position (peep2_current
+ n
);
3034 return peep2_insn_data
[n
].insn
;
3037 /* Return true if REGNO is dead before the Nth non-note insn
3041 peep2_regno_dead_p (int ofs
, int regno
)
3043 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3045 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3047 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3049 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3052 /* Similarly for a REG. */
3055 peep2_reg_dead_p (int ofs
, rtx reg
)
3059 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3061 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3063 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3065 regno
= REGNO (reg
);
3066 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
3068 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
3073 /* Regno offset to be used in the register search. */
3074 static int search_ofs
;
3076 /* Try to find a hard register of mode MODE, matching the register class in
3077 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3078 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3079 in which case the only condition is that the register must be available
3080 before CURRENT_INSN.
3081 Registers that already have bits set in REG_SET will not be considered.
3083 If an appropriate register is available, it will be returned and the
3084 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3088 peep2_find_free_register (int from
, int to
, const char *class_str
,
3089 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
3096 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3097 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3099 from
= peep2_buf_position (peep2_current
+ from
);
3100 to
= peep2_buf_position (peep2_current
+ to
);
3102 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3103 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3107 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3109 /* Don't use registers set or clobbered by the insn. */
3110 for (def_rec
= DF_INSN_DEFS (peep2_insn_data
[from
].insn
);
3111 *def_rec
; def_rec
++)
3112 SET_HARD_REG_BIT (live
, DF_REF_REGNO (*def_rec
));
3114 from
= peep2_buf_position (from
+ 1);
3117 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
3118 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
3120 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3122 int raw_regno
, regno
, success
, j
;
3124 /* Distribute the free registers as much as possible. */
3125 raw_regno
= search_ofs
+ i
;
3126 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3127 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3128 #ifdef REG_ALLOC_ORDER
3129 regno
= reg_alloc_order
[raw_regno
];
3134 /* Can it support the mode we need? */
3135 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3139 for (j
= 0; success
&& j
< hard_regno_nregs
[regno
][mode
]; j
++)
3141 /* Don't allocate fixed registers. */
3142 if (fixed_regs
[regno
+ j
])
3147 /* Don't allocate global registers. */
3148 if (global_regs
[regno
+ j
])
3153 /* Make sure the register is of the right class. */
3154 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3159 /* And that we don't create an extra save/restore. */
3160 if (! call_used_regs
[regno
+ j
] && ! df_regs_ever_live_p (regno
+ j
))
3166 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3172 /* And we don't clobber traceback for noreturn functions. */
3173 if ((regno
+ j
== FRAME_POINTER_REGNUM
3174 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3175 && (! reload_completed
|| frame_pointer_needed
))
3181 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3182 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3191 add_to_hard_reg_set (reg_set
, mode
, regno
);
3193 /* Start the next search with the next register. */
3194 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3196 search_ofs
= raw_regno
;
3198 return gen_rtx_REG (mode
, regno
);
3206 /* Forget all currently tracked instructions, only remember current
3210 peep2_reinit_state (regset live
)
3214 /* Indicate that all slots except the last holds invalid data. */
3215 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3216 peep2_insn_data
[i
].insn
= NULL_RTX
;
3217 peep2_current_count
= 0;
3219 /* Indicate that the last slot contains live_after data. */
3220 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3221 peep2_current
= MAX_INSNS_PER_PEEP2
;
3223 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3226 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3227 starting at INSN. Perform the replacement, removing the old insns and
3228 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3229 if the replacement is rejected. */
3232 peep2_attempt (basic_block bb
, rtx insn
, int match_len
, rtx attempt
)
3235 rtx last
, eh_note
, as_note
, before_try
, x
;
3236 rtx old_insn
, new_insn
;
3237 bool was_call
= false;
3239 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3240 match more than one insn, or to be split into more than one insn. */
3241 old_insn
= peep2_insn_data
[peep2_current
].insn
;
3242 if (RTX_FRAME_RELATED_P (old_insn
))
3244 bool any_note
= false;
3250 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3251 may be in the stream for the purpose of register allocation. */
3252 if (active_insn_p (attempt
))
3255 new_insn
= next_active_insn (attempt
);
3256 if (next_active_insn (new_insn
))
3259 /* We have a 1-1 replacement. Copy over any frame-related info. */
3260 RTX_FRAME_RELATED_P (new_insn
) = 1;
3262 /* Allow the backend to fill in a note during the split. */
3263 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3264 switch (REG_NOTE_KIND (note
))
3266 case REG_FRAME_RELATED_EXPR
:
3267 case REG_CFA_DEF_CFA
:
3268 case REG_CFA_ADJUST_CFA
:
3269 case REG_CFA_OFFSET
:
3270 case REG_CFA_REGISTER
:
3271 case REG_CFA_EXPRESSION
:
3272 case REG_CFA_RESTORE
:
3273 case REG_CFA_SET_VDRAP
:
3280 /* If the backend didn't supply a note, copy one over. */
3282 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3283 switch (REG_NOTE_KIND (note
))
3285 case REG_FRAME_RELATED_EXPR
:
3286 case REG_CFA_DEF_CFA
:
3287 case REG_CFA_ADJUST_CFA
:
3288 case REG_CFA_OFFSET
:
3289 case REG_CFA_REGISTER
:
3290 case REG_CFA_EXPRESSION
:
3291 case REG_CFA_RESTORE
:
3292 case REG_CFA_SET_VDRAP
:
3293 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3300 /* If there still isn't a note, make sure the unwind info sees the
3301 same expression as before the split. */
3304 rtx old_set
, new_set
;
3306 /* The old insn had better have been simple, or annotated. */
3307 old_set
= single_set (old_insn
);
3308 gcc_assert (old_set
!= NULL
);
3310 new_set
= single_set (new_insn
);
3311 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3312 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3315 /* Copy prologue/epilogue status. This is required in order to keep
3316 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3317 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3320 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3321 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3322 cfg-related call notes. */
3323 for (i
= 0; i
<= match_len
; ++i
)
3328 j
= peep2_buf_position (peep2_current
+ i
);
3329 old_insn
= peep2_insn_data
[j
].insn
;
3330 if (!CALL_P (old_insn
))
3335 while (new_insn
!= NULL_RTX
)
3337 if (CALL_P (new_insn
))
3339 new_insn
= NEXT_INSN (new_insn
);
3342 gcc_assert (new_insn
!= NULL_RTX
);
3344 CALL_INSN_FUNCTION_USAGE (new_insn
)
3345 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3347 for (note
= REG_NOTES (old_insn
);
3349 note
= XEXP (note
, 1))
3350 switch (REG_NOTE_KIND (note
))
3355 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3359 /* Discard all other reg notes. */
3363 /* Croak if there is another call in the sequence. */
3364 while (++i
<= match_len
)
3366 j
= peep2_buf_position (peep2_current
+ i
);
3367 old_insn
= peep2_insn_data
[j
].insn
;
3368 gcc_assert (!CALL_P (old_insn
));
3373 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3374 move those notes over to the new sequence. */
3376 for (i
= match_len
; i
>= 0; --i
)
3378 int j
= peep2_buf_position (peep2_current
+ i
);
3379 old_insn
= peep2_insn_data
[j
].insn
;
3381 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3386 i
= peep2_buf_position (peep2_current
+ match_len
);
3387 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3389 /* Replace the old sequence with the new. */
3390 last
= emit_insn_after_setloc (attempt
,
3391 peep2_insn_data
[i
].insn
,
3392 INSN_LOCATION (peep2_insn_data
[i
].insn
));
3393 before_try
= PREV_INSN (insn
);
3394 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3396 /* Re-insert the EH_REGION notes. */
3397 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3402 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3403 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3407 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3410 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3411 if (x
!= BB_END (bb
)
3412 && (can_throw_internal (x
)
3413 || can_nonlocal_goto (x
)))
3418 nfte
= split_block (bb
, x
);
3419 flags
= (eh_edge
->flags
3420 & (EDGE_EH
| EDGE_ABNORMAL
));
3422 flags
|= EDGE_ABNORMAL_CALL
;
3423 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3426 nehe
->probability
= eh_edge
->probability
;
3428 = REG_BR_PROB_BASE
- nehe
->probability
;
3430 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3435 /* Converting possibly trapping insn to non-trapping is
3436 possible. Zap dummy outgoing edges. */
3437 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
3440 /* Re-insert the ARGS_SIZE notes. */
3442 fixup_args_size_notes (before_try
, last
, INTVAL (XEXP (as_note
, 0)));
3444 /* If we generated a jump instruction, it won't have
3445 JUMP_LABEL set. Recompute after we're done. */
3446 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3449 peep2_do_rebuild_jump_labels
= true;
3456 /* After performing a replacement in basic block BB, fix up the life
3457 information in our buffer. LAST is the last of the insns that we
3458 emitted as a replacement. PREV is the insn before the start of
3459 the replacement. MATCH_LEN is the number of instructions that were
3460 matched, and which now need to be replaced in the buffer. */
3463 peep2_update_life (basic_block bb
, int match_len
, rtx last
, rtx prev
)
3465 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
3469 INIT_REG_SET (&live
);
3470 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
3472 gcc_assert (peep2_current_count
>= match_len
+ 1);
3473 peep2_current_count
-= match_len
+ 1;
3481 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
3483 peep2_current_count
++;
3485 i
= MAX_INSNS_PER_PEEP2
;
3486 peep2_insn_data
[i
].insn
= x
;
3487 df_simulate_one_insn_backwards (bb
, x
, &live
);
3488 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
3494 CLEAR_REG_SET (&live
);
3499 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3500 Return true if we added it, false otherwise. The caller will try to match
3501 peepholes against the buffer if we return false; otherwise it will try to
3502 add more instructions to the buffer. */
3505 peep2_fill_buffer (basic_block bb
, rtx insn
, regset live
)
3509 /* Once we have filled the maximum number of insns the buffer can hold,
3510 allow the caller to match the insns against peepholes. We wait until
3511 the buffer is full in case the target has similar peepholes of different
3512 length; we always want to match the longest if possible. */
3513 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
3516 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3517 any other pattern, lest it change the semantics of the frame info. */
3518 if (RTX_FRAME_RELATED_P (insn
))
3520 /* Let the buffer drain first. */
3521 if (peep2_current_count
> 0)
3523 /* Now the insn will be the only thing in the buffer. */
3526 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3527 peep2_insn_data
[pos
].insn
= insn
;
3528 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3529 peep2_current_count
++;
3531 df_simulate_one_insn_forwards (bb
, insn
, live
);
3535 /* Perform the peephole2 optimization pass. */
3538 peephole2_optimize (void)
3545 peep2_do_cleanup_cfg
= false;
3546 peep2_do_rebuild_jump_labels
= false;
3548 df_set_flags (DF_LR_RUN_DCE
);
3549 df_note_add_problem ();
3552 /* Initialize the regsets we're going to use. */
3553 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3554 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
3556 live
= BITMAP_ALLOC (®_obstack
);
3558 FOR_EACH_BB_REVERSE (bb
)
3560 bool past_end
= false;
3563 rtl_profile_for_bb (bb
);
3565 /* Start up propagation. */
3566 bitmap_copy (live
, DF_LR_IN (bb
));
3567 df_simulate_initialize_forwards (bb
, live
);
3568 peep2_reinit_state (live
);
3570 insn
= BB_HEAD (bb
);
3576 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
3579 insn
= NEXT_INSN (insn
);
3580 if (insn
== NEXT_INSN (BB_END (bb
)))
3584 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
3587 /* If we did not fill an empty buffer, it signals the end of the
3589 if (peep2_current_count
== 0)
3592 /* The buffer filled to the current maximum, so try to match. */
3594 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3595 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
3596 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3598 /* Match the peephole. */
3599 head
= peep2_insn_data
[peep2_current
].insn
;
3600 attempt
= peephole2_insns (PATTERN (head
), head
, &match_len
);
3601 if (attempt
!= NULL
)
3603 rtx last
= peep2_attempt (bb
, head
, match_len
, attempt
);
3606 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
3611 /* No match: advance the buffer by one insn. */
3612 peep2_current
= peep2_buf_position (peep2_current
+ 1);
3613 peep2_current_count
--;
3617 default_rtl_profile ();
3618 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3619 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3621 if (peep2_do_rebuild_jump_labels
)
3622 rebuild_jump_labels (get_insns ());
3624 #endif /* HAVE_peephole2 */
3626 /* Common predicates for use with define_bypass. */
3628 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3629 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3630 must be either a single_set or a PARALLEL with SETs inside. */
3633 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3635 rtx out_set
, in_set
;
3636 rtx out_pat
, in_pat
;
3637 rtx out_exp
, in_exp
;
3640 in_set
= single_set (in_insn
);
3643 if (!MEM_P (SET_DEST (in_set
)))
3646 out_set
= single_set (out_insn
);
3649 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3654 out_pat
= PATTERN (out_insn
);
3656 if (GET_CODE (out_pat
) != PARALLEL
)
3659 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3661 out_exp
= XVECEXP (out_pat
, 0, i
);
3663 if (GET_CODE (out_exp
) == CLOBBER
)
3666 gcc_assert (GET_CODE (out_exp
) == SET
);
3668 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3675 in_pat
= PATTERN (in_insn
);
3676 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3678 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3680 in_exp
= XVECEXP (in_pat
, 0, i
);
3682 if (GET_CODE (in_exp
) == CLOBBER
)
3685 gcc_assert (GET_CODE (in_exp
) == SET
);
3687 if (!MEM_P (SET_DEST (in_exp
)))
3690 out_set
= single_set (out_insn
);
3693 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3698 out_pat
= PATTERN (out_insn
);
3699 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3701 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3703 out_exp
= XVECEXP (out_pat
, 0, j
);
3705 if (GET_CODE (out_exp
) == CLOBBER
)
3708 gcc_assert (GET_CODE (out_exp
) == SET
);
3710 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3720 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3721 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3722 or multiple set; IN_INSN should be single_set for truth, but for convenience
3723 of insn categorization may be any JUMP or CALL insn. */
3726 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3728 rtx out_set
, in_set
;
3730 in_set
= single_set (in_insn
);
3733 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3737 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3739 in_set
= SET_SRC (in_set
);
3741 out_set
= single_set (out_insn
);
3744 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3745 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3753 out_pat
= PATTERN (out_insn
);
3754 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3756 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3758 rtx exp
= XVECEXP (out_pat
, 0, i
);
3760 if (GET_CODE (exp
) == CLOBBER
)
3763 gcc_assert (GET_CODE (exp
) == SET
);
3765 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3766 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3775 gate_handle_peephole2 (void)
3777 return (optimize
> 0 && flag_peephole2
);
3781 rest_of_handle_peephole2 (void)
3783 #ifdef HAVE_peephole2
3784 peephole2_optimize ();
3791 const pass_data pass_data_peephole2
=
3793 RTL_PASS
, /* type */
3794 "peephole2", /* name */
3795 OPTGROUP_NONE
, /* optinfo_flags */
3796 true, /* has_gate */
3797 true, /* has_execute */
3798 TV_PEEPHOLE2
, /* tv_id */
3799 0, /* properties_required */
3800 0, /* properties_provided */
3801 0, /* properties_destroyed */
3802 0, /* todo_flags_start */
3803 ( TODO_df_finish
| TODO_verify_rtl_sharing
| 0 ), /* todo_flags_finish */
3806 class pass_peephole2
: public rtl_opt_pass
3809 pass_peephole2 (gcc::context
*ctxt
)
3810 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
3813 /* opt_pass methods: */
3814 /* The epiphany backend creates a second instance of this pass, so we need
3816 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
3817 bool gate () { return gate_handle_peephole2 (); }
3818 unsigned int execute () { return rest_of_handle_peephole2 (); }
3820 }; // class pass_peephole2
3825 make_pass_peephole2 (gcc::context
*ctxt
)
3827 return new pass_peephole2 (ctxt
);
3831 rest_of_handle_split_all_insns (void)
3839 const pass_data pass_data_split_all_insns
=
3841 RTL_PASS
, /* type */
3842 "split1", /* name */
3843 OPTGROUP_NONE
, /* optinfo_flags */
3844 false, /* has_gate */
3845 true, /* has_execute */
3846 TV_NONE
, /* tv_id */
3847 0, /* properties_required */
3848 0, /* properties_provided */
3849 0, /* properties_destroyed */
3850 0, /* todo_flags_start */
3851 0, /* todo_flags_finish */
3854 class pass_split_all_insns
: public rtl_opt_pass
3857 pass_split_all_insns (gcc::context
*ctxt
)
3858 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
3861 /* opt_pass methods: */
3862 /* The epiphany backend creates a second instance of this pass, so
3863 we need a clone method. */
3864 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
3865 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3867 }; // class pass_split_all_insns
3872 make_pass_split_all_insns (gcc::context
*ctxt
)
3874 return new pass_split_all_insns (ctxt
);
3878 rest_of_handle_split_after_reload (void)
3880 /* If optimizing, then go ahead and split insns now. */
3890 const pass_data pass_data_split_after_reload
=
3892 RTL_PASS
, /* type */
3893 "split2", /* name */
3894 OPTGROUP_NONE
, /* optinfo_flags */
3895 false, /* has_gate */
3896 true, /* has_execute */
3897 TV_NONE
, /* tv_id */
3898 0, /* properties_required */
3899 0, /* properties_provided */
3900 0, /* properties_destroyed */
3901 0, /* todo_flags_start */
3902 0, /* todo_flags_finish */
3905 class pass_split_after_reload
: public rtl_opt_pass
3908 pass_split_after_reload (gcc::context
*ctxt
)
3909 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
3912 /* opt_pass methods: */
3913 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3915 }; // class pass_split_after_reload
3920 make_pass_split_after_reload (gcc::context
*ctxt
)
3922 return new pass_split_after_reload (ctxt
);
3926 gate_handle_split_before_regstack (void)
3928 #if HAVE_ATTR_length && defined (STACK_REGS)
3929 /* If flow2 creates new instructions which need splitting
3930 and scheduling after reload is not done, they might not be
3931 split until final which doesn't allow splitting
3932 if HAVE_ATTR_length. */
3933 # ifdef INSN_SCHEDULING
3934 return (optimize
&& !flag_schedule_insns_after_reload
);
3944 rest_of_handle_split_before_regstack (void)
3952 const pass_data pass_data_split_before_regstack
=
3954 RTL_PASS
, /* type */
3955 "split3", /* name */
3956 OPTGROUP_NONE
, /* optinfo_flags */
3957 true, /* has_gate */
3958 true, /* has_execute */
3959 TV_NONE
, /* tv_id */
3960 0, /* properties_required */
3961 0, /* properties_provided */
3962 0, /* properties_destroyed */
3963 0, /* todo_flags_start */
3964 0, /* todo_flags_finish */
3967 class pass_split_before_regstack
: public rtl_opt_pass
3970 pass_split_before_regstack (gcc::context
*ctxt
)
3971 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
3974 /* opt_pass methods: */
3975 bool gate () { return gate_handle_split_before_regstack (); }
3976 unsigned int execute () {
3977 return rest_of_handle_split_before_regstack ();
3980 }; // class pass_split_before_regstack
3985 make_pass_split_before_regstack (gcc::context
*ctxt
)
3987 return new pass_split_before_regstack (ctxt
);
3991 gate_handle_split_before_sched2 (void)
3993 #ifdef INSN_SCHEDULING
3994 return optimize
> 0 && flag_schedule_insns_after_reload
;
4001 rest_of_handle_split_before_sched2 (void)
4003 #ifdef INSN_SCHEDULING
4011 const pass_data pass_data_split_before_sched2
=
4013 RTL_PASS
, /* type */
4014 "split4", /* name */
4015 OPTGROUP_NONE
, /* optinfo_flags */
4016 true, /* has_gate */
4017 true, /* has_execute */
4018 TV_NONE
, /* tv_id */
4019 0, /* properties_required */
4020 0, /* properties_provided */
4021 0, /* properties_destroyed */
4022 0, /* todo_flags_start */
4023 TODO_verify_flow
, /* todo_flags_finish */
4026 class pass_split_before_sched2
: public rtl_opt_pass
4029 pass_split_before_sched2 (gcc::context
*ctxt
)
4030 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4033 /* opt_pass methods: */
4034 bool gate () { return gate_handle_split_before_sched2 (); }
4035 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4037 }; // class pass_split_before_sched2
4042 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4044 return new pass_split_before_sched2 (ctxt
);
4047 /* The placement of the splitting that we do for shorten_branches
4048 depends on whether regstack is used by the target or not. */
4050 gate_do_final_split (void)
4052 #if HAVE_ATTR_length && !defined (STACK_REGS)
4061 const pass_data pass_data_split_for_shorten_branches
=
4063 RTL_PASS
, /* type */
4064 "split5", /* name */
4065 OPTGROUP_NONE
, /* optinfo_flags */
4066 true, /* has_gate */
4067 true, /* has_execute */
4068 TV_NONE
, /* tv_id */
4069 0, /* properties_required */
4070 0, /* properties_provided */
4071 0, /* properties_destroyed */
4072 0, /* todo_flags_start */
4073 TODO_verify_rtl_sharing
, /* todo_flags_finish */
4076 class pass_split_for_shorten_branches
: public rtl_opt_pass
4079 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4080 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4083 /* opt_pass methods: */
4084 bool gate () { return gate_do_final_split (); }
4085 unsigned int execute () { return split_all_insns_noflow (); }
4087 }; // class pass_split_for_shorten_branches
4092 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4094 return new pass_split_for_shorten_branches (ctxt
);