1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "insn-config.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
40 #include "cfgcleanup.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
49 #define STACK_POP_CODE POST_DEC
53 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx_insn
*, bool);
54 static void validate_replace_src_1 (rtx
*, void *);
55 static rtx_insn
*split_insn (rtx_insn
*);
57 struct target_recog default_target_recog
;
59 struct target_recog
*this_target_recog
= &default_target_recog
;
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.cc and expmed.cc (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.cc and final.cc and reload.cc.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data_d recog_data
;
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative
*recog_op_alt
;
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt
[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES
];
83 /* On return from `constrain_operands', indicate which alternative
86 int which_alternative
;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.cc.
90 Controls the significance of (SUBREG (MEM)). */
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed
;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
102 init_recog_no_volatile (void)
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
117 asm_labels_ok (rtx body
)
122 asmop
= extract_asm_operands (body
);
123 if (asmop
== NULL_RTX
)
126 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
137 check_asm_operands (rtx x
)
141 const char **constraints
;
144 if (!asm_labels_ok (x
))
147 /* Post-reload, be more strict with things. */
148 if (reload_completed
)
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn
*insn
= make_insn_raw (x
);
153 constrain_operands (1, get_enabled_alternatives (insn
));
154 return which_alternative
>= 0;
157 noperands
= asm_noperands (x
);
163 operands
= XALLOCAVEC (rtx
, noperands
);
164 constraints
= XALLOCAVEC (const char *, noperands
);
166 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
168 for (i
= 0; i
< noperands
; i
++)
170 const char *c
= constraints
[i
];
173 if (! asm_operand_ok (operands
[i
], c
, constraints
))
180 /* Static data for the next two routines. */
192 static change_t
*changes
;
193 static int changes_allocated
;
195 static int num_changes
= 0;
196 static int temporarily_undone_changes
= 0;
198 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
199 at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
200 will also be changed to NEW_LEN, which is no greater than the current
201 XVECLEN. If OBJECT is zero, no validation is done, the change is
204 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
205 will be called with the address and mode as parameters. If OBJECT is
206 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
209 IN_GROUP is nonzero if this is part of a group of changes that must be
210 performed as a group. In that case, the changes will be stored. The
211 function `apply_change_group' will validate and apply the changes.
213 If IN_GROUP is zero, this is a single change. Try to recognize the insn
214 or validate the memory reference with the change applied. If the result
215 is not valid for the machine, suppress the change and return zero.
216 Otherwise, perform the change and return 1. */
219 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
,
220 bool unshare
, int new_len
= -1)
222 gcc_assert (temporarily_undone_changes
== 0);
225 /* Single-element parallels aren't valid and won't match anything.
226 Replace them with the single element. */
227 if (new_len
== 1 && GET_CODE (new_rtx
) == PARALLEL
)
229 new_rtx
= XVECEXP (new_rtx
, 0, 0);
233 if ((old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
234 && (new_len
< 0 || XVECLEN (new_rtx
, 0) == new_len
))
237 gcc_assert ((in_group
!= 0 || num_changes
== 0)
238 && (new_len
< 0 || new_rtx
== *loc
));
242 /* Save the information describing this change. */
243 if (num_changes
>= changes_allocated
)
245 if (changes_allocated
== 0)
246 /* This value allows for repeated substitutions inside complex
247 indexed addresses, or changes in up to 5 insns. */
248 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
250 changes_allocated
*= 2;
252 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
255 changes
[num_changes
].object
= object
;
256 changes
[num_changes
].loc
= loc
;
257 changes
[num_changes
].old
= old
;
258 changes
[num_changes
].old_len
= (new_len
>= 0 ? XVECLEN (new_rtx
, 0) : -1);
259 changes
[num_changes
].unshare
= unshare
;
262 XVECLEN (new_rtx
, 0) = new_len
;
264 if (object
&& !MEM_P (object
))
266 /* Set INSN_CODE to force rerecognition of insn. Save old code in
268 changes
[num_changes
].old_code
= INSN_CODE (object
);
269 INSN_CODE (object
) = -1;
274 /* If we are making a group of changes, return 1. Otherwise, validate the
275 change group we made. */
280 return apply_change_group ();
283 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
287 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
289 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
292 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
296 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
298 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
301 /* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
302 value are as for validate_change_1. */
305 validate_change_xveclen (rtx object
, rtx
*loc
, int new_len
, bool in_group
)
307 return validate_change_1 (object
, loc
, *loc
, in_group
, false, new_len
);
310 /* Keep X canonicalized if some changes have made it non-canonical; only
311 modifies the operands of X, not (for example) its code. Simplifications
312 are not the job of this routine.
314 Return true if anything was changed. */
316 canonicalize_change_group (rtx_insn
*insn
, rtx x
)
318 if (COMMUTATIVE_P (x
)
319 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
321 /* Oops, the caller has made X no longer canonical.
322 Let's redo the changes in the correct order. */
323 rtx tem
= XEXP (x
, 0);
324 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
325 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
332 /* Check if REG_INC argument in *data overlaps a stored REG. */
335 check_invalid_inc_dec (rtx reg
, const_rtx
, void *data
)
337 rtx
*pinc
= (rtx
*) data
;
338 if (*pinc
== NULL_RTX
|| MEM_P (reg
))
340 if (reg_overlap_mentioned_p (reg
, *pinc
))
344 /* This subroutine of apply_change_group verifies whether the changes to INSN
345 were valid; i.e. whether INSN can still be recognized.
347 If IN_GROUP is true clobbers which have to be added in order to
348 match the instructions will be added to the current change group.
349 Otherwise the changes will take effect immediately. */
352 insn_invalid_p (rtx_insn
*insn
, bool in_group
)
354 rtx pat
= PATTERN (insn
);
355 int num_clobbers
= 0;
356 /* If we are before reload and the pattern is a SET, see if we can add
358 int icode
= recog (pat
, insn
,
359 (GET_CODE (pat
) == SET
360 && ! reload_completed
361 && ! reload_in_progress
)
362 ? &num_clobbers
: 0);
363 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
366 /* If this is an asm and the operand aren't legal, then fail. Likewise if
367 this is not an asm and the insn wasn't recognized. */
368 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
369 || (!is_asm
&& icode
< 0))
372 /* If we have to add CLOBBERs, fail if we have to add ones that reference
373 hard registers since our callers can't know if they are live or not.
374 Otherwise, add them. */
375 if (num_clobbers
> 0)
379 if (added_clobbers_hard_reg_p (icode
))
382 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
383 XVECEXP (newpat
, 0, 0) = pat
;
384 add_clobbers (newpat
, icode
);
386 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
388 PATTERN (insn
) = pat
= newpat
;
391 /* After reload, verify that all constraints are satisfied. */
392 if (reload_completed
)
396 if (! constrain_operands (1, get_preferred_alternatives (insn
)))
400 /* Punt if REG_INC argument overlaps some stored REG. */
401 for (rtx link
= FIND_REG_INC_NOTE (insn
, NULL_RTX
);
402 link
; link
= XEXP (link
, 1))
403 if (REG_NOTE_KIND (link
) == REG_INC
)
405 rtx reg
= XEXP (link
, 0);
406 note_stores (insn
, check_invalid_inc_dec
, ®
);
411 INSN_CODE (insn
) = icode
;
415 /* Return number of changes made and not validated yet. */
417 num_changes_pending (void)
422 /* Tentatively apply the changes numbered NUM and up.
423 Return 1 if all changes are valid, zero otherwise. */
426 verify_changes (int num
)
429 rtx last_validated
= NULL_RTX
;
431 /* The changes have been applied and all INSN_CODEs have been reset to force
434 The changes are valid if we aren't given an object, or if we are
435 given a MEM and it still is a valid address, or if this is in insn
436 and it is recognized. In the latter case, if reload has completed,
437 we also require that the operands meet the constraints for
440 for (i
= num
; i
< num_changes
; i
++)
442 rtx object
= changes
[i
].object
;
444 /* If there is no object to test or if it is the same as the one we
445 already tested, ignore it. */
446 if (object
== 0 || object
== last_validated
)
451 if (! memory_address_addr_space_p (GET_MODE (object
),
453 MEM_ADDR_SPACE (object
)))
456 else if (/* changes[i].old might be zero, e.g. when putting a
457 REG_FRAME_RELATED_EXPR into a previously empty list. */
459 && REG_P (changes
[i
].old
)
460 && asm_noperands (PATTERN (object
)) > 0
461 && register_asm_p (changes
[i
].old
))
463 /* Don't allow changes of hard register operands to inline
464 assemblies if they have been defined as register asm ("x"). */
467 else if (DEBUG_INSN_P (object
))
469 else if (insn_invalid_p (as_a
<rtx_insn
*> (object
), true))
471 rtx pat
= PATTERN (object
);
473 /* Perhaps we couldn't recognize the insn because there were
474 extra CLOBBERs at the end. If so, try to re-recognize
475 without the last CLOBBER (later iterations will cause each of
476 them to be eliminated, in turn). But don't do this if we
477 have an ASM_OPERAND. */
478 if (GET_CODE (pat
) == PARALLEL
479 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
480 && asm_noperands (PATTERN (object
)) < 0)
484 if (XVECLEN (pat
, 0) == 2)
485 newpat
= XVECEXP (pat
, 0, 0);
491 = gen_rtx_PARALLEL (VOIDmode
,
492 rtvec_alloc (XVECLEN (pat
, 0) - 1));
493 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
494 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
497 /* Add a new change to this group to replace the pattern
498 with this new pattern. Then consider this change
499 as having succeeded. The change we added will
500 cause the entire call to fail if things remain invalid.
502 Note that this can lose if a later change than the one
503 we are processing specified &XVECEXP (PATTERN (object), 0, X)
504 but this shouldn't occur. */
506 validate_change (object
, &PATTERN (object
), newpat
, 1);
509 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
510 || GET_CODE (pat
) == VAR_LOCATION
)
511 /* If this insn is a CLOBBER or USE, it is always valid, but is
517 last_validated
= object
;
520 return (i
== num_changes
);
523 /* A group of changes has previously been issued with validate_change
524 and verified with verify_changes. Call df_insn_rescan for each of
525 the insn changed and clear num_changes. */
528 confirm_change_group (void)
531 rtx last_object
= NULL
;
533 gcc_assert (temporarily_undone_changes
== 0);
534 for (i
= 0; i
< num_changes
; i
++)
536 rtx object
= changes
[i
].object
;
538 if (changes
[i
].unshare
)
539 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
541 /* Avoid unnecessary rescanning when multiple changes to same instruction
545 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
546 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
547 last_object
= object
;
551 if (last_object
&& INSN_P (last_object
))
552 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
556 /* Apply a group of changes previously issued with `validate_change'.
557 If all changes are valid, call confirm_change_group and return 1,
558 otherwise, call cancel_changes and return 0. */
561 apply_change_group (void)
563 if (verify_changes (0))
565 confirm_change_group ();
576 /* Return the number of changes so far in the current group. */
579 num_validated_changes (void)
584 /* Retract the changes numbered NUM and up. */
587 cancel_changes (int num
)
589 gcc_assert (temporarily_undone_changes
== 0);
592 /* Back out all the changes. Do this in the opposite order in which
594 for (i
= num_changes
- 1; i
>= num
; i
--)
596 if (changes
[i
].old_len
>= 0)
597 XVECLEN (*changes
[i
].loc
, 0) = changes
[i
].old_len
;
599 *changes
[i
].loc
= changes
[i
].old
;
600 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
601 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
606 /* Swap the status of change NUM from being applied to not being applied,
610 swap_change (int num
)
612 if (changes
[num
].old_len
>= 0)
613 std::swap (XVECLEN (*changes
[num
].loc
, 0), changes
[num
].old_len
);
615 std::swap (*changes
[num
].loc
, changes
[num
].old
);
616 if (changes
[num
].object
&& !MEM_P (changes
[num
].object
))
617 std::swap (INSN_CODE (changes
[num
].object
), changes
[num
].old_code
);
620 /* Temporarily undo all the changes numbered NUM and up, with a view
621 to reapplying them later. The next call to the changes machinery
626 otherwise things will end up in an invalid state. */
629 temporarily_undo_changes (int num
)
631 gcc_assert (temporarily_undone_changes
== 0 && num
<= num_changes
);
632 for (int i
= num_changes
- 1; i
>= num
; i
--)
634 temporarily_undone_changes
= num_changes
- num
;
637 /* Redo the changes that were temporarily undone by:
639 temporarily_undo_changes (NUM). */
642 redo_changes (int num
)
644 gcc_assert (temporarily_undone_changes
== num_changes
- num
);
645 for (int i
= num
; i
< num_changes
; ++i
)
647 temporarily_undone_changes
= 0;
650 /* Reduce conditional compilation elsewhere. */
651 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
655 simplify_while_replacing (rtx
*loc
, rtx to
, rtx_insn
*object
,
656 machine_mode op0_mode
)
659 enum rtx_code code
= GET_CODE (x
);
660 rtx new_rtx
= NULL_RTX
;
661 scalar_int_mode is_mode
;
663 if (SWAPPABLE_OPERANDS_P (x
)
664 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
666 validate_unshare_change (object
, loc
,
667 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
668 : swap_condition (code
),
669 GET_MODE (x
), XEXP (x
, 1),
675 /* Canonicalize arithmetics with all constant operands. */
676 switch (GET_RTX_CLASS (code
))
679 if (CONSTANT_P (XEXP (x
, 0)))
680 new_rtx
= simplify_unary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
685 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
686 new_rtx
= simplify_binary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
690 case RTX_COMM_COMPARE
:
691 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
692 new_rtx
= simplify_relational_operation (code
, GET_MODE (x
), op0_mode
,
693 XEXP (x
, 0), XEXP (x
, 1));
700 validate_change (object
, loc
, new_rtx
, 1);
707 /* If we have a PLUS whose second operand is now a CONST_INT, use
708 simplify_gen_binary to try to simplify it.
709 ??? We may want later to remove this, once simplification is
710 separated from this function. */
711 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
712 validate_change (object
, loc
,
714 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
717 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
718 validate_change (object
, loc
,
720 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
721 simplify_gen_unary (NEG
,
722 GET_MODE (x
), XEXP (x
, 1),
727 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
729 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
731 /* If any of the above failed, substitute in something that
732 we know won't be recognized. */
734 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
735 validate_change (object
, loc
, new_rtx
, 1);
739 /* All subregs possible to simplify should be simplified. */
740 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
743 /* Subregs of VOIDmode operands are incorrect. */
744 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
745 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
747 validate_change (object
, loc
, new_rtx
, 1);
751 /* If we are replacing a register with memory, try to change the memory
752 to be the mode required for memory in extract operations (this isn't
753 likely to be an insertion operation; if it was, nothing bad will
754 happen, we might just fail in some cases). */
756 if (MEM_P (XEXP (x
, 0))
757 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &is_mode
)
758 && CONST_INT_P (XEXP (x
, 1))
759 && CONST_INT_P (XEXP (x
, 2))
760 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
761 MEM_ADDR_SPACE (XEXP (x
, 0)))
762 && !MEM_VOLATILE_P (XEXP (x
, 0)))
764 int pos
= INTVAL (XEXP (x
, 2));
765 machine_mode new_mode
= is_mode
;
766 if (GET_CODE (x
) == ZERO_EXTRACT
&& targetm
.have_extzv ())
767 new_mode
= insn_data
[targetm
.code_for_extzv
].operand
[1].mode
;
768 else if (GET_CODE (x
) == SIGN_EXTRACT
&& targetm
.have_extv ())
769 new_mode
= insn_data
[targetm
.code_for_extv
].operand
[1].mode
;
770 scalar_int_mode wanted_mode
= (new_mode
== VOIDmode
772 : as_a
<scalar_int_mode
> (new_mode
));
774 /* If we have a narrower mode, we can do something. */
775 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
777 int offset
= pos
/ BITS_PER_UNIT
;
780 /* If the bytes and bits are counted differently, we
781 must adjust the offset. */
782 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
784 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
787 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
788 == GET_MODE_BITSIZE (wanted_mode
));
789 pos
%= GET_MODE_BITSIZE (wanted_mode
);
791 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
793 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
794 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
805 /* Replace every occurrence of FROM in X with TO. Mark each change with
806 validate_change passing OBJECT. */
809 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx_insn
*object
,
816 machine_mode op0_mode
= VOIDmode
;
817 int prev_changes
= num_changes
;
823 fmt
= GET_RTX_FORMAT (code
);
825 op0_mode
= GET_MODE (XEXP (x
, 0));
827 /* X matches FROM if it is the same rtx or they are both referring to the
828 same register in the same mode. Avoid calling rtx_equal_p unless the
829 operands look similar. */
832 || (REG_P (x
) && REG_P (from
)
833 && GET_MODE (x
) == GET_MODE (from
)
834 && REGNO (x
) == REGNO (from
))
835 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
836 && rtx_equal_p (x
, from
)))
838 validate_unshare_change (object
, loc
, to
, 1);
842 /* Call ourself recursively to perform the replacements.
843 We must not replace inside already replaced expression, otherwise we
844 get infinite recursion for replacements like (reg X)->(subreg (reg X))
845 so we must special case shared ASM_OPERANDS. */
847 if (GET_CODE (x
) == PARALLEL
)
849 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
851 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
852 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
854 /* Verify that operands are really shared. */
855 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
856 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
858 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
859 from
, to
, object
, simplify
);
862 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
867 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
870 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
871 else if (fmt
[i
] == 'E')
872 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
873 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
877 /* If we didn't substitute, there is nothing more to do. */
878 if (num_changes
== prev_changes
)
881 /* ??? The regmove is no more, so is this aberration still necessary? */
882 /* Allow substituted expression to have different mode. This is used by
883 regmove to change mode of pseudo register. */
884 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
885 op0_mode
= GET_MODE (XEXP (x
, 0));
887 /* Do changes needed to keep rtx consistent. Don't do any other
888 simplifications, as it is not our job. */
890 simplify_while_replacing (loc
, to
, object
, op0_mode
);
893 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
894 with TO. After all changes have been made, validate by seeing
895 if INSN is still valid. */
898 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx_insn
*insn
, rtx
*loc
)
900 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
901 return apply_change_group ();
904 /* Try replacing every occurrence of FROM in INSN with TO. After all
905 changes have been made, validate by seeing if INSN is still valid. */
908 validate_replace_rtx (rtx from
, rtx to
, rtx_insn
*insn
)
910 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
911 return apply_change_group ();
914 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
915 is a part of INSN. After all changes have been made, validate by seeing if
917 validate_replace_rtx (from, to, insn) is equivalent to
918 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
921 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx_insn
*insn
)
923 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
924 return apply_change_group ();
927 /* Same as above, but do not simplify rtx afterwards. */
929 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
932 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
933 return apply_change_group ();
937 /* Try replacing every occurrence of FROM in INSN with TO. This also
938 will replace in REG_EQUAL and REG_EQUIV notes. */
941 validate_replace_rtx_group (rtx from
, rtx to
, rtx_insn
*insn
)
944 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
945 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
946 if (REG_NOTE_KIND (note
) == REG_EQUAL
947 || REG_NOTE_KIND (note
) == REG_EQUIV
)
948 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
951 /* Function called by note_uses to replace used subexpressions. */
952 struct validate_replace_src_data
954 rtx from
; /* Old RTX */
955 rtx to
; /* New RTX */
956 rtx_insn
*insn
; /* Insn in which substitution is occurring. */
960 validate_replace_src_1 (rtx
*x
, void *data
)
962 struct validate_replace_src_data
*d
963 = (struct validate_replace_src_data
*) data
;
965 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
968 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
972 validate_replace_src_group (rtx from
, rtx to
, rtx_insn
*insn
)
974 struct validate_replace_src_data d
;
979 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
982 /* Try simplify INSN.
983 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
984 pattern and return true if something was simplified. */
987 validate_simplify_insn (rtx_insn
*insn
)
993 pat
= PATTERN (insn
);
995 if (GET_CODE (pat
) == SET
)
997 newpat
= simplify_rtx (SET_SRC (pat
));
998 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
999 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
1000 newpat
= simplify_rtx (SET_DEST (pat
));
1001 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
1002 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
1004 else if (GET_CODE (pat
) == PARALLEL
)
1005 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1007 rtx s
= XVECEXP (pat
, 0, i
);
1009 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
1011 newpat
= simplify_rtx (SET_SRC (s
));
1012 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
1013 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
1014 newpat
= simplify_rtx (SET_DEST (s
));
1015 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
1016 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
1019 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
1022 /* Try to process the address of memory expression MEM. Return true on
1023 success; leave the caller to clean up on failure. */
1026 insn_propagation::apply_to_mem_1 (rtx mem
)
1028 auto old_num_changes
= num_validated_changes ();
1030 bool res
= apply_to_rvalue_1 (&XEXP (mem
, 0));
1035 if (old_num_changes
!= num_validated_changes ()
1036 && should_check_mems
1037 && !check_mem (old_num_changes
, mem
))
1043 /* Try to process the rvalue expression at *LOC. Return true on success;
1044 leave the caller to clean up on failure. */
1047 insn_propagation::apply_to_rvalue_1 (rtx
*loc
)
1050 enum rtx_code code
= GET_CODE (x
);
1051 machine_mode mode
= GET_MODE (x
);
1053 auto old_num_changes
= num_validated_changes ();
1054 if (from
&& GET_CODE (x
) == GET_CODE (from
) && rtx_equal_p (x
, from
))
1056 /* Don't replace register asms in asm statements; we mustn't
1057 change the user's register allocation. */
1059 && HARD_REGISTER_P (x
)
1060 && register_asm_p (x
)
1061 && asm_noperands (PATTERN (insn
)) > 0)
1065 validate_unshare_change (insn
, loc
, to
, 1);
1067 validate_change (insn
, loc
, to
, 1);
1068 if (mem_depth
&& !REG_P (to
) && !CONSTANT_P (to
))
1070 /* We're substituting into an address, but TO will have the
1071 form expected outside an address. Canonicalize it if
1073 insn_propagation
subprop (insn
);
1074 subprop
.mem_depth
+= 1;
1075 if (!subprop
.apply_to_rvalue (loc
))
1078 && num_validated_changes () != old_num_changes
+ 1)
1080 /* TO is owned by someone else, so create a copy and
1081 return TO to its original form. */
1082 rtx to
= copy_rtx (*loc
);
1083 cancel_changes (old_num_changes
);
1084 validate_change (insn
, loc
, to
, 1);
1087 num_replacements
+= 1;
1088 should_unshare
= true;
1089 result_flags
|= UNSIMPLIFIED
;
1093 /* Recursively apply the substitution and see if we can simplify
1094 the result. This specifically shouldn't use simplify_gen_* for
1095 speculative simplifications, since we want to avoid generating new
1096 expressions where possible. */
1097 auto old_result_flags
= result_flags
;
1098 rtx newx
= NULL_RTX
;
1099 bool recurse_p
= false;
1100 switch (GET_RTX_CLASS (code
))
1104 machine_mode op0_mode
= GET_MODE (XEXP (x
, 0));
1105 if (!apply_to_rvalue_1 (&XEXP (x
, 0)))
1107 if (from
&& old_num_changes
== num_validated_changes ())
1110 newx
= simplify_unary_operation (code
, mode
, XEXP (x
, 0), op0_mode
);
1115 case RTX_COMM_ARITH
:
1117 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1118 || !apply_to_rvalue_1 (&XEXP (x
, 1)))
1120 if (from
&& old_num_changes
== num_validated_changes ())
1123 if (GET_RTX_CLASS (code
) == RTX_COMM_ARITH
1124 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
1125 newx
= simplify_gen_binary (code
, mode
, XEXP (x
, 1), XEXP (x
, 0));
1127 newx
= simplify_binary_operation (code
, mode
,
1128 XEXP (x
, 0), XEXP (x
, 1));
1133 case RTX_COMM_COMPARE
:
1135 machine_mode op_mode
= (GET_MODE (XEXP (x
, 0)) != VOIDmode
1136 ? GET_MODE (XEXP (x
, 0))
1137 : GET_MODE (XEXP (x
, 1)));
1138 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1139 || !apply_to_rvalue_1 (&XEXP (x
, 1)))
1141 if (from
&& old_num_changes
== num_validated_changes ())
1144 newx
= simplify_relational_operation (code
, mode
, op_mode
,
1145 XEXP (x
, 0), XEXP (x
, 1));
1150 case RTX_BITFIELD_OPS
:
1152 machine_mode op0_mode
= GET_MODE (XEXP (x
, 0));
1153 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1154 || !apply_to_rvalue_1 (&XEXP (x
, 1))
1155 || !apply_to_rvalue_1 (&XEXP (x
, 2)))
1157 if (from
&& old_num_changes
== num_validated_changes ())
1160 newx
= simplify_ternary_operation (code
, mode
, op0_mode
,
1161 XEXP (x
, 0), XEXP (x
, 1),
1169 machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
1170 if (!apply_to_rvalue_1 (&SUBREG_REG (x
)))
1172 if (from
&& old_num_changes
== num_validated_changes ())
1175 rtx inner
= SUBREG_REG (x
);
1176 newx
= simplify_subreg (mode
, inner
, inner_mode
, SUBREG_BYTE (x
));
1177 /* Reject the same cases that simplify_gen_subreg would. */
1179 && (GET_CODE (inner
) == SUBREG
1180 || GET_CODE (inner
) == CONCAT
1181 || GET_MODE (inner
) == VOIDmode
1182 || !validate_subreg (mode
, inner_mode
,
1183 inner
, SUBREG_BYTE (x
))))
1185 failure_reason
= "would create an invalid subreg";
1197 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1198 || !apply_to_rvalue_1 (&XEXP (x
, 1)))
1200 if (from
&& old_num_changes
== num_validated_changes ())
1203 /* (lo_sum (high x) y) -> y where x and y have the same base. */
1204 rtx op0
= XEXP (x
, 0);
1205 rtx op1
= XEXP (x
, 1);
1206 if (GET_CODE (op0
) == HIGH
)
1208 rtx base0
, base1
, offset0
, offset1
;
1209 split_const (XEXP (op0
, 0), &base0
, &offset0
);
1210 split_const (op1
, &base1
, &offset1
);
1211 if (rtx_equal_p (base0
, base1
))
1215 else if (code
== REG
)
1217 if (from
&& REG_P (from
) && reg_overlap_mentioned_p (x
, from
))
1219 failure_reason
= "inexact register overlap";
1223 else if (code
== MEM
)
1224 return apply_to_mem_1 (x
);
1233 if (from
&& reg_overlap_mentioned_p (XEXP (x
, 0), from
))
1235 failure_reason
= "is subject to autoinc";
1248 const char *fmt
= GET_RTX_FORMAT (code
);
1249 for (int i
= 0; fmt
[i
]; i
++)
1253 for (int j
= 0; j
< XVECLEN (x
, i
); j
++)
1254 if (!apply_to_rvalue_1 (&XVECEXP (x
, i
, j
)))
1259 if (XEXP (x
, i
) && !apply_to_rvalue_1 (&XEXP (x
, i
)))
1264 else if (newx
&& !rtx_equal_p (x
, newx
))
1266 /* All substitutions made by OLD_NUM_CHANGES onwards have been
1268 result_flags
= ((result_flags
& ~UNSIMPLIFIED
)
1269 | (old_result_flags
& UNSIMPLIFIED
));
1271 if (should_note_simplifications
)
1272 note_simplification (old_num_changes
, old_result_flags
, x
, newx
);
1274 /* There's no longer any point unsharing the substitutions made
1275 for subexpressions, since we'll just copy this one instead. */
1276 bool unshare
= false;
1277 for (int i
= old_num_changes
; i
< num_changes
; ++i
)
1279 unshare
|= changes
[i
].unshare
;
1280 changes
[i
].unshare
= false;
1283 validate_unshare_change (insn
, loc
, newx
, 1);
1285 validate_change (insn
, loc
, newx
, 1);
1291 /* Try to process the lvalue expression at *LOC. Return true on success;
1292 leave the caller to clean up on failure. */
1295 insn_propagation::apply_to_lvalue_1 (rtx dest
)
1297 rtx old_dest
= dest
;
1298 while (GET_CODE (dest
) == SUBREG
1299 || GET_CODE (dest
) == ZERO_EXTRACT
1300 || GET_CODE (dest
) == STRICT_LOW_PART
)
1302 if (GET_CODE (dest
) == ZERO_EXTRACT
1303 && (!apply_to_rvalue_1 (&XEXP (dest
, 1))
1304 || !apply_to_rvalue_1 (&XEXP (dest
, 2))))
1306 dest
= XEXP (dest
, 0);
1310 return apply_to_mem_1 (dest
);
1312 /* Check whether the substitution is safe in the presence of this lvalue. */
1316 || !reg_overlap_mentioned_p (dest
, from
))
1319 if (SUBREG_P (old_dest
)
1320 && SUBREG_REG (old_dest
) == dest
1321 && !read_modify_subreg_p (old_dest
))
1324 failure_reason
= "is part of a read-write destination";
1328 /* Try to process the instruction pattern at *LOC. Return true on success;
1329 leave the caller to clean up on failure. */
1332 insn_propagation::apply_to_pattern_1 (rtx
*loc
)
1335 switch (GET_CODE (body
))
1338 return (apply_to_rvalue_1 (&COND_EXEC_TEST (body
))
1339 && apply_to_pattern_1 (&COND_EXEC_CODE (body
)));
1343 int last
= XVECLEN (body
, 0) - 1;
1344 for (int i
= 0; i
< last
; ++i
)
1345 if (!apply_to_pattern_1 (&XVECEXP (body
, 0, i
)))
1347 return apply_to_pattern_1 (&XVECEXP (body
, 0, last
));
1351 for (int i
= 0, len
= ASM_OPERANDS_INPUT_LENGTH (body
); i
< len
; ++i
)
1352 if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body
, i
)))
1357 return apply_to_lvalue_1 (XEXP (body
, 0));
1360 return (apply_to_lvalue_1 (SET_DEST (body
))
1361 && apply_to_rvalue_1 (&SET_SRC (body
)));
1364 /* All the other possibilities never store and can use a normal
1365 rtx walk. This includes:
1371 - UNSPEC_VOLATILE. */
1372 return apply_to_rvalue_1 (loc
);
1376 /* Apply this insn_propagation object's simplification or substitution
1377 to the instruction pattern at LOC. */
1380 insn_propagation::apply_to_pattern (rtx
*loc
)
1382 unsigned int num_changes
= num_validated_changes ();
1383 bool res
= apply_to_pattern_1 (loc
);
1385 cancel_changes (num_changes
);
1389 /* Apply this insn_propagation object's simplification or substitution
1390 to the rvalue expression at LOC. */
1393 insn_propagation::apply_to_rvalue (rtx
*loc
)
1395 unsigned int num_changes
= num_validated_changes ();
1396 bool res
= apply_to_rvalue_1 (loc
);
1398 cancel_changes (num_changes
);
1402 /* Check whether INSN matches a specific alternative of an .md pattern. */
1405 valid_insn_p (rtx_insn
*insn
)
1407 recog_memoized (insn
);
1408 if (INSN_CODE (insn
) < 0)
1410 extract_insn (insn
);
1411 /* We don't know whether the insn will be in code that is optimized
1412 for size or speed, so consider all enabled alternatives. */
1413 if (!constrain_operands (1, get_enabled_alternatives (insn
)))
1418 /* Return true if OP is a valid general operand for machine mode MODE.
1419 This is either a register reference, a memory reference,
1420 or a constant. In the case of a memory reference, the address
1421 is checked for general validity for the target machine.
1423 Register and memory references must have mode MODE in order to be valid,
1424 but some constants have no machine mode and are valid for any mode.
1426 If MODE is VOIDmode, OP is checked for validity for whatever mode
1429 The main use of this function is as a predicate in match_operand
1430 expressions in the machine description. */
1433 general_operand (rtx op
, machine_mode mode
)
1435 enum rtx_code code
= GET_CODE (op
);
1437 if (mode
== VOIDmode
)
1438 mode
= GET_MODE (op
);
1440 /* Don't accept CONST_INT or anything similar
1441 if the caller wants something floating. */
1442 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1443 && GET_MODE_CLASS (mode
) != MODE_INT
1444 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1447 if (CONST_INT_P (op
)
1449 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1452 if (CONSTANT_P (op
))
1453 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1454 || mode
== VOIDmode
)
1455 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1456 && targetm
.legitimate_constant_p (mode
== VOIDmode
1460 /* Except for certain constants with VOIDmode, already checked for,
1461 OP's mode must match MODE if MODE specifies a mode. */
1463 if (GET_MODE (op
) != mode
)
1468 rtx sub
= SUBREG_REG (op
);
1470 #ifdef INSN_SCHEDULING
1471 /* On machines that have insn scheduling, we want all memory
1472 reference to be explicit, so outlaw paradoxical SUBREGs.
1473 However, we must allow them after reload so that they can
1474 get cleaned up by cleanup_subreg_operands. */
1475 if (!reload_completed
&& MEM_P (sub
)
1476 && paradoxical_subreg_p (op
))
1479 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1480 may result in incorrect reference. We should simplify all valid
1481 subregs of MEM anyway. But allow this after reload because we
1482 might be called from cleanup_subreg_operands.
1484 ??? This is a kludge. */
1485 if (!reload_completed
1486 && maybe_ne (SUBREG_BYTE (op
), 0)
1491 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1492 && !REG_CAN_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1493 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1494 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1495 /* LRA can generate some invalid SUBREGS just for matched
1496 operand reload presentation. LRA needs to treat them as
1498 && ! LRA_SUBREG_P (op
))
1501 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1502 create such rtl, and we must reject it. */
1503 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1504 /* LRA can use subreg to store a floating point value in an
1505 integer mode. Although the floating point and the
1506 integer modes need the same number of hard registers, the
1507 size of floating point mode can be less than the integer
1509 && ! lra_in_progress
1510 && paradoxical_subreg_p (op
))
1514 code
= GET_CODE (op
);
1518 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1519 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1523 rtx y
= XEXP (op
, 0);
1525 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1528 /* Use the mem's mode, since it will be reloaded thus. LRA can
1529 generate move insn with invalid addresses which is made valid
1530 and efficiently calculated by LRA through further numerous
1533 || memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1540 /* Return true if OP is a valid memory address for a memory reference
1543 The main use of this function is as a predicate in match_operand
1544 expressions in the machine description. */
1547 address_operand (rtx op
, machine_mode mode
)
1549 /* Wrong mode for an address expr. */
1550 if (GET_MODE (op
) != VOIDmode
1551 && ! SCALAR_INT_MODE_P (GET_MODE (op
)))
1554 return memory_address_p (mode
, op
);
1557 /* Return true if OP is a register reference of mode MODE.
1558 If MODE is VOIDmode, accept a register in any mode.
1560 The main use of this function is as a predicate in match_operand
1561 expressions in the machine description. */
1564 register_operand (rtx op
, machine_mode mode
)
1566 if (GET_CODE (op
) == SUBREG
)
1568 rtx sub
= SUBREG_REG (op
);
1570 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1571 because it is guaranteed to be reloaded into one.
1572 Just make sure the MEM is valid in itself.
1573 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1574 but currently it does result from (SUBREG (REG)...) where the
1575 reg went on the stack.) */
1576 if (!REG_P (sub
) && (reload_completed
|| !MEM_P (sub
)))
1579 else if (!REG_P (op
))
1581 return general_operand (op
, mode
);
1584 /* Return true for a register in Pmode; ignore the tested mode. */
1587 pmode_register_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
1589 return register_operand (op
, Pmode
);
1592 /* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1593 or a hard register. */
1596 scratch_operand (rtx op
, machine_mode mode
)
1598 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1601 return (GET_CODE (op
) == SCRATCH
1604 || (REGNO (op
) < FIRST_PSEUDO_REGISTER
1605 && REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
))));
1608 /* Return true if OP is a valid immediate operand for mode MODE.
1610 The main use of this function is as a predicate in match_operand
1611 expressions in the machine description. */
1614 immediate_operand (rtx op
, machine_mode mode
)
1616 /* Don't accept CONST_INT or anything similar
1617 if the caller wants something floating. */
1618 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1619 && GET_MODE_CLASS (mode
) != MODE_INT
1620 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1623 if (CONST_INT_P (op
)
1625 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1628 return (CONSTANT_P (op
)
1629 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1630 || GET_MODE (op
) == VOIDmode
)
1631 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1632 && targetm
.legitimate_constant_p (mode
== VOIDmode
1637 /* Return true if OP is an operand that is a CONST_INT of mode MODE. */
1640 const_int_operand (rtx op
, machine_mode mode
)
1642 if (!CONST_INT_P (op
))
1645 if (mode
!= VOIDmode
1646 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1652 #if TARGET_SUPPORTS_WIDE_INT
1653 /* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1656 const_scalar_int_operand (rtx op
, machine_mode mode
)
1658 if (!CONST_SCALAR_INT_P (op
))
1661 if (CONST_INT_P (op
))
1662 return const_int_operand (op
, mode
);
1664 if (mode
!= VOIDmode
)
1666 scalar_int_mode int_mode
= as_a
<scalar_int_mode
> (mode
);
1667 int prec
= GET_MODE_PRECISION (int_mode
);
1668 int bitsize
= GET_MODE_BITSIZE (int_mode
);
1670 if (CONST_WIDE_INT_NUNITS (op
) * HOST_BITS_PER_WIDE_INT
> bitsize
)
1673 if (prec
== bitsize
)
1677 /* Multiword partial int. */
1679 = CONST_WIDE_INT_ELT (op
, CONST_WIDE_INT_NUNITS (op
) - 1);
1680 return (sext_hwi (x
, prec
& (HOST_BITS_PER_WIDE_INT
- 1)) == x
);
1686 /* Return true if OP is an operand that is a constant integer or constant
1687 floating-point number of MODE. */
1690 const_double_operand (rtx op
, machine_mode mode
)
1692 return (GET_CODE (op
) == CONST_DOUBLE
)
1693 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
);
1696 /* Return true if OP is an operand that is a constant integer or constant
1697 floating-point number of MODE. */
1700 const_double_operand (rtx op
, machine_mode mode
)
1702 /* Don't accept CONST_INT or anything similar
1703 if the caller wants something floating. */
1704 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1705 && GET_MODE_CLASS (mode
) != MODE_INT
1706 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1709 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1710 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1711 || GET_MODE (op
) == VOIDmode
));
1714 /* Return true if OP is a general operand that is not an immediate
1715 operand of mode MODE. */
1718 nonimmediate_operand (rtx op
, machine_mode mode
)
1720 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1723 /* Return true if OP is a register reference or
1724 immediate value of mode MODE. */
1727 nonmemory_operand (rtx op
, machine_mode mode
)
1729 if (CONSTANT_P (op
))
1730 return immediate_operand (op
, mode
);
1731 return register_operand (op
, mode
);
1734 /* Return true if OP is a valid operand that stands for pushing a
1735 value of mode MODE onto the stack.
1737 The main use of this function is as a predicate in match_operand
1738 expressions in the machine description. */
1741 push_operand (rtx op
, machine_mode mode
)
1746 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1749 poly_int64 rounded_size
= GET_MODE_SIZE (mode
);
1751 #ifdef PUSH_ROUNDING
1752 rounded_size
= PUSH_ROUNDING (MACRO_INT (rounded_size
));
1757 if (known_eq (rounded_size
, GET_MODE_SIZE (mode
)))
1759 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1765 if (GET_CODE (op
) != PRE_MODIFY
1766 || GET_CODE (XEXP (op
, 1)) != PLUS
1767 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1768 || !poly_int_rtx_p (XEXP (XEXP (op
, 1), 1), &offset
)
1769 || (STACK_GROWS_DOWNWARD
1770 ? maybe_ne (offset
, -rounded_size
)
1771 : maybe_ne (offset
, rounded_size
)))
1775 return XEXP (op
, 0) == stack_pointer_rtx
;
1778 /* Return true if OP is a valid operand that stands for popping a
1779 value of mode MODE off the stack.
1781 The main use of this function is as a predicate in match_operand
1782 expressions in the machine description. */
1785 pop_operand (rtx op
, machine_mode mode
)
1790 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1795 if (GET_CODE (op
) != STACK_POP_CODE
)
1798 return XEXP (op
, 0) == stack_pointer_rtx
;
1801 /* Return true if ADDR is a valid memory address
1802 for mode MODE in address space AS. */
1805 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED
,
1806 rtx addr
, addr_space_t as
)
1808 #ifdef GO_IF_LEGITIMATE_ADDRESS
1809 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1810 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1816 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1820 /* Return true if OP is a valid memory reference with mode MODE,
1821 including a valid address.
1823 The main use of this function is as a predicate in match_operand
1824 expressions in the machine description. */
1827 memory_operand (rtx op
, machine_mode mode
)
1831 if (! reload_completed
)
1832 /* Note that no SUBREG is a memory operand before end of reload pass,
1833 because (SUBREG (MEM...)) forces reloading into a register. */
1834 return MEM_P (op
) && general_operand (op
, mode
);
1836 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1840 if (GET_CODE (inner
) == SUBREG
)
1841 inner
= SUBREG_REG (inner
);
1843 return (MEM_P (inner
) && general_operand (op
, mode
));
1846 /* Return true if OP is a valid indirect memory reference with mode MODE;
1847 that is, a memory reference whose address is a general_operand. */
1850 indirect_operand (rtx op
, machine_mode mode
)
1852 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1853 if (! reload_completed
1854 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1856 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1859 /* The only way that we can have a general_operand as the resulting
1860 address is if OFFSET is zero and the address already is an operand
1861 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1864 rtx addr
= strip_offset (XEXP (SUBREG_REG (op
), 0), &offset
);
1865 return (known_eq (offset
+ SUBREG_BYTE (op
), 0)
1866 && general_operand (addr
, Pmode
));
1870 && memory_operand (op
, mode
)
1871 && general_operand (XEXP (op
, 0), Pmode
));
1874 /* Return true if this is an ordered comparison operator (not including
1875 ORDERED and UNORDERED). */
1878 ordered_comparison_operator (rtx op
, machine_mode mode
)
1880 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1882 switch (GET_CODE (op
))
1900 /* Return true if this is a comparison operator. This allows the use of
1901 MATCH_OPERATOR to recognize all the branch insns. */
1904 comparison_operator (rtx op
, machine_mode mode
)
1906 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1907 && COMPARISON_P (op
));
1910 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1913 extract_asm_operands (rtx body
)
1916 switch (GET_CODE (body
))
1922 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1923 tmp
= SET_SRC (body
);
1924 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1929 tmp
= XVECEXP (body
, 0, 0);
1930 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1932 if (GET_CODE (tmp
) == SET
)
1934 tmp
= SET_SRC (tmp
);
1935 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1946 /* If BODY is an insn body that uses ASM_OPERANDS,
1947 return the number of operands (both input and output) in the insn.
1948 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1950 Otherwise return -1. */
1953 asm_noperands (const_rtx body
)
1955 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1960 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) >= 2
1961 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_INPUT
)
1963 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1964 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1965 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1972 if (GET_CODE (body
) == SET
)
1974 else if (GET_CODE (body
) == PARALLEL
)
1976 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1978 /* Multiple output operands, or 1 output plus some clobbers:
1980 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1981 /* Count backwards through CLOBBERs to determine number of SETs. */
1982 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1984 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1986 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1990 /* N_SETS is now number of output operands. */
1993 /* Verify that all the SETs we have
1994 came from a single original asm_operands insn
1995 (so that invalid combinations are blocked). */
1996 for (i
= 0; i
< n_sets
; i
++)
1998 rtx elt
= XVECEXP (body
, 0, i
);
1999 if (GET_CODE (elt
) != SET
)
2001 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
2003 /* If these ASM_OPERANDS rtx's came from different original insns
2004 then they aren't allowed together. */
2005 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
2006 != ASM_OPERANDS_INPUT_VEC (asm_op
))
2012 /* 0 outputs, but some clobbers:
2013 body is [(asm_operands ...) (clobber (reg ...))...]. */
2014 /* Make sure all the other parallel things really are clobbers. */
2015 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
2016 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
2021 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
2022 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
2025 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
2026 copy its operands (both input and output) into the vector OPERANDS,
2027 the locations of the operands within the insn into the vector OPERAND_LOCS,
2028 and the constraints for the operands into CONSTRAINTS.
2029 Write the modes of the operands into MODES.
2030 Write the location info into LOC.
2031 Return the assembler-template.
2032 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2033 return the basic assembly string.
2035 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2036 we don't store that info. */
2039 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
2040 const char **constraints
, machine_mode
*modes
,
2043 int nbase
= 0, n
, i
;
2046 switch (GET_CODE (body
))
2049 /* Zero output asm: BODY is (asm_operands ...). */
2054 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2055 asmop
= SET_SRC (body
);
2057 /* The output is in the SET.
2058 Its constraint is in the ASM_OPERANDS itself. */
2060 operands
[0] = SET_DEST (body
);
2062 operand_locs
[0] = &SET_DEST (body
);
2064 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
2066 modes
[0] = GET_MODE (SET_DEST (body
));
2072 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
2074 asmop
= XVECEXP (body
, 0, 0);
2075 if (GET_CODE (asmop
) == SET
)
2077 asmop
= SET_SRC (asmop
);
2079 /* At least one output, plus some CLOBBERs. The outputs are in
2080 the SETs. Their constraints are in the ASM_OPERANDS itself. */
2081 for (i
= 0; i
< nparallel
; i
++)
2083 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
2084 break; /* Past last SET */
2085 gcc_assert (GET_CODE (XVECEXP (body
, 0, i
)) == SET
);
2087 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
2089 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
2091 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
2093 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
2097 else if (GET_CODE (asmop
) == ASM_INPUT
)
2100 *loc
= ASM_INPUT_SOURCE_LOCATION (asmop
);
2101 return XSTR (asmop
, 0);
2110 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
2111 for (i
= 0; i
< n
; i
++)
2114 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
2116 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
2118 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
2120 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
2124 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
2125 for (i
= 0; i
< n
; i
++)
2128 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
2130 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
2132 constraints
[nbase
+ i
] = "";
2134 modes
[nbase
+ i
] = Pmode
;
2138 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
2140 return ASM_OPERANDS_TEMPLATE (asmop
);
2143 /* Parse inline assembly string STRING and determine which operands are
2144 referenced by % markers. For the first NOPERANDS operands, set USED[I]
2145 to true if operand I is referenced.
2147 This is intended to distinguish barrier-like asms such as:
2149 asm ("" : "=m" (...));
2151 from real references such as:
2153 asm ("sw\t$0, %0" : "=m" (...)); */
2156 get_referenced_operands (const char *string
, bool *used
,
2157 unsigned int noperands
)
2159 memset (used
, 0, sizeof (bool) * noperands
);
2160 const char *p
= string
;
2166 /* A letter followed by a digit indicates an operand number. */
2167 if (ISALPHA (p
[0]) && ISDIGIT (p
[1]))
2172 unsigned long opnum
= strtoul (p
, &endptr
, 10);
2173 if (endptr
!= p
&& opnum
< noperands
)
2187 /* Check if an asm_operand matches its constraints.
2188 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2191 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
2194 bool incdec_ok
= false;
2196 /* Use constrain_operands after reload. */
2197 gcc_assert (!reload_completed
);
2199 /* Empty constraint string is the same as "X,...,X", i.e. X for as
2200 many alternatives as required to match the other operands. */
2201 if (*constraint
== '\0')
2206 enum constraint_num cn
;
2207 char c
= *constraint
;
2215 case '0': case '1': case '2': case '3': case '4':
2216 case '5': case '6': case '7': case '8': case '9':
2217 /* If caller provided constraints pointer, look up
2218 the matching constraint. Otherwise, our caller should have
2219 given us the proper matching constraint, but we can't
2220 actually fail the check if they didn't. Indicate that
2221 results are inconclusive. */
2225 unsigned long match
;
2227 match
= strtoul (constraint
, &end
, 10);
2229 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
2230 constraint
= (const char *) end
;
2236 while (ISDIGIT (*constraint
));
2242 /* The rest of the compiler assumes that reloading the address
2243 of a MEM into a register will make it fit an 'o' constraint.
2244 That is, if it sees a MEM operand for an 'o' constraint,
2245 it assumes that (mem (base-reg)) will fit.
2247 That assumption fails on targets that don't have offsettable
2248 addresses at all. We therefore need to treat 'o' asm
2249 constraints as a special case and only accept operands that
2250 are already offsettable, thus proving that at least one
2251 offsettable address exists. */
2252 case 'o': /* offsettable */
2253 if (offsettable_nonstrict_memref_p (op
))
2258 if (general_operand (op
, VOIDmode
))
2264 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2265 to exist, excepting those that expand_call created. Further,
2266 on some machines which do not have generalized auto inc/dec,
2267 an inc/dec is not a memory_operand.
2269 Match any memory and hope things are resolved after reload. */
2273 cn
= lookup_constraint (constraint
);
2275 switch (get_constraint_type (cn
))
2279 && reg_class_for_constraint (cn
) != NO_REGS
2280 && GET_MODE (op
) != BLKmode
2281 && register_operand (op
, VOIDmode
))
2288 && insn_const_int_ok_for_constraint (INTVAL (op
), cn
))
2293 case CT_RELAXED_MEMORY
:
2296 case CT_SPECIAL_MEMORY
:
2297 /* Every memory operand can be reloaded to fit. */
2299 mem
= extract_mem_from_operand (op
);
2300 result
= result
|| memory_operand (mem
, VOIDmode
);
2304 /* Every address operand can be reloaded to fit. */
2305 result
= result
|| address_operand (op
, VOIDmode
);
2309 result
= result
|| constraint_satisfied_p (op
, cn
);
2314 len
= CONSTRAINT_LEN (c
, constraint
);
2317 while (--len
&& *constraint
&& *constraint
!= ',');
2322 /* For operands without < or > constraints reject side-effects. */
2323 if (AUTO_INC_DEC
&& !incdec_ok
&& result
&& MEM_P (op
))
2324 switch (GET_CODE (XEXP (op
, 0)))
2340 /* Given an rtx *P, if it is a sum containing an integer constant term,
2341 return the location (type rtx *) of the pointer to that constant term.
2342 Otherwise, return a null pointer. */
2345 find_constant_term_loc (rtx
*p
)
2348 enum rtx_code code
= GET_CODE (*p
);
2350 /* If *P IS such a constant term, P is its location. */
2352 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
2356 /* Otherwise, if not a sum, it has no constant term. */
2358 if (GET_CODE (*p
) != PLUS
)
2361 /* If one of the summands is constant, return its location. */
2363 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
2364 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
2367 /* Otherwise, check each summand for containing a constant term. */
2369 if (XEXP (*p
, 0) != 0)
2371 tem
= find_constant_term_loc (&XEXP (*p
, 0));
2376 if (XEXP (*p
, 1) != 0)
2378 tem
= find_constant_term_loc (&XEXP (*p
, 1));
2386 /* Return true if OP is a memory reference whose address contains
2387 no side effects and remains valid after the addition of a positive
2388 integer less than the size of the object being referenced.
2390 We assume that the original address is valid and do not check it.
2392 This uses strict_memory_address_p as a subroutine, so
2393 don't use it before reload. */
2396 offsettable_memref_p (rtx op
)
2398 return ((MEM_P (op
))
2399 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
2400 MEM_ADDR_SPACE (op
)));
2403 /* Similar, but don't require a strictly valid mem ref:
2404 consider pseudo-regs valid as index or base regs. */
2407 offsettable_nonstrict_memref_p (rtx op
)
2409 return ((MEM_P (op
))
2410 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
2411 MEM_ADDR_SPACE (op
)));
2414 /* Return true if Y is a memory address which contains no side effects
2415 and would remain valid for address space AS after the addition of
2416 a positive integer less than the size of that mode.
2418 We assume that the original address is valid and do not check it.
2419 We do check that it is valid for narrower modes.
2421 If STRICTP is nonzero, we require a strictly valid address,
2422 for the sake of use in reload.cc. */
2425 offsettable_address_addr_space_p (int strictp
, machine_mode mode
, rtx y
,
2428 enum rtx_code ycode
= GET_CODE (y
);
2432 bool (*addressp
) (machine_mode
, rtx
, addr_space_t
) =
2433 (strictp
? strict_memory_address_addr_space_p
2434 : memory_address_addr_space_p
);
2435 poly_int64 mode_sz
= GET_MODE_SIZE (mode
);
2437 if (CONSTANT_ADDRESS_P (y
))
2440 /* Adjusting an offsettable address involves changing to a narrower mode.
2441 Make sure that's OK. */
2443 if (mode_dependent_address_p (y
, as
))
2446 machine_mode address_mode
= GET_MODE (y
);
2447 if (address_mode
== VOIDmode
)
2448 address_mode
= targetm
.addr_space
.address_mode (as
);
2449 #ifdef POINTERS_EXTEND_UNSIGNED
2450 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
2453 /* ??? How much offset does an offsettable BLKmode reference need?
2454 Clearly that depends on the situation in which it's being used.
2455 However, the current situation in which we test 0xffffffff is
2456 less than ideal. Caveat user. */
2457 if (known_eq (mode_sz
, 0))
2458 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
2460 /* If the expression contains a constant term,
2461 see if it remains valid when max possible offset is added. */
2463 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
2468 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
2469 /* Use QImode because an odd displacement may be automatically invalid
2470 for any wider mode. But it should be valid for a single byte. */
2471 good
= (*addressp
) (QImode
, y
, as
);
2473 /* In any case, restore old contents of memory. */
2478 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2481 /* The offset added here is chosen as the maximum offset that
2482 any instruction could need to add when operating on something
2483 of the specified mode. We assume that if Y and Y+c are
2484 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2485 go inside a LO_SUM here, so we do so as well. */
2486 if (GET_CODE (y
) == LO_SUM
2488 && known_le (mode_sz
, GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
))
2489 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2490 plus_constant (address_mode
, XEXP (y
, 1),
2492 #ifdef POINTERS_EXTEND_UNSIGNED
2493 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2494 else if (POINTERS_EXTEND_UNSIGNED
> 0
2495 && GET_CODE (y
) == ZERO_EXTEND
2496 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2497 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2498 plus_constant (pointer_mode
, XEXP (y
, 0),
2502 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2504 /* Use QImode because an odd displacement may be automatically invalid
2505 for any wider mode. But it should be valid for a single byte. */
2506 return (*addressp
) (QImode
, z
, as
);
2509 /* Return true if ADDR is an address-expression whose effect depends
2510 on the mode of the memory reference it is used in.
2512 ADDRSPACE is the address space associated with the address.
2514 Autoincrement addressing is a typical example of mode-dependence
2515 because the amount of the increment depends on the mode. */
2518 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2520 /* Auto-increment addressing with anything other than post_modify
2521 or pre_modify always introduces a mode dependency. Catch such
2522 cases now instead of deferring to the target. */
2523 if (GET_CODE (addr
) == PRE_INC
2524 || GET_CODE (addr
) == POST_INC
2525 || GET_CODE (addr
) == PRE_DEC
2526 || GET_CODE (addr
) == POST_DEC
)
2529 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2532 /* Return true if boolean attribute ATTR is supported. */
2535 have_bool_attr (bool_attr attr
)
2540 return HAVE_ATTR_enabled
;
2541 case BA_PREFERRED_FOR_SIZE
:
2542 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_size
;
2543 case BA_PREFERRED_FOR_SPEED
:
2544 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_speed
;
2549 /* Return the value of ATTR for instruction INSN. */
2552 get_bool_attr (rtx_insn
*insn
, bool_attr attr
)
2557 return get_attr_enabled (insn
);
2558 case BA_PREFERRED_FOR_SIZE
:
2559 return get_attr_enabled (insn
) && get_attr_preferred_for_size (insn
);
2560 case BA_PREFERRED_FOR_SPEED
:
2561 return get_attr_enabled (insn
) && get_attr_preferred_for_speed (insn
);
2566 /* Like get_bool_attr_mask, but don't use the cache. */
2568 static alternative_mask
2569 get_bool_attr_mask_uncached (rtx_insn
*insn
, bool_attr attr
)
2571 /* Temporarily install enough information for get_attr_<foo> to assume
2572 that the insn operands are already cached. As above, the attribute
2573 mustn't depend on the values of operands, so we don't provide their
2574 real values here. */
2575 rtx_insn
*old_insn
= recog_data
.insn
;
2576 int old_alternative
= which_alternative
;
2578 recog_data
.insn
= insn
;
2579 alternative_mask mask
= ALL_ALTERNATIVES
;
2580 int n_alternatives
= insn_data
[INSN_CODE (insn
)].n_alternatives
;
2581 for (int i
= 0; i
< n_alternatives
; i
++)
2583 which_alternative
= i
;
2584 if (!get_bool_attr (insn
, attr
))
2585 mask
&= ~ALTERNATIVE_BIT (i
);
2588 recog_data
.insn
= old_insn
;
2589 which_alternative
= old_alternative
;
2593 /* Return the mask of operand alternatives that are allowed for INSN
2594 by boolean attribute ATTR. This mask depends only on INSN and on
2595 the current target; it does not depend on things like the values of
2598 static alternative_mask
2599 get_bool_attr_mask (rtx_insn
*insn
, bool_attr attr
)
2601 /* Quick exit for asms and for targets that don't use these attributes. */
2602 int code
= INSN_CODE (insn
);
2603 if (code
< 0 || !have_bool_attr (attr
))
2604 return ALL_ALTERNATIVES
;
2606 /* Calling get_attr_<foo> can be expensive, so cache the mask
2608 if (!this_target_recog
->x_bool_attr_masks
[code
][attr
])
2609 this_target_recog
->x_bool_attr_masks
[code
][attr
]
2610 = get_bool_attr_mask_uncached (insn
, attr
);
2611 return this_target_recog
->x_bool_attr_masks
[code
][attr
];
2614 /* Return the set of alternatives of INSN that are allowed by the current
2618 get_enabled_alternatives (rtx_insn
*insn
)
2620 return get_bool_attr_mask (insn
, BA_ENABLED
);
2623 /* Return the set of alternatives of INSN that are allowed by the current
2624 target and are preferred for the current size/speed optimization
2628 get_preferred_alternatives (rtx_insn
*insn
)
2630 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
)))
2631 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2633 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2636 /* Return the set of alternatives of INSN that are allowed by the current
2637 target and are preferred for the size/speed optimization choice
2638 associated with BB. Passing a separate BB is useful if INSN has not
2639 been emitted yet or if we are considering moving it to a different
2643 get_preferred_alternatives (rtx_insn
*insn
, basic_block bb
)
2645 if (optimize_bb_for_speed_p (bb
))
2646 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2648 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2651 /* Assert that the cached boolean attributes for INSN are still accurate.
2652 The backend is required to define these attributes in a way that only
2653 depends on the current target (rather than operands, compiler phase,
2657 check_bool_attrs (rtx_insn
*insn
)
2659 int code
= INSN_CODE (insn
);
2661 for (int i
= 0; i
<= BA_LAST
; ++i
)
2663 enum bool_attr attr
= (enum bool_attr
) i
;
2664 if (this_target_recog
->x_bool_attr_masks
[code
][attr
])
2665 gcc_assert (this_target_recog
->x_bool_attr_masks
[code
][attr
]
2666 == get_bool_attr_mask_uncached (insn
, attr
));
2671 /* Like extract_insn, but save insn extracted and don't extract again, when
2672 called again for the same insn expecting that recog_data still contain the
2673 valid information. This is used primary by gen_attr infrastructure that
2674 often does extract insn again and again. */
2676 extract_insn_cached (rtx_insn
*insn
)
2678 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2680 extract_insn (insn
);
2681 recog_data
.insn
= insn
;
2684 /* Do uncached extract_insn, constrain_operands and complain about failures.
2685 This should be used when extracting a pre-existing constrained instruction
2686 if the caller wants to know which alternative was chosen. */
2688 extract_constrain_insn (rtx_insn
*insn
)
2690 extract_insn (insn
);
2691 if (!constrain_operands (reload_completed
, get_enabled_alternatives (insn
)))
2692 fatal_insn_not_found (insn
);
2695 /* Do cached extract_insn, constrain_operands and complain about failures.
2696 Used by insn_attrtab. */
2698 extract_constrain_insn_cached (rtx_insn
*insn
)
2700 extract_insn_cached (insn
);
2701 if (which_alternative
== -1
2702 && !constrain_operands (reload_completed
,
2703 get_enabled_alternatives (insn
)))
2704 fatal_insn_not_found (insn
);
2707 /* Do cached constrain_operands on INSN and complain about failures. */
2709 constrain_operands_cached (rtx_insn
*insn
, int strict
)
2711 if (which_alternative
== -1)
2712 return constrain_operands (strict
, get_enabled_alternatives (insn
));
2717 /* Analyze INSN and fill in recog_data. */
2720 extract_insn (rtx_insn
*insn
)
2725 rtx body
= PATTERN (insn
);
2727 recog_data
.n_operands
= 0;
2728 recog_data
.n_alternatives
= 0;
2729 recog_data
.n_dups
= 0;
2730 recog_data
.is_asm
= false;
2732 switch (GET_CODE (body
))
2744 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2749 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2750 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2751 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
2752 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_INPUT
)
2758 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2761 /* This insn is an `asm' with operands. */
2763 /* expand_asm_operands makes sure there aren't too many operands. */
2764 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2766 /* Now get the operand values and constraints out of the insn. */
2767 decode_asm_operands (body
, recog_data
.operand
,
2768 recog_data
.operand_loc
,
2769 recog_data
.constraints
,
2770 recog_data
.operand_mode
, NULL
);
2771 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2774 const char *p
= recog_data
.constraints
[0];
2775 recog_data
.n_alternatives
= 1;
2777 recog_data
.n_alternatives
+= (*p
++ == ',');
2779 recog_data
.is_asm
= true;
2782 fatal_insn_not_found (insn
);
2786 /* Ordinary insn: recognize it, get the operands via insn_extract
2787 and get the constraints. */
2789 icode
= recog_memoized (insn
);
2791 fatal_insn_not_found (insn
);
2793 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2794 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2795 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2797 insn_extract (insn
);
2799 for (i
= 0; i
< noperands
; i
++)
2801 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2802 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2803 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2804 /* VOIDmode match_operands gets mode from their real operand. */
2805 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2806 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2809 for (i
= 0; i
< noperands
; i
++)
2810 recog_data
.operand_type
[i
]
2811 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2812 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2815 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2817 recog_data
.insn
= NULL
;
2818 which_alternative
= -1;
2821 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2822 operands, N_ALTERNATIVES alternatives and constraint strings
2823 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2824 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2825 if the insn is an asm statement and preprocessing should take the
2826 asm operands into account, e.g. to determine whether they could be
2827 addresses in constraints that require addresses; it should then
2828 point to an array of pointers to each operand. */
2831 preprocess_constraints (int n_operands
, int n_alternatives
,
2832 const char **constraints
,
2833 operand_alternative
*op_alt_base
,
2836 for (int i
= 0; i
< n_operands
; i
++)
2839 struct operand_alternative
*op_alt
;
2840 const char *p
= constraints
[i
];
2842 op_alt
= op_alt_base
;
2844 for (j
= 0; j
< n_alternatives
; j
++, op_alt
+= n_operands
)
2846 op_alt
[i
].cl
= NO_REGS
;
2847 op_alt
[i
].constraint
= p
;
2848 op_alt
[i
].matches
= -1;
2849 op_alt
[i
].matched
= -1;
2851 if (*p
== '\0' || *p
== ',')
2853 op_alt
[i
].anything_ok
= 1;
2863 while (c
!= ',' && c
!= '\0');
2864 if (c
== ',' || c
== '\0')
2873 op_alt
[i
].reject
+= 6;
2876 op_alt
[i
].reject
+= 600;
2879 op_alt
[i
].earlyclobber
= 1;
2882 case '0': case '1': case '2': case '3': case '4':
2883 case '5': case '6': case '7': case '8': case '9':
2886 op_alt
[i
].matches
= strtoul (p
, &end
, 10);
2887 op_alt
[op_alt
[i
].matches
].matched
= i
;
2893 op_alt
[i
].anything_ok
= 1;
2898 reg_class_subunion
[(int) op_alt
[i
].cl
][(int) GENERAL_REGS
];
2902 enum constraint_num cn
= lookup_constraint (p
);
2904 switch (get_constraint_type (cn
))
2907 cl
= reg_class_for_constraint (cn
);
2909 op_alt
[i
].cl
= reg_class_subunion
[op_alt
[i
].cl
][cl
];
2916 case CT_SPECIAL_MEMORY
:
2917 case CT_RELAXED_MEMORY
:
2918 op_alt
[i
].memory_ok
= 1;
2922 if (oploc
&& !address_operand (*oploc
[i
], VOIDmode
))
2925 op_alt
[i
].is_address
= 1;
2927 = (reg_class_subunion
2928 [(int) op_alt
[i
].cl
]
2929 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2930 ADDRESS
, SCRATCH
)]);
2938 p
+= CONSTRAINT_LEN (c
, p
);
2944 /* Return an array of operand_alternative instructions for
2945 instruction ICODE. */
2947 const operand_alternative
*
2948 preprocess_insn_constraints (unsigned int icode
)
2950 gcc_checking_assert (IN_RANGE (icode
, 0, NUM_INSN_CODES
- 1));
2951 if (this_target_recog
->x_op_alt
[icode
])
2952 return this_target_recog
->x_op_alt
[icode
];
2954 int n_operands
= insn_data
[icode
].n_operands
;
2955 if (n_operands
== 0)
2957 /* Always provide at least one alternative so that which_op_alt ()
2958 works correctly. If the instruction has 0 alternatives (i.e. all
2959 constraint strings are empty) then each operand in this alternative
2960 will have anything_ok set. */
2961 int n_alternatives
= MAX (insn_data
[icode
].n_alternatives
, 1);
2962 int n_entries
= n_operands
* n_alternatives
;
2964 operand_alternative
*op_alt
= XCNEWVEC (operand_alternative
, n_entries
);
2965 const char **constraints
= XALLOCAVEC (const char *, n_operands
);
2967 for (int i
= 0; i
< n_operands
; ++i
)
2968 constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2969 preprocess_constraints (n_operands
, n_alternatives
, constraints
, op_alt
,
2972 this_target_recog
->x_op_alt
[icode
] = op_alt
;
2976 /* After calling extract_insn, you can use this function to extract some
2977 information from the constraint strings into a more usable form.
2978 The collected data is stored in recog_op_alt. */
2981 preprocess_constraints (rtx_insn
*insn
)
2983 int icode
= INSN_CODE (insn
);
2985 recog_op_alt
= preprocess_insn_constraints (icode
);
2988 int n_operands
= recog_data
.n_operands
;
2989 int n_alternatives
= recog_data
.n_alternatives
;
2990 int n_entries
= n_operands
* n_alternatives
;
2991 memset (asm_op_alt
, 0, n_entries
* sizeof (operand_alternative
));
2992 preprocess_constraints (n_operands
, n_alternatives
,
2993 recog_data
.constraints
, asm_op_alt
,
2995 recog_op_alt
= asm_op_alt
;
2999 /* Check the operands of an insn against the insn's operand constraints
3000 and return 1 if they match any of the alternatives in ALTERNATIVES.
3002 The information about the insn's operands, constraints, operand modes
3003 etc. is obtained from the global variables set up by extract_insn.
3005 WHICH_ALTERNATIVE is set to a number which indicates which
3006 alternative of constraints was matched: 0 for the first alternative,
3007 1 for the next, etc.
3009 In addition, when two operands are required to match
3010 and it happens that the output operand is (reg) while the
3011 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
3012 make the output operand look like the input.
3013 This is because the output operand is the one the template will print.
3015 This is used in final, just before printing the assembler code and by
3016 the routines that determine an insn's attribute.
3018 If STRICT is a positive nonzero value, it means that we have been
3019 called after reload has been completed. In that case, we must
3020 do all checks strictly. If it is zero, it means that we have been called
3021 before reload has completed. In that case, we first try to see if we can
3022 find an alternative that matches strictly. If not, we try again, this
3023 time assuming that reload will fix up the insn. This provides a "best
3024 guess" for the alternative and is used to compute attributes of insns prior
3025 to reload. A negative value of STRICT is used for this internal call. */
3033 constrain_operands (int strict
, alternative_mask alternatives
)
3035 const char *constraints
[MAX_RECOG_OPERANDS
];
3036 int matching_operands
[MAX_RECOG_OPERANDS
];
3037 int earlyclobber
[MAX_RECOG_OPERANDS
];
3040 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
3041 int funny_match_index
;
3043 which_alternative
= 0;
3044 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
3047 for (c
= 0; c
< recog_data
.n_operands
; c
++)
3048 constraints
[c
] = recog_data
.constraints
[c
];
3052 int seen_earlyclobber_at
= -1;
3055 funny_match_index
= 0;
3057 if (!TEST_BIT (alternatives
, which_alternative
))
3061 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3062 constraints
[i
] = skip_alternative (constraints
[i
]);
3064 which_alternative
++;
3068 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3069 matching_operands
[opno
] = -1;
3071 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3073 rtx op
= recog_data
.operand
[opno
];
3074 machine_mode mode
= GET_MODE (op
);
3075 const char *p
= constraints
[opno
];
3081 earlyclobber
[opno
] = 0;
3083 /* A unary operator may be accepted by the predicate, but it
3084 is irrelevant for matching constraints. */
3085 /* For special_memory_operand, there could be a memory operand inside,
3086 and it would cause a mismatch for constraint_satisfied_p. */
3087 if (UNARY_P (op
) && op
== extract_mem_from_operand (op
))
3090 if (GET_CODE (op
) == SUBREG
)
3092 if (REG_P (SUBREG_REG (op
))
3093 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
3094 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
3095 GET_MODE (SUBREG_REG (op
)),
3098 op
= SUBREG_REG (op
);
3101 /* An empty constraint or empty alternative
3102 allows anything which matched the pattern. */
3103 if (*p
== 0 || *p
== ',')
3107 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
3117 /* Ignore rest of this alternative as far as
3118 constraint checking is concerned. */
3121 while (*p
&& *p
!= ',');
3126 earlyclobber
[opno
] = 1;
3127 if (seen_earlyclobber_at
< 0)
3128 seen_earlyclobber_at
= opno
;
3131 case '0': case '1': case '2': case '3': case '4':
3132 case '5': case '6': case '7': case '8': case '9':
3134 /* This operand must be the same as a previous one.
3135 This kind of constraint is used for instructions such
3136 as add when they take only two operands.
3138 Note that the lower-numbered operand is passed first.
3140 If we are not testing strictly, assume that this
3141 constraint will be satisfied. */
3146 match
= strtoul (p
, &end
, 10);
3153 rtx op1
= recog_data
.operand
[match
];
3154 rtx op2
= recog_data
.operand
[opno
];
3156 /* A unary operator may be accepted by the predicate,
3157 but it is irrelevant for matching constraints. */
3159 op1
= XEXP (op1
, 0);
3161 op2
= XEXP (op2
, 0);
3163 val
= operands_match_p (op1
, op2
);
3166 matching_operands
[opno
] = match
;
3167 matching_operands
[match
] = opno
;
3172 /* If output is *x and input is *--x, arrange later
3173 to change the output to *--x as well, since the
3174 output op is the one that will be printed. */
3175 if (val
== 2 && strict
> 0)
3177 funny_match
[funny_match_index
].this_op
= opno
;
3178 funny_match
[funny_match_index
++].other
= match
;
3185 /* p is used for address_operands. When we are called by
3186 gen_reload, no one will have checked that the address is
3187 strictly valid, i.e., that all pseudos requiring hard regs
3188 have gotten them. We also want to make sure we have a
3190 if ((GET_MODE (op
) == VOIDmode
3191 || SCALAR_INT_MODE_P (GET_MODE (op
)))
3193 || (strict_memory_address_p
3194 (recog_data
.operand_mode
[opno
], op
))))
3198 /* No need to check general_operand again;
3199 it was done in insn-recog.cc. Well, except that reload
3200 doesn't check the validity of its replacements, but
3201 that should only matter when there's a bug. */
3203 /* Anything goes unless it is a REG and really has a hard reg
3204 but the hard reg is not in the class GENERAL_REGS. */
3208 || GENERAL_REGS
== ALL_REGS
3209 || (reload_in_progress
3210 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
3211 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
3214 else if (strict
< 0 || general_operand (op
, mode
))
3220 enum constraint_num cn
= lookup_constraint (p
);
3221 enum reg_class cl
= reg_class_for_constraint (cn
);
3227 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
3228 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
3230 && reg_fits_class_p (op
, cl
, offset
, mode
)))
3234 else if (constraint_satisfied_p (op
, cn
))
3237 else if (insn_extra_memory_constraint (cn
)
3238 /* Every memory operand can be reloaded to fit. */
3239 && ((strict
< 0 && MEM_P (op
))
3240 /* Before reload, accept what reload can turn
3242 || (strict
< 0 && CONSTANT_P (op
))
3243 /* Before reload, accept a pseudo or hard register,
3244 since LRA can turn it into a mem. */
3245 || (strict
< 0 && targetm
.lra_p () && REG_P (op
))
3246 /* During reload, accept a pseudo */
3247 || (reload_in_progress
&& REG_P (op
)
3248 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
3250 else if (insn_extra_address_constraint (cn
)
3251 /* Every address operand can be reloaded to fit. */
3254 /* Cater to architectures like IA-64 that define extra memory
3255 constraints without using define_memory_constraint. */
3256 else if (reload_in_progress
3258 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
3259 && reg_renumber
[REGNO (op
)] < 0
3260 && reg_equiv_mem (REGNO (op
)) != 0
3261 && constraint_satisfied_p
3262 (reg_equiv_mem (REGNO (op
)), cn
))
3267 while (p
+= len
, c
);
3269 constraints
[opno
] = p
;
3270 /* If this operand did not win somehow,
3271 this alternative loses. */
3275 /* This alternative won; the operands are ok.
3276 Change whichever operands this alternative says to change. */
3281 /* See if any earlyclobber operand conflicts with some other
3284 if (strict
> 0 && seen_earlyclobber_at
>= 0)
3285 for (eopno
= seen_earlyclobber_at
;
3286 eopno
< recog_data
.n_operands
;
3288 /* Ignore earlyclobber operands now in memory,
3289 because we would often report failure when we have
3290 two memory operands, one of which was formerly a REG. */
3291 if (earlyclobber
[eopno
]
3292 && REG_P (recog_data
.operand
[eopno
]))
3293 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3294 if ((MEM_P (recog_data
.operand
[opno
])
3295 || recog_data
.operand_type
[opno
] != OP_OUT
)
3297 /* Ignore things like match_operator operands. */
3298 && *recog_data
.constraints
[opno
] != 0
3299 && ! (matching_operands
[opno
] == eopno
3300 && operands_match_p (recog_data
.operand
[opno
],
3301 recog_data
.operand
[eopno
]))
3302 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
3303 recog_data
.operand
[eopno
]))
3308 while (--funny_match_index
>= 0)
3310 recog_data
.operand
[funny_match
[funny_match_index
].other
]
3311 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
3314 /* For operands without < or > constraints reject side-effects. */
3315 if (AUTO_INC_DEC
&& recog_data
.is_asm
)
3317 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3318 if (MEM_P (recog_data
.operand
[opno
]))
3319 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
3327 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
3328 && strchr (recog_data
.constraints
[opno
], '>')
3341 which_alternative
++;
3343 while (which_alternative
< recog_data
.n_alternatives
);
3345 which_alternative
= -1;
3346 /* If we are about to reject this, but we are not to test strictly,
3347 try a very loose test. Only return failure if it fails also. */
3349 return constrain_operands (-1, alternatives
);
3354 /* Return true iff OPERAND (assumed to be a REG rtx)
3355 is a hard reg in class CLASS when its regno is offset by OFFSET
3356 and changed to mode MODE.
3357 If REG occupies multiple hard regs, all of them must be in CLASS. */
3360 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
3363 unsigned int regno
= REGNO (operand
);
3368 /* Regno must not be a pseudo register. Offset may be negative. */
3369 return (HARD_REGISTER_NUM_P (regno
)
3370 && HARD_REGISTER_NUM_P (regno
+ offset
)
3371 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
3375 /* Split single instruction. Helper function for split_all_insns and
3376 split_all_insns_noflow. Return last insn in the sequence if successful,
3377 or NULL if unsuccessful. */
3380 split_insn (rtx_insn
*insn
)
3382 /* Split insns here to get max fine-grain parallelism. */
3383 rtx_insn
*first
= PREV_INSN (insn
);
3384 rtx_insn
*last
= try_split (PATTERN (insn
), insn
, 1);
3385 rtx insn_set
, last_set
, note
;
3390 /* If the original instruction was a single set that was known to be
3391 equivalent to a constant, see if we can say the same about the last
3392 instruction in the split sequence. The two instructions must set
3393 the same destination. */
3394 insn_set
= single_set (insn
);
3397 last_set
= single_set (last
);
3398 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
3400 note
= find_reg_equal_equiv_note (insn
);
3401 if (note
&& CONSTANT_P (XEXP (note
, 0)))
3402 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
3403 else if (CONSTANT_P (SET_SRC (insn_set
)))
3404 set_unique_reg_note (last
, REG_EQUAL
,
3405 copy_rtx (SET_SRC (insn_set
)));
3409 /* try_split returns the NOTE that INSN became. */
3410 SET_INSN_DELETED (insn
);
3412 /* ??? Coddle to md files that generate subregs in post-reload
3413 splitters instead of computing the proper hard register. */
3414 if (reload_completed
&& first
!= last
)
3416 first
= NEXT_INSN (first
);
3420 cleanup_subreg_operands (first
);
3423 first
= NEXT_INSN (first
);
3430 /* Split all insns in the function. If UPD_LIFE, update life info after. */
3433 split_all_insns (void)
3436 bool need_cfg_cleanup
= false;
3439 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
3440 bitmap_clear (blocks
);
3443 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
3445 rtx_insn
*insn
, *next
;
3446 bool finish
= false;
3448 rtl_profile_for_bb (bb
);
3449 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
3451 /* Can't use `next_real_insn' because that might go across
3452 CODE_LABELS and short-out basic blocks. */
3453 next
= NEXT_INSN (insn
);
3454 finish
= (insn
== BB_END (bb
));
3456 /* If INSN has a REG_EH_REGION note and we split INSN, the
3457 resulting split may not have/need REG_EH_REGION notes.
3459 If that happens and INSN was the last reference to the
3460 given EH region, then the EH region will become unreachable.
3461 We cannot leave the unreachable blocks in the CFG as that
3462 will trigger a checking failure.
3464 So track if INSN has a REG_EH_REGION note. If so and we
3465 split INSN, then trigger a CFG cleanup. */
3466 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3469 rtx set
= single_set (insn
);
3471 /* Don't split no-op move insns. These should silently
3472 disappear later in final. Splitting such insns would
3473 break the code that handles LIBCALL blocks. */
3474 if (set
&& set_noop_p (set
))
3476 /* Nops get in the way while scheduling, so delete them
3477 now if register allocation has already been done. It
3478 is too risky to try to do this before register
3479 allocation, and there are unlikely to be very many
3480 nops then anyways. */
3481 if (reload_completed
)
3482 delete_insn_and_edges (insn
);
3484 need_cfg_cleanup
= true;
3488 if (split_insn (insn
))
3490 bitmap_set_bit (blocks
, bb
->index
);
3493 need_cfg_cleanup
= true;
3500 default_rtl_profile ();
3503 find_many_sub_basic_blocks (blocks
);
3505 /* Splitting could drop an REG_EH_REGION if it potentially
3506 trapped in its original form, but does not in its split
3507 form. Consider a FLOAT_TRUNCATE which splits into a memory
3508 store/load pair and -fnon-call-exceptions. */
3509 if (need_cfg_cleanup
)
3513 checking_verify_flow_info ();
3516 /* Same as split_all_insns, but do not expect CFG to be available.
3517 Used by machine dependent reorg passes. */
3520 split_all_insns_noflow (void)
3522 rtx_insn
*next
, *insn
;
3524 for (insn
= get_insns (); insn
; insn
= next
)
3526 next
= NEXT_INSN (insn
);
3529 /* Don't split no-op move insns. These should silently
3530 disappear later in final. Splitting such insns would
3531 break the code that handles LIBCALL blocks. */
3532 rtx set
= single_set (insn
);
3533 if (set
&& set_noop_p (set
))
3535 /* Nops get in the way while scheduling, so delete them
3536 now if register allocation has already been done. It
3537 is too risky to try to do this before register
3538 allocation, and there are unlikely to be very many
3541 ??? Should we use delete_insn when the CFG isn't valid? */
3542 if (reload_completed
)
3543 delete_insn_and_edges (insn
);
3552 struct peep2_insn_data
3558 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3559 static int peep2_current
;
3561 static bool peep2_do_rebuild_jump_labels
;
3562 static bool peep2_do_cleanup_cfg
;
3564 /* The number of instructions available to match a peep2. */
3565 int peep2_current_count
;
3567 /* A marker indicating the last insn of the block. The live_before regset
3568 for this element is correct, indicating DF_LIVE_OUT for the block. */
3569 #define PEEP2_EOB invalid_insn_rtx
3571 /* Wrap N to fit into the peep2_insn_data buffer. */
3574 peep2_buf_position (int n
)
3576 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3577 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3581 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3582 does not exist. Used by the recognizer to find the next insn to match
3583 in a multi-insn pattern. */
3586 peep2_next_insn (int n
)
3588 gcc_assert (n
<= peep2_current_count
);
3590 n
= peep2_buf_position (peep2_current
+ n
);
3592 return peep2_insn_data
[n
].insn
;
3595 /* Return true if REGNO is dead before the Nth non-note insn
3599 peep2_regno_dead_p (int ofs
, int regno
)
3601 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3603 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3605 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3607 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3610 /* Similarly for a REG. */
3613 peep2_reg_dead_p (int ofs
, rtx reg
)
3615 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3617 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3619 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3621 unsigned int end_regno
= END_REGNO (reg
);
3622 for (unsigned int regno
= REGNO (reg
); regno
< end_regno
; ++regno
)
3623 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
))
3628 /* Regno offset to be used in the register search. */
3629 static int search_ofs
;
3631 /* Try to find a hard register of mode MODE, matching the register class in
3632 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3633 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3634 in which case the only condition is that the register must be available
3635 before CURRENT_INSN.
3636 Registers that already have bits set in REG_SET will not be considered.
3638 If an appropriate register is available, it will be returned and the
3639 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3643 peep2_find_free_register (int from
, int to
, const char *class_str
,
3644 machine_mode mode
, HARD_REG_SET
*reg_set
)
3651 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3652 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3654 from
= peep2_buf_position (peep2_current
+ from
);
3655 to
= peep2_buf_position (peep2_current
+ to
);
3657 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3658 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3662 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3664 /* Don't use registers set or clobbered by the insn. */
3665 FOR_EACH_INSN_DEF (def
, peep2_insn_data
[from
].insn
)
3666 SET_HARD_REG_BIT (live
, DF_REF_REGNO (def
));
3668 from
= peep2_buf_position (from
+ 1);
3671 cl
= reg_class_for_constraint (lookup_constraint (class_str
));
3673 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3675 int raw_regno
, regno
, success
, j
;
3677 /* Distribute the free registers as much as possible. */
3678 raw_regno
= search_ofs
+ i
;
3679 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3680 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3681 #ifdef REG_ALLOC_ORDER
3682 regno
= reg_alloc_order
[raw_regno
];
3687 /* Can it support the mode we need? */
3688 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
3692 for (j
= 0; success
&& j
< hard_regno_nregs (regno
, mode
); j
++)
3694 /* Don't allocate fixed registers. */
3695 if (fixed_regs
[regno
+ j
])
3700 /* Don't allocate global registers. */
3701 if (global_regs
[regno
+ j
])
3706 /* Make sure the register is of the right class. */
3707 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3712 /* And that we don't create an extra save/restore. */
3713 if (! crtl
->abi
->clobbers_full_reg_p (regno
+ j
)
3714 && ! df_regs_ever_live_p (regno
+ j
))
3720 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3726 /* And we don't clobber traceback for noreturn functions. */
3727 if ((regno
+ j
== FRAME_POINTER_REGNUM
3728 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3729 && (! reload_completed
|| frame_pointer_needed
))
3735 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3736 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3745 add_to_hard_reg_set (reg_set
, mode
, regno
);
3747 /* Start the next search with the next register. */
3748 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3750 search_ofs
= raw_regno
;
3752 return gen_rtx_REG (mode
, regno
);
3760 /* Forget all currently tracked instructions, only remember current
3764 peep2_reinit_state (regset live
)
3768 /* Indicate that all slots except the last holds invalid data. */
3769 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3770 peep2_insn_data
[i
].insn
= NULL
;
3771 peep2_current_count
= 0;
3773 /* Indicate that the last slot contains live_after data. */
3774 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3775 peep2_current
= MAX_INSNS_PER_PEEP2
;
3777 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3780 /* Copies frame related info of an insn (OLD_INSN) to the single
3781 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3784 copy_frame_info_to_split_insn (rtx_insn
*old_insn
, rtx_insn
*new_insn
)
3786 bool any_note
= false;
3789 if (!RTX_FRAME_RELATED_P (old_insn
))
3792 RTX_FRAME_RELATED_P (new_insn
) = 1;
3794 /* Allow the backend to fill in a note during the split. */
3795 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3796 switch (REG_NOTE_KIND (note
))
3798 case REG_FRAME_RELATED_EXPR
:
3799 case REG_CFA_DEF_CFA
:
3800 case REG_CFA_ADJUST_CFA
:
3801 case REG_CFA_OFFSET
:
3802 case REG_CFA_REGISTER
:
3803 case REG_CFA_EXPRESSION
:
3804 case REG_CFA_RESTORE
:
3805 case REG_CFA_SET_VDRAP
:
3812 /* If the backend didn't supply a note, copy one over. */
3814 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3815 switch (REG_NOTE_KIND (note
))
3817 case REG_FRAME_RELATED_EXPR
:
3818 case REG_CFA_DEF_CFA
:
3819 case REG_CFA_ADJUST_CFA
:
3820 case REG_CFA_OFFSET
:
3821 case REG_CFA_REGISTER
:
3822 case REG_CFA_EXPRESSION
:
3823 case REG_CFA_RESTORE
:
3824 case REG_CFA_SET_VDRAP
:
3825 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3832 /* If there still isn't a note, make sure the unwind info sees the
3833 same expression as before the split. */
3836 rtx old_set
, new_set
;
3838 /* The old insn had better have been simple, or annotated. */
3839 old_set
= single_set (old_insn
);
3840 gcc_assert (old_set
!= NULL
);
3842 new_set
= single_set (new_insn
);
3843 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3844 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3847 /* Copy prologue/epilogue status. This is required in order to keep
3848 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3849 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3852 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3853 starting at INSN. Perform the replacement, removing the old insns and
3854 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3855 if the replacement is rejected. */
3858 peep2_attempt (basic_block bb
, rtx_insn
*insn
, int match_len
, rtx_insn
*attempt
)
3861 rtx_insn
*last
, *before_try
, *x
;
3862 rtx eh_note
, as_note
;
3865 bool was_call
= false;
3867 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3868 match more than one insn, or to be split into more than one insn. */
3869 old_insn
= peep2_insn_data
[peep2_current
].insn
;
3870 if (RTX_FRAME_RELATED_P (old_insn
))
3875 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3876 may be in the stream for the purpose of register allocation. */
3877 if (active_insn_p (attempt
))
3880 new_insn
= next_active_insn (attempt
);
3881 if (next_active_insn (new_insn
))
3884 /* We have a 1-1 replacement. Copy over any frame-related info. */
3885 copy_frame_info_to_split_insn (old_insn
, new_insn
);
3888 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3889 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3890 cfg-related call notes. */
3891 for (i
= 0; i
<= match_len
; ++i
)
3896 j
= peep2_buf_position (peep2_current
+ i
);
3897 old_insn
= peep2_insn_data
[j
].insn
;
3898 if (!CALL_P (old_insn
))
3903 while (new_insn
!= NULL_RTX
)
3905 if (CALL_P (new_insn
))
3907 new_insn
= NEXT_INSN (new_insn
);
3910 gcc_assert (new_insn
!= NULL_RTX
);
3912 CALL_INSN_FUNCTION_USAGE (new_insn
)
3913 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3914 SIBLING_CALL_P (new_insn
) = SIBLING_CALL_P (old_insn
);
3916 for (note
= REG_NOTES (old_insn
);
3918 note
= XEXP (note
, 1))
3919 switch (REG_NOTE_KIND (note
))
3924 case REG_CALL_NOCF_CHECK
:
3925 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3929 /* Discard all other reg notes. */
3933 /* Croak if there is another call in the sequence. */
3934 while (++i
<= match_len
)
3936 j
= peep2_buf_position (peep2_current
+ i
);
3937 old_insn
= peep2_insn_data
[j
].insn
;
3938 gcc_assert (!CALL_P (old_insn
));
3943 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3944 move those notes over to the new sequence. */
3946 for (i
= match_len
; i
>= 0; --i
)
3948 int j
= peep2_buf_position (peep2_current
+ i
);
3949 old_insn
= peep2_insn_data
[j
].insn
;
3951 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3956 i
= peep2_buf_position (peep2_current
+ match_len
);
3957 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3959 /* Replace the old sequence with the new. */
3960 rtx_insn
*peepinsn
= peep2_insn_data
[i
].insn
;
3961 last
= emit_insn_after_setloc (attempt
,
3962 peep2_insn_data
[i
].insn
,
3963 INSN_LOCATION (peepinsn
));
3964 if (JUMP_P (peepinsn
) && JUMP_P (last
))
3965 CROSSING_JUMP_P (last
) = CROSSING_JUMP_P (peepinsn
);
3966 before_try
= PREV_INSN (insn
);
3967 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3969 /* Re-insert the EH_REGION notes. */
3970 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3975 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3976 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3980 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3983 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3984 if (x
!= BB_END (bb
)
3985 && (can_throw_internal (x
)
3986 || can_nonlocal_goto (x
)))
3991 nfte
= split_block (bb
, x
);
3992 flags
= (eh_edge
->flags
3993 & (EDGE_EH
| EDGE_ABNORMAL
));
3995 flags
|= EDGE_ABNORMAL_CALL
;
3996 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3999 nehe
->probability
= eh_edge
->probability
;
4000 nfte
->probability
= nehe
->probability
.invert ();
4002 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
4007 /* Converting possibly trapping insn to non-trapping is
4008 possible. Zap dummy outgoing edges. */
4009 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
4012 /* Re-insert the ARGS_SIZE notes. */
4014 fixup_args_size_notes (before_try
, last
, get_args_size (as_note
));
4016 /* Scan the new insns for embedded side effects and add appropriate
4019 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
4020 if (NONDEBUG_INSN_P (x
))
4021 add_auto_inc_notes (x
, PATTERN (x
));
4023 /* If we generated a jump instruction, it won't have
4024 JUMP_LABEL set. Recompute after we're done. */
4025 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
4028 peep2_do_rebuild_jump_labels
= true;
4035 /* After performing a replacement in basic block BB, fix up the life
4036 information in our buffer. LAST is the last of the insns that we
4037 emitted as a replacement. PREV is the insn before the start of
4038 the replacement. MATCH_LEN is the number of instructions that were
4039 matched, and which now need to be replaced in the buffer. */
4042 peep2_update_life (basic_block bb
, int match_len
, rtx_insn
*last
,
4045 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
4049 INIT_REG_SET (&live
);
4050 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
4052 gcc_assert (peep2_current_count
>= match_len
+ 1);
4053 peep2_current_count
-= match_len
+ 1;
4061 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
4063 peep2_current_count
++;
4065 i
= MAX_INSNS_PER_PEEP2
;
4066 peep2_insn_data
[i
].insn
= x
;
4067 df_simulate_one_insn_backwards (bb
, x
, &live
);
4068 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
4074 CLEAR_REG_SET (&live
);
4079 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4080 Return true if we added it, false otherwise. The caller will try to match
4081 peepholes against the buffer if we return false; otherwise it will try to
4082 add more instructions to the buffer. */
4085 peep2_fill_buffer (basic_block bb
, rtx_insn
*insn
, regset live
)
4089 /* Once we have filled the maximum number of insns the buffer can hold,
4090 allow the caller to match the insns against peepholes. We wait until
4091 the buffer is full in case the target has similar peepholes of different
4092 length; we always want to match the longest if possible. */
4093 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
4096 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4097 any other pattern, lest it change the semantics of the frame info. */
4098 if (RTX_FRAME_RELATED_P (insn
))
4100 /* Let the buffer drain first. */
4101 if (peep2_current_count
> 0)
4103 /* Now the insn will be the only thing in the buffer. */
4106 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
4107 peep2_insn_data
[pos
].insn
= insn
;
4108 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
4109 peep2_current_count
++;
4111 df_simulate_one_insn_forwards (bb
, insn
, live
);
4115 /* Perform the peephole2 optimization pass. */
4118 peephole2_optimize (void)
4125 peep2_do_cleanup_cfg
= false;
4126 peep2_do_rebuild_jump_labels
= false;
4128 df_set_flags (DF_LR_RUN_DCE
);
4129 df_note_add_problem ();
4132 /* Initialize the regsets we're going to use. */
4133 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
4134 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
4136 live
= BITMAP_ALLOC (®_obstack
);
4138 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
4140 bool past_end
= false;
4143 rtl_profile_for_bb (bb
);
4145 /* Start up propagation. */
4146 bitmap_copy (live
, DF_LR_IN (bb
));
4147 df_simulate_initialize_forwards (bb
, live
);
4148 peep2_reinit_state (live
);
4150 insn
= BB_HEAD (bb
);
4153 rtx_insn
*attempt
, *head
;
4156 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
4159 insn
= NEXT_INSN (insn
);
4160 if (insn
== NEXT_INSN (BB_END (bb
)))
4164 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
4167 /* If we did not fill an empty buffer, it signals the end of the
4169 if (peep2_current_count
== 0)
4172 /* The buffer filled to the current maximum, so try to match. */
4174 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
4175 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
4176 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
4178 /* Match the peephole. */
4179 head
= peep2_insn_data
[peep2_current
].insn
;
4180 attempt
= peephole2_insns (PATTERN (head
), head
, &match_len
);
4181 if (attempt
!= NULL
)
4183 rtx_insn
*last
= peep2_attempt (bb
, head
, match_len
, attempt
);
4186 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
4191 /* No match: advance the buffer by one insn. */
4192 peep2_current
= peep2_buf_position (peep2_current
+ 1);
4193 peep2_current_count
--;
4197 default_rtl_profile ();
4198 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
4199 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
4201 if (peep2_do_rebuild_jump_labels
)
4202 rebuild_jump_labels (get_insns ());
4203 if (peep2_do_cleanup_cfg
)
4204 cleanup_cfg (CLEANUP_CFG_CHANGED
);
4207 /* Common predicates for use with define_bypass. */
4209 /* Helper function for store_data_bypass_p, handle just a single SET
4213 store_data_bypass_p_1 (rtx_insn
*out_insn
, rtx in_set
)
4215 if (!MEM_P (SET_DEST (in_set
)))
4218 rtx out_set
= single_set (out_insn
);
4220 return !reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
));
4222 rtx out_pat
= PATTERN (out_insn
);
4223 if (GET_CODE (out_pat
) != PARALLEL
)
4226 for (int i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
4228 rtx out_exp
= XVECEXP (out_pat
, 0, i
);
4230 if (GET_CODE (out_exp
) == CLOBBER
|| GET_CODE (out_exp
) == USE
)
4233 gcc_assert (GET_CODE (out_exp
) == SET
);
4235 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
4242 /* True if the dependency between OUT_INSN and IN_INSN is on the store
4243 data not the address operand(s) of the store. IN_INSN and OUT_INSN
4244 must be either a single_set or a PARALLEL with SETs inside. */
4247 store_data_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
4249 rtx in_set
= single_set (in_insn
);
4251 return store_data_bypass_p_1 (out_insn
, in_set
);
4253 rtx in_pat
= PATTERN (in_insn
);
4254 if (GET_CODE (in_pat
) != PARALLEL
)
4257 for (int i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
4259 rtx in_exp
= XVECEXP (in_pat
, 0, i
);
4261 if (GET_CODE (in_exp
) == CLOBBER
|| GET_CODE (in_exp
) == USE
)
4264 gcc_assert (GET_CODE (in_exp
) == SET
);
4266 if (!store_data_bypass_p_1 (out_insn
, in_exp
))
4273 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4274 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4275 or multiple set; IN_INSN should be single_set for truth, but for convenience
4276 of insn categorization may be any JUMP or CALL insn. */
4279 if_test_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
4281 rtx out_set
, in_set
;
4283 in_set
= single_set (in_insn
);
4286 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
4290 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
4292 in_set
= SET_SRC (in_set
);
4294 out_set
= single_set (out_insn
);
4297 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
4298 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
4306 out_pat
= PATTERN (out_insn
);
4307 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
4309 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
4311 rtx exp
= XVECEXP (out_pat
, 0, i
);
4313 if (GET_CODE (exp
) == CLOBBER
)
4316 gcc_assert (GET_CODE (exp
) == SET
);
4318 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
4319 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
4328 rest_of_handle_peephole2 (void)
4331 peephole2_optimize ();
4338 const pass_data pass_data_peephole2
=
4340 RTL_PASS
, /* type */
4341 "peephole2", /* name */
4342 OPTGROUP_NONE
, /* optinfo_flags */
4343 TV_PEEPHOLE2
, /* tv_id */
4344 0, /* properties_required */
4345 0, /* properties_provided */
4346 0, /* properties_destroyed */
4347 0, /* todo_flags_start */
4348 TODO_df_finish
, /* todo_flags_finish */
4351 class pass_peephole2
: public rtl_opt_pass
4354 pass_peephole2 (gcc::context
*ctxt
)
4355 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
4358 /* opt_pass methods: */
4359 /* The epiphany backend creates a second instance of this pass, so we need
4361 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
4362 virtual bool gate (function
*) { return (optimize
> 0 && flag_peephole2
); }
4363 virtual unsigned int execute (function
*)
4365 return rest_of_handle_peephole2 ();
4368 }; // class pass_peephole2
4373 make_pass_peephole2 (gcc::context
*ctxt
)
4375 return new pass_peephole2 (ctxt
);
4380 const pass_data pass_data_split_all_insns
=
4382 RTL_PASS
, /* type */
4383 "split1", /* name */
4384 OPTGROUP_NONE
, /* optinfo_flags */
4385 TV_NONE
, /* tv_id */
4386 0, /* properties_required */
4387 PROP_rtl_split_insns
, /* properties_provided */
4388 0, /* properties_destroyed */
4389 0, /* todo_flags_start */
4390 0, /* todo_flags_finish */
4393 class pass_split_all_insns
: public rtl_opt_pass
4396 pass_split_all_insns (gcc::context
*ctxt
)
4397 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
4400 /* opt_pass methods: */
4401 /* The epiphany backend creates a second instance of this pass, so
4402 we need a clone method. */
4403 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
4404 virtual unsigned int execute (function
*)
4410 }; // class pass_split_all_insns
4415 make_pass_split_all_insns (gcc::context
*ctxt
)
4417 return new pass_split_all_insns (ctxt
);
4422 const pass_data pass_data_split_after_reload
=
4424 RTL_PASS
, /* type */
4425 "split2", /* name */
4426 OPTGROUP_NONE
, /* optinfo_flags */
4427 TV_NONE
, /* tv_id */
4428 0, /* properties_required */
4429 0, /* properties_provided */
4430 0, /* properties_destroyed */
4431 0, /* todo_flags_start */
4432 0, /* todo_flags_finish */
4435 class pass_split_after_reload
: public rtl_opt_pass
4438 pass_split_after_reload (gcc::context
*ctxt
)
4439 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
4442 /* opt_pass methods: */
4443 virtual bool gate (function
*)
4445 /* If optimizing, then go ahead and split insns now. */
4446 return optimize
> 0;
4449 virtual unsigned int execute (function
*)
4455 }; // class pass_split_after_reload
4460 make_pass_split_after_reload (gcc::context
*ctxt
)
4462 return new pass_split_after_reload (ctxt
);
4466 enable_split_before_sched2 (void)
4468 #ifdef INSN_SCHEDULING
4469 return optimize
> 0 && flag_schedule_insns_after_reload
;
4477 const pass_data pass_data_split_before_sched2
=
4479 RTL_PASS
, /* type */
4480 "split3", /* name */
4481 OPTGROUP_NONE
, /* optinfo_flags */
4482 TV_NONE
, /* tv_id */
4483 0, /* properties_required */
4484 0, /* properties_provided */
4485 0, /* properties_destroyed */
4486 0, /* todo_flags_start */
4487 0, /* todo_flags_finish */
4490 class pass_split_before_sched2
: public rtl_opt_pass
4493 pass_split_before_sched2 (gcc::context
*ctxt
)
4494 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4497 /* opt_pass methods: */
4498 virtual bool gate (function
*)
4500 return enable_split_before_sched2 ();
4503 virtual unsigned int execute (function
*)
4509 }; // class pass_split_before_sched2
4514 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4516 return new pass_split_before_sched2 (ctxt
);
4521 const pass_data pass_data_split_before_regstack
=
4523 RTL_PASS
, /* type */
4524 "split4", /* name */
4525 OPTGROUP_NONE
, /* optinfo_flags */
4526 TV_NONE
, /* tv_id */
4527 0, /* properties_required */
4528 0, /* properties_provided */
4529 0, /* properties_destroyed */
4530 0, /* todo_flags_start */
4531 0, /* todo_flags_finish */
4534 class pass_split_before_regstack
: public rtl_opt_pass
4537 pass_split_before_regstack (gcc::context
*ctxt
)
4538 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
4541 /* opt_pass methods: */
4542 virtual bool gate (function
*);
4543 virtual unsigned int execute (function
*)
4549 }; // class pass_split_before_regstack
4552 pass_split_before_regstack::gate (function
*)
4554 #if HAVE_ATTR_length && defined (STACK_REGS)
4555 /* If flow2 creates new instructions which need splitting
4556 and scheduling after reload is not done, they might not be
4557 split until final which doesn't allow splitting
4558 if HAVE_ATTR_length. Selective scheduling can result in
4559 further instructions that need splitting. */
4560 #ifdef INSN_SCHEDULING
4561 return !enable_split_before_sched2 () || flag_selective_scheduling2
;
4563 return !enable_split_before_sched2 ();
4573 make_pass_split_before_regstack (gcc::context
*ctxt
)
4575 return new pass_split_before_regstack (ctxt
);
4580 const pass_data pass_data_split_for_shorten_branches
=
4582 RTL_PASS
, /* type */
4583 "split5", /* name */
4584 OPTGROUP_NONE
, /* optinfo_flags */
4585 TV_NONE
, /* tv_id */
4586 0, /* properties_required */
4587 0, /* properties_provided */
4588 0, /* properties_destroyed */
4589 0, /* todo_flags_start */
4590 0, /* todo_flags_finish */
4593 class pass_split_for_shorten_branches
: public rtl_opt_pass
4596 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4597 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4600 /* opt_pass methods: */
4601 virtual bool gate (function
*)
4603 /* The placement of the splitting that we do for shorten_branches
4604 depends on whether regstack is used by the target or not. */
4605 #if HAVE_ATTR_length && !defined (STACK_REGS)
4612 virtual unsigned int execute (function
*)
4614 return split_all_insns_noflow ();
4617 }; // class pass_split_for_shorten_branches
4622 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4624 return new pass_split_for_shorten_branches (ctxt
);
4627 /* (Re)initialize the target information after a change in target. */
4632 /* The information is zero-initialized, so we don't need to do anything
4633 first time round. */
4634 if (!this_target_recog
->x_initialized
)
4636 this_target_recog
->x_initialized
= true;
4639 memset (this_target_recog
->x_bool_attr_masks
, 0,
4640 sizeof (this_target_recog
->x_bool_attr_masks
));
4641 for (unsigned int i
= 0; i
< NUM_INSN_CODES
; ++i
)
4642 if (this_target_recog
->x_op_alt
[i
])
4644 free (this_target_recog
->x_op_alt
[i
]);
4645 this_target_recog
->x_op_alt
[i
] = 0;