1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "insn-config.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
40 #include "cfgcleanup.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
49 #define STACK_POP_CODE POST_DEC
53 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx_insn
*, bool);
54 static void validate_replace_src_1 (rtx
*, void *);
55 static rtx_insn
*split_insn (rtx_insn
*);
57 struct target_recog default_target_recog
;
59 struct target_recog
*this_target_recog
= &default_target_recog
;
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data_d recog_data
;
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative
*recog_op_alt
;
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt
[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES
];
83 /* On return from `constrain_operands', indicate which alternative
86 int which_alternative
;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed
;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
102 init_recog_no_volatile (void)
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
117 asm_labels_ok (rtx body
)
122 asmop
= extract_asm_operands (body
);
123 if (asmop
== NULL_RTX
)
126 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
137 check_asm_operands (rtx x
)
141 const char **constraints
;
144 if (!asm_labels_ok (x
))
147 /* Post-reload, be more strict with things. */
148 if (reload_completed
)
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn
*insn
= make_insn_raw (x
);
153 constrain_operands (1, get_enabled_alternatives (insn
));
154 return which_alternative
>= 0;
157 noperands
= asm_noperands (x
);
163 operands
= XALLOCAVEC (rtx
, noperands
);
164 constraints
= XALLOCAVEC (const char *, noperands
);
166 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
168 for (i
= 0; i
< noperands
; i
++)
170 const char *c
= constraints
[i
];
173 if (! asm_operand_ok (operands
[i
], c
, constraints
))
180 /* Static data for the next two routines. */
192 static change_t
*changes
;
193 static int changes_allocated
;
195 static int num_changes
= 0;
196 static int temporarily_undone_changes
= 0;
198 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
199 at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
200 will also be changed to NEW_LEN, which is no greater than the current
201 XVECLEN. If OBJECT is zero, no validation is done, the change is
204 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
205 will be called with the address and mode as parameters. If OBJECT is
206 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
209 IN_GROUP is nonzero if this is part of a group of changes that must be
210 performed as a group. In that case, the changes will be stored. The
211 function `apply_change_group' will validate and apply the changes.
213 If IN_GROUP is zero, this is a single change. Try to recognize the insn
214 or validate the memory reference with the change applied. If the result
215 is not valid for the machine, suppress the change and return zero.
216 Otherwise, perform the change and return 1. */
219 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
,
220 bool unshare
, int new_len
= -1)
222 gcc_assert (temporarily_undone_changes
== 0);
225 /* Single-element parallels aren't valid and won't match anything.
226 Replace them with the single element. */
227 if (new_len
== 1 && GET_CODE (new_rtx
) == PARALLEL
)
229 new_rtx
= XVECEXP (new_rtx
, 0, 0);
233 if ((old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
234 && (new_len
< 0 || XVECLEN (new_rtx
, 0) == new_len
))
237 gcc_assert ((in_group
!= 0 || num_changes
== 0)
238 && (new_len
< 0 || new_rtx
== *loc
));
242 /* Save the information describing this change. */
243 if (num_changes
>= changes_allocated
)
245 if (changes_allocated
== 0)
246 /* This value allows for repeated substitutions inside complex
247 indexed addresses, or changes in up to 5 insns. */
248 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
250 changes_allocated
*= 2;
252 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
255 changes
[num_changes
].object
= object
;
256 changes
[num_changes
].loc
= loc
;
257 changes
[num_changes
].old
= old
;
258 changes
[num_changes
].old_len
= (new_len
>= 0 ? XVECLEN (new_rtx
, 0) : -1);
259 changes
[num_changes
].unshare
= unshare
;
262 XVECLEN (new_rtx
, 0) = new_len
;
264 if (object
&& !MEM_P (object
))
266 /* Set INSN_CODE to force rerecognition of insn. Save old code in
268 changes
[num_changes
].old_code
= INSN_CODE (object
);
269 INSN_CODE (object
) = -1;
274 /* If we are making a group of changes, return 1. Otherwise, validate the
275 change group we made. */
280 return apply_change_group ();
283 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
287 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
289 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
292 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
296 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
298 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
301 /* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
302 value are as for validate_change_1. */
305 validate_change_xveclen (rtx object
, rtx
*loc
, int new_len
, bool in_group
)
307 return validate_change_1 (object
, loc
, *loc
, in_group
, false, new_len
);
310 /* Keep X canonicalized if some changes have made it non-canonical; only
311 modifies the operands of X, not (for example) its code. Simplifications
312 are not the job of this routine.
314 Return true if anything was changed. */
316 canonicalize_change_group (rtx_insn
*insn
, rtx x
)
318 if (COMMUTATIVE_P (x
)
319 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
321 /* Oops, the caller has made X no longer canonical.
322 Let's redo the changes in the correct order. */
323 rtx tem
= XEXP (x
, 0);
324 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
325 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
333 /* This subroutine of apply_change_group verifies whether the changes to INSN
334 were valid; i.e. whether INSN can still be recognized.
336 If IN_GROUP is true clobbers which have to be added in order to
337 match the instructions will be added to the current change group.
338 Otherwise the changes will take effect immediately. */
341 insn_invalid_p (rtx_insn
*insn
, bool in_group
)
343 rtx pat
= PATTERN (insn
);
344 int num_clobbers
= 0;
345 /* If we are before reload and the pattern is a SET, see if we can add
347 int icode
= recog (pat
, insn
,
348 (GET_CODE (pat
) == SET
349 && ! reload_completed
350 && ! reload_in_progress
)
351 ? &num_clobbers
: 0);
352 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
355 /* If this is an asm and the operand aren't legal, then fail. Likewise if
356 this is not an asm and the insn wasn't recognized. */
357 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
358 || (!is_asm
&& icode
< 0))
361 /* If we have to add CLOBBERs, fail if we have to add ones that reference
362 hard registers since our callers can't know if they are live or not.
363 Otherwise, add them. */
364 if (num_clobbers
> 0)
368 if (added_clobbers_hard_reg_p (icode
))
371 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
372 XVECEXP (newpat
, 0, 0) = pat
;
373 add_clobbers (newpat
, icode
);
375 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
377 PATTERN (insn
) = pat
= newpat
;
380 /* After reload, verify that all constraints are satisfied. */
381 if (reload_completed
)
385 if (! constrain_operands (1, get_preferred_alternatives (insn
)))
389 INSN_CODE (insn
) = icode
;
393 /* Return number of changes made and not validated yet. */
395 num_changes_pending (void)
400 /* Tentatively apply the changes numbered NUM and up.
401 Return 1 if all changes are valid, zero otherwise. */
404 verify_changes (int num
)
407 rtx last_validated
= NULL_RTX
;
409 /* The changes have been applied and all INSN_CODEs have been reset to force
412 The changes are valid if we aren't given an object, or if we are
413 given a MEM and it still is a valid address, or if this is in insn
414 and it is recognized. In the latter case, if reload has completed,
415 we also require that the operands meet the constraints for
418 for (i
= num
; i
< num_changes
; i
++)
420 rtx object
= changes
[i
].object
;
422 /* If there is no object to test or if it is the same as the one we
423 already tested, ignore it. */
424 if (object
== 0 || object
== last_validated
)
429 if (! memory_address_addr_space_p (GET_MODE (object
),
431 MEM_ADDR_SPACE (object
)))
434 else if (/* changes[i].old might be zero, e.g. when putting a
435 REG_FRAME_RELATED_EXPR into a previously empty list. */
437 && REG_P (changes
[i
].old
)
438 && asm_noperands (PATTERN (object
)) > 0
439 && register_asm_p (changes
[i
].old
))
441 /* Don't allow changes of hard register operands to inline
442 assemblies if they have been defined as register asm ("x"). */
445 else if (DEBUG_INSN_P (object
))
447 else if (insn_invalid_p (as_a
<rtx_insn
*> (object
), true))
449 rtx pat
= PATTERN (object
);
451 /* Perhaps we couldn't recognize the insn because there were
452 extra CLOBBERs at the end. If so, try to re-recognize
453 without the last CLOBBER (later iterations will cause each of
454 them to be eliminated, in turn). But don't do this if we
455 have an ASM_OPERAND. */
456 if (GET_CODE (pat
) == PARALLEL
457 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
458 && asm_noperands (PATTERN (object
)) < 0)
462 if (XVECLEN (pat
, 0) == 2)
463 newpat
= XVECEXP (pat
, 0, 0);
469 = gen_rtx_PARALLEL (VOIDmode
,
470 rtvec_alloc (XVECLEN (pat
, 0) - 1));
471 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
472 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
475 /* Add a new change to this group to replace the pattern
476 with this new pattern. Then consider this change
477 as having succeeded. The change we added will
478 cause the entire call to fail if things remain invalid.
480 Note that this can lose if a later change than the one
481 we are processing specified &XVECEXP (PATTERN (object), 0, X)
482 but this shouldn't occur. */
484 validate_change (object
, &PATTERN (object
), newpat
, 1);
487 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
488 || GET_CODE (pat
) == VAR_LOCATION
)
489 /* If this insn is a CLOBBER or USE, it is always valid, but is
495 last_validated
= object
;
498 return (i
== num_changes
);
501 /* A group of changes has previously been issued with validate_change
502 and verified with verify_changes. Call df_insn_rescan for each of
503 the insn changed and clear num_changes. */
506 confirm_change_group (void)
509 rtx last_object
= NULL
;
511 gcc_assert (temporarily_undone_changes
== 0);
512 for (i
= 0; i
< num_changes
; i
++)
514 rtx object
= changes
[i
].object
;
516 if (changes
[i
].unshare
)
517 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
519 /* Avoid unnecessary rescanning when multiple changes to same instruction
523 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
524 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
525 last_object
= object
;
529 if (last_object
&& INSN_P (last_object
))
530 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
534 /* Apply a group of changes previously issued with `validate_change'.
535 If all changes are valid, call confirm_change_group and return 1,
536 otherwise, call cancel_changes and return 0. */
539 apply_change_group (void)
541 if (verify_changes (0))
543 confirm_change_group ();
554 /* Return the number of changes so far in the current group. */
557 num_validated_changes (void)
562 /* Retract the changes numbered NUM and up. */
565 cancel_changes (int num
)
567 gcc_assert (temporarily_undone_changes
== 0);
570 /* Back out all the changes. Do this in the opposite order in which
572 for (i
= num_changes
- 1; i
>= num
; i
--)
574 if (changes
[i
].old_len
>= 0)
575 XVECLEN (*changes
[i
].loc
, 0) = changes
[i
].old_len
;
577 *changes
[i
].loc
= changes
[i
].old
;
578 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
579 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
584 /* Swap the status of change NUM from being applied to not being applied,
588 swap_change (int num
)
590 if (changes
[num
].old_len
>= 0)
591 std::swap (XVECLEN (*changes
[num
].loc
, 0), changes
[num
].old_len
);
593 std::swap (*changes
[num
].loc
, changes
[num
].old
);
594 if (changes
[num
].object
&& !MEM_P (changes
[num
].object
))
595 std::swap (INSN_CODE (changes
[num
].object
), changes
[num
].old_code
);
598 /* Temporarily undo all the changes numbered NUM and up, with a view
599 to reapplying them later. The next call to the changes machinery
604 otherwise things will end up in an invalid state. */
607 temporarily_undo_changes (int num
)
609 gcc_assert (temporarily_undone_changes
== 0 && num
<= num_changes
);
610 for (int i
= num_changes
- 1; i
>= num
; i
--)
612 temporarily_undone_changes
= num_changes
- num
;
615 /* Redo the changes that were temporarily undone by:
617 temporarily_undo_changes (NUM). */
620 redo_changes (int num
)
622 gcc_assert (temporarily_undone_changes
== num_changes
- num
);
623 for (int i
= num
; i
< num_changes
; ++i
)
625 temporarily_undone_changes
= 0;
628 /* Reduce conditional compilation elsewhere. */
629 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
633 simplify_while_replacing (rtx
*loc
, rtx to
, rtx_insn
*object
,
634 machine_mode op0_mode
)
637 enum rtx_code code
= GET_CODE (x
);
638 rtx new_rtx
= NULL_RTX
;
639 scalar_int_mode is_mode
;
641 if (SWAPPABLE_OPERANDS_P (x
)
642 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
644 validate_unshare_change (object
, loc
,
645 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
646 : swap_condition (code
),
647 GET_MODE (x
), XEXP (x
, 1),
653 /* Canonicalize arithmetics with all constant operands. */
654 switch (GET_RTX_CLASS (code
))
657 if (CONSTANT_P (XEXP (x
, 0)))
658 new_rtx
= simplify_unary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
663 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
664 new_rtx
= simplify_binary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
668 case RTX_COMM_COMPARE
:
669 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
670 new_rtx
= simplify_relational_operation (code
, GET_MODE (x
), op0_mode
,
671 XEXP (x
, 0), XEXP (x
, 1));
678 validate_change (object
, loc
, new_rtx
, 1);
685 /* If we have a PLUS whose second operand is now a CONST_INT, use
686 simplify_gen_binary to try to simplify it.
687 ??? We may want later to remove this, once simplification is
688 separated from this function. */
689 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
690 validate_change (object
, loc
,
692 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
695 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
696 validate_change (object
, loc
,
698 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
699 simplify_gen_unary (NEG
,
700 GET_MODE (x
), XEXP (x
, 1),
705 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
707 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
709 /* If any of the above failed, substitute in something that
710 we know won't be recognized. */
712 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
713 validate_change (object
, loc
, new_rtx
, 1);
717 /* All subregs possible to simplify should be simplified. */
718 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
721 /* Subregs of VOIDmode operands are incorrect. */
722 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
723 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
725 validate_change (object
, loc
, new_rtx
, 1);
729 /* If we are replacing a register with memory, try to change the memory
730 to be the mode required for memory in extract operations (this isn't
731 likely to be an insertion operation; if it was, nothing bad will
732 happen, we might just fail in some cases). */
734 if (MEM_P (XEXP (x
, 0))
735 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &is_mode
)
736 && CONST_INT_P (XEXP (x
, 1))
737 && CONST_INT_P (XEXP (x
, 2))
738 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
739 MEM_ADDR_SPACE (XEXP (x
, 0)))
740 && !MEM_VOLATILE_P (XEXP (x
, 0)))
742 int pos
= INTVAL (XEXP (x
, 2));
743 machine_mode new_mode
= is_mode
;
744 if (GET_CODE (x
) == ZERO_EXTRACT
&& targetm
.have_extzv ())
745 new_mode
= insn_data
[targetm
.code_for_extzv
].operand
[1].mode
;
746 else if (GET_CODE (x
) == SIGN_EXTRACT
&& targetm
.have_extv ())
747 new_mode
= insn_data
[targetm
.code_for_extv
].operand
[1].mode
;
748 scalar_int_mode wanted_mode
= (new_mode
== VOIDmode
750 : as_a
<scalar_int_mode
> (new_mode
));
752 /* If we have a narrower mode, we can do something. */
753 if (GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
755 int offset
= pos
/ BITS_PER_UNIT
;
758 /* If the bytes and bits are counted differently, we
759 must adjust the offset. */
760 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
762 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
765 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
766 == GET_MODE_BITSIZE (wanted_mode
));
767 pos
%= GET_MODE_BITSIZE (wanted_mode
);
769 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
771 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
772 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
783 /* Replace every occurrence of FROM in X with TO. Mark each change with
784 validate_change passing OBJECT. */
787 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx_insn
*object
,
794 machine_mode op0_mode
= VOIDmode
;
795 int prev_changes
= num_changes
;
801 fmt
= GET_RTX_FORMAT (code
);
803 op0_mode
= GET_MODE (XEXP (x
, 0));
805 /* X matches FROM if it is the same rtx or they are both referring to the
806 same register in the same mode. Avoid calling rtx_equal_p unless the
807 operands look similar. */
810 || (REG_P (x
) && REG_P (from
)
811 && GET_MODE (x
) == GET_MODE (from
)
812 && REGNO (x
) == REGNO (from
))
813 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
814 && rtx_equal_p (x
, from
)))
816 validate_unshare_change (object
, loc
, to
, 1);
820 /* Call ourself recursively to perform the replacements.
821 We must not replace inside already replaced expression, otherwise we
822 get infinite recursion for replacements like (reg X)->(subreg (reg X))
823 so we must special case shared ASM_OPERANDS. */
825 if (GET_CODE (x
) == PARALLEL
)
827 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
829 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
830 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
832 /* Verify that operands are really shared. */
833 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
834 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
836 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
837 from
, to
, object
, simplify
);
840 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
845 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
848 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
849 else if (fmt
[i
] == 'E')
850 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
851 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
855 /* If we didn't substitute, there is nothing more to do. */
856 if (num_changes
== prev_changes
)
859 /* ??? The regmove is no more, so is this aberration still necessary? */
860 /* Allow substituted expression to have different mode. This is used by
861 regmove to change mode of pseudo register. */
862 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
863 op0_mode
= GET_MODE (XEXP (x
, 0));
865 /* Do changes needed to keep rtx consistent. Don't do any other
866 simplifications, as it is not our job. */
868 simplify_while_replacing (loc
, to
, object
, op0_mode
);
871 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
872 with TO. After all changes have been made, validate by seeing
873 if INSN is still valid. */
876 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx_insn
*insn
, rtx
*loc
)
878 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
879 return apply_change_group ();
882 /* Try replacing every occurrence of FROM in INSN with TO. After all
883 changes have been made, validate by seeing if INSN is still valid. */
886 validate_replace_rtx (rtx from
, rtx to
, rtx_insn
*insn
)
888 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
889 return apply_change_group ();
892 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
893 is a part of INSN. After all changes have been made, validate by seeing if
895 validate_replace_rtx (from, to, insn) is equivalent to
896 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
899 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx_insn
*insn
)
901 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
902 return apply_change_group ();
905 /* Same as above, but do not simplify rtx afterwards. */
907 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
910 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
911 return apply_change_group ();
915 /* Try replacing every occurrence of FROM in INSN with TO. This also
916 will replace in REG_EQUAL and REG_EQUIV notes. */
919 validate_replace_rtx_group (rtx from
, rtx to
, rtx_insn
*insn
)
922 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
923 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
924 if (REG_NOTE_KIND (note
) == REG_EQUAL
925 || REG_NOTE_KIND (note
) == REG_EQUIV
)
926 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
929 /* Function called by note_uses to replace used subexpressions. */
930 struct validate_replace_src_data
932 rtx from
; /* Old RTX */
933 rtx to
; /* New RTX */
934 rtx_insn
*insn
; /* Insn in which substitution is occurring. */
938 validate_replace_src_1 (rtx
*x
, void *data
)
940 struct validate_replace_src_data
*d
941 = (struct validate_replace_src_data
*) data
;
943 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
946 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
950 validate_replace_src_group (rtx from
, rtx to
, rtx_insn
*insn
)
952 struct validate_replace_src_data d
;
957 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
960 /* Try simplify INSN.
961 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
962 pattern and return true if something was simplified. */
965 validate_simplify_insn (rtx_insn
*insn
)
971 pat
= PATTERN (insn
);
973 if (GET_CODE (pat
) == SET
)
975 newpat
= simplify_rtx (SET_SRC (pat
));
976 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
977 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
978 newpat
= simplify_rtx (SET_DEST (pat
));
979 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
980 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
982 else if (GET_CODE (pat
) == PARALLEL
)
983 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
985 rtx s
= XVECEXP (pat
, 0, i
);
987 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
989 newpat
= simplify_rtx (SET_SRC (s
));
990 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
991 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
992 newpat
= simplify_rtx (SET_DEST (s
));
993 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
994 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
997 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
1000 /* Try to process the address of memory expression MEM. Return true on
1001 success; leave the caller to clean up on failure. */
1004 insn_propagation::apply_to_mem_1 (rtx mem
)
1006 auto old_num_changes
= num_validated_changes ();
1008 bool res
= apply_to_rvalue_1 (&XEXP (mem
, 0));
1013 if (old_num_changes
!= num_validated_changes ()
1014 && should_check_mems
1015 && !check_mem (old_num_changes
, mem
))
1021 /* Try to process the rvalue expression at *LOC. Return true on success;
1022 leave the caller to clean up on failure. */
1025 insn_propagation::apply_to_rvalue_1 (rtx
*loc
)
1028 enum rtx_code code
= GET_CODE (x
);
1029 machine_mode mode
= GET_MODE (x
);
1031 auto old_num_changes
= num_validated_changes ();
1032 if (from
&& GET_CODE (x
) == GET_CODE (from
) && rtx_equal_p (x
, from
))
1034 /* Don't replace register asms in asm statements; we mustn't
1035 change the user's register allocation. */
1037 && HARD_REGISTER_P (x
)
1038 && register_asm_p (x
)
1039 && asm_noperands (PATTERN (insn
)) > 0)
1043 validate_unshare_change (insn
, loc
, to
, 1);
1045 validate_change (insn
, loc
, to
, 1);
1046 if (mem_depth
&& !REG_P (to
) && !CONSTANT_P (to
))
1048 /* We're substituting into an address, but TO will have the
1049 form expected outside an address. Canonicalize it if
1051 insn_propagation
subprop (insn
);
1052 subprop
.mem_depth
+= 1;
1053 if (!subprop
.apply_to_rvalue (loc
))
1056 && num_validated_changes () != old_num_changes
+ 1)
1058 /* TO is owned by someone else, so create a copy and
1059 return TO to its original form. */
1060 rtx to
= copy_rtx (*loc
);
1061 cancel_changes (old_num_changes
);
1062 validate_change (insn
, loc
, to
, 1);
1065 num_replacements
+= 1;
1066 should_unshare
= true;
1067 result_flags
|= UNSIMPLIFIED
;
1071 /* Recursively apply the substitution and see if we can simplify
1072 the result. This specifically shouldn't use simplify_gen_* for
1073 speculative simplifications, since we want to avoid generating new
1074 expressions where possible. */
1075 auto old_result_flags
= result_flags
;
1076 rtx newx
= NULL_RTX
;
1077 bool recurse_p
= false;
1078 switch (GET_RTX_CLASS (code
))
1082 machine_mode op0_mode
= GET_MODE (XEXP (x
, 0));
1083 if (!apply_to_rvalue_1 (&XEXP (x
, 0)))
1085 if (from
&& old_num_changes
== num_validated_changes ())
1088 newx
= simplify_unary_operation (code
, mode
, XEXP (x
, 0), op0_mode
);
1093 case RTX_COMM_ARITH
:
1095 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1096 || !apply_to_rvalue_1 (&XEXP (x
, 1)))
1098 if (from
&& old_num_changes
== num_validated_changes ())
1101 if (GET_RTX_CLASS (code
) == RTX_COMM_ARITH
1102 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
1103 newx
= simplify_gen_binary (code
, mode
, XEXP (x
, 1), XEXP (x
, 0));
1105 newx
= simplify_binary_operation (code
, mode
,
1106 XEXP (x
, 0), XEXP (x
, 1));
1111 case RTX_COMM_COMPARE
:
1113 machine_mode op_mode
= (GET_MODE (XEXP (x
, 0)) != VOIDmode
1114 ? GET_MODE (XEXP (x
, 0))
1115 : GET_MODE (XEXP (x
, 1)));
1116 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1117 || !apply_to_rvalue_1 (&XEXP (x
, 1)))
1119 if (from
&& old_num_changes
== num_validated_changes ())
1122 newx
= simplify_relational_operation (code
, mode
, op_mode
,
1123 XEXP (x
, 0), XEXP (x
, 1));
1128 case RTX_BITFIELD_OPS
:
1130 machine_mode op0_mode
= GET_MODE (XEXP (x
, 0));
1131 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1132 || !apply_to_rvalue_1 (&XEXP (x
, 1))
1133 || !apply_to_rvalue_1 (&XEXP (x
, 2)))
1135 if (from
&& old_num_changes
== num_validated_changes ())
1138 newx
= simplify_ternary_operation (code
, mode
, op0_mode
,
1139 XEXP (x
, 0), XEXP (x
, 1),
1147 machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
1148 if (!apply_to_rvalue_1 (&SUBREG_REG (x
)))
1150 if (from
&& old_num_changes
== num_validated_changes ())
1153 rtx inner
= SUBREG_REG (x
);
1154 newx
= simplify_subreg (mode
, inner
, inner_mode
, SUBREG_BYTE (x
));
1155 /* Reject the same cases that simplify_gen_subreg would. */
1157 && (GET_CODE (inner
) == SUBREG
1158 || GET_CODE (inner
) == CONCAT
1159 || GET_MODE (inner
) == VOIDmode
1160 || !validate_subreg (mode
, inner_mode
,
1161 inner
, SUBREG_BYTE (x
))))
1163 failure_reason
= "would create an invalid subreg";
1175 if (!apply_to_rvalue_1 (&XEXP (x
, 0))
1176 || !apply_to_rvalue_1 (&XEXP (x
, 1)))
1178 if (from
&& old_num_changes
== num_validated_changes ())
1181 /* (lo_sum (high x) y) -> y where x and y have the same base. */
1182 rtx op0
= XEXP (x
, 0);
1183 rtx op1
= XEXP (x
, 1);
1184 if (GET_CODE (op0
) == HIGH
)
1186 rtx base0
, base1
, offset0
, offset1
;
1187 split_const (XEXP (op0
, 0), &base0
, &offset0
);
1188 split_const (op1
, &base1
, &offset1
);
1189 if (rtx_equal_p (base0
, base1
))
1193 else if (code
== REG
)
1195 if (from
&& REG_P (from
) && reg_overlap_mentioned_p (x
, from
))
1197 failure_reason
= "inexact register overlap";
1201 else if (code
== MEM
)
1202 return apply_to_mem_1 (x
);
1211 if (from
&& reg_overlap_mentioned_p (XEXP (x
, 0), from
))
1213 failure_reason
= "is subject to autoinc";
1226 const char *fmt
= GET_RTX_FORMAT (code
);
1227 for (int i
= 0; fmt
[i
]; i
++)
1231 for (int j
= 0; j
< XVECLEN (x
, i
); j
++)
1232 if (!apply_to_rvalue_1 (&XVECEXP (x
, i
, j
)))
1237 if (XEXP (x
, i
) && !apply_to_rvalue_1 (&XEXP (x
, i
)))
1242 else if (newx
&& !rtx_equal_p (x
, newx
))
1244 /* All substitutions made by OLD_NUM_CHANGES onwards have been
1246 result_flags
= ((result_flags
& ~UNSIMPLIFIED
)
1247 | (old_result_flags
& UNSIMPLIFIED
));
1249 if (should_note_simplifications
)
1250 note_simplification (old_num_changes
, old_result_flags
, x
, newx
);
1252 /* There's no longer any point unsharing the substitutions made
1253 for subexpressions, since we'll just copy this one instead. */
1254 bool unshare
= false;
1255 for (int i
= old_num_changes
; i
< num_changes
; ++i
)
1257 unshare
|= changes
[i
].unshare
;
1258 changes
[i
].unshare
= false;
1261 validate_unshare_change (insn
, loc
, newx
, 1);
1263 validate_change (insn
, loc
, newx
, 1);
1269 /* Try to process the lvalue expression at *LOC. Return true on success;
1270 leave the caller to clean up on failure. */
1273 insn_propagation::apply_to_lvalue_1 (rtx dest
)
1275 rtx old_dest
= dest
;
1276 while (GET_CODE (dest
) == SUBREG
1277 || GET_CODE (dest
) == ZERO_EXTRACT
1278 || GET_CODE (dest
) == STRICT_LOW_PART
)
1280 if (GET_CODE (dest
) == ZERO_EXTRACT
1281 && (!apply_to_rvalue_1 (&XEXP (dest
, 1))
1282 || !apply_to_rvalue_1 (&XEXP (dest
, 2))))
1284 dest
= XEXP (dest
, 0);
1288 return apply_to_mem_1 (dest
);
1290 /* Check whether the substitution is safe in the presence of this lvalue. */
1294 || !reg_overlap_mentioned_p (dest
, from
))
1297 if (SUBREG_P (old_dest
)
1298 && SUBREG_REG (old_dest
) == dest
1299 && !read_modify_subreg_p (old_dest
))
1302 failure_reason
= "is part of a read-write destination";
1306 /* Try to process the instruction pattern at *LOC. Return true on success;
1307 leave the caller to clean up on failure. */
1310 insn_propagation::apply_to_pattern_1 (rtx
*loc
)
1313 switch (GET_CODE (body
))
1316 return (apply_to_rvalue_1 (&COND_EXEC_TEST (body
))
1317 && apply_to_pattern_1 (&COND_EXEC_CODE (body
)));
1321 int last
= XVECLEN (body
, 0) - 1;
1322 for (int i
= 0; i
< last
; ++i
)
1323 if (!apply_to_pattern_1 (&XVECEXP (body
, 0, i
)))
1325 return apply_to_pattern_1 (&XVECEXP (body
, 0, last
));
1329 for (int i
= 0, len
= ASM_OPERANDS_INPUT_LENGTH (body
); i
< len
; ++i
)
1330 if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body
, i
)))
1335 return apply_to_lvalue_1 (XEXP (body
, 0));
1338 return (apply_to_lvalue_1 (SET_DEST (body
))
1339 && apply_to_rvalue_1 (&SET_SRC (body
)));
1342 /* All the other possibilities never store and can use a normal
1343 rtx walk. This includes:
1349 - UNSPEC_VOLATILE. */
1350 return apply_to_rvalue_1 (loc
);
1354 /* Apply this insn_propagation object's simplification or substitution
1355 to the instruction pattern at LOC. */
1358 insn_propagation::apply_to_pattern (rtx
*loc
)
1360 unsigned int num_changes
= num_validated_changes ();
1361 bool res
= apply_to_pattern_1 (loc
);
1363 cancel_changes (num_changes
);
1367 /* Apply this insn_propagation object's simplification or substitution
1368 to the rvalue expression at LOC. */
1371 insn_propagation::apply_to_rvalue (rtx
*loc
)
1373 unsigned int num_changes
= num_validated_changes ();
1374 bool res
= apply_to_rvalue_1 (loc
);
1376 cancel_changes (num_changes
);
1380 /* Check whether INSN matches a specific alternative of an .md pattern. */
1383 valid_insn_p (rtx_insn
*insn
)
1385 recog_memoized (insn
);
1386 if (INSN_CODE (insn
) < 0)
1388 extract_insn (insn
);
1389 /* We don't know whether the insn will be in code that is optimized
1390 for size or speed, so consider all enabled alternatives. */
1391 if (!constrain_operands (1, get_enabled_alternatives (insn
)))
1396 /* Return true if OP is a valid general operand for machine mode MODE.
1397 This is either a register reference, a memory reference,
1398 or a constant. In the case of a memory reference, the address
1399 is checked for general validity for the target machine.
1401 Register and memory references must have mode MODE in order to be valid,
1402 but some constants have no machine mode and are valid for any mode.
1404 If MODE is VOIDmode, OP is checked for validity for whatever mode
1407 The main use of this function is as a predicate in match_operand
1408 expressions in the machine description. */
1411 general_operand (rtx op
, machine_mode mode
)
1413 enum rtx_code code
= GET_CODE (op
);
1415 if (mode
== VOIDmode
)
1416 mode
= GET_MODE (op
);
1418 /* Don't accept CONST_INT or anything similar
1419 if the caller wants something floating. */
1420 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1421 && GET_MODE_CLASS (mode
) != MODE_INT
1422 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1425 if (CONST_INT_P (op
)
1427 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1430 if (CONSTANT_P (op
))
1431 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1432 || mode
== VOIDmode
)
1433 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1434 && targetm
.legitimate_constant_p (mode
== VOIDmode
1438 /* Except for certain constants with VOIDmode, already checked for,
1439 OP's mode must match MODE if MODE specifies a mode. */
1441 if (GET_MODE (op
) != mode
)
1446 rtx sub
= SUBREG_REG (op
);
1448 #ifdef INSN_SCHEDULING
1449 /* On machines that have insn scheduling, we want all memory
1450 reference to be explicit, so outlaw paradoxical SUBREGs.
1451 However, we must allow them after reload so that they can
1452 get cleaned up by cleanup_subreg_operands. */
1453 if (!reload_completed
&& MEM_P (sub
)
1454 && paradoxical_subreg_p (op
))
1457 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1458 may result in incorrect reference. We should simplify all valid
1459 subregs of MEM anyway. But allow this after reload because we
1460 might be called from cleanup_subreg_operands.
1462 ??? This is a kludge. */
1463 if (!reload_completed
1464 && maybe_ne (SUBREG_BYTE (op
), 0)
1469 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1470 && !REG_CAN_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1471 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1472 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1473 /* LRA can generate some invalid SUBREGS just for matched
1474 operand reload presentation. LRA needs to treat them as
1476 && ! LRA_SUBREG_P (op
))
1479 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1480 create such rtl, and we must reject it. */
1481 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1482 /* LRA can use subreg to store a floating point value in an
1483 integer mode. Although the floating point and the
1484 integer modes need the same number of hard registers, the
1485 size of floating point mode can be less than the integer
1487 && ! lra_in_progress
1488 && paradoxical_subreg_p (op
))
1492 code
= GET_CODE (op
);
1496 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1497 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1501 rtx y
= XEXP (op
, 0);
1503 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1506 /* Use the mem's mode, since it will be reloaded thus. LRA can
1507 generate move insn with invalid addresses which is made valid
1508 and efficiently calculated by LRA through further numerous
1511 || memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1518 /* Return true if OP is a valid memory address for a memory reference
1521 The main use of this function is as a predicate in match_operand
1522 expressions in the machine description. */
1525 address_operand (rtx op
, machine_mode mode
)
1527 /* Wrong mode for an address expr. */
1528 if (GET_MODE (op
) != VOIDmode
1529 && ! SCALAR_INT_MODE_P (GET_MODE (op
)))
1532 return memory_address_p (mode
, op
);
1535 /* Return true if OP is a register reference of mode MODE.
1536 If MODE is VOIDmode, accept a register in any mode.
1538 The main use of this function is as a predicate in match_operand
1539 expressions in the machine description. */
1542 register_operand (rtx op
, machine_mode mode
)
1544 if (GET_CODE (op
) == SUBREG
)
1546 rtx sub
= SUBREG_REG (op
);
1548 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1549 because it is guaranteed to be reloaded into one.
1550 Just make sure the MEM is valid in itself.
1551 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1552 but currently it does result from (SUBREG (REG)...) where the
1553 reg went on the stack.) */
1554 if (!REG_P (sub
) && (reload_completed
|| !MEM_P (sub
)))
1557 else if (!REG_P (op
))
1559 return general_operand (op
, mode
);
1562 /* Return true for a register in Pmode; ignore the tested mode. */
1565 pmode_register_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
1567 return register_operand (op
, Pmode
);
1570 /* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1571 or a hard register. */
1574 scratch_operand (rtx op
, machine_mode mode
)
1576 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1579 return (GET_CODE (op
) == SCRATCH
1582 || (REGNO (op
) < FIRST_PSEUDO_REGISTER
1583 && REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
))));
1586 /* Return true if OP is a valid immediate operand for mode MODE.
1588 The main use of this function is as a predicate in match_operand
1589 expressions in the machine description. */
1592 immediate_operand (rtx op
, machine_mode mode
)
1594 /* Don't accept CONST_INT or anything similar
1595 if the caller wants something floating. */
1596 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1597 && GET_MODE_CLASS (mode
) != MODE_INT
1598 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1601 if (CONST_INT_P (op
)
1603 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1606 return (CONSTANT_P (op
)
1607 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1608 || GET_MODE (op
) == VOIDmode
)
1609 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1610 && targetm
.legitimate_constant_p (mode
== VOIDmode
1615 /* Return true if OP is an operand that is a CONST_INT of mode MODE. */
1618 const_int_operand (rtx op
, machine_mode mode
)
1620 if (!CONST_INT_P (op
))
1623 if (mode
!= VOIDmode
1624 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1630 #if TARGET_SUPPORTS_WIDE_INT
1631 /* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1634 const_scalar_int_operand (rtx op
, machine_mode mode
)
1636 if (!CONST_SCALAR_INT_P (op
))
1639 if (CONST_INT_P (op
))
1640 return const_int_operand (op
, mode
);
1642 if (mode
!= VOIDmode
)
1644 scalar_int_mode int_mode
= as_a
<scalar_int_mode
> (mode
);
1645 int prec
= GET_MODE_PRECISION (int_mode
);
1646 int bitsize
= GET_MODE_BITSIZE (int_mode
);
1648 if (CONST_WIDE_INT_NUNITS (op
) * HOST_BITS_PER_WIDE_INT
> bitsize
)
1651 if (prec
== bitsize
)
1655 /* Multiword partial int. */
1657 = CONST_WIDE_INT_ELT (op
, CONST_WIDE_INT_NUNITS (op
) - 1);
1658 return (sext_hwi (x
, prec
& (HOST_BITS_PER_WIDE_INT
- 1)) == x
);
1664 /* Return true if OP is an operand that is a constant integer or constant
1665 floating-point number of MODE. */
1668 const_double_operand (rtx op
, machine_mode mode
)
1670 return (GET_CODE (op
) == CONST_DOUBLE
)
1671 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
);
1674 /* Return true if OP is an operand that is a constant integer or constant
1675 floating-point number of MODE. */
1678 const_double_operand (rtx op
, machine_mode mode
)
1680 /* Don't accept CONST_INT or anything similar
1681 if the caller wants something floating. */
1682 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1683 && GET_MODE_CLASS (mode
) != MODE_INT
1684 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1687 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1688 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1689 || GET_MODE (op
) == VOIDmode
));
1692 /* Return true if OP is a general operand that is not an immediate
1693 operand of mode MODE. */
1696 nonimmediate_operand (rtx op
, machine_mode mode
)
1698 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1701 /* Return true if OP is a register reference or
1702 immediate value of mode MODE. */
1705 nonmemory_operand (rtx op
, machine_mode mode
)
1707 if (CONSTANT_P (op
))
1708 return immediate_operand (op
, mode
);
1709 return register_operand (op
, mode
);
1712 /* Return true if OP is a valid operand that stands for pushing a
1713 value of mode MODE onto the stack.
1715 The main use of this function is as a predicate in match_operand
1716 expressions in the machine description. */
1719 push_operand (rtx op
, machine_mode mode
)
1724 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1727 poly_int64 rounded_size
= GET_MODE_SIZE (mode
);
1729 #ifdef PUSH_ROUNDING
1730 rounded_size
= PUSH_ROUNDING (MACRO_INT (rounded_size
));
1735 if (known_eq (rounded_size
, GET_MODE_SIZE (mode
)))
1737 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1743 if (GET_CODE (op
) != PRE_MODIFY
1744 || GET_CODE (XEXP (op
, 1)) != PLUS
1745 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1746 || !poly_int_rtx_p (XEXP (XEXP (op
, 1), 1), &offset
)
1747 || (STACK_GROWS_DOWNWARD
1748 ? maybe_ne (offset
, -rounded_size
)
1749 : maybe_ne (offset
, rounded_size
)))
1753 return XEXP (op
, 0) == stack_pointer_rtx
;
1756 /* Return true if OP is a valid operand that stands for popping a
1757 value of mode MODE off the stack.
1759 The main use of this function is as a predicate in match_operand
1760 expressions in the machine description. */
1763 pop_operand (rtx op
, machine_mode mode
)
1768 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1773 if (GET_CODE (op
) != STACK_POP_CODE
)
1776 return XEXP (op
, 0) == stack_pointer_rtx
;
1779 /* Return true if ADDR is a valid memory address
1780 for mode MODE in address space AS. */
1783 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED
,
1784 rtx addr
, addr_space_t as
)
1786 #ifdef GO_IF_LEGITIMATE_ADDRESS
1787 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1788 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1794 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1798 /* Return true if OP is a valid memory reference with mode MODE,
1799 including a valid address.
1801 The main use of this function is as a predicate in match_operand
1802 expressions in the machine description. */
1805 memory_operand (rtx op
, machine_mode mode
)
1809 if (! reload_completed
)
1810 /* Note that no SUBREG is a memory operand before end of reload pass,
1811 because (SUBREG (MEM...)) forces reloading into a register. */
1812 return MEM_P (op
) && general_operand (op
, mode
);
1814 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1818 if (GET_CODE (inner
) == SUBREG
)
1819 inner
= SUBREG_REG (inner
);
1821 return (MEM_P (inner
) && general_operand (op
, mode
));
1824 /* Return true if OP is a valid indirect memory reference with mode MODE;
1825 that is, a memory reference whose address is a general_operand. */
1828 indirect_operand (rtx op
, machine_mode mode
)
1830 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1831 if (! reload_completed
1832 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1834 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1837 /* The only way that we can have a general_operand as the resulting
1838 address is if OFFSET is zero and the address already is an operand
1839 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1842 rtx addr
= strip_offset (XEXP (SUBREG_REG (op
), 0), &offset
);
1843 return (known_eq (offset
+ SUBREG_BYTE (op
), 0)
1844 && general_operand (addr
, Pmode
));
1848 && memory_operand (op
, mode
)
1849 && general_operand (XEXP (op
, 0), Pmode
));
1852 /* Return true if this is an ordered comparison operator (not including
1853 ORDERED and UNORDERED). */
1856 ordered_comparison_operator (rtx op
, machine_mode mode
)
1858 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1860 switch (GET_CODE (op
))
1878 /* Return true if this is a comparison operator. This allows the use of
1879 MATCH_OPERATOR to recognize all the branch insns. */
1882 comparison_operator (rtx op
, machine_mode mode
)
1884 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1885 && COMPARISON_P (op
));
1888 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1891 extract_asm_operands (rtx body
)
1894 switch (GET_CODE (body
))
1900 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1901 tmp
= SET_SRC (body
);
1902 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1907 tmp
= XVECEXP (body
, 0, 0);
1908 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1910 if (GET_CODE (tmp
) == SET
)
1912 tmp
= SET_SRC (tmp
);
1913 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1924 /* If BODY is an insn body that uses ASM_OPERANDS,
1925 return the number of operands (both input and output) in the insn.
1926 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1928 Otherwise return -1. */
1931 asm_noperands (const_rtx body
)
1933 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1938 if (GET_CODE (body
) == PARALLEL
&& XVECLEN (body
, 0) >= 2
1939 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_INPUT
)
1941 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1942 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1943 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1950 if (GET_CODE (body
) == SET
)
1952 else if (GET_CODE (body
) == PARALLEL
)
1954 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1956 /* Multiple output operands, or 1 output plus some clobbers:
1958 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1959 /* Count backwards through CLOBBERs to determine number of SETs. */
1960 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1962 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1964 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1968 /* N_SETS is now number of output operands. */
1971 /* Verify that all the SETs we have
1972 came from a single original asm_operands insn
1973 (so that invalid combinations are blocked). */
1974 for (i
= 0; i
< n_sets
; i
++)
1976 rtx elt
= XVECEXP (body
, 0, i
);
1977 if (GET_CODE (elt
) != SET
)
1979 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1981 /* If these ASM_OPERANDS rtx's came from different original insns
1982 then they aren't allowed together. */
1983 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1984 != ASM_OPERANDS_INPUT_VEC (asm_op
))
1990 /* 0 outputs, but some clobbers:
1991 body is [(asm_operands ...) (clobber (reg ...))...]. */
1992 /* Make sure all the other parallel things really are clobbers. */
1993 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1994 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1999 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
2000 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
2003 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
2004 copy its operands (both input and output) into the vector OPERANDS,
2005 the locations of the operands within the insn into the vector OPERAND_LOCS,
2006 and the constraints for the operands into CONSTRAINTS.
2007 Write the modes of the operands into MODES.
2008 Write the location info into LOC.
2009 Return the assembler-template.
2010 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2011 return the basic assembly string.
2013 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2014 we don't store that info. */
2017 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
2018 const char **constraints
, machine_mode
*modes
,
2021 int nbase
= 0, n
, i
;
2024 switch (GET_CODE (body
))
2027 /* Zero output asm: BODY is (asm_operands ...). */
2032 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2033 asmop
= SET_SRC (body
);
2035 /* The output is in the SET.
2036 Its constraint is in the ASM_OPERANDS itself. */
2038 operands
[0] = SET_DEST (body
);
2040 operand_locs
[0] = &SET_DEST (body
);
2042 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
2044 modes
[0] = GET_MODE (SET_DEST (body
));
2050 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
2052 asmop
= XVECEXP (body
, 0, 0);
2053 if (GET_CODE (asmop
) == SET
)
2055 asmop
= SET_SRC (asmop
);
2057 /* At least one output, plus some CLOBBERs. The outputs are in
2058 the SETs. Their constraints are in the ASM_OPERANDS itself. */
2059 for (i
= 0; i
< nparallel
; i
++)
2061 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
2062 break; /* Past last SET */
2063 gcc_assert (GET_CODE (XVECEXP (body
, 0, i
)) == SET
);
2065 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
2067 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
2069 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
2071 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
2075 else if (GET_CODE (asmop
) == ASM_INPUT
)
2078 *loc
= ASM_INPUT_SOURCE_LOCATION (asmop
);
2079 return XSTR (asmop
, 0);
2088 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
2089 for (i
= 0; i
< n
; i
++)
2092 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
2094 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
2096 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
2098 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
2102 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
2103 for (i
= 0; i
< n
; i
++)
2106 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
2108 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
2110 constraints
[nbase
+ i
] = "";
2112 modes
[nbase
+ i
] = Pmode
;
2116 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
2118 return ASM_OPERANDS_TEMPLATE (asmop
);
2121 /* Parse inline assembly string STRING and determine which operands are
2122 referenced by % markers. For the first NOPERANDS operands, set USED[I]
2123 to true if operand I is referenced.
2125 This is intended to distinguish barrier-like asms such as:
2127 asm ("" : "=m" (...));
2129 from real references such as:
2131 asm ("sw\t$0, %0" : "=m" (...)); */
2134 get_referenced_operands (const char *string
, bool *used
,
2135 unsigned int noperands
)
2137 memset (used
, 0, sizeof (bool) * noperands
);
2138 const char *p
= string
;
2144 /* A letter followed by a digit indicates an operand number. */
2145 if (ISALPHA (p
[0]) && ISDIGIT (p
[1]))
2150 unsigned long opnum
= strtoul (p
, &endptr
, 10);
2151 if (endptr
!= p
&& opnum
< noperands
)
2165 /* Check if an asm_operand matches its constraints.
2166 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2169 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
2172 bool incdec_ok
= false;
2174 /* Use constrain_operands after reload. */
2175 gcc_assert (!reload_completed
);
2177 /* Empty constraint string is the same as "X,...,X", i.e. X for as
2178 many alternatives as required to match the other operands. */
2179 if (*constraint
== '\0')
2184 enum constraint_num cn
;
2185 char c
= *constraint
;
2193 case '0': case '1': case '2': case '3': case '4':
2194 case '5': case '6': case '7': case '8': case '9':
2195 /* If caller provided constraints pointer, look up
2196 the matching constraint. Otherwise, our caller should have
2197 given us the proper matching constraint, but we can't
2198 actually fail the check if they didn't. Indicate that
2199 results are inconclusive. */
2203 unsigned long match
;
2205 match
= strtoul (constraint
, &end
, 10);
2207 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
2208 constraint
= (const char *) end
;
2214 while (ISDIGIT (*constraint
));
2220 /* The rest of the compiler assumes that reloading the address
2221 of a MEM into a register will make it fit an 'o' constraint.
2222 That is, if it sees a MEM operand for an 'o' constraint,
2223 it assumes that (mem (base-reg)) will fit.
2225 That assumption fails on targets that don't have offsettable
2226 addresses at all. We therefore need to treat 'o' asm
2227 constraints as a special case and only accept operands that
2228 are already offsettable, thus proving that at least one
2229 offsettable address exists. */
2230 case 'o': /* offsettable */
2231 if (offsettable_nonstrict_memref_p (op
))
2236 if (general_operand (op
, VOIDmode
))
2242 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2243 to exist, excepting those that expand_call created. Further,
2244 on some machines which do not have generalized auto inc/dec,
2245 an inc/dec is not a memory_operand.
2247 Match any memory and hope things are resolved after reload. */
2251 cn
= lookup_constraint (constraint
);
2253 switch (get_constraint_type (cn
))
2257 && reg_class_for_constraint (cn
) != NO_REGS
2258 && GET_MODE (op
) != BLKmode
2259 && register_operand (op
, VOIDmode
))
2266 && insn_const_int_ok_for_constraint (INTVAL (op
), cn
))
2271 case CT_RELAXED_MEMORY
:
2274 case CT_SPECIAL_MEMORY
:
2275 /* Every memory operand can be reloaded to fit. */
2277 mem
= extract_mem_from_operand (op
);
2278 result
= result
|| memory_operand (mem
, VOIDmode
);
2282 /* Every address operand can be reloaded to fit. */
2283 result
= result
|| address_operand (op
, VOIDmode
);
2287 result
= result
|| constraint_satisfied_p (op
, cn
);
2292 len
= CONSTRAINT_LEN (c
, constraint
);
2295 while (--len
&& *constraint
&& *constraint
!= ',');
2300 /* For operands without < or > constraints reject side-effects. */
2301 if (AUTO_INC_DEC
&& !incdec_ok
&& result
&& MEM_P (op
))
2302 switch (GET_CODE (XEXP (op
, 0)))
2318 /* Given an rtx *P, if it is a sum containing an integer constant term,
2319 return the location (type rtx *) of the pointer to that constant term.
2320 Otherwise, return a null pointer. */
2323 find_constant_term_loc (rtx
*p
)
2326 enum rtx_code code
= GET_CODE (*p
);
2328 /* If *P IS such a constant term, P is its location. */
2330 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
2334 /* Otherwise, if not a sum, it has no constant term. */
2336 if (GET_CODE (*p
) != PLUS
)
2339 /* If one of the summands is constant, return its location. */
2341 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
2342 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
2345 /* Otherwise, check each summand for containing a constant term. */
2347 if (XEXP (*p
, 0) != 0)
2349 tem
= find_constant_term_loc (&XEXP (*p
, 0));
2354 if (XEXP (*p
, 1) != 0)
2356 tem
= find_constant_term_loc (&XEXP (*p
, 1));
2364 /* Return true if OP is a memory reference whose address contains
2365 no side effects and remains valid after the addition of a positive
2366 integer less than the size of the object being referenced.
2368 We assume that the original address is valid and do not check it.
2370 This uses strict_memory_address_p as a subroutine, so
2371 don't use it before reload. */
2374 offsettable_memref_p (rtx op
)
2376 return ((MEM_P (op
))
2377 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
2378 MEM_ADDR_SPACE (op
)));
2381 /* Similar, but don't require a strictly valid mem ref:
2382 consider pseudo-regs valid as index or base regs. */
2385 offsettable_nonstrict_memref_p (rtx op
)
2387 return ((MEM_P (op
))
2388 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
2389 MEM_ADDR_SPACE (op
)));
2392 /* Return true if Y is a memory address which contains no side effects
2393 and would remain valid for address space AS after the addition of
2394 a positive integer less than the size of that mode.
2396 We assume that the original address is valid and do not check it.
2397 We do check that it is valid for narrower modes.
2399 If STRICTP is nonzero, we require a strictly valid address,
2400 for the sake of use in reload.c. */
2403 offsettable_address_addr_space_p (int strictp
, machine_mode mode
, rtx y
,
2406 enum rtx_code ycode
= GET_CODE (y
);
2410 bool (*addressp
) (machine_mode
, rtx
, addr_space_t
) =
2411 (strictp
? strict_memory_address_addr_space_p
2412 : memory_address_addr_space_p
);
2413 poly_int64 mode_sz
= GET_MODE_SIZE (mode
);
2415 if (CONSTANT_ADDRESS_P (y
))
2418 /* Adjusting an offsettable address involves changing to a narrower mode.
2419 Make sure that's OK. */
2421 if (mode_dependent_address_p (y
, as
))
2424 machine_mode address_mode
= GET_MODE (y
);
2425 if (address_mode
== VOIDmode
)
2426 address_mode
= targetm
.addr_space
.address_mode (as
);
2427 #ifdef POINTERS_EXTEND_UNSIGNED
2428 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
2431 /* ??? How much offset does an offsettable BLKmode reference need?
2432 Clearly that depends on the situation in which it's being used.
2433 However, the current situation in which we test 0xffffffff is
2434 less than ideal. Caveat user. */
2435 if (known_eq (mode_sz
, 0))
2436 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
2438 /* If the expression contains a constant term,
2439 see if it remains valid when max possible offset is added. */
2441 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
2446 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
2447 /* Use QImode because an odd displacement may be automatically invalid
2448 for any wider mode. But it should be valid for a single byte. */
2449 good
= (*addressp
) (QImode
, y
, as
);
2451 /* In any case, restore old contents of memory. */
2456 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2459 /* The offset added here is chosen as the maximum offset that
2460 any instruction could need to add when operating on something
2461 of the specified mode. We assume that if Y and Y+c are
2462 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2463 go inside a LO_SUM here, so we do so as well. */
2464 if (GET_CODE (y
) == LO_SUM
2466 && known_le (mode_sz
, GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
))
2467 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2468 plus_constant (address_mode
, XEXP (y
, 1),
2470 #ifdef POINTERS_EXTEND_UNSIGNED
2471 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2472 else if (POINTERS_EXTEND_UNSIGNED
> 0
2473 && GET_CODE (y
) == ZERO_EXTEND
2474 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2475 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2476 plus_constant (pointer_mode
, XEXP (y
, 0),
2480 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2482 /* Use QImode because an odd displacement may be automatically invalid
2483 for any wider mode. But it should be valid for a single byte. */
2484 return (*addressp
) (QImode
, z
, as
);
2487 /* Return true if ADDR is an address-expression whose effect depends
2488 on the mode of the memory reference it is used in.
2490 ADDRSPACE is the address space associated with the address.
2492 Autoincrement addressing is a typical example of mode-dependence
2493 because the amount of the increment depends on the mode. */
2496 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2498 /* Auto-increment addressing with anything other than post_modify
2499 or pre_modify always introduces a mode dependency. Catch such
2500 cases now instead of deferring to the target. */
2501 if (GET_CODE (addr
) == PRE_INC
2502 || GET_CODE (addr
) == POST_INC
2503 || GET_CODE (addr
) == PRE_DEC
2504 || GET_CODE (addr
) == POST_DEC
)
2507 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2510 /* Return true if boolean attribute ATTR is supported. */
2513 have_bool_attr (bool_attr attr
)
2518 return HAVE_ATTR_enabled
;
2519 case BA_PREFERRED_FOR_SIZE
:
2520 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_size
;
2521 case BA_PREFERRED_FOR_SPEED
:
2522 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_speed
;
2527 /* Return the value of ATTR for instruction INSN. */
2530 get_bool_attr (rtx_insn
*insn
, bool_attr attr
)
2535 return get_attr_enabled (insn
);
2536 case BA_PREFERRED_FOR_SIZE
:
2537 return get_attr_enabled (insn
) && get_attr_preferred_for_size (insn
);
2538 case BA_PREFERRED_FOR_SPEED
:
2539 return get_attr_enabled (insn
) && get_attr_preferred_for_speed (insn
);
2544 /* Like get_bool_attr_mask, but don't use the cache. */
2546 static alternative_mask
2547 get_bool_attr_mask_uncached (rtx_insn
*insn
, bool_attr attr
)
2549 /* Temporarily install enough information for get_attr_<foo> to assume
2550 that the insn operands are already cached. As above, the attribute
2551 mustn't depend on the values of operands, so we don't provide their
2552 real values here. */
2553 rtx_insn
*old_insn
= recog_data
.insn
;
2554 int old_alternative
= which_alternative
;
2556 recog_data
.insn
= insn
;
2557 alternative_mask mask
= ALL_ALTERNATIVES
;
2558 int n_alternatives
= insn_data
[INSN_CODE (insn
)].n_alternatives
;
2559 for (int i
= 0; i
< n_alternatives
; i
++)
2561 which_alternative
= i
;
2562 if (!get_bool_attr (insn
, attr
))
2563 mask
&= ~ALTERNATIVE_BIT (i
);
2566 recog_data
.insn
= old_insn
;
2567 which_alternative
= old_alternative
;
2571 /* Return the mask of operand alternatives that are allowed for INSN
2572 by boolean attribute ATTR. This mask depends only on INSN and on
2573 the current target; it does not depend on things like the values of
2576 static alternative_mask
2577 get_bool_attr_mask (rtx_insn
*insn
, bool_attr attr
)
2579 /* Quick exit for asms and for targets that don't use these attributes. */
2580 int code
= INSN_CODE (insn
);
2581 if (code
< 0 || !have_bool_attr (attr
))
2582 return ALL_ALTERNATIVES
;
2584 /* Calling get_attr_<foo> can be expensive, so cache the mask
2586 if (!this_target_recog
->x_bool_attr_masks
[code
][attr
])
2587 this_target_recog
->x_bool_attr_masks
[code
][attr
]
2588 = get_bool_attr_mask_uncached (insn
, attr
);
2589 return this_target_recog
->x_bool_attr_masks
[code
][attr
];
2592 /* Return the set of alternatives of INSN that are allowed by the current
2596 get_enabled_alternatives (rtx_insn
*insn
)
2598 return get_bool_attr_mask (insn
, BA_ENABLED
);
2601 /* Return the set of alternatives of INSN that are allowed by the current
2602 target and are preferred for the current size/speed optimization
2606 get_preferred_alternatives (rtx_insn
*insn
)
2608 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
)))
2609 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2611 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2614 /* Return the set of alternatives of INSN that are allowed by the current
2615 target and are preferred for the size/speed optimization choice
2616 associated with BB. Passing a separate BB is useful if INSN has not
2617 been emitted yet or if we are considering moving it to a different
2621 get_preferred_alternatives (rtx_insn
*insn
, basic_block bb
)
2623 if (optimize_bb_for_speed_p (bb
))
2624 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2626 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2629 /* Assert that the cached boolean attributes for INSN are still accurate.
2630 The backend is required to define these attributes in a way that only
2631 depends on the current target (rather than operands, compiler phase,
2635 check_bool_attrs (rtx_insn
*insn
)
2637 int code
= INSN_CODE (insn
);
2639 for (int i
= 0; i
<= BA_LAST
; ++i
)
2641 enum bool_attr attr
= (enum bool_attr
) i
;
2642 if (this_target_recog
->x_bool_attr_masks
[code
][attr
])
2643 gcc_assert (this_target_recog
->x_bool_attr_masks
[code
][attr
]
2644 == get_bool_attr_mask_uncached (insn
, attr
));
2649 /* Like extract_insn, but save insn extracted and don't extract again, when
2650 called again for the same insn expecting that recog_data still contain the
2651 valid information. This is used primary by gen_attr infrastructure that
2652 often does extract insn again and again. */
2654 extract_insn_cached (rtx_insn
*insn
)
2656 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2658 extract_insn (insn
);
2659 recog_data
.insn
= insn
;
2662 /* Do uncached extract_insn, constrain_operands and complain about failures.
2663 This should be used when extracting a pre-existing constrained instruction
2664 if the caller wants to know which alternative was chosen. */
2666 extract_constrain_insn (rtx_insn
*insn
)
2668 extract_insn (insn
);
2669 if (!constrain_operands (reload_completed
, get_enabled_alternatives (insn
)))
2670 fatal_insn_not_found (insn
);
2673 /* Do cached extract_insn, constrain_operands and complain about failures.
2674 Used by insn_attrtab. */
2676 extract_constrain_insn_cached (rtx_insn
*insn
)
2678 extract_insn_cached (insn
);
2679 if (which_alternative
== -1
2680 && !constrain_operands (reload_completed
,
2681 get_enabled_alternatives (insn
)))
2682 fatal_insn_not_found (insn
);
2685 /* Do cached constrain_operands on INSN and complain about failures. */
2687 constrain_operands_cached (rtx_insn
*insn
, int strict
)
2689 if (which_alternative
== -1)
2690 return constrain_operands (strict
, get_enabled_alternatives (insn
));
2695 /* Analyze INSN and fill in recog_data. */
2698 extract_insn (rtx_insn
*insn
)
2703 rtx body
= PATTERN (insn
);
2705 recog_data
.n_operands
= 0;
2706 recog_data
.n_alternatives
= 0;
2707 recog_data
.n_dups
= 0;
2708 recog_data
.is_asm
= false;
2710 switch (GET_CODE (body
))
2722 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2727 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2728 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2729 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
2730 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_INPUT
)
2736 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2739 /* This insn is an `asm' with operands. */
2741 /* expand_asm_operands makes sure there aren't too many operands. */
2742 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2744 /* Now get the operand values and constraints out of the insn. */
2745 decode_asm_operands (body
, recog_data
.operand
,
2746 recog_data
.operand_loc
,
2747 recog_data
.constraints
,
2748 recog_data
.operand_mode
, NULL
);
2749 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2752 const char *p
= recog_data
.constraints
[0];
2753 recog_data
.n_alternatives
= 1;
2755 recog_data
.n_alternatives
+= (*p
++ == ',');
2757 recog_data
.is_asm
= true;
2760 fatal_insn_not_found (insn
);
2764 /* Ordinary insn: recognize it, get the operands via insn_extract
2765 and get the constraints. */
2767 icode
= recog_memoized (insn
);
2769 fatal_insn_not_found (insn
);
2771 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2772 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2773 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2775 insn_extract (insn
);
2777 for (i
= 0; i
< noperands
; i
++)
2779 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2780 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2781 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2782 /* VOIDmode match_operands gets mode from their real operand. */
2783 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2784 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2787 for (i
= 0; i
< noperands
; i
++)
2788 recog_data
.operand_type
[i
]
2789 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2790 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2793 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2795 recog_data
.insn
= NULL
;
2796 which_alternative
= -1;
2799 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2800 operands, N_ALTERNATIVES alternatives and constraint strings
2801 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2802 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2803 if the insn is an asm statement and preprocessing should take the
2804 asm operands into account, e.g. to determine whether they could be
2805 addresses in constraints that require addresses; it should then
2806 point to an array of pointers to each operand. */
2809 preprocess_constraints (int n_operands
, int n_alternatives
,
2810 const char **constraints
,
2811 operand_alternative
*op_alt_base
,
2814 for (int i
= 0; i
< n_operands
; i
++)
2817 struct operand_alternative
*op_alt
;
2818 const char *p
= constraints
[i
];
2820 op_alt
= op_alt_base
;
2822 for (j
= 0; j
< n_alternatives
; j
++, op_alt
+= n_operands
)
2824 op_alt
[i
].cl
= NO_REGS
;
2825 op_alt
[i
].constraint
= p
;
2826 op_alt
[i
].matches
= -1;
2827 op_alt
[i
].matched
= -1;
2829 if (*p
== '\0' || *p
== ',')
2831 op_alt
[i
].anything_ok
= 1;
2841 while (c
!= ',' && c
!= '\0');
2842 if (c
== ',' || c
== '\0')
2851 op_alt
[i
].reject
+= 6;
2854 op_alt
[i
].reject
+= 600;
2857 op_alt
[i
].earlyclobber
= 1;
2860 case '0': case '1': case '2': case '3': case '4':
2861 case '5': case '6': case '7': case '8': case '9':
2864 op_alt
[i
].matches
= strtoul (p
, &end
, 10);
2865 op_alt
[op_alt
[i
].matches
].matched
= i
;
2871 op_alt
[i
].anything_ok
= 1;
2876 reg_class_subunion
[(int) op_alt
[i
].cl
][(int) GENERAL_REGS
];
2880 enum constraint_num cn
= lookup_constraint (p
);
2882 switch (get_constraint_type (cn
))
2885 cl
= reg_class_for_constraint (cn
);
2887 op_alt
[i
].cl
= reg_class_subunion
[op_alt
[i
].cl
][cl
];
2894 case CT_SPECIAL_MEMORY
:
2895 case CT_RELAXED_MEMORY
:
2896 op_alt
[i
].memory_ok
= 1;
2900 if (oploc
&& !address_operand (*oploc
[i
], VOIDmode
))
2903 op_alt
[i
].is_address
= 1;
2905 = (reg_class_subunion
2906 [(int) op_alt
[i
].cl
]
2907 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2908 ADDRESS
, SCRATCH
)]);
2916 p
+= CONSTRAINT_LEN (c
, p
);
2922 /* Return an array of operand_alternative instructions for
2923 instruction ICODE. */
2925 const operand_alternative
*
2926 preprocess_insn_constraints (unsigned int icode
)
2928 gcc_checking_assert (IN_RANGE (icode
, 0, NUM_INSN_CODES
- 1));
2929 if (this_target_recog
->x_op_alt
[icode
])
2930 return this_target_recog
->x_op_alt
[icode
];
2932 int n_operands
= insn_data
[icode
].n_operands
;
2933 if (n_operands
== 0)
2935 /* Always provide at least one alternative so that which_op_alt ()
2936 works correctly. If the instruction has 0 alternatives (i.e. all
2937 constraint strings are empty) then each operand in this alternative
2938 will have anything_ok set. */
2939 int n_alternatives
= MAX (insn_data
[icode
].n_alternatives
, 1);
2940 int n_entries
= n_operands
* n_alternatives
;
2942 operand_alternative
*op_alt
= XCNEWVEC (operand_alternative
, n_entries
);
2943 const char **constraints
= XALLOCAVEC (const char *, n_operands
);
2945 for (int i
= 0; i
< n_operands
; ++i
)
2946 constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2947 preprocess_constraints (n_operands
, n_alternatives
, constraints
, op_alt
,
2950 this_target_recog
->x_op_alt
[icode
] = op_alt
;
2954 /* After calling extract_insn, you can use this function to extract some
2955 information from the constraint strings into a more usable form.
2956 The collected data is stored in recog_op_alt. */
2959 preprocess_constraints (rtx_insn
*insn
)
2961 int icode
= INSN_CODE (insn
);
2963 recog_op_alt
= preprocess_insn_constraints (icode
);
2966 int n_operands
= recog_data
.n_operands
;
2967 int n_alternatives
= recog_data
.n_alternatives
;
2968 int n_entries
= n_operands
* n_alternatives
;
2969 memset (asm_op_alt
, 0, n_entries
* sizeof (operand_alternative
));
2970 preprocess_constraints (n_operands
, n_alternatives
,
2971 recog_data
.constraints
, asm_op_alt
,
2973 recog_op_alt
= asm_op_alt
;
2977 /* Check the operands of an insn against the insn's operand constraints
2978 and return 1 if they match any of the alternatives in ALTERNATIVES.
2980 The information about the insn's operands, constraints, operand modes
2981 etc. is obtained from the global variables set up by extract_insn.
2983 WHICH_ALTERNATIVE is set to a number which indicates which
2984 alternative of constraints was matched: 0 for the first alternative,
2985 1 for the next, etc.
2987 In addition, when two operands are required to match
2988 and it happens that the output operand is (reg) while the
2989 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2990 make the output operand look like the input.
2991 This is because the output operand is the one the template will print.
2993 This is used in final, just before printing the assembler code and by
2994 the routines that determine an insn's attribute.
2996 If STRICT is a positive nonzero value, it means that we have been
2997 called after reload has been completed. In that case, we must
2998 do all checks strictly. If it is zero, it means that we have been called
2999 before reload has completed. In that case, we first try to see if we can
3000 find an alternative that matches strictly. If not, we try again, this
3001 time assuming that reload will fix up the insn. This provides a "best
3002 guess" for the alternative and is used to compute attributes of insns prior
3003 to reload. A negative value of STRICT is used for this internal call. */
3011 constrain_operands (int strict
, alternative_mask alternatives
)
3013 const char *constraints
[MAX_RECOG_OPERANDS
];
3014 int matching_operands
[MAX_RECOG_OPERANDS
];
3015 int earlyclobber
[MAX_RECOG_OPERANDS
];
3018 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
3019 int funny_match_index
;
3021 which_alternative
= 0;
3022 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
3025 for (c
= 0; c
< recog_data
.n_operands
; c
++)
3026 constraints
[c
] = recog_data
.constraints
[c
];
3030 int seen_earlyclobber_at
= -1;
3033 funny_match_index
= 0;
3035 if (!TEST_BIT (alternatives
, which_alternative
))
3039 for (i
= 0; i
< recog_data
.n_operands
; i
++)
3040 constraints
[i
] = skip_alternative (constraints
[i
]);
3042 which_alternative
++;
3046 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3047 matching_operands
[opno
] = -1;
3049 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3051 rtx op
= recog_data
.operand
[opno
];
3052 machine_mode mode
= GET_MODE (op
);
3053 const char *p
= constraints
[opno
];
3059 earlyclobber
[opno
] = 0;
3061 /* A unary operator may be accepted by the predicate, but it
3062 is irrelevant for matching constraints. */
3063 /* For special_memory_operand, there could be a memory operand inside,
3064 and it would cause a mismatch for constraint_satisfied_p. */
3065 if (UNARY_P (op
) && op
== extract_mem_from_operand (op
))
3068 if (GET_CODE (op
) == SUBREG
)
3070 if (REG_P (SUBREG_REG (op
))
3071 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
3072 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
3073 GET_MODE (SUBREG_REG (op
)),
3076 op
= SUBREG_REG (op
);
3079 /* An empty constraint or empty alternative
3080 allows anything which matched the pattern. */
3081 if (*p
== 0 || *p
== ',')
3085 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
3095 /* Ignore rest of this alternative as far as
3096 constraint checking is concerned. */
3099 while (*p
&& *p
!= ',');
3104 earlyclobber
[opno
] = 1;
3105 if (seen_earlyclobber_at
< 0)
3106 seen_earlyclobber_at
= opno
;
3109 case '0': case '1': case '2': case '3': case '4':
3110 case '5': case '6': case '7': case '8': case '9':
3112 /* This operand must be the same as a previous one.
3113 This kind of constraint is used for instructions such
3114 as add when they take only two operands.
3116 Note that the lower-numbered operand is passed first.
3118 If we are not testing strictly, assume that this
3119 constraint will be satisfied. */
3124 match
= strtoul (p
, &end
, 10);
3131 rtx op1
= recog_data
.operand
[match
];
3132 rtx op2
= recog_data
.operand
[opno
];
3134 /* A unary operator may be accepted by the predicate,
3135 but it is irrelevant for matching constraints. */
3137 op1
= XEXP (op1
, 0);
3139 op2
= XEXP (op2
, 0);
3141 val
= operands_match_p (op1
, op2
);
3144 matching_operands
[opno
] = match
;
3145 matching_operands
[match
] = opno
;
3150 /* If output is *x and input is *--x, arrange later
3151 to change the output to *--x as well, since the
3152 output op is the one that will be printed. */
3153 if (val
== 2 && strict
> 0)
3155 funny_match
[funny_match_index
].this_op
= opno
;
3156 funny_match
[funny_match_index
++].other
= match
;
3163 /* p is used for address_operands. When we are called by
3164 gen_reload, no one will have checked that the address is
3165 strictly valid, i.e., that all pseudos requiring hard regs
3166 have gotten them. We also want to make sure we have a
3168 if ((GET_MODE (op
) == VOIDmode
3169 || SCALAR_INT_MODE_P (GET_MODE (op
)))
3171 || (strict_memory_address_p
3172 (recog_data
.operand_mode
[opno
], op
))))
3176 /* No need to check general_operand again;
3177 it was done in insn-recog.c. Well, except that reload
3178 doesn't check the validity of its replacements, but
3179 that should only matter when there's a bug. */
3181 /* Anything goes unless it is a REG and really has a hard reg
3182 but the hard reg is not in the class GENERAL_REGS. */
3186 || GENERAL_REGS
== ALL_REGS
3187 || (reload_in_progress
3188 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
3189 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
3192 else if (strict
< 0 || general_operand (op
, mode
))
3198 enum constraint_num cn
= lookup_constraint (p
);
3199 enum reg_class cl
= reg_class_for_constraint (cn
);
3205 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
3206 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
3208 && reg_fits_class_p (op
, cl
, offset
, mode
)))
3212 else if (constraint_satisfied_p (op
, cn
))
3215 else if (insn_extra_memory_constraint (cn
)
3216 /* Every memory operand can be reloaded to fit. */
3217 && ((strict
< 0 && MEM_P (op
))
3218 /* Before reload, accept what reload can turn
3220 || (strict
< 0 && CONSTANT_P (op
))
3221 /* Before reload, accept a pseudo or hard register,
3222 since LRA can turn it into a mem. */
3223 || (strict
< 0 && targetm
.lra_p () && REG_P (op
))
3224 /* During reload, accept a pseudo */
3225 || (reload_in_progress
&& REG_P (op
)
3226 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
3228 else if (insn_extra_address_constraint (cn
)
3229 /* Every address operand can be reloaded to fit. */
3232 /* Cater to architectures like IA-64 that define extra memory
3233 constraints without using define_memory_constraint. */
3234 else if (reload_in_progress
3236 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
3237 && reg_renumber
[REGNO (op
)] < 0
3238 && reg_equiv_mem (REGNO (op
)) != 0
3239 && constraint_satisfied_p
3240 (reg_equiv_mem (REGNO (op
)), cn
))
3245 while (p
+= len
, c
);
3247 constraints
[opno
] = p
;
3248 /* If this operand did not win somehow,
3249 this alternative loses. */
3253 /* This alternative won; the operands are ok.
3254 Change whichever operands this alternative says to change. */
3259 /* See if any earlyclobber operand conflicts with some other
3262 if (strict
> 0 && seen_earlyclobber_at
>= 0)
3263 for (eopno
= seen_earlyclobber_at
;
3264 eopno
< recog_data
.n_operands
;
3266 /* Ignore earlyclobber operands now in memory,
3267 because we would often report failure when we have
3268 two memory operands, one of which was formerly a REG. */
3269 if (earlyclobber
[eopno
]
3270 && REG_P (recog_data
.operand
[eopno
]))
3271 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3272 if ((MEM_P (recog_data
.operand
[opno
])
3273 || recog_data
.operand_type
[opno
] != OP_OUT
)
3275 /* Ignore things like match_operator operands. */
3276 && *recog_data
.constraints
[opno
] != 0
3277 && ! (matching_operands
[opno
] == eopno
3278 && operands_match_p (recog_data
.operand
[opno
],
3279 recog_data
.operand
[eopno
]))
3280 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
3281 recog_data
.operand
[eopno
]))
3286 while (--funny_match_index
>= 0)
3288 recog_data
.operand
[funny_match
[funny_match_index
].other
]
3289 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
3292 /* For operands without < or > constraints reject side-effects. */
3293 if (AUTO_INC_DEC
&& recog_data
.is_asm
)
3295 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
3296 if (MEM_P (recog_data
.operand
[opno
]))
3297 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
3305 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
3306 && strchr (recog_data
.constraints
[opno
], '>')
3319 which_alternative
++;
3321 while (which_alternative
< recog_data
.n_alternatives
);
3323 which_alternative
= -1;
3324 /* If we are about to reject this, but we are not to test strictly,
3325 try a very loose test. Only return failure if it fails also. */
3327 return constrain_operands (-1, alternatives
);
3332 /* Return true iff OPERAND (assumed to be a REG rtx)
3333 is a hard reg in class CLASS when its regno is offset by OFFSET
3334 and changed to mode MODE.
3335 If REG occupies multiple hard regs, all of them must be in CLASS. */
3338 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
3341 unsigned int regno
= REGNO (operand
);
3346 /* Regno must not be a pseudo register. Offset may be negative. */
3347 return (HARD_REGISTER_NUM_P (regno
)
3348 && HARD_REGISTER_NUM_P (regno
+ offset
)
3349 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
3353 /* Split single instruction. Helper function for split_all_insns and
3354 split_all_insns_noflow. Return last insn in the sequence if successful,
3355 or NULL if unsuccessful. */
3358 split_insn (rtx_insn
*insn
)
3360 /* Split insns here to get max fine-grain parallelism. */
3361 rtx_insn
*first
= PREV_INSN (insn
);
3362 rtx_insn
*last
= try_split (PATTERN (insn
), insn
, 1);
3363 rtx insn_set
, last_set
, note
;
3368 /* If the original instruction was a single set that was known to be
3369 equivalent to a constant, see if we can say the same about the last
3370 instruction in the split sequence. The two instructions must set
3371 the same destination. */
3372 insn_set
= single_set (insn
);
3375 last_set
= single_set (last
);
3376 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
3378 note
= find_reg_equal_equiv_note (insn
);
3379 if (note
&& CONSTANT_P (XEXP (note
, 0)))
3380 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
3381 else if (CONSTANT_P (SET_SRC (insn_set
)))
3382 set_unique_reg_note (last
, REG_EQUAL
,
3383 copy_rtx (SET_SRC (insn_set
)));
3387 /* try_split returns the NOTE that INSN became. */
3388 SET_INSN_DELETED (insn
);
3390 /* ??? Coddle to md files that generate subregs in post-reload
3391 splitters instead of computing the proper hard register. */
3392 if (reload_completed
&& first
!= last
)
3394 first
= NEXT_INSN (first
);
3398 cleanup_subreg_operands (first
);
3401 first
= NEXT_INSN (first
);
3408 /* Split all insns in the function. If UPD_LIFE, update life info after. */
3411 split_all_insns (void)
3414 bool need_cfg_cleanup
= false;
3417 auto_sbitmap
blocks (last_basic_block_for_fn (cfun
));
3418 bitmap_clear (blocks
);
3421 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
3423 rtx_insn
*insn
, *next
;
3424 bool finish
= false;
3426 rtl_profile_for_bb (bb
);
3427 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
3429 /* Can't use `next_real_insn' because that might go across
3430 CODE_LABELS and short-out basic blocks. */
3431 next
= NEXT_INSN (insn
);
3432 finish
= (insn
== BB_END (bb
));
3434 /* If INSN has a REG_EH_REGION note and we split INSN, the
3435 resulting split may not have/need REG_EH_REGION notes.
3437 If that happens and INSN was the last reference to the
3438 given EH region, then the EH region will become unreachable.
3439 We cannot leave the unreachable blocks in the CFG as that
3440 will trigger a checking failure.
3442 So track if INSN has a REG_EH_REGION note. If so and we
3443 split INSN, then trigger a CFG cleanup. */
3444 rtx note
= find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
);
3447 rtx set
= single_set (insn
);
3449 /* Don't split no-op move insns. These should silently
3450 disappear later in final. Splitting such insns would
3451 break the code that handles LIBCALL blocks. */
3452 if (set
&& set_noop_p (set
))
3454 /* Nops get in the way while scheduling, so delete them
3455 now if register allocation has already been done. It
3456 is too risky to try to do this before register
3457 allocation, and there are unlikely to be very many
3458 nops then anyways. */
3459 if (reload_completed
)
3460 delete_insn_and_edges (insn
);
3462 need_cfg_cleanup
= true;
3466 if (split_insn (insn
))
3468 bitmap_set_bit (blocks
, bb
->index
);
3471 need_cfg_cleanup
= true;
3478 default_rtl_profile ();
3481 find_many_sub_basic_blocks (blocks
);
3483 /* Splitting could drop an REG_EH_REGION if it potentially
3484 trapped in its original form, but does not in its split
3485 form. Consider a FLOAT_TRUNCATE which splits into a memory
3486 store/load pair and -fnon-call-exceptions. */
3487 if (need_cfg_cleanup
)
3491 checking_verify_flow_info ();
3494 /* Same as split_all_insns, but do not expect CFG to be available.
3495 Used by machine dependent reorg passes. */
3498 split_all_insns_noflow (void)
3500 rtx_insn
*next
, *insn
;
3502 for (insn
= get_insns (); insn
; insn
= next
)
3504 next
= NEXT_INSN (insn
);
3507 /* Don't split no-op move insns. These should silently
3508 disappear later in final. Splitting such insns would
3509 break the code that handles LIBCALL blocks. */
3510 rtx set
= single_set (insn
);
3511 if (set
&& set_noop_p (set
))
3513 /* Nops get in the way while scheduling, so delete them
3514 now if register allocation has already been done. It
3515 is too risky to try to do this before register
3516 allocation, and there are unlikely to be very many
3519 ??? Should we use delete_insn when the CFG isn't valid? */
3520 if (reload_completed
)
3521 delete_insn_and_edges (insn
);
3530 struct peep2_insn_data
3536 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3537 static int peep2_current
;
3539 static bool peep2_do_rebuild_jump_labels
;
3540 static bool peep2_do_cleanup_cfg
;
3542 /* The number of instructions available to match a peep2. */
3543 int peep2_current_count
;
3545 /* A marker indicating the last insn of the block. The live_before regset
3546 for this element is correct, indicating DF_LIVE_OUT for the block. */
3547 #define PEEP2_EOB invalid_insn_rtx
3549 /* Wrap N to fit into the peep2_insn_data buffer. */
3552 peep2_buf_position (int n
)
3554 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3555 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3559 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3560 does not exist. Used by the recognizer to find the next insn to match
3561 in a multi-insn pattern. */
3564 peep2_next_insn (int n
)
3566 gcc_assert (n
<= peep2_current_count
);
3568 n
= peep2_buf_position (peep2_current
+ n
);
3570 return peep2_insn_data
[n
].insn
;
3573 /* Return true if REGNO is dead before the Nth non-note insn
3577 peep2_regno_dead_p (int ofs
, int regno
)
3579 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3581 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3583 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3585 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3588 /* Similarly for a REG. */
3591 peep2_reg_dead_p (int ofs
, rtx reg
)
3593 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3595 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3597 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3599 unsigned int end_regno
= END_REGNO (reg
);
3600 for (unsigned int regno
= REGNO (reg
); regno
< end_regno
; ++regno
)
3601 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
))
3606 /* Regno offset to be used in the register search. */
3607 static int search_ofs
;
3609 /* Try to find a hard register of mode MODE, matching the register class in
3610 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3611 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3612 in which case the only condition is that the register must be available
3613 before CURRENT_INSN.
3614 Registers that already have bits set in REG_SET will not be considered.
3616 If an appropriate register is available, it will be returned and the
3617 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3621 peep2_find_free_register (int from
, int to
, const char *class_str
,
3622 machine_mode mode
, HARD_REG_SET
*reg_set
)
3629 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3630 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3632 from
= peep2_buf_position (peep2_current
+ from
);
3633 to
= peep2_buf_position (peep2_current
+ to
);
3635 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3636 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3640 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3642 /* Don't use registers set or clobbered by the insn. */
3643 FOR_EACH_INSN_DEF (def
, peep2_insn_data
[from
].insn
)
3644 SET_HARD_REG_BIT (live
, DF_REF_REGNO (def
));
3646 from
= peep2_buf_position (from
+ 1);
3649 cl
= reg_class_for_constraint (lookup_constraint (class_str
));
3651 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3653 int raw_regno
, regno
, success
, j
;
3655 /* Distribute the free registers as much as possible. */
3656 raw_regno
= search_ofs
+ i
;
3657 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3658 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3659 #ifdef REG_ALLOC_ORDER
3660 regno
= reg_alloc_order
[raw_regno
];
3665 /* Can it support the mode we need? */
3666 if (!targetm
.hard_regno_mode_ok (regno
, mode
))
3670 for (j
= 0; success
&& j
< hard_regno_nregs (regno
, mode
); j
++)
3672 /* Don't allocate fixed registers. */
3673 if (fixed_regs
[regno
+ j
])
3678 /* Don't allocate global registers. */
3679 if (global_regs
[regno
+ j
])
3684 /* Make sure the register is of the right class. */
3685 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3690 /* And that we don't create an extra save/restore. */
3691 if (! crtl
->abi
->clobbers_full_reg_p (regno
+ j
)
3692 && ! df_regs_ever_live_p (regno
+ j
))
3698 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3704 /* And we don't clobber traceback for noreturn functions. */
3705 if ((regno
+ j
== FRAME_POINTER_REGNUM
3706 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3707 && (! reload_completed
|| frame_pointer_needed
))
3713 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3714 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3723 add_to_hard_reg_set (reg_set
, mode
, regno
);
3725 /* Start the next search with the next register. */
3726 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3728 search_ofs
= raw_regno
;
3730 return gen_rtx_REG (mode
, regno
);
3738 /* Forget all currently tracked instructions, only remember current
3742 peep2_reinit_state (regset live
)
3746 /* Indicate that all slots except the last holds invalid data. */
3747 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3748 peep2_insn_data
[i
].insn
= NULL
;
3749 peep2_current_count
= 0;
3751 /* Indicate that the last slot contains live_after data. */
3752 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3753 peep2_current
= MAX_INSNS_PER_PEEP2
;
3755 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3758 /* Copies frame related info of an insn (OLD_INSN) to the single
3759 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3762 copy_frame_info_to_split_insn (rtx_insn
*old_insn
, rtx_insn
*new_insn
)
3764 bool any_note
= false;
3767 if (!RTX_FRAME_RELATED_P (old_insn
))
3770 RTX_FRAME_RELATED_P (new_insn
) = 1;
3772 /* Allow the backend to fill in a note during the split. */
3773 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3774 switch (REG_NOTE_KIND (note
))
3776 case REG_FRAME_RELATED_EXPR
:
3777 case REG_CFA_DEF_CFA
:
3778 case REG_CFA_ADJUST_CFA
:
3779 case REG_CFA_OFFSET
:
3780 case REG_CFA_REGISTER
:
3781 case REG_CFA_EXPRESSION
:
3782 case REG_CFA_RESTORE
:
3783 case REG_CFA_SET_VDRAP
:
3790 /* If the backend didn't supply a note, copy one over. */
3792 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3793 switch (REG_NOTE_KIND (note
))
3795 case REG_FRAME_RELATED_EXPR
:
3796 case REG_CFA_DEF_CFA
:
3797 case REG_CFA_ADJUST_CFA
:
3798 case REG_CFA_OFFSET
:
3799 case REG_CFA_REGISTER
:
3800 case REG_CFA_EXPRESSION
:
3801 case REG_CFA_RESTORE
:
3802 case REG_CFA_SET_VDRAP
:
3803 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3810 /* If there still isn't a note, make sure the unwind info sees the
3811 same expression as before the split. */
3814 rtx old_set
, new_set
;
3816 /* The old insn had better have been simple, or annotated. */
3817 old_set
= single_set (old_insn
);
3818 gcc_assert (old_set
!= NULL
);
3820 new_set
= single_set (new_insn
);
3821 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3822 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3825 /* Copy prologue/epilogue status. This is required in order to keep
3826 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3827 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3830 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3831 starting at INSN. Perform the replacement, removing the old insns and
3832 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3833 if the replacement is rejected. */
3836 peep2_attempt (basic_block bb
, rtx_insn
*insn
, int match_len
, rtx_insn
*attempt
)
3839 rtx_insn
*last
, *before_try
, *x
;
3840 rtx eh_note
, as_note
;
3843 bool was_call
= false;
3845 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3846 match more than one insn, or to be split into more than one insn. */
3847 old_insn
= peep2_insn_data
[peep2_current
].insn
;
3848 if (RTX_FRAME_RELATED_P (old_insn
))
3853 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3854 may be in the stream for the purpose of register allocation. */
3855 if (active_insn_p (attempt
))
3858 new_insn
= next_active_insn (attempt
);
3859 if (next_active_insn (new_insn
))
3862 /* We have a 1-1 replacement. Copy over any frame-related info. */
3863 copy_frame_info_to_split_insn (old_insn
, new_insn
);
3866 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3867 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3868 cfg-related call notes. */
3869 for (i
= 0; i
<= match_len
; ++i
)
3874 j
= peep2_buf_position (peep2_current
+ i
);
3875 old_insn
= peep2_insn_data
[j
].insn
;
3876 if (!CALL_P (old_insn
))
3881 while (new_insn
!= NULL_RTX
)
3883 if (CALL_P (new_insn
))
3885 new_insn
= NEXT_INSN (new_insn
);
3888 gcc_assert (new_insn
!= NULL_RTX
);
3890 CALL_INSN_FUNCTION_USAGE (new_insn
)
3891 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3892 SIBLING_CALL_P (new_insn
) = SIBLING_CALL_P (old_insn
);
3894 for (note
= REG_NOTES (old_insn
);
3896 note
= XEXP (note
, 1))
3897 switch (REG_NOTE_KIND (note
))
3902 case REG_CALL_NOCF_CHECK
:
3903 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3907 /* Discard all other reg notes. */
3911 /* Croak if there is another call in the sequence. */
3912 while (++i
<= match_len
)
3914 j
= peep2_buf_position (peep2_current
+ i
);
3915 old_insn
= peep2_insn_data
[j
].insn
;
3916 gcc_assert (!CALL_P (old_insn
));
3921 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3922 move those notes over to the new sequence. */
3924 for (i
= match_len
; i
>= 0; --i
)
3926 int j
= peep2_buf_position (peep2_current
+ i
);
3927 old_insn
= peep2_insn_data
[j
].insn
;
3929 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3934 i
= peep2_buf_position (peep2_current
+ match_len
);
3935 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3937 /* Replace the old sequence with the new. */
3938 rtx_insn
*peepinsn
= peep2_insn_data
[i
].insn
;
3939 last
= emit_insn_after_setloc (attempt
,
3940 peep2_insn_data
[i
].insn
,
3941 INSN_LOCATION (peepinsn
));
3942 if (JUMP_P (peepinsn
) && JUMP_P (last
))
3943 CROSSING_JUMP_P (last
) = CROSSING_JUMP_P (peepinsn
);
3944 before_try
= PREV_INSN (insn
);
3945 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3947 /* Re-insert the EH_REGION notes. */
3948 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3953 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3954 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3958 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3961 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3962 if (x
!= BB_END (bb
)
3963 && (can_throw_internal (x
)
3964 || can_nonlocal_goto (x
)))
3969 nfte
= split_block (bb
, x
);
3970 flags
= (eh_edge
->flags
3971 & (EDGE_EH
| EDGE_ABNORMAL
));
3973 flags
|= EDGE_ABNORMAL_CALL
;
3974 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3977 nehe
->probability
= eh_edge
->probability
;
3978 nfte
->probability
= nehe
->probability
.invert ();
3980 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3985 /* Converting possibly trapping insn to non-trapping is
3986 possible. Zap dummy outgoing edges. */
3987 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
3990 /* Re-insert the ARGS_SIZE notes. */
3992 fixup_args_size_notes (before_try
, last
, get_args_size (as_note
));
3994 /* Scan the new insns for embedded side effects and add appropriate
3997 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3998 if (NONDEBUG_INSN_P (x
))
3999 add_auto_inc_notes (x
, PATTERN (x
));
4001 /* If we generated a jump instruction, it won't have
4002 JUMP_LABEL set. Recompute after we're done. */
4003 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
4006 peep2_do_rebuild_jump_labels
= true;
4013 /* After performing a replacement in basic block BB, fix up the life
4014 information in our buffer. LAST is the last of the insns that we
4015 emitted as a replacement. PREV is the insn before the start of
4016 the replacement. MATCH_LEN is the number of instructions that were
4017 matched, and which now need to be replaced in the buffer. */
4020 peep2_update_life (basic_block bb
, int match_len
, rtx_insn
*last
,
4023 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
4027 INIT_REG_SET (&live
);
4028 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
4030 gcc_assert (peep2_current_count
>= match_len
+ 1);
4031 peep2_current_count
-= match_len
+ 1;
4039 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
4041 peep2_current_count
++;
4043 i
= MAX_INSNS_PER_PEEP2
;
4044 peep2_insn_data
[i
].insn
= x
;
4045 df_simulate_one_insn_backwards (bb
, x
, &live
);
4046 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
4052 CLEAR_REG_SET (&live
);
4057 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4058 Return true if we added it, false otherwise. The caller will try to match
4059 peepholes against the buffer if we return false; otherwise it will try to
4060 add more instructions to the buffer. */
4063 peep2_fill_buffer (basic_block bb
, rtx_insn
*insn
, regset live
)
4067 /* Once we have filled the maximum number of insns the buffer can hold,
4068 allow the caller to match the insns against peepholes. We wait until
4069 the buffer is full in case the target has similar peepholes of different
4070 length; we always want to match the longest if possible. */
4071 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
4074 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4075 any other pattern, lest it change the semantics of the frame info. */
4076 if (RTX_FRAME_RELATED_P (insn
))
4078 /* Let the buffer drain first. */
4079 if (peep2_current_count
> 0)
4081 /* Now the insn will be the only thing in the buffer. */
4084 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
4085 peep2_insn_data
[pos
].insn
= insn
;
4086 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
4087 peep2_current_count
++;
4089 df_simulate_one_insn_forwards (bb
, insn
, live
);
4093 /* Perform the peephole2 optimization pass. */
4096 peephole2_optimize (void)
4103 peep2_do_cleanup_cfg
= false;
4104 peep2_do_rebuild_jump_labels
= false;
4106 df_set_flags (DF_LR_RUN_DCE
);
4107 df_note_add_problem ();
4110 /* Initialize the regsets we're going to use. */
4111 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
4112 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
4114 live
= BITMAP_ALLOC (®_obstack
);
4116 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
4118 bool past_end
= false;
4121 rtl_profile_for_bb (bb
);
4123 /* Start up propagation. */
4124 bitmap_copy (live
, DF_LR_IN (bb
));
4125 df_simulate_initialize_forwards (bb
, live
);
4126 peep2_reinit_state (live
);
4128 insn
= BB_HEAD (bb
);
4131 rtx_insn
*attempt
, *head
;
4134 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
4137 insn
= NEXT_INSN (insn
);
4138 if (insn
== NEXT_INSN (BB_END (bb
)))
4142 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
4145 /* If we did not fill an empty buffer, it signals the end of the
4147 if (peep2_current_count
== 0)
4150 /* The buffer filled to the current maximum, so try to match. */
4152 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
4153 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
4154 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
4156 /* Match the peephole. */
4157 head
= peep2_insn_data
[peep2_current
].insn
;
4158 attempt
= peephole2_insns (PATTERN (head
), head
, &match_len
);
4159 if (attempt
!= NULL
)
4161 rtx_insn
*last
= peep2_attempt (bb
, head
, match_len
, attempt
);
4164 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
4169 /* No match: advance the buffer by one insn. */
4170 peep2_current
= peep2_buf_position (peep2_current
+ 1);
4171 peep2_current_count
--;
4175 default_rtl_profile ();
4176 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
4177 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
4179 if (peep2_do_rebuild_jump_labels
)
4180 rebuild_jump_labels (get_insns ());
4181 if (peep2_do_cleanup_cfg
)
4182 cleanup_cfg (CLEANUP_CFG_CHANGED
);
4185 /* Common predicates for use with define_bypass. */
4187 /* Helper function for store_data_bypass_p, handle just a single SET
4191 store_data_bypass_p_1 (rtx_insn
*out_insn
, rtx in_set
)
4193 if (!MEM_P (SET_DEST (in_set
)))
4196 rtx out_set
= single_set (out_insn
);
4198 return !reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
));
4200 rtx out_pat
= PATTERN (out_insn
);
4201 if (GET_CODE (out_pat
) != PARALLEL
)
4204 for (int i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
4206 rtx out_exp
= XVECEXP (out_pat
, 0, i
);
4208 if (GET_CODE (out_exp
) == CLOBBER
|| GET_CODE (out_exp
) == USE
)
4211 gcc_assert (GET_CODE (out_exp
) == SET
);
4213 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
4220 /* True if the dependency between OUT_INSN and IN_INSN is on the store
4221 data not the address operand(s) of the store. IN_INSN and OUT_INSN
4222 must be either a single_set or a PARALLEL with SETs inside. */
4225 store_data_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
4227 rtx in_set
= single_set (in_insn
);
4229 return store_data_bypass_p_1 (out_insn
, in_set
);
4231 rtx in_pat
= PATTERN (in_insn
);
4232 if (GET_CODE (in_pat
) != PARALLEL
)
4235 for (int i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
4237 rtx in_exp
= XVECEXP (in_pat
, 0, i
);
4239 if (GET_CODE (in_exp
) == CLOBBER
|| GET_CODE (in_exp
) == USE
)
4242 gcc_assert (GET_CODE (in_exp
) == SET
);
4244 if (!store_data_bypass_p_1 (out_insn
, in_exp
))
4251 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4252 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4253 or multiple set; IN_INSN should be single_set for truth, but for convenience
4254 of insn categorization may be any JUMP or CALL insn. */
4257 if_test_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
4259 rtx out_set
, in_set
;
4261 in_set
= single_set (in_insn
);
4264 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
4268 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
4270 in_set
= SET_SRC (in_set
);
4272 out_set
= single_set (out_insn
);
4275 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
4276 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
4284 out_pat
= PATTERN (out_insn
);
4285 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
4287 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
4289 rtx exp
= XVECEXP (out_pat
, 0, i
);
4291 if (GET_CODE (exp
) == CLOBBER
)
4294 gcc_assert (GET_CODE (exp
) == SET
);
4296 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
4297 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
4306 rest_of_handle_peephole2 (void)
4309 peephole2_optimize ();
4316 const pass_data pass_data_peephole2
=
4318 RTL_PASS
, /* type */
4319 "peephole2", /* name */
4320 OPTGROUP_NONE
, /* optinfo_flags */
4321 TV_PEEPHOLE2
, /* tv_id */
4322 0, /* properties_required */
4323 0, /* properties_provided */
4324 0, /* properties_destroyed */
4325 0, /* todo_flags_start */
4326 TODO_df_finish
, /* todo_flags_finish */
4329 class pass_peephole2
: public rtl_opt_pass
4332 pass_peephole2 (gcc::context
*ctxt
)
4333 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
4336 /* opt_pass methods: */
4337 /* The epiphany backend creates a second instance of this pass, so we need
4339 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
4340 virtual bool gate (function
*) { return (optimize
> 0 && flag_peephole2
); }
4341 virtual unsigned int execute (function
*)
4343 return rest_of_handle_peephole2 ();
4346 }; // class pass_peephole2
4351 make_pass_peephole2 (gcc::context
*ctxt
)
4353 return new pass_peephole2 (ctxt
);
4358 const pass_data pass_data_split_all_insns
=
4360 RTL_PASS
, /* type */
4361 "split1", /* name */
4362 OPTGROUP_NONE
, /* optinfo_flags */
4363 TV_NONE
, /* tv_id */
4364 0, /* properties_required */
4365 PROP_rtl_split_insns
, /* properties_provided */
4366 0, /* properties_destroyed */
4367 0, /* todo_flags_start */
4368 0, /* todo_flags_finish */
4371 class pass_split_all_insns
: public rtl_opt_pass
4374 pass_split_all_insns (gcc::context
*ctxt
)
4375 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
4378 /* opt_pass methods: */
4379 /* The epiphany backend creates a second instance of this pass, so
4380 we need a clone method. */
4381 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
4382 virtual unsigned int execute (function
*)
4388 }; // class pass_split_all_insns
4393 make_pass_split_all_insns (gcc::context
*ctxt
)
4395 return new pass_split_all_insns (ctxt
);
4400 const pass_data pass_data_split_after_reload
=
4402 RTL_PASS
, /* type */
4403 "split2", /* name */
4404 OPTGROUP_NONE
, /* optinfo_flags */
4405 TV_NONE
, /* tv_id */
4406 0, /* properties_required */
4407 0, /* properties_provided */
4408 0, /* properties_destroyed */
4409 0, /* todo_flags_start */
4410 0, /* todo_flags_finish */
4413 class pass_split_after_reload
: public rtl_opt_pass
4416 pass_split_after_reload (gcc::context
*ctxt
)
4417 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
4420 /* opt_pass methods: */
4421 virtual bool gate (function
*)
4423 /* If optimizing, then go ahead and split insns now. */
4424 return optimize
> 0;
4427 virtual unsigned int execute (function
*)
4433 }; // class pass_split_after_reload
4438 make_pass_split_after_reload (gcc::context
*ctxt
)
4440 return new pass_split_after_reload (ctxt
);
4444 enable_split_before_sched2 (void)
4446 #ifdef INSN_SCHEDULING
4447 return optimize
> 0 && flag_schedule_insns_after_reload
;
4455 const pass_data pass_data_split_before_sched2
=
4457 RTL_PASS
, /* type */
4458 "split3", /* name */
4459 OPTGROUP_NONE
, /* optinfo_flags */
4460 TV_NONE
, /* tv_id */
4461 0, /* properties_required */
4462 0, /* properties_provided */
4463 0, /* properties_destroyed */
4464 0, /* todo_flags_start */
4465 0, /* todo_flags_finish */
4468 class pass_split_before_sched2
: public rtl_opt_pass
4471 pass_split_before_sched2 (gcc::context
*ctxt
)
4472 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4475 /* opt_pass methods: */
4476 virtual bool gate (function
*)
4478 return enable_split_before_sched2 ();
4481 virtual unsigned int execute (function
*)
4487 }; // class pass_split_before_sched2
4492 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4494 return new pass_split_before_sched2 (ctxt
);
4499 const pass_data pass_data_split_before_regstack
=
4501 RTL_PASS
, /* type */
4502 "split4", /* name */
4503 OPTGROUP_NONE
, /* optinfo_flags */
4504 TV_NONE
, /* tv_id */
4505 0, /* properties_required */
4506 0, /* properties_provided */
4507 0, /* properties_destroyed */
4508 0, /* todo_flags_start */
4509 0, /* todo_flags_finish */
4512 class pass_split_before_regstack
: public rtl_opt_pass
4515 pass_split_before_regstack (gcc::context
*ctxt
)
4516 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
4519 /* opt_pass methods: */
4520 virtual bool gate (function
*);
4521 virtual unsigned int execute (function
*)
4527 }; // class pass_split_before_regstack
4530 pass_split_before_regstack::gate (function
*)
4532 #if HAVE_ATTR_length && defined (STACK_REGS)
4533 /* If flow2 creates new instructions which need splitting
4534 and scheduling after reload is not done, they might not be
4535 split until final which doesn't allow splitting
4536 if HAVE_ATTR_length. Selective scheduling can result in
4537 further instructions that need splitting. */
4538 #ifdef INSN_SCHEDULING
4539 return !enable_split_before_sched2 () || flag_selective_scheduling2
;
4541 return !enable_split_before_sched2 ();
4551 make_pass_split_before_regstack (gcc::context
*ctxt
)
4553 return new pass_split_before_regstack (ctxt
);
4558 const pass_data pass_data_split_for_shorten_branches
=
4560 RTL_PASS
, /* type */
4561 "split5", /* name */
4562 OPTGROUP_NONE
, /* optinfo_flags */
4563 TV_NONE
, /* tv_id */
4564 0, /* properties_required */
4565 0, /* properties_provided */
4566 0, /* properties_destroyed */
4567 0, /* todo_flags_start */
4568 0, /* todo_flags_finish */
4571 class pass_split_for_shorten_branches
: public rtl_opt_pass
4574 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4575 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4578 /* opt_pass methods: */
4579 virtual bool gate (function
*)
4581 /* The placement of the splitting that we do for shorten_branches
4582 depends on whether regstack is used by the target or not. */
4583 #if HAVE_ATTR_length && !defined (STACK_REGS)
4590 virtual unsigned int execute (function
*)
4592 return split_all_insns_noflow ();
4595 }; // class pass_split_for_shorten_branches
4600 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4602 return new pass_split_for_shorten_branches (ctxt
);
4605 /* (Re)initialize the target information after a change in target. */
4610 /* The information is zero-initialized, so we don't need to do anything
4611 first time round. */
4612 if (!this_target_recog
->x_initialized
)
4614 this_target_recog
->x_initialized
= true;
4617 memset (this_target_recog
->x_bool_attr_masks
, 0,
4618 sizeof (this_target_recog
->x_bool_attr_masks
));
4619 for (unsigned int i
= 0; i
< NUM_INSN_CODES
; ++i
)
4620 if (this_target_recog
->x_op_alt
[i
])
4622 free (this_target_recog
->x_op_alt
[i
]);
4623 this_target_recog
->x_op_alt
[i
] = 0;