1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "rtl-error.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
35 #include "addresses.h"
48 #include "dominance.h"
52 #include "cfgcleanup.h"
53 #include "basic-block.h"
56 #include "tree-pass.h"
58 #include "insn-codes.h"
60 #ifndef STACK_POP_CODE
61 #if STACK_GROWS_DOWNWARD
62 #define STACK_POP_CODE POST_INC
64 #define STACK_POP_CODE POST_DEC
68 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx_insn
*, bool);
69 static void validate_replace_src_1 (rtx
*, void *);
70 static rtx_insn
*split_insn (rtx_insn
*);
72 struct target_recog default_target_recog
;
74 struct target_recog
*this_target_recog
= &default_target_recog
;
77 /* Nonzero means allow operands to be volatile.
78 This should be 0 if you are generating rtl, such as if you are calling
79 the functions in optabs.c and expmed.c (most of the time).
80 This should be 1 if all valid insns need to be recognized,
81 such as in reginfo.c and final.c and reload.c.
83 init_recog and init_recog_no_volatile are responsible for setting this. */
87 struct recog_data_d recog_data
;
89 /* Contains a vector of operand_alternative structures, such that
90 operand OP of alternative A is at index A * n_operands + OP.
91 Set up by preprocess_constraints. */
92 const operand_alternative
*recog_op_alt
;
94 /* Used to provide recog_op_alt for asms. */
95 static operand_alternative asm_op_alt
[MAX_RECOG_OPERANDS
96 * MAX_RECOG_ALTERNATIVES
];
98 /* On return from `constrain_operands', indicate which alternative
101 int which_alternative
;
103 /* Nonzero after end of reload pass.
104 Set to 1 or 0 by toplev.c.
105 Controls the significance of (SUBREG (MEM)). */
107 int reload_completed
;
109 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
110 int epilogue_completed
;
112 /* Initialize data used by the function `recog'.
113 This must be called once in the compilation of a function
114 before any insn recognition may be done in the function. */
117 init_recog_no_volatile (void)
129 /* Return true if labels in asm operands BODY are LABEL_REFs. */
132 asm_labels_ok (rtx body
)
137 asmop
= extract_asm_operands (body
);
138 if (asmop
== NULL_RTX
)
141 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
142 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
148 /* Check that X is an insn-body for an `asm' with operands
149 and that the operands mentioned in it are legitimate. */
152 check_asm_operands (rtx x
)
156 const char **constraints
;
159 if (!asm_labels_ok (x
))
162 /* Post-reload, be more strict with things. */
163 if (reload_completed
)
165 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
166 rtx_insn
*insn
= make_insn_raw (x
);
168 constrain_operands (1, get_enabled_alternatives (insn
));
169 return which_alternative
>= 0;
172 noperands
= asm_noperands (x
);
178 operands
= XALLOCAVEC (rtx
, noperands
);
179 constraints
= XALLOCAVEC (const char *, noperands
);
181 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
183 for (i
= 0; i
< noperands
; i
++)
185 const char *c
= constraints
[i
];
188 if (! asm_operand_ok (operands
[i
], c
, constraints
))
195 /* Static data for the next two routines. */
197 typedef struct change_t
206 static change_t
*changes
;
207 static int changes_allocated
;
209 static int num_changes
= 0;
211 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
212 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
213 the change is simply made.
215 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
216 will be called with the address and mode as parameters. If OBJECT is
217 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
220 IN_GROUP is nonzero if this is part of a group of changes that must be
221 performed as a group. In that case, the changes will be stored. The
222 function `apply_change_group' will validate and apply the changes.
224 If IN_GROUP is zero, this is a single change. Try to recognize the insn
225 or validate the memory reference with the change applied. If the result
226 is not valid for the machine, suppress the change and return zero.
227 Otherwise, perform the change and return 1. */
230 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
234 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
237 gcc_assert (in_group
!= 0 || num_changes
== 0);
241 /* Save the information describing this change. */
242 if (num_changes
>= changes_allocated
)
244 if (changes_allocated
== 0)
245 /* This value allows for repeated substitutions inside complex
246 indexed addresses, or changes in up to 5 insns. */
247 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
249 changes_allocated
*= 2;
251 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
254 changes
[num_changes
].object
= object
;
255 changes
[num_changes
].loc
= loc
;
256 changes
[num_changes
].old
= old
;
257 changes
[num_changes
].unshare
= unshare
;
259 if (object
&& !MEM_P (object
))
261 /* Set INSN_CODE to force rerecognition of insn. Save old code in
263 changes
[num_changes
].old_code
= INSN_CODE (object
);
264 INSN_CODE (object
) = -1;
269 /* If we are making a group of changes, return 1. Otherwise, validate the
270 change group we made. */
275 return apply_change_group ();
278 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
282 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
284 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
287 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
291 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
293 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
297 /* Keep X canonicalized if some changes have made it non-canonical; only
298 modifies the operands of X, not (for example) its code. Simplifications
299 are not the job of this routine.
301 Return true if anything was changed. */
303 canonicalize_change_group (rtx_insn
*insn
, rtx x
)
305 if (COMMUTATIVE_P (x
)
306 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
308 /* Oops, the caller has made X no longer canonical.
309 Let's redo the changes in the correct order. */
310 rtx tem
= XEXP (x
, 0);
311 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
312 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
320 /* This subroutine of apply_change_group verifies whether the changes to INSN
321 were valid; i.e. whether INSN can still be recognized.
323 If IN_GROUP is true clobbers which have to be added in order to
324 match the instructions will be added to the current change group.
325 Otherwise the changes will take effect immediately. */
328 insn_invalid_p (rtx_insn
*insn
, bool in_group
)
330 rtx pat
= PATTERN (insn
);
331 int num_clobbers
= 0;
332 /* If we are before reload and the pattern is a SET, see if we can add
334 int icode
= recog (pat
, insn
,
335 (GET_CODE (pat
) == SET
336 && ! reload_completed
337 && ! reload_in_progress
)
338 ? &num_clobbers
: 0);
339 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
342 /* If this is an asm and the operand aren't legal, then fail. Likewise if
343 this is not an asm and the insn wasn't recognized. */
344 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
345 || (!is_asm
&& icode
< 0))
348 /* If we have to add CLOBBERs, fail if we have to add ones that reference
349 hard registers since our callers can't know if they are live or not.
350 Otherwise, add them. */
351 if (num_clobbers
> 0)
355 if (added_clobbers_hard_reg_p (icode
))
358 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
359 XVECEXP (newpat
, 0, 0) = pat
;
360 add_clobbers (newpat
, icode
);
362 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
364 PATTERN (insn
) = pat
= newpat
;
367 /* After reload, verify that all constraints are satisfied. */
368 if (reload_completed
)
372 if (! constrain_operands (1, get_preferred_alternatives (insn
)))
376 INSN_CODE (insn
) = icode
;
380 /* Return number of changes made and not validated yet. */
382 num_changes_pending (void)
387 /* Tentatively apply the changes numbered NUM and up.
388 Return 1 if all changes are valid, zero otherwise. */
391 verify_changes (int num
)
394 rtx last_validated
= NULL_RTX
;
396 /* The changes have been applied and all INSN_CODEs have been reset to force
399 The changes are valid if we aren't given an object, or if we are
400 given a MEM and it still is a valid address, or if this is in insn
401 and it is recognized. In the latter case, if reload has completed,
402 we also require that the operands meet the constraints for
405 for (i
= num
; i
< num_changes
; i
++)
407 rtx object
= changes
[i
].object
;
409 /* If there is no object to test or if it is the same as the one we
410 already tested, ignore it. */
411 if (object
== 0 || object
== last_validated
)
416 if (! memory_address_addr_space_p (GET_MODE (object
),
418 MEM_ADDR_SPACE (object
)))
421 else if (/* changes[i].old might be zero, e.g. when putting a
422 REG_FRAME_RELATED_EXPR into a previously empty list. */
424 && REG_P (changes
[i
].old
)
425 && asm_noperands (PATTERN (object
)) > 0
426 && REG_EXPR (changes
[i
].old
) != NULL_TREE
427 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes
[i
].old
))
428 && DECL_REGISTER (REG_EXPR (changes
[i
].old
)))
430 /* Don't allow changes of hard register operands to inline
431 assemblies if they have been defined as register asm ("x"). */
434 else if (DEBUG_INSN_P (object
))
436 else if (insn_invalid_p (as_a
<rtx_insn
*> (object
), true))
438 rtx pat
= PATTERN (object
);
440 /* Perhaps we couldn't recognize the insn because there were
441 extra CLOBBERs at the end. If so, try to re-recognize
442 without the last CLOBBER (later iterations will cause each of
443 them to be eliminated, in turn). But don't do this if we
444 have an ASM_OPERAND. */
445 if (GET_CODE (pat
) == PARALLEL
446 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
447 && asm_noperands (PATTERN (object
)) < 0)
451 if (XVECLEN (pat
, 0) == 2)
452 newpat
= XVECEXP (pat
, 0, 0);
458 = gen_rtx_PARALLEL (VOIDmode
,
459 rtvec_alloc (XVECLEN (pat
, 0) - 1));
460 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
461 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
464 /* Add a new change to this group to replace the pattern
465 with this new pattern. Then consider this change
466 as having succeeded. The change we added will
467 cause the entire call to fail if things remain invalid.
469 Note that this can lose if a later change than the one
470 we are processing specified &XVECEXP (PATTERN (object), 0, X)
471 but this shouldn't occur. */
473 validate_change (object
, &PATTERN (object
), newpat
, 1);
476 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
477 || GET_CODE (pat
) == VAR_LOCATION
)
478 /* If this insn is a CLOBBER or USE, it is always valid, but is
484 last_validated
= object
;
487 return (i
== num_changes
);
490 /* A group of changes has previously been issued with validate_change
491 and verified with verify_changes. Call df_insn_rescan for each of
492 the insn changed and clear num_changes. */
495 confirm_change_group (void)
498 rtx last_object
= NULL
;
500 for (i
= 0; i
< num_changes
; i
++)
502 rtx object
= changes
[i
].object
;
504 if (changes
[i
].unshare
)
505 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
507 /* Avoid unnecessary rescanning when multiple changes to same instruction
511 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
512 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
513 last_object
= object
;
517 if (last_object
&& INSN_P (last_object
))
518 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
522 /* Apply a group of changes previously issued with `validate_change'.
523 If all changes are valid, call confirm_change_group and return 1,
524 otherwise, call cancel_changes and return 0. */
527 apply_change_group (void)
529 if (verify_changes (0))
531 confirm_change_group ();
542 /* Return the number of changes so far in the current group. */
545 num_validated_changes (void)
550 /* Retract the changes numbered NUM and up. */
553 cancel_changes (int num
)
557 /* Back out all the changes. Do this in the opposite order in which
559 for (i
= num_changes
- 1; i
>= num
; i
--)
561 *changes
[i
].loc
= changes
[i
].old
;
562 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
563 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
568 /* Reduce conditional compilation elsewhere. */
571 #define CODE_FOR_extv CODE_FOR_nothing
575 #define CODE_FOR_extzv CODE_FOR_nothing
578 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
582 simplify_while_replacing (rtx
*loc
, rtx to
, rtx_insn
*object
,
583 machine_mode op0_mode
)
586 enum rtx_code code
= GET_CODE (x
);
587 rtx new_rtx
= NULL_RTX
;
589 if (SWAPPABLE_OPERANDS_P (x
)
590 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
592 validate_unshare_change (object
, loc
,
593 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
594 : swap_condition (code
),
595 GET_MODE (x
), XEXP (x
, 1),
601 /* Canonicalize arithmetics with all constant operands. */
602 switch (GET_RTX_CLASS (code
))
605 if (CONSTANT_P (XEXP (x
, 0)))
606 new_rtx
= simplify_unary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
611 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
612 new_rtx
= simplify_binary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
616 case RTX_COMM_COMPARE
:
617 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
618 new_rtx
= simplify_relational_operation (code
, GET_MODE (x
), op0_mode
,
619 XEXP (x
, 0), XEXP (x
, 1));
626 validate_change (object
, loc
, new_rtx
, 1);
633 /* If we have a PLUS whose second operand is now a CONST_INT, use
634 simplify_gen_binary to try to simplify it.
635 ??? We may want later to remove this, once simplification is
636 separated from this function. */
637 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
638 validate_change (object
, loc
,
640 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
643 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
644 validate_change (object
, loc
,
646 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
647 simplify_gen_unary (NEG
,
648 GET_MODE (x
), XEXP (x
, 1),
653 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
655 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
657 /* If any of the above failed, substitute in something that
658 we know won't be recognized. */
660 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
661 validate_change (object
, loc
, new_rtx
, 1);
665 /* All subregs possible to simplify should be simplified. */
666 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
669 /* Subregs of VOIDmode operands are incorrect. */
670 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
671 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
673 validate_change (object
, loc
, new_rtx
, 1);
677 /* If we are replacing a register with memory, try to change the memory
678 to be the mode required for memory in extract operations (this isn't
679 likely to be an insertion operation; if it was, nothing bad will
680 happen, we might just fail in some cases). */
682 if (MEM_P (XEXP (x
, 0))
683 && CONST_INT_P (XEXP (x
, 1))
684 && CONST_INT_P (XEXP (x
, 2))
685 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
686 MEM_ADDR_SPACE (XEXP (x
, 0)))
687 && !MEM_VOLATILE_P (XEXP (x
, 0)))
689 machine_mode wanted_mode
= VOIDmode
;
690 machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
691 int pos
= INTVAL (XEXP (x
, 2));
693 if (GET_CODE (x
) == ZERO_EXTRACT
&& HAVE_extzv
)
695 wanted_mode
= insn_data
[CODE_FOR_extzv
].operand
[1].mode
;
696 if (wanted_mode
== VOIDmode
)
697 wanted_mode
= word_mode
;
699 else if (GET_CODE (x
) == SIGN_EXTRACT
&& HAVE_extv
)
701 wanted_mode
= insn_data
[CODE_FOR_extv
].operand
[1].mode
;
702 if (wanted_mode
== VOIDmode
)
703 wanted_mode
= word_mode
;
706 /* If we have a narrower mode, we can do something. */
707 if (wanted_mode
!= VOIDmode
708 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
710 int offset
= pos
/ BITS_PER_UNIT
;
713 /* If the bytes and bits are counted differently, we
714 must adjust the offset. */
715 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
717 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
720 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
721 == GET_MODE_BITSIZE (wanted_mode
));
722 pos
%= GET_MODE_BITSIZE (wanted_mode
);
724 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
726 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
727 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
738 /* Replace every occurrence of FROM in X with TO. Mark each change with
739 validate_change passing OBJECT. */
742 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx_insn
*object
,
749 machine_mode op0_mode
= VOIDmode
;
750 int prev_changes
= num_changes
;
756 fmt
= GET_RTX_FORMAT (code
);
758 op0_mode
= GET_MODE (XEXP (x
, 0));
760 /* X matches FROM if it is the same rtx or they are both referring to the
761 same register in the same mode. Avoid calling rtx_equal_p unless the
762 operands look similar. */
765 || (REG_P (x
) && REG_P (from
)
766 && GET_MODE (x
) == GET_MODE (from
)
767 && REGNO (x
) == REGNO (from
))
768 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
769 && rtx_equal_p (x
, from
)))
771 validate_unshare_change (object
, loc
, to
, 1);
775 /* Call ourself recursively to perform the replacements.
776 We must not replace inside already replaced expression, otherwise we
777 get infinite recursion for replacements like (reg X)->(subreg (reg X))
778 so we must special case shared ASM_OPERANDS. */
780 if (GET_CODE (x
) == PARALLEL
)
782 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
784 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
785 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
787 /* Verify that operands are really shared. */
788 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
789 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
791 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
792 from
, to
, object
, simplify
);
795 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
800 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
803 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
804 else if (fmt
[i
] == 'E')
805 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
806 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
810 /* If we didn't substitute, there is nothing more to do. */
811 if (num_changes
== prev_changes
)
814 /* ??? The regmove is no more, so is this aberration still necessary? */
815 /* Allow substituted expression to have different mode. This is used by
816 regmove to change mode of pseudo register. */
817 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
818 op0_mode
= GET_MODE (XEXP (x
, 0));
820 /* Do changes needed to keep rtx consistent. Don't do any other
821 simplifications, as it is not our job. */
823 simplify_while_replacing (loc
, to
, object
, op0_mode
);
826 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
827 with TO. After all changes have been made, validate by seeing
828 if INSN is still valid. */
831 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx_insn
*insn
, rtx
*loc
)
833 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
834 return apply_change_group ();
837 /* Try replacing every occurrence of FROM in INSN with TO. After all
838 changes have been made, validate by seeing if INSN is still valid. */
841 validate_replace_rtx (rtx from
, rtx to
, rtx_insn
*insn
)
843 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
844 return apply_change_group ();
847 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
848 is a part of INSN. After all changes have been made, validate by seeing if
850 validate_replace_rtx (from, to, insn) is equivalent to
851 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
854 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx_insn
*insn
)
856 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
857 return apply_change_group ();
860 /* Same as above, but do not simplify rtx afterwards. */
862 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
865 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
866 return apply_change_group ();
870 /* Try replacing every occurrence of FROM in INSN with TO. This also
871 will replace in REG_EQUAL and REG_EQUIV notes. */
874 validate_replace_rtx_group (rtx from
, rtx to
, rtx_insn
*insn
)
877 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
878 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
879 if (REG_NOTE_KIND (note
) == REG_EQUAL
880 || REG_NOTE_KIND (note
) == REG_EQUIV
)
881 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
884 /* Function called by note_uses to replace used subexpressions. */
885 struct validate_replace_src_data
887 rtx from
; /* Old RTX */
888 rtx to
; /* New RTX */
889 rtx_insn
*insn
; /* Insn in which substitution is occurring. */
893 validate_replace_src_1 (rtx
*x
, void *data
)
895 struct validate_replace_src_data
*d
896 = (struct validate_replace_src_data
*) data
;
898 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
901 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
905 validate_replace_src_group (rtx from
, rtx to
, rtx_insn
*insn
)
907 struct validate_replace_src_data d
;
912 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
915 /* Try simplify INSN.
916 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
917 pattern and return true if something was simplified. */
920 validate_simplify_insn (rtx_insn
*insn
)
926 pat
= PATTERN (insn
);
928 if (GET_CODE (pat
) == SET
)
930 newpat
= simplify_rtx (SET_SRC (pat
));
931 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
932 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
933 newpat
= simplify_rtx (SET_DEST (pat
));
934 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
935 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
937 else if (GET_CODE (pat
) == PARALLEL
)
938 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
940 rtx s
= XVECEXP (pat
, 0, i
);
942 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
944 newpat
= simplify_rtx (SET_SRC (s
));
945 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
946 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
947 newpat
= simplify_rtx (SET_DEST (s
));
948 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
949 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
952 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
955 /* Return 1 if the insn using CC0 set by INSN does not contain
956 any ordered tests applied to the condition codes.
957 EQ and NE tests do not count. */
960 next_insn_tests_no_inequality (rtx_insn
*insn
)
962 rtx_insn
*next
= next_cc0_user (insn
);
964 /* If there is no next insn, we have to take the conservative choice. */
968 return (INSN_P (next
)
969 && ! inequality_comparisons_p (PATTERN (next
)));
972 /* Return 1 if OP is a valid general operand for machine mode MODE.
973 This is either a register reference, a memory reference,
974 or a constant. In the case of a memory reference, the address
975 is checked for general validity for the target machine.
977 Register and memory references must have mode MODE in order to be valid,
978 but some constants have no machine mode and are valid for any mode.
980 If MODE is VOIDmode, OP is checked for validity for whatever mode
983 The main use of this function is as a predicate in match_operand
984 expressions in the machine description. */
987 general_operand (rtx op
, machine_mode mode
)
989 enum rtx_code code
= GET_CODE (op
);
991 if (mode
== VOIDmode
)
992 mode
= GET_MODE (op
);
994 /* Don't accept CONST_INT or anything similar
995 if the caller wants something floating. */
996 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
997 && GET_MODE_CLASS (mode
) != MODE_INT
998 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1001 if (CONST_INT_P (op
)
1003 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1006 if (CONSTANT_P (op
))
1007 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1008 || mode
== VOIDmode
)
1009 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1010 && targetm
.legitimate_constant_p (mode
== VOIDmode
1014 /* Except for certain constants with VOIDmode, already checked for,
1015 OP's mode must match MODE if MODE specifies a mode. */
1017 if (GET_MODE (op
) != mode
)
1022 rtx sub
= SUBREG_REG (op
);
1024 #ifdef INSN_SCHEDULING
1025 /* On machines that have insn scheduling, we want all memory
1026 reference to be explicit, so outlaw paradoxical SUBREGs.
1027 However, we must allow them after reload so that they can
1028 get cleaned up by cleanup_subreg_operands. */
1029 if (!reload_completed
&& MEM_P (sub
)
1030 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
1033 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1034 may result in incorrect reference. We should simplify all valid
1035 subregs of MEM anyway. But allow this after reload because we
1036 might be called from cleanup_subreg_operands.
1038 ??? This is a kludge. */
1039 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
1043 #ifdef CANNOT_CHANGE_MODE_CLASS
1045 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1046 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1047 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1048 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1049 /* LRA can generate some invalid SUBREGS just for matched
1050 operand reload presentation. LRA needs to treat them as
1052 && ! LRA_SUBREG_P (op
))
1056 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1057 create such rtl, and we must reject it. */
1058 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1059 /* LRA can use subreg to store a floating point value in an
1060 integer mode. Although the floating point and the
1061 integer modes need the same number of hard registers, the
1062 size of floating point mode can be less than the integer
1064 && ! lra_in_progress
1065 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1069 code
= GET_CODE (op
);
1073 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1074 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1078 rtx y
= XEXP (op
, 0);
1080 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1083 /* Use the mem's mode, since it will be reloaded thus. LRA can
1084 generate move insn with invalid addresses which is made valid
1085 and efficiently calculated by LRA through further numerous
1088 || memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1095 /* Return 1 if OP is a valid memory address for a memory reference
1098 The main use of this function is as a predicate in match_operand
1099 expressions in the machine description. */
1102 address_operand (rtx op
, machine_mode mode
)
1104 return memory_address_p (mode
, op
);
1107 /* Return 1 if OP is a register reference of mode MODE.
1108 If MODE is VOIDmode, accept a register in any mode.
1110 The main use of this function is as a predicate in match_operand
1111 expressions in the machine description. */
1114 register_operand (rtx op
, machine_mode mode
)
1116 if (GET_CODE (op
) == SUBREG
)
1118 rtx sub
= SUBREG_REG (op
);
1120 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1121 because it is guaranteed to be reloaded into one.
1122 Just make sure the MEM is valid in itself.
1123 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1124 but currently it does result from (SUBREG (REG)...) where the
1125 reg went on the stack.) */
1126 if (!REG_P (sub
) && (reload_completed
|| !MEM_P (sub
)))
1129 else if (!REG_P (op
))
1131 return general_operand (op
, mode
);
1134 /* Return 1 for a register in Pmode; ignore the tested mode. */
1137 pmode_register_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
1139 return register_operand (op
, Pmode
);
1142 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1143 or a hard register. */
1146 scratch_operand (rtx op
, machine_mode mode
)
1148 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1151 return (GET_CODE (op
) == SCRATCH
1154 || (REGNO (op
) < FIRST_PSEUDO_REGISTER
1155 && REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
))));
1158 /* Return 1 if OP is a valid immediate operand for mode MODE.
1160 The main use of this function is as a predicate in match_operand
1161 expressions in the machine description. */
1164 immediate_operand (rtx op
, machine_mode mode
)
1166 /* Don't accept CONST_INT or anything similar
1167 if the caller wants something floating. */
1168 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1169 && GET_MODE_CLASS (mode
) != MODE_INT
1170 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1173 if (CONST_INT_P (op
)
1175 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1178 return (CONSTANT_P (op
)
1179 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1180 || GET_MODE (op
) == VOIDmode
)
1181 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1182 && targetm
.legitimate_constant_p (mode
== VOIDmode
1187 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1190 const_int_operand (rtx op
, machine_mode mode
)
1192 if (!CONST_INT_P (op
))
1195 if (mode
!= VOIDmode
1196 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1202 #if TARGET_SUPPORTS_WIDE_INT
1203 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1206 const_scalar_int_operand (rtx op
, machine_mode mode
)
1208 if (!CONST_SCALAR_INT_P (op
))
1211 if (CONST_INT_P (op
))
1212 return const_int_operand (op
, mode
);
1214 if (mode
!= VOIDmode
)
1216 int prec
= GET_MODE_PRECISION (mode
);
1217 int bitsize
= GET_MODE_BITSIZE (mode
);
1219 if (CONST_WIDE_INT_NUNITS (op
) * HOST_BITS_PER_WIDE_INT
> bitsize
)
1222 if (prec
== bitsize
)
1226 /* Multiword partial int. */
1228 = CONST_WIDE_INT_ELT (op
, CONST_WIDE_INT_NUNITS (op
) - 1);
1229 return (sext_hwi (x
, prec
& (HOST_BITS_PER_WIDE_INT
- 1)) == x
);
1235 /* Returns 1 if OP is an operand that is a constant integer or constant
1236 floating-point number of MODE. */
1239 const_double_operand (rtx op
, machine_mode mode
)
1241 return (GET_CODE (op
) == CONST_DOUBLE
)
1242 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
);
1245 /* Returns 1 if OP is an operand that is a constant integer or constant
1246 floating-point number of MODE. */
1249 const_double_operand (rtx op
, machine_mode mode
)
1251 /* Don't accept CONST_INT or anything similar
1252 if the caller wants something floating. */
1253 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1254 && GET_MODE_CLASS (mode
) != MODE_INT
1255 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1258 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1259 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1260 || GET_MODE (op
) == VOIDmode
));
1263 /* Return 1 if OP is a general operand that is not an immediate
1264 operand of mode MODE. */
1267 nonimmediate_operand (rtx op
, machine_mode mode
)
1269 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1272 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1275 nonmemory_operand (rtx op
, machine_mode mode
)
1277 if (CONSTANT_P (op
))
1278 return immediate_operand (op
, mode
);
1279 return register_operand (op
, mode
);
1282 /* Return 1 if OP is a valid operand that stands for pushing a
1283 value of mode MODE onto the stack.
1285 The main use of this function is as a predicate in match_operand
1286 expressions in the machine description. */
1289 push_operand (rtx op
, machine_mode mode
)
1291 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1293 #ifdef PUSH_ROUNDING
1294 rounded_size
= PUSH_ROUNDING (rounded_size
);
1300 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1305 if (rounded_size
== GET_MODE_SIZE (mode
))
1307 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1312 if (GET_CODE (op
) != PRE_MODIFY
1313 || GET_CODE (XEXP (op
, 1)) != PLUS
1314 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1315 || !CONST_INT_P (XEXP (XEXP (op
, 1), 1))
1316 || INTVAL (XEXP (XEXP (op
, 1), 1))
1317 != ((STACK_GROWS_DOWNWARD
? -1 : 1) * (int) rounded_size
))
1321 return XEXP (op
, 0) == stack_pointer_rtx
;
1324 /* Return 1 if OP is a valid operand that stands for popping a
1325 value of mode MODE off the stack.
1327 The main use of this function is as a predicate in match_operand
1328 expressions in the machine description. */
1331 pop_operand (rtx op
, machine_mode mode
)
1336 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1341 if (GET_CODE (op
) != STACK_POP_CODE
)
1344 return XEXP (op
, 0) == stack_pointer_rtx
;
1347 /* Return 1 if ADDR is a valid memory address
1348 for mode MODE in address space AS. */
1351 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED
,
1352 rtx addr
, addr_space_t as
)
1354 #ifdef GO_IF_LEGITIMATE_ADDRESS
1355 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1356 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1362 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1366 /* Return 1 if OP is a valid memory reference with mode MODE,
1367 including a valid address.
1369 The main use of this function is as a predicate in match_operand
1370 expressions in the machine description. */
1373 memory_operand (rtx op
, machine_mode mode
)
1377 if (! reload_completed
)
1378 /* Note that no SUBREG is a memory operand before end of reload pass,
1379 because (SUBREG (MEM...)) forces reloading into a register. */
1380 return MEM_P (op
) && general_operand (op
, mode
);
1382 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1386 if (GET_CODE (inner
) == SUBREG
)
1387 inner
= SUBREG_REG (inner
);
1389 return (MEM_P (inner
) && general_operand (op
, mode
));
1392 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1393 that is, a memory reference whose address is a general_operand. */
1396 indirect_operand (rtx op
, machine_mode mode
)
1398 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1399 if (! reload_completed
1400 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1402 int offset
= SUBREG_BYTE (op
);
1403 rtx inner
= SUBREG_REG (op
);
1405 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1408 /* The only way that we can have a general_operand as the resulting
1409 address is if OFFSET is zero and the address already is an operand
1410 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1413 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1414 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1415 && CONST_INT_P (XEXP (XEXP (inner
, 0), 1))
1416 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1417 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1421 && memory_operand (op
, mode
)
1422 && general_operand (XEXP (op
, 0), Pmode
));
1425 /* Return 1 if this is an ordered comparison operator (not including
1426 ORDERED and UNORDERED). */
1429 ordered_comparison_operator (rtx op
, machine_mode mode
)
1431 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1433 switch (GET_CODE (op
))
1451 /* Return 1 if this is a comparison operator. This allows the use of
1452 MATCH_OPERATOR to recognize all the branch insns. */
1455 comparison_operator (rtx op
, machine_mode mode
)
1457 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1458 && COMPARISON_P (op
));
1461 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1464 extract_asm_operands (rtx body
)
1467 switch (GET_CODE (body
))
1473 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1474 tmp
= SET_SRC (body
);
1475 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1480 tmp
= XVECEXP (body
, 0, 0);
1481 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1483 if (GET_CODE (tmp
) == SET
)
1485 tmp
= SET_SRC (tmp
);
1486 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1497 /* If BODY is an insn body that uses ASM_OPERANDS,
1498 return the number of operands (both input and output) in the insn.
1499 Otherwise return -1. */
1502 asm_noperands (const_rtx body
)
1504 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1510 if (GET_CODE (body
) == SET
)
1512 else if (GET_CODE (body
) == PARALLEL
)
1515 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1517 /* Multiple output operands, or 1 output plus some clobbers:
1519 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1520 /* Count backwards through CLOBBERs to determine number of SETs. */
1521 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1523 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1525 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1529 /* N_SETS is now number of output operands. */
1532 /* Verify that all the SETs we have
1533 came from a single original asm_operands insn
1534 (so that invalid combinations are blocked). */
1535 for (i
= 0; i
< n_sets
; i
++)
1537 rtx elt
= XVECEXP (body
, 0, i
);
1538 if (GET_CODE (elt
) != SET
)
1540 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1542 /* If these ASM_OPERANDS rtx's came from different original insns
1543 then they aren't allowed together. */
1544 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1545 != ASM_OPERANDS_INPUT_VEC (asm_op
))
1551 /* 0 outputs, but some clobbers:
1552 body is [(asm_operands ...) (clobber (reg ...))...]. */
1553 /* Make sure all the other parallel things really are clobbers. */
1554 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1555 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1560 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
1561 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
1564 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1565 copy its operands (both input and output) into the vector OPERANDS,
1566 the locations of the operands within the insn into the vector OPERAND_LOCS,
1567 and the constraints for the operands into CONSTRAINTS.
1568 Write the modes of the operands into MODES.
1569 Return the assembler-template.
1571 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1572 we don't store that info. */
1575 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1576 const char **constraints
, machine_mode
*modes
,
1579 int nbase
= 0, n
, i
;
1582 switch (GET_CODE (body
))
1585 /* Zero output asm: BODY is (asm_operands ...). */
1590 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1591 asmop
= SET_SRC (body
);
1593 /* The output is in the SET.
1594 Its constraint is in the ASM_OPERANDS itself. */
1596 operands
[0] = SET_DEST (body
);
1598 operand_locs
[0] = &SET_DEST (body
);
1600 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1602 modes
[0] = GET_MODE (SET_DEST (body
));
1608 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1610 asmop
= XVECEXP (body
, 0, 0);
1611 if (GET_CODE (asmop
) == SET
)
1613 asmop
= SET_SRC (asmop
);
1615 /* At least one output, plus some CLOBBERs. The outputs are in
1616 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1617 for (i
= 0; i
< nparallel
; i
++)
1619 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1620 break; /* Past last SET */
1622 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1624 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1626 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1628 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1639 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1640 for (i
= 0; i
< n
; i
++)
1643 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1645 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1647 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1649 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1653 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
1654 for (i
= 0; i
< n
; i
++)
1657 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
1659 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
1661 constraints
[nbase
+ i
] = "";
1663 modes
[nbase
+ i
] = Pmode
;
1667 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1669 return ASM_OPERANDS_TEMPLATE (asmop
);
1672 /* Parse inline assembly string STRING and determine which operands are
1673 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1674 to true if operand I is referenced.
1676 This is intended to distinguish barrier-like asms such as:
1678 asm ("" : "=m" (...));
1680 from real references such as:
1682 asm ("sw\t$0, %0" : "=m" (...)); */
1685 get_referenced_operands (const char *string
, bool *used
,
1686 unsigned int noperands
)
1688 memset (used
, 0, sizeof (bool) * noperands
);
1689 const char *p
= string
;
1695 /* A letter followed by a digit indicates an operand number. */
1696 if (ISALPHA (p
[0]) && ISDIGIT (p
[1]))
1701 unsigned long opnum
= strtoul (p
, &endptr
, 10);
1702 if (endptr
!= p
&& opnum
< noperands
)
1716 /* Check if an asm_operand matches its constraints.
1717 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1720 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
1724 bool incdec_ok
= false;
1727 /* Use constrain_operands after reload. */
1728 gcc_assert (!reload_completed
);
1730 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1731 many alternatives as required to match the other operands. */
1732 if (*constraint
== '\0')
1737 enum constraint_num cn
;
1738 char c
= *constraint
;
1746 case '0': case '1': case '2': case '3': case '4':
1747 case '5': case '6': case '7': case '8': case '9':
1748 /* If caller provided constraints pointer, look up
1749 the matching constraint. Otherwise, our caller should have
1750 given us the proper matching constraint, but we can't
1751 actually fail the check if they didn't. Indicate that
1752 results are inconclusive. */
1756 unsigned long match
;
1758 match
= strtoul (constraint
, &end
, 10);
1760 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
1761 constraint
= (const char *) end
;
1767 while (ISDIGIT (*constraint
));
1773 /* The rest of the compiler assumes that reloading the address
1774 of a MEM into a register will make it fit an 'o' constraint.
1775 That is, if it sees a MEM operand for an 'o' constraint,
1776 it assumes that (mem (base-reg)) will fit.
1778 That assumption fails on targets that don't have offsettable
1779 addresses at all. We therefore need to treat 'o' asm
1780 constraints as a special case and only accept operands that
1781 are already offsettable, thus proving that at least one
1782 offsettable address exists. */
1783 case 'o': /* offsettable */
1784 if (offsettable_nonstrict_memref_p (op
))
1789 if (general_operand (op
, VOIDmode
))
1796 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1797 to exist, excepting those that expand_call created. Further,
1798 on some machines which do not have generalized auto inc/dec,
1799 an inc/dec is not a memory_operand.
1801 Match any memory and hope things are resolved after reload. */
1805 cn
= lookup_constraint (constraint
);
1806 switch (get_constraint_type (cn
))
1810 && reg_class_for_constraint (cn
) != NO_REGS
1811 && GET_MODE (op
) != BLKmode
1812 && register_operand (op
, VOIDmode
))
1819 && insn_const_int_ok_for_constraint (INTVAL (op
), cn
))
1824 /* Every memory operand can be reloaded to fit. */
1825 result
= result
|| memory_operand (op
, VOIDmode
);
1829 /* Every address operand can be reloaded to fit. */
1830 result
= result
|| address_operand (op
, VOIDmode
);
1834 result
= result
|| constraint_satisfied_p (op
, cn
);
1839 len
= CONSTRAINT_LEN (c
, constraint
);
1842 while (--len
&& *constraint
);
1848 /* For operands without < or > constraints reject side-effects. */
1849 if (!incdec_ok
&& result
&& MEM_P (op
))
1850 switch (GET_CODE (XEXP (op
, 0)))
1867 /* Given an rtx *P, if it is a sum containing an integer constant term,
1868 return the location (type rtx *) of the pointer to that constant term.
1869 Otherwise, return a null pointer. */
1872 find_constant_term_loc (rtx
*p
)
1875 enum rtx_code code
= GET_CODE (*p
);
1877 /* If *P IS such a constant term, P is its location. */
1879 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1883 /* Otherwise, if not a sum, it has no constant term. */
1885 if (GET_CODE (*p
) != PLUS
)
1888 /* If one of the summands is constant, return its location. */
1890 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1891 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1894 /* Otherwise, check each summand for containing a constant term. */
1896 if (XEXP (*p
, 0) != 0)
1898 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1903 if (XEXP (*p
, 1) != 0)
1905 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1913 /* Return 1 if OP is a memory reference
1914 whose address contains no side effects
1915 and remains valid after the addition
1916 of a positive integer less than the
1917 size of the object being referenced.
1919 We assume that the original address is valid and do not check it.
1921 This uses strict_memory_address_p as a subroutine, so
1922 don't use it before reload. */
1925 offsettable_memref_p (rtx op
)
1927 return ((MEM_P (op
))
1928 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
1929 MEM_ADDR_SPACE (op
)));
1932 /* Similar, but don't require a strictly valid mem ref:
1933 consider pseudo-regs valid as index or base regs. */
1936 offsettable_nonstrict_memref_p (rtx op
)
1938 return ((MEM_P (op
))
1939 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
1940 MEM_ADDR_SPACE (op
)));
1943 /* Return 1 if Y is a memory address which contains no side effects
1944 and would remain valid for address space AS after the addition of
1945 a positive integer less than the size of that mode.
1947 We assume that the original address is valid and do not check it.
1948 We do check that it is valid for narrower modes.
1950 If STRICTP is nonzero, we require a strictly valid address,
1951 for the sake of use in reload.c. */
1954 offsettable_address_addr_space_p (int strictp
, machine_mode mode
, rtx y
,
1957 enum rtx_code ycode
= GET_CODE (y
);
1961 int (*addressp
) (machine_mode
, rtx
, addr_space_t
) =
1962 (strictp
? strict_memory_address_addr_space_p
1963 : memory_address_addr_space_p
);
1964 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1966 if (CONSTANT_ADDRESS_P (y
))
1969 /* Adjusting an offsettable address involves changing to a narrower mode.
1970 Make sure that's OK. */
1972 if (mode_dependent_address_p (y
, as
))
1975 machine_mode address_mode
= GET_MODE (y
);
1976 if (address_mode
== VOIDmode
)
1977 address_mode
= targetm
.addr_space
.address_mode (as
);
1978 #ifdef POINTERS_EXTEND_UNSIGNED
1979 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
1982 /* ??? How much offset does an offsettable BLKmode reference need?
1983 Clearly that depends on the situation in which it's being used.
1984 However, the current situation in which we test 0xffffffff is
1985 less than ideal. Caveat user. */
1987 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1989 /* If the expression contains a constant term,
1990 see if it remains valid when max possible offset is added. */
1992 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1997 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
1998 /* Use QImode because an odd displacement may be automatically invalid
1999 for any wider mode. But it should be valid for a single byte. */
2000 good
= (*addressp
) (QImode
, y
, as
);
2002 /* In any case, restore old contents of memory. */
2007 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2010 /* The offset added here is chosen as the maximum offset that
2011 any instruction could need to add when operating on something
2012 of the specified mode. We assume that if Y and Y+c are
2013 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2014 go inside a LO_SUM here, so we do so as well. */
2015 if (GET_CODE (y
) == LO_SUM
2017 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
2018 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2019 plus_constant (address_mode
, XEXP (y
, 1),
2021 #ifdef POINTERS_EXTEND_UNSIGNED
2022 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2023 else if (POINTERS_EXTEND_UNSIGNED
> 0
2024 && GET_CODE (y
) == ZERO_EXTEND
2025 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2026 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2027 plus_constant (pointer_mode
, XEXP (y
, 0),
2031 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2033 /* Use QImode because an odd displacement may be automatically invalid
2034 for any wider mode. But it should be valid for a single byte. */
2035 return (*addressp
) (QImode
, z
, as
);
2038 /* Return 1 if ADDR is an address-expression whose effect depends
2039 on the mode of the memory reference it is used in.
2041 ADDRSPACE is the address space associated with the address.
2043 Autoincrement addressing is a typical example of mode-dependence
2044 because the amount of the increment depends on the mode. */
2047 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2049 /* Auto-increment addressing with anything other than post_modify
2050 or pre_modify always introduces a mode dependency. Catch such
2051 cases now instead of deferring to the target. */
2052 if (GET_CODE (addr
) == PRE_INC
2053 || GET_CODE (addr
) == POST_INC
2054 || GET_CODE (addr
) == PRE_DEC
2055 || GET_CODE (addr
) == POST_DEC
)
2058 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2061 /* Return true if boolean attribute ATTR is supported. */
2064 have_bool_attr (bool_attr attr
)
2069 return HAVE_ATTR_enabled
;
2070 case BA_PREFERRED_FOR_SIZE
:
2071 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_size
;
2072 case BA_PREFERRED_FOR_SPEED
:
2073 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_speed
;
2078 /* Return the value of ATTR for instruction INSN. */
2081 get_bool_attr (rtx_insn
*insn
, bool_attr attr
)
2086 return get_attr_enabled (insn
);
2087 case BA_PREFERRED_FOR_SIZE
:
2088 return get_attr_enabled (insn
) && get_attr_preferred_for_size (insn
);
2089 case BA_PREFERRED_FOR_SPEED
:
2090 return get_attr_enabled (insn
) && get_attr_preferred_for_speed (insn
);
2095 /* Like get_bool_attr_mask, but don't use the cache. */
2097 static alternative_mask
2098 get_bool_attr_mask_uncached (rtx_insn
*insn
, bool_attr attr
)
2100 /* Temporarily install enough information for get_attr_<foo> to assume
2101 that the insn operands are already cached. As above, the attribute
2102 mustn't depend on the values of operands, so we don't provide their
2103 real values here. */
2104 rtx_insn
*old_insn
= recog_data
.insn
;
2105 int old_alternative
= which_alternative
;
2107 recog_data
.insn
= insn
;
2108 alternative_mask mask
= ALL_ALTERNATIVES
;
2109 int n_alternatives
= insn_data
[INSN_CODE (insn
)].n_alternatives
;
2110 for (int i
= 0; i
< n_alternatives
; i
++)
2112 which_alternative
= i
;
2113 if (!get_bool_attr (insn
, attr
))
2114 mask
&= ~ALTERNATIVE_BIT (i
);
2117 recog_data
.insn
= old_insn
;
2118 which_alternative
= old_alternative
;
2122 /* Return the mask of operand alternatives that are allowed for INSN
2123 by boolean attribute ATTR. This mask depends only on INSN and on
2124 the current target; it does not depend on things like the values of
2127 static alternative_mask
2128 get_bool_attr_mask (rtx_insn
*insn
, bool_attr attr
)
2130 /* Quick exit for asms and for targets that don't use these attributes. */
2131 int code
= INSN_CODE (insn
);
2132 if (code
< 0 || !have_bool_attr (attr
))
2133 return ALL_ALTERNATIVES
;
2135 /* Calling get_attr_<foo> can be expensive, so cache the mask
2137 if (!this_target_recog
->x_bool_attr_masks
[code
][attr
])
2138 this_target_recog
->x_bool_attr_masks
[code
][attr
]
2139 = get_bool_attr_mask_uncached (insn
, attr
);
2140 return this_target_recog
->x_bool_attr_masks
[code
][attr
];
2143 /* Return the set of alternatives of INSN that are allowed by the current
2147 get_enabled_alternatives (rtx_insn
*insn
)
2149 return get_bool_attr_mask (insn
, BA_ENABLED
);
2152 /* Return the set of alternatives of INSN that are allowed by the current
2153 target and are preferred for the current size/speed optimization
2157 get_preferred_alternatives (rtx_insn
*insn
)
2159 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
)))
2160 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2162 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2165 /* Return the set of alternatives of INSN that are allowed by the current
2166 target and are preferred for the size/speed optimization choice
2167 associated with BB. Passing a separate BB is useful if INSN has not
2168 been emitted yet or if we are considering moving it to a different
2172 get_preferred_alternatives (rtx_insn
*insn
, basic_block bb
)
2174 if (optimize_bb_for_speed_p (bb
))
2175 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2177 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2180 /* Assert that the cached boolean attributes for INSN are still accurate.
2181 The backend is required to define these attributes in a way that only
2182 depends on the current target (rather than operands, compiler phase,
2186 check_bool_attrs (rtx_insn
*insn
)
2188 int code
= INSN_CODE (insn
);
2190 for (int i
= 0; i
<= BA_LAST
; ++i
)
2192 enum bool_attr attr
= (enum bool_attr
) i
;
2193 if (this_target_recog
->x_bool_attr_masks
[code
][attr
])
2194 gcc_assert (this_target_recog
->x_bool_attr_masks
[code
][attr
]
2195 == get_bool_attr_mask_uncached (insn
, attr
));
2200 /* Like extract_insn, but save insn extracted and don't extract again, when
2201 called again for the same insn expecting that recog_data still contain the
2202 valid information. This is used primary by gen_attr infrastructure that
2203 often does extract insn again and again. */
2205 extract_insn_cached (rtx_insn
*insn
)
2207 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2209 extract_insn (insn
);
2210 recog_data
.insn
= insn
;
2213 /* Do uncached extract_insn, constrain_operands and complain about failures.
2214 This should be used when extracting a pre-existing constrained instruction
2215 if the caller wants to know which alternative was chosen. */
2217 extract_constrain_insn (rtx_insn
*insn
)
2219 extract_insn (insn
);
2220 if (!constrain_operands (reload_completed
, get_enabled_alternatives (insn
)))
2221 fatal_insn_not_found (insn
);
2224 /* Do cached extract_insn, constrain_operands and complain about failures.
2225 Used by insn_attrtab. */
2227 extract_constrain_insn_cached (rtx_insn
*insn
)
2229 extract_insn_cached (insn
);
2230 if (which_alternative
== -1
2231 && !constrain_operands (reload_completed
,
2232 get_enabled_alternatives (insn
)))
2233 fatal_insn_not_found (insn
);
2236 /* Do cached constrain_operands on INSN and complain about failures. */
2238 constrain_operands_cached (rtx_insn
*insn
, int strict
)
2240 if (which_alternative
== -1)
2241 return constrain_operands (strict
, get_enabled_alternatives (insn
));
2246 /* Analyze INSN and fill in recog_data. */
2249 extract_insn (rtx_insn
*insn
)
2254 rtx body
= PATTERN (insn
);
2256 recog_data
.n_operands
= 0;
2257 recog_data
.n_alternatives
= 0;
2258 recog_data
.n_dups
= 0;
2259 recog_data
.is_asm
= false;
2261 switch (GET_CODE (body
))
2272 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2277 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2278 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2279 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2285 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2288 /* This insn is an `asm' with operands. */
2290 /* expand_asm_operands makes sure there aren't too many operands. */
2291 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2293 /* Now get the operand values and constraints out of the insn. */
2294 decode_asm_operands (body
, recog_data
.operand
,
2295 recog_data
.operand_loc
,
2296 recog_data
.constraints
,
2297 recog_data
.operand_mode
, NULL
);
2298 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2301 const char *p
= recog_data
.constraints
[0];
2302 recog_data
.n_alternatives
= 1;
2304 recog_data
.n_alternatives
+= (*p
++ == ',');
2306 recog_data
.is_asm
= true;
2309 fatal_insn_not_found (insn
);
2313 /* Ordinary insn: recognize it, get the operands via insn_extract
2314 and get the constraints. */
2316 icode
= recog_memoized (insn
);
2318 fatal_insn_not_found (insn
);
2320 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2321 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2322 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2324 insn_extract (insn
);
2326 for (i
= 0; i
< noperands
; i
++)
2328 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2329 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2330 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2331 /* VOIDmode match_operands gets mode from their real operand. */
2332 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2333 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2336 for (i
= 0; i
< noperands
; i
++)
2337 recog_data
.operand_type
[i
]
2338 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2339 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2342 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2344 recog_data
.insn
= NULL
;
2345 which_alternative
= -1;
2348 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2349 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2350 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2351 has N_OPERANDS entries. */
2354 preprocess_constraints (int n_operands
, int n_alternatives
,
2355 const char **constraints
,
2356 operand_alternative
*op_alt_base
)
2358 for (int i
= 0; i
< n_operands
; i
++)
2361 struct operand_alternative
*op_alt
;
2362 const char *p
= constraints
[i
];
2364 op_alt
= op_alt_base
;
2366 for (j
= 0; j
< n_alternatives
; j
++, op_alt
+= n_operands
)
2368 op_alt
[i
].cl
= NO_REGS
;
2369 op_alt
[i
].constraint
= p
;
2370 op_alt
[i
].matches
= -1;
2371 op_alt
[i
].matched
= -1;
2373 if (*p
== '\0' || *p
== ',')
2375 op_alt
[i
].anything_ok
= 1;
2385 while (c
!= ',' && c
!= '\0');
2386 if (c
== ',' || c
== '\0')
2395 op_alt
[i
].reject
+= 6;
2398 op_alt
[i
].reject
+= 600;
2401 op_alt
[i
].earlyclobber
= 1;
2404 case '0': case '1': case '2': case '3': case '4':
2405 case '5': case '6': case '7': case '8': case '9':
2408 op_alt
[i
].matches
= strtoul (p
, &end
, 10);
2409 op_alt
[op_alt
[i
].matches
].matched
= i
;
2415 op_alt
[i
].anything_ok
= 1;
2420 reg_class_subunion
[(int) op_alt
[i
].cl
][(int) GENERAL_REGS
];
2424 enum constraint_num cn
= lookup_constraint (p
);
2426 switch (get_constraint_type (cn
))
2429 cl
= reg_class_for_constraint (cn
);
2431 op_alt
[i
].cl
= reg_class_subunion
[op_alt
[i
].cl
][cl
];
2438 op_alt
[i
].memory_ok
= 1;
2442 op_alt
[i
].is_address
= 1;
2444 = (reg_class_subunion
2445 [(int) op_alt
[i
].cl
]
2446 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2447 ADDRESS
, SCRATCH
)]);
2455 p
+= CONSTRAINT_LEN (c
, p
);
2461 /* Return an array of operand_alternative instructions for
2462 instruction ICODE. */
2464 const operand_alternative
*
2465 preprocess_insn_constraints (int icode
)
2467 gcc_checking_assert (IN_RANGE (icode
, 0, LAST_INSN_CODE
));
2468 if (this_target_recog
->x_op_alt
[icode
])
2469 return this_target_recog
->x_op_alt
[icode
];
2471 int n_operands
= insn_data
[icode
].n_operands
;
2472 if (n_operands
== 0)
2474 /* Always provide at least one alternative so that which_op_alt ()
2475 works correctly. If the instruction has 0 alternatives (i.e. all
2476 constraint strings are empty) then each operand in this alternative
2477 will have anything_ok set. */
2478 int n_alternatives
= MAX (insn_data
[icode
].n_alternatives
, 1);
2479 int n_entries
= n_operands
* n_alternatives
;
2481 operand_alternative
*op_alt
= XCNEWVEC (operand_alternative
, n_entries
);
2482 const char **constraints
= XALLOCAVEC (const char *, n_operands
);
2484 for (int i
= 0; i
< n_operands
; ++i
)
2485 constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2486 preprocess_constraints (n_operands
, n_alternatives
, constraints
, op_alt
);
2488 this_target_recog
->x_op_alt
[icode
] = op_alt
;
2492 /* After calling extract_insn, you can use this function to extract some
2493 information from the constraint strings into a more usable form.
2494 The collected data is stored in recog_op_alt. */
2497 preprocess_constraints (rtx_insn
*insn
)
2499 int icode
= INSN_CODE (insn
);
2501 recog_op_alt
= preprocess_insn_constraints (icode
);
2504 int n_operands
= recog_data
.n_operands
;
2505 int n_alternatives
= recog_data
.n_alternatives
;
2506 int n_entries
= n_operands
* n_alternatives
;
2507 memset (asm_op_alt
, 0, n_entries
* sizeof (operand_alternative
));
2508 preprocess_constraints (n_operands
, n_alternatives
,
2509 recog_data
.constraints
, asm_op_alt
);
2510 recog_op_alt
= asm_op_alt
;
2514 /* Check the operands of an insn against the insn's operand constraints
2515 and return 1 if they match any of the alternatives in ALTERNATIVES.
2517 The information about the insn's operands, constraints, operand modes
2518 etc. is obtained from the global variables set up by extract_insn.
2520 WHICH_ALTERNATIVE is set to a number which indicates which
2521 alternative of constraints was matched: 0 for the first alternative,
2522 1 for the next, etc.
2524 In addition, when two operands are required to match
2525 and it happens that the output operand is (reg) while the
2526 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2527 make the output operand look like the input.
2528 This is because the output operand is the one the template will print.
2530 This is used in final, just before printing the assembler code and by
2531 the routines that determine an insn's attribute.
2533 If STRICT is a positive nonzero value, it means that we have been
2534 called after reload has been completed. In that case, we must
2535 do all checks strictly. If it is zero, it means that we have been called
2536 before reload has completed. In that case, we first try to see if we can
2537 find an alternative that matches strictly. If not, we try again, this
2538 time assuming that reload will fix up the insn. This provides a "best
2539 guess" for the alternative and is used to compute attributes of insns prior
2540 to reload. A negative value of STRICT is used for this internal call. */
2548 constrain_operands (int strict
, alternative_mask alternatives
)
2550 const char *constraints
[MAX_RECOG_OPERANDS
];
2551 int matching_operands
[MAX_RECOG_OPERANDS
];
2552 int earlyclobber
[MAX_RECOG_OPERANDS
];
2555 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2556 int funny_match_index
;
2558 which_alternative
= 0;
2559 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2562 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2564 constraints
[c
] = recog_data
.constraints
[c
];
2565 matching_operands
[c
] = -1;
2570 int seen_earlyclobber_at
= -1;
2573 funny_match_index
= 0;
2575 if (!TEST_BIT (alternatives
, which_alternative
))
2579 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2580 constraints
[i
] = skip_alternative (constraints
[i
]);
2582 which_alternative
++;
2586 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2588 rtx op
= recog_data
.operand
[opno
];
2589 machine_mode mode
= GET_MODE (op
);
2590 const char *p
= constraints
[opno
];
2596 earlyclobber
[opno
] = 0;
2598 /* A unary operator may be accepted by the predicate, but it
2599 is irrelevant for matching constraints. */
2603 if (GET_CODE (op
) == SUBREG
)
2605 if (REG_P (SUBREG_REG (op
))
2606 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2607 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2608 GET_MODE (SUBREG_REG (op
)),
2611 op
= SUBREG_REG (op
);
2614 /* An empty constraint or empty alternative
2615 allows anything which matched the pattern. */
2616 if (*p
== 0 || *p
== ',')
2620 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2630 /* Ignore rest of this alternative as far as
2631 constraint checking is concerned. */
2634 while (*p
&& *p
!= ',');
2639 earlyclobber
[opno
] = 1;
2640 if (seen_earlyclobber_at
< 0)
2641 seen_earlyclobber_at
= opno
;
2644 case '0': case '1': case '2': case '3': case '4':
2645 case '5': case '6': case '7': case '8': case '9':
2647 /* This operand must be the same as a previous one.
2648 This kind of constraint is used for instructions such
2649 as add when they take only two operands.
2651 Note that the lower-numbered operand is passed first.
2653 If we are not testing strictly, assume that this
2654 constraint will be satisfied. */
2659 match
= strtoul (p
, &end
, 10);
2666 rtx op1
= recog_data
.operand
[match
];
2667 rtx op2
= recog_data
.operand
[opno
];
2669 /* A unary operator may be accepted by the predicate,
2670 but it is irrelevant for matching constraints. */
2672 op1
= XEXP (op1
, 0);
2674 op2
= XEXP (op2
, 0);
2676 val
= operands_match_p (op1
, op2
);
2679 matching_operands
[opno
] = match
;
2680 matching_operands
[match
] = opno
;
2685 /* If output is *x and input is *--x, arrange later
2686 to change the output to *--x as well, since the
2687 output op is the one that will be printed. */
2688 if (val
== 2 && strict
> 0)
2690 funny_match
[funny_match_index
].this_op
= opno
;
2691 funny_match
[funny_match_index
++].other
= match
;
2698 /* p is used for address_operands. When we are called by
2699 gen_reload, no one will have checked that the address is
2700 strictly valid, i.e., that all pseudos requiring hard regs
2701 have gotten them. */
2703 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2708 /* No need to check general_operand again;
2709 it was done in insn-recog.c. Well, except that reload
2710 doesn't check the validity of its replacements, but
2711 that should only matter when there's a bug. */
2713 /* Anything goes unless it is a REG and really has a hard reg
2714 but the hard reg is not in the class GENERAL_REGS. */
2718 || GENERAL_REGS
== ALL_REGS
2719 || (reload_in_progress
2720 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2721 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2724 else if (strict
< 0 || general_operand (op
, mode
))
2730 enum constraint_num cn
= lookup_constraint (p
);
2731 enum reg_class cl
= reg_class_for_constraint (cn
);
2737 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2738 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2740 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2744 else if (constraint_satisfied_p (op
, cn
))
2747 else if (insn_extra_memory_constraint (cn
)
2748 /* Every memory operand can be reloaded to fit. */
2749 && ((strict
< 0 && MEM_P (op
))
2750 /* Before reload, accept what reload can turn
2752 || (strict
< 0 && CONSTANT_P (op
))
2753 /* Before reload, accept a pseudo,
2754 since LRA can turn it into a mem. */
2755 || (strict
< 0 && targetm
.lra_p () && REG_P (op
)
2756 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2757 /* During reload, accept a pseudo */
2758 || (reload_in_progress
&& REG_P (op
)
2759 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2761 else if (insn_extra_address_constraint (cn
)
2762 /* Every address operand can be reloaded to fit. */
2765 /* Cater to architectures like IA-64 that define extra memory
2766 constraints without using define_memory_constraint. */
2767 else if (reload_in_progress
2769 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
2770 && reg_renumber
[REGNO (op
)] < 0
2771 && reg_equiv_mem (REGNO (op
)) != 0
2772 && constraint_satisfied_p
2773 (reg_equiv_mem (REGNO (op
)), cn
))
2778 while (p
+= len
, c
);
2780 constraints
[opno
] = p
;
2781 /* If this operand did not win somehow,
2782 this alternative loses. */
2786 /* This alternative won; the operands are ok.
2787 Change whichever operands this alternative says to change. */
2792 /* See if any earlyclobber operand conflicts with some other
2795 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2796 for (eopno
= seen_earlyclobber_at
;
2797 eopno
< recog_data
.n_operands
;
2799 /* Ignore earlyclobber operands now in memory,
2800 because we would often report failure when we have
2801 two memory operands, one of which was formerly a REG. */
2802 if (earlyclobber
[eopno
]
2803 && REG_P (recog_data
.operand
[eopno
]))
2804 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2805 if ((MEM_P (recog_data
.operand
[opno
])
2806 || recog_data
.operand_type
[opno
] != OP_OUT
)
2808 /* Ignore things like match_operator operands. */
2809 && *recog_data
.constraints
[opno
] != 0
2810 && ! (matching_operands
[opno
] == eopno
2811 && operands_match_p (recog_data
.operand
[opno
],
2812 recog_data
.operand
[eopno
]))
2813 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2814 recog_data
.operand
[eopno
]))
2819 while (--funny_match_index
>= 0)
2821 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2822 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2826 /* For operands without < or > constraints reject side-effects. */
2827 if (recog_data
.is_asm
)
2829 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2830 if (MEM_P (recog_data
.operand
[opno
]))
2831 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
2839 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
2840 && strchr (recog_data
.constraints
[opno
], '>')
2853 which_alternative
++;
2855 while (which_alternative
< recog_data
.n_alternatives
);
2857 which_alternative
= -1;
2858 /* If we are about to reject this, but we are not to test strictly,
2859 try a very loose test. Only return failure if it fails also. */
2861 return constrain_operands (-1, alternatives
);
2866 /* Return true iff OPERAND (assumed to be a REG rtx)
2867 is a hard reg in class CLASS when its regno is offset by OFFSET
2868 and changed to mode MODE.
2869 If REG occupies multiple hard regs, all of them must be in CLASS. */
2872 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
2875 unsigned int regno
= REGNO (operand
);
2880 /* Regno must not be a pseudo register. Offset may be negative. */
2881 return (HARD_REGISTER_NUM_P (regno
)
2882 && HARD_REGISTER_NUM_P (regno
+ offset
)
2883 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
2887 /* Split single instruction. Helper function for split_all_insns and
2888 split_all_insns_noflow. Return last insn in the sequence if successful,
2889 or NULL if unsuccessful. */
2892 split_insn (rtx_insn
*insn
)
2894 /* Split insns here to get max fine-grain parallelism. */
2895 rtx_insn
*first
= PREV_INSN (insn
);
2896 rtx_insn
*last
= try_split (PATTERN (insn
), insn
, 1);
2897 rtx insn_set
, last_set
, note
;
2902 /* If the original instruction was a single set that was known to be
2903 equivalent to a constant, see if we can say the same about the last
2904 instruction in the split sequence. The two instructions must set
2905 the same destination. */
2906 insn_set
= single_set (insn
);
2909 last_set
= single_set (last
);
2910 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
2912 note
= find_reg_equal_equiv_note (insn
);
2913 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2914 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
2915 else if (CONSTANT_P (SET_SRC (insn_set
)))
2916 set_unique_reg_note (last
, REG_EQUAL
,
2917 copy_rtx (SET_SRC (insn_set
)));
2921 /* try_split returns the NOTE that INSN became. */
2922 SET_INSN_DELETED (insn
);
2924 /* ??? Coddle to md files that generate subregs in post-reload
2925 splitters instead of computing the proper hard register. */
2926 if (reload_completed
&& first
!= last
)
2928 first
= NEXT_INSN (first
);
2932 cleanup_subreg_operands (first
);
2935 first
= NEXT_INSN (first
);
2942 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2945 split_all_insns (void)
2951 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2952 bitmap_clear (blocks
);
2955 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2957 rtx_insn
*insn
, *next
;
2958 bool finish
= false;
2960 rtl_profile_for_bb (bb
);
2961 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2963 /* Can't use `next_real_insn' because that might go across
2964 CODE_LABELS and short-out basic blocks. */
2965 next
= NEXT_INSN (insn
);
2966 finish
= (insn
== BB_END (bb
));
2969 rtx set
= single_set (insn
);
2971 /* Don't split no-op move insns. These should silently
2972 disappear later in final. Splitting such insns would
2973 break the code that handles LIBCALL blocks. */
2974 if (set
&& set_noop_p (set
))
2976 /* Nops get in the way while scheduling, so delete them
2977 now if register allocation has already been done. It
2978 is too risky to try to do this before register
2979 allocation, and there are unlikely to be very many
2980 nops then anyways. */
2981 if (reload_completed
)
2982 delete_insn_and_edges (insn
);
2986 if (split_insn (insn
))
2988 bitmap_set_bit (blocks
, bb
->index
);
2996 default_rtl_profile ();
2998 find_many_sub_basic_blocks (blocks
);
3000 #ifdef ENABLE_CHECKING
3001 verify_flow_info ();
3004 sbitmap_free (blocks
);
3007 /* Same as split_all_insns, but do not expect CFG to be available.
3008 Used by machine dependent reorg passes. */
3011 split_all_insns_noflow (void)
3013 rtx_insn
*next
, *insn
;
3015 for (insn
= get_insns (); insn
; insn
= next
)
3017 next
= NEXT_INSN (insn
);
3020 /* Don't split no-op move insns. These should silently
3021 disappear later in final. Splitting such insns would
3022 break the code that handles LIBCALL blocks. */
3023 rtx set
= single_set (insn
);
3024 if (set
&& set_noop_p (set
))
3026 /* Nops get in the way while scheduling, so delete them
3027 now if register allocation has already been done. It
3028 is too risky to try to do this before register
3029 allocation, and there are unlikely to be very many
3032 ??? Should we use delete_insn when the CFG isn't valid? */
3033 if (reload_completed
)
3034 delete_insn_and_edges (insn
);
3043 #ifdef HAVE_peephole2
3044 struct peep2_insn_data
3050 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3051 static int peep2_current
;
3053 static bool peep2_do_rebuild_jump_labels
;
3054 static bool peep2_do_cleanup_cfg
;
3056 /* The number of instructions available to match a peep2. */
3057 int peep2_current_count
;
3059 /* A marker indicating the last insn of the block. The live_before regset
3060 for this element is correct, indicating DF_LIVE_OUT for the block. */
3061 #define PEEP2_EOB invalid_insn_rtx
3063 /* Wrap N to fit into the peep2_insn_data buffer. */
3066 peep2_buf_position (int n
)
3068 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3069 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3073 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3074 does not exist. Used by the recognizer to find the next insn to match
3075 in a multi-insn pattern. */
3078 peep2_next_insn (int n
)
3080 gcc_assert (n
<= peep2_current_count
);
3082 n
= peep2_buf_position (peep2_current
+ n
);
3084 return peep2_insn_data
[n
].insn
;
3087 /* Return true if REGNO is dead before the Nth non-note insn
3091 peep2_regno_dead_p (int ofs
, int regno
)
3093 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3095 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3097 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3099 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3102 /* Similarly for a REG. */
3105 peep2_reg_dead_p (int ofs
, rtx reg
)
3107 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3109 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3111 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3113 unsigned int end_regno
= END_REGNO (reg
);
3114 for (unsigned int regno
= REGNO (reg
); regno
< end_regno
; ++regno
)
3115 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
))
3120 /* Regno offset to be used in the register search. */
3121 static int search_ofs
;
3123 /* Try to find a hard register of mode MODE, matching the register class in
3124 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3125 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3126 in which case the only condition is that the register must be available
3127 before CURRENT_INSN.
3128 Registers that already have bits set in REG_SET will not be considered.
3130 If an appropriate register is available, it will be returned and the
3131 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3135 peep2_find_free_register (int from
, int to
, const char *class_str
,
3136 machine_mode mode
, HARD_REG_SET
*reg_set
)
3143 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3144 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3146 from
= peep2_buf_position (peep2_current
+ from
);
3147 to
= peep2_buf_position (peep2_current
+ to
);
3149 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3150 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3154 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3156 /* Don't use registers set or clobbered by the insn. */
3157 FOR_EACH_INSN_DEF (def
, peep2_insn_data
[from
].insn
)
3158 SET_HARD_REG_BIT (live
, DF_REF_REGNO (def
));
3160 from
= peep2_buf_position (from
+ 1);
3163 cl
= reg_class_for_constraint (lookup_constraint (class_str
));
3165 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3167 int raw_regno
, regno
, success
, j
;
3169 /* Distribute the free registers as much as possible. */
3170 raw_regno
= search_ofs
+ i
;
3171 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3172 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3173 #ifdef REG_ALLOC_ORDER
3174 regno
= reg_alloc_order
[raw_regno
];
3179 /* Can it support the mode we need? */
3180 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3184 for (j
= 0; success
&& j
< hard_regno_nregs
[regno
][mode
]; j
++)
3186 /* Don't allocate fixed registers. */
3187 if (fixed_regs
[regno
+ j
])
3192 /* Don't allocate global registers. */
3193 if (global_regs
[regno
+ j
])
3198 /* Make sure the register is of the right class. */
3199 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3204 /* And that we don't create an extra save/restore. */
3205 if (! call_used_regs
[regno
+ j
] && ! df_regs_ever_live_p (regno
+ j
))
3211 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3217 /* And we don't clobber traceback for noreturn functions. */
3218 if ((regno
+ j
== FRAME_POINTER_REGNUM
3219 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3220 && (! reload_completed
|| frame_pointer_needed
))
3226 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3227 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3236 add_to_hard_reg_set (reg_set
, mode
, regno
);
3238 /* Start the next search with the next register. */
3239 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3241 search_ofs
= raw_regno
;
3243 return gen_rtx_REG (mode
, regno
);
3251 /* Forget all currently tracked instructions, only remember current
3255 peep2_reinit_state (regset live
)
3259 /* Indicate that all slots except the last holds invalid data. */
3260 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3261 peep2_insn_data
[i
].insn
= NULL
;
3262 peep2_current_count
= 0;
3264 /* Indicate that the last slot contains live_after data. */
3265 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3266 peep2_current
= MAX_INSNS_PER_PEEP2
;
3268 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3271 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3272 starting at INSN. Perform the replacement, removing the old insns and
3273 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3274 if the replacement is rejected. */
3277 peep2_attempt (basic_block bb
, rtx_insn
*insn
, int match_len
, rtx_insn
*attempt
)
3280 rtx_insn
*last
, *before_try
, *x
;
3281 rtx eh_note
, as_note
;
3284 bool was_call
= false;
3286 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3287 match more than one insn, or to be split into more than one insn. */
3288 old_insn
= peep2_insn_data
[peep2_current
].insn
;
3289 if (RTX_FRAME_RELATED_P (old_insn
))
3291 bool any_note
= false;
3297 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3298 may be in the stream for the purpose of register allocation. */
3299 if (active_insn_p (attempt
))
3302 new_insn
= next_active_insn (attempt
);
3303 if (next_active_insn (new_insn
))
3306 /* We have a 1-1 replacement. Copy over any frame-related info. */
3307 RTX_FRAME_RELATED_P (new_insn
) = 1;
3309 /* Allow the backend to fill in a note during the split. */
3310 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3311 switch (REG_NOTE_KIND (note
))
3313 case REG_FRAME_RELATED_EXPR
:
3314 case REG_CFA_DEF_CFA
:
3315 case REG_CFA_ADJUST_CFA
:
3316 case REG_CFA_OFFSET
:
3317 case REG_CFA_REGISTER
:
3318 case REG_CFA_EXPRESSION
:
3319 case REG_CFA_RESTORE
:
3320 case REG_CFA_SET_VDRAP
:
3327 /* If the backend didn't supply a note, copy one over. */
3329 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3330 switch (REG_NOTE_KIND (note
))
3332 case REG_FRAME_RELATED_EXPR
:
3333 case REG_CFA_DEF_CFA
:
3334 case REG_CFA_ADJUST_CFA
:
3335 case REG_CFA_OFFSET
:
3336 case REG_CFA_REGISTER
:
3337 case REG_CFA_EXPRESSION
:
3338 case REG_CFA_RESTORE
:
3339 case REG_CFA_SET_VDRAP
:
3340 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3347 /* If there still isn't a note, make sure the unwind info sees the
3348 same expression as before the split. */
3351 rtx old_set
, new_set
;
3353 /* The old insn had better have been simple, or annotated. */
3354 old_set
= single_set (old_insn
);
3355 gcc_assert (old_set
!= NULL
);
3357 new_set
= single_set (new_insn
);
3358 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3359 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3362 /* Copy prologue/epilogue status. This is required in order to keep
3363 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3364 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3367 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3368 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3369 cfg-related call notes. */
3370 for (i
= 0; i
<= match_len
; ++i
)
3375 j
= peep2_buf_position (peep2_current
+ i
);
3376 old_insn
= peep2_insn_data
[j
].insn
;
3377 if (!CALL_P (old_insn
))
3382 while (new_insn
!= NULL_RTX
)
3384 if (CALL_P (new_insn
))
3386 new_insn
= NEXT_INSN (new_insn
);
3389 gcc_assert (new_insn
!= NULL_RTX
);
3391 CALL_INSN_FUNCTION_USAGE (new_insn
)
3392 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3393 SIBLING_CALL_P (new_insn
) = SIBLING_CALL_P (old_insn
);
3395 for (note
= REG_NOTES (old_insn
);
3397 note
= XEXP (note
, 1))
3398 switch (REG_NOTE_KIND (note
))
3403 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3407 /* Discard all other reg notes. */
3411 /* Croak if there is another call in the sequence. */
3412 while (++i
<= match_len
)
3414 j
= peep2_buf_position (peep2_current
+ i
);
3415 old_insn
= peep2_insn_data
[j
].insn
;
3416 gcc_assert (!CALL_P (old_insn
));
3421 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3422 move those notes over to the new sequence. */
3424 for (i
= match_len
; i
>= 0; --i
)
3426 int j
= peep2_buf_position (peep2_current
+ i
);
3427 old_insn
= peep2_insn_data
[j
].insn
;
3429 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3434 i
= peep2_buf_position (peep2_current
+ match_len
);
3435 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3437 /* Replace the old sequence with the new. */
3438 rtx_insn
*peepinsn
= peep2_insn_data
[i
].insn
;
3439 last
= emit_insn_after_setloc (attempt
,
3440 peep2_insn_data
[i
].insn
,
3441 INSN_LOCATION (peepinsn
));
3442 before_try
= PREV_INSN (insn
);
3443 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3445 /* Re-insert the EH_REGION notes. */
3446 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3451 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3452 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3456 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3459 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3460 if (x
!= BB_END (bb
)
3461 && (can_throw_internal (x
)
3462 || can_nonlocal_goto (x
)))
3467 nfte
= split_block (bb
, x
);
3468 flags
= (eh_edge
->flags
3469 & (EDGE_EH
| EDGE_ABNORMAL
));
3471 flags
|= EDGE_ABNORMAL_CALL
;
3472 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3475 nehe
->probability
= eh_edge
->probability
;
3477 = REG_BR_PROB_BASE
- nehe
->probability
;
3479 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3484 /* Converting possibly trapping insn to non-trapping is
3485 possible. Zap dummy outgoing edges. */
3486 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
3489 /* Re-insert the ARGS_SIZE notes. */
3491 fixup_args_size_notes (before_try
, last
, INTVAL (XEXP (as_note
, 0)));
3493 /* If we generated a jump instruction, it won't have
3494 JUMP_LABEL set. Recompute after we're done. */
3495 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3498 peep2_do_rebuild_jump_labels
= true;
3505 /* After performing a replacement in basic block BB, fix up the life
3506 information in our buffer. LAST is the last of the insns that we
3507 emitted as a replacement. PREV is the insn before the start of
3508 the replacement. MATCH_LEN is the number of instructions that were
3509 matched, and which now need to be replaced in the buffer. */
3512 peep2_update_life (basic_block bb
, int match_len
, rtx_insn
*last
,
3515 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
3519 INIT_REG_SET (&live
);
3520 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
3522 gcc_assert (peep2_current_count
>= match_len
+ 1);
3523 peep2_current_count
-= match_len
+ 1;
3531 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
3533 peep2_current_count
++;
3535 i
= MAX_INSNS_PER_PEEP2
;
3536 peep2_insn_data
[i
].insn
= x
;
3537 df_simulate_one_insn_backwards (bb
, x
, &live
);
3538 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
3544 CLEAR_REG_SET (&live
);
3549 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3550 Return true if we added it, false otherwise. The caller will try to match
3551 peepholes against the buffer if we return false; otherwise it will try to
3552 add more instructions to the buffer. */
3555 peep2_fill_buffer (basic_block bb
, rtx_insn
*insn
, regset live
)
3559 /* Once we have filled the maximum number of insns the buffer can hold,
3560 allow the caller to match the insns against peepholes. We wait until
3561 the buffer is full in case the target has similar peepholes of different
3562 length; we always want to match the longest if possible. */
3563 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
3566 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3567 any other pattern, lest it change the semantics of the frame info. */
3568 if (RTX_FRAME_RELATED_P (insn
))
3570 /* Let the buffer drain first. */
3571 if (peep2_current_count
> 0)
3573 /* Now the insn will be the only thing in the buffer. */
3576 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3577 peep2_insn_data
[pos
].insn
= insn
;
3578 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3579 peep2_current_count
++;
3581 df_simulate_one_insn_forwards (bb
, insn
, live
);
3585 /* Perform the peephole2 optimization pass. */
3588 peephole2_optimize (void)
3595 peep2_do_cleanup_cfg
= false;
3596 peep2_do_rebuild_jump_labels
= false;
3598 df_set_flags (DF_LR_RUN_DCE
);
3599 df_note_add_problem ();
3602 /* Initialize the regsets we're going to use. */
3603 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3604 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
3606 live
= BITMAP_ALLOC (®_obstack
);
3608 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
3610 bool past_end
= false;
3613 rtl_profile_for_bb (bb
);
3615 /* Start up propagation. */
3616 bitmap_copy (live
, DF_LR_IN (bb
));
3617 df_simulate_initialize_forwards (bb
, live
);
3618 peep2_reinit_state (live
);
3620 insn
= BB_HEAD (bb
);
3623 rtx_insn
*attempt
, *head
;
3626 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
3629 insn
= NEXT_INSN (insn
);
3630 if (insn
== NEXT_INSN (BB_END (bb
)))
3634 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
3637 /* If we did not fill an empty buffer, it signals the end of the
3639 if (peep2_current_count
== 0)
3642 /* The buffer filled to the current maximum, so try to match. */
3644 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3645 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
3646 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3648 /* Match the peephole. */
3649 head
= peep2_insn_data
[peep2_current
].insn
;
3650 attempt
= peephole2_insns (PATTERN (head
), head
, &match_len
);
3651 if (attempt
!= NULL
)
3653 rtx_insn
*last
= peep2_attempt (bb
, head
, match_len
, attempt
);
3656 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
3661 /* No match: advance the buffer by one insn. */
3662 peep2_current
= peep2_buf_position (peep2_current
+ 1);
3663 peep2_current_count
--;
3667 default_rtl_profile ();
3668 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3669 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3671 if (peep2_do_rebuild_jump_labels
)
3672 rebuild_jump_labels (get_insns ());
3673 if (peep2_do_cleanup_cfg
)
3674 cleanup_cfg (CLEANUP_CFG_CHANGED
);
3676 #endif /* HAVE_peephole2 */
3678 /* Common predicates for use with define_bypass. */
3680 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3681 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3682 must be either a single_set or a PARALLEL with SETs inside. */
3685 store_data_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3687 rtx out_set
, in_set
;
3688 rtx out_pat
, in_pat
;
3689 rtx out_exp
, in_exp
;
3692 in_set
= single_set (in_insn
);
3695 if (!MEM_P (SET_DEST (in_set
)))
3698 out_set
= single_set (out_insn
);
3701 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3706 out_pat
= PATTERN (out_insn
);
3708 if (GET_CODE (out_pat
) != PARALLEL
)
3711 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3713 out_exp
= XVECEXP (out_pat
, 0, i
);
3715 if (GET_CODE (out_exp
) == CLOBBER
)
3718 gcc_assert (GET_CODE (out_exp
) == SET
);
3720 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3727 in_pat
= PATTERN (in_insn
);
3728 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3730 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3732 in_exp
= XVECEXP (in_pat
, 0, i
);
3734 if (GET_CODE (in_exp
) == CLOBBER
)
3737 gcc_assert (GET_CODE (in_exp
) == SET
);
3739 if (!MEM_P (SET_DEST (in_exp
)))
3742 out_set
= single_set (out_insn
);
3745 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3750 out_pat
= PATTERN (out_insn
);
3751 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3753 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3755 out_exp
= XVECEXP (out_pat
, 0, j
);
3757 if (GET_CODE (out_exp
) == CLOBBER
)
3760 gcc_assert (GET_CODE (out_exp
) == SET
);
3762 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3772 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3773 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3774 or multiple set; IN_INSN should be single_set for truth, but for convenience
3775 of insn categorization may be any JUMP or CALL insn. */
3778 if_test_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3780 rtx out_set
, in_set
;
3782 in_set
= single_set (in_insn
);
3785 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3789 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3791 in_set
= SET_SRC (in_set
);
3793 out_set
= single_set (out_insn
);
3796 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3797 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3805 out_pat
= PATTERN (out_insn
);
3806 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3808 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3810 rtx exp
= XVECEXP (out_pat
, 0, i
);
3812 if (GET_CODE (exp
) == CLOBBER
)
3815 gcc_assert (GET_CODE (exp
) == SET
);
3817 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3818 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3827 rest_of_handle_peephole2 (void)
3829 #ifdef HAVE_peephole2
3830 peephole2_optimize ();
3837 const pass_data pass_data_peephole2
=
3839 RTL_PASS
, /* type */
3840 "peephole2", /* name */
3841 OPTGROUP_NONE
, /* optinfo_flags */
3842 TV_PEEPHOLE2
, /* tv_id */
3843 0, /* properties_required */
3844 0, /* properties_provided */
3845 0, /* properties_destroyed */
3846 0, /* todo_flags_start */
3847 TODO_df_finish
, /* todo_flags_finish */
3850 class pass_peephole2
: public rtl_opt_pass
3853 pass_peephole2 (gcc::context
*ctxt
)
3854 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
3857 /* opt_pass methods: */
3858 /* The epiphany backend creates a second instance of this pass, so we need
3860 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
3861 virtual bool gate (function
*) { return (optimize
> 0 && flag_peephole2
); }
3862 virtual unsigned int execute (function
*)
3864 return rest_of_handle_peephole2 ();
3867 }; // class pass_peephole2
3872 make_pass_peephole2 (gcc::context
*ctxt
)
3874 return new pass_peephole2 (ctxt
);
3879 const pass_data pass_data_split_all_insns
=
3881 RTL_PASS
, /* type */
3882 "split1", /* name */
3883 OPTGROUP_NONE
, /* optinfo_flags */
3884 TV_NONE
, /* tv_id */
3885 0, /* properties_required */
3886 0, /* properties_provided */
3887 0, /* properties_destroyed */
3888 0, /* todo_flags_start */
3889 0, /* todo_flags_finish */
3892 class pass_split_all_insns
: public rtl_opt_pass
3895 pass_split_all_insns (gcc::context
*ctxt
)
3896 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
3899 /* opt_pass methods: */
3900 /* The epiphany backend creates a second instance of this pass, so
3901 we need a clone method. */
3902 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
3903 virtual unsigned int execute (function
*)
3909 }; // class pass_split_all_insns
3914 make_pass_split_all_insns (gcc::context
*ctxt
)
3916 return new pass_split_all_insns (ctxt
);
3920 rest_of_handle_split_after_reload (void)
3922 /* If optimizing, then go ahead and split insns now. */
3932 const pass_data pass_data_split_after_reload
=
3934 RTL_PASS
, /* type */
3935 "split2", /* name */
3936 OPTGROUP_NONE
, /* optinfo_flags */
3937 TV_NONE
, /* tv_id */
3938 0, /* properties_required */
3939 0, /* properties_provided */
3940 0, /* properties_destroyed */
3941 0, /* todo_flags_start */
3942 0, /* todo_flags_finish */
3945 class pass_split_after_reload
: public rtl_opt_pass
3948 pass_split_after_reload (gcc::context
*ctxt
)
3949 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
3952 /* opt_pass methods: */
3953 virtual unsigned int execute (function
*)
3955 return rest_of_handle_split_after_reload ();
3958 }; // class pass_split_after_reload
3963 make_pass_split_after_reload (gcc::context
*ctxt
)
3965 return new pass_split_after_reload (ctxt
);
3970 const pass_data pass_data_split_before_regstack
=
3972 RTL_PASS
, /* type */
3973 "split3", /* name */
3974 OPTGROUP_NONE
, /* optinfo_flags */
3975 TV_NONE
, /* tv_id */
3976 0, /* properties_required */
3977 0, /* properties_provided */
3978 0, /* properties_destroyed */
3979 0, /* todo_flags_start */
3980 0, /* todo_flags_finish */
3983 class pass_split_before_regstack
: public rtl_opt_pass
3986 pass_split_before_regstack (gcc::context
*ctxt
)
3987 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
3990 /* opt_pass methods: */
3991 virtual bool gate (function
*);
3992 virtual unsigned int execute (function
*)
3998 }; // class pass_split_before_regstack
4001 pass_split_before_regstack::gate (function
*)
4003 #if HAVE_ATTR_length && defined (STACK_REGS)
4004 /* If flow2 creates new instructions which need splitting
4005 and scheduling after reload is not done, they might not be
4006 split until final which doesn't allow splitting
4007 if HAVE_ATTR_length. */
4008 # ifdef INSN_SCHEDULING
4009 return (optimize
&& !flag_schedule_insns_after_reload
);
4021 make_pass_split_before_regstack (gcc::context
*ctxt
)
4023 return new pass_split_before_regstack (ctxt
);
4027 rest_of_handle_split_before_sched2 (void)
4029 #ifdef INSN_SCHEDULING
4037 const pass_data pass_data_split_before_sched2
=
4039 RTL_PASS
, /* type */
4040 "split4", /* name */
4041 OPTGROUP_NONE
, /* optinfo_flags */
4042 TV_NONE
, /* tv_id */
4043 0, /* properties_required */
4044 0, /* properties_provided */
4045 0, /* properties_destroyed */
4046 0, /* todo_flags_start */
4047 0, /* todo_flags_finish */
4050 class pass_split_before_sched2
: public rtl_opt_pass
4053 pass_split_before_sched2 (gcc::context
*ctxt
)
4054 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4057 /* opt_pass methods: */
4058 virtual bool gate (function
*)
4060 #ifdef INSN_SCHEDULING
4061 return optimize
> 0 && flag_schedule_insns_after_reload
;
4067 virtual unsigned int execute (function
*)
4069 return rest_of_handle_split_before_sched2 ();
4072 }; // class pass_split_before_sched2
4077 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4079 return new pass_split_before_sched2 (ctxt
);
4084 const pass_data pass_data_split_for_shorten_branches
=
4086 RTL_PASS
, /* type */
4087 "split5", /* name */
4088 OPTGROUP_NONE
, /* optinfo_flags */
4089 TV_NONE
, /* tv_id */
4090 0, /* properties_required */
4091 0, /* properties_provided */
4092 0, /* properties_destroyed */
4093 0, /* todo_flags_start */
4094 0, /* todo_flags_finish */
4097 class pass_split_for_shorten_branches
: public rtl_opt_pass
4100 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4101 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4104 /* opt_pass methods: */
4105 virtual bool gate (function
*)
4107 /* The placement of the splitting that we do for shorten_branches
4108 depends on whether regstack is used by the target or not. */
4109 #if HAVE_ATTR_length && !defined (STACK_REGS)
4116 virtual unsigned int execute (function
*)
4118 return split_all_insns_noflow ();
4121 }; // class pass_split_for_shorten_branches
4126 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4128 return new pass_split_for_shorten_branches (ctxt
);
4131 /* (Re)initialize the target information after a change in target. */
4136 /* The information is zero-initialized, so we don't need to do anything
4137 first time round. */
4138 if (!this_target_recog
->x_initialized
)
4140 this_target_recog
->x_initialized
= true;
4143 memset (this_target_recog
->x_bool_attr_masks
, 0,
4144 sizeof (this_target_recog
->x_bool_attr_masks
));
4145 for (int i
= 0; i
< LAST_INSN_CODE
; ++i
)
4146 if (this_target_recog
->x_op_alt
[i
])
4148 free (this_target_recog
->x_op_alt
[i
]);
4149 this_target_recog
->x_op_alt
[i
] = 0;