1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "rtl-error.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "hard-reg-set.h"
42 #include "addresses.h"
48 #include "dominance.h"
52 #include "cfgcleanup.h"
53 #include "basic-block.h"
56 #include "tree-pass.h"
58 #include "insn-codes.h"
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
64 #define STACK_PUSH_CODE PRE_INC
68 #ifndef STACK_POP_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_POP_CODE POST_INC
72 #define STACK_POP_CODE POST_DEC
76 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
, bool);
77 static void validate_replace_src_1 (rtx
*, void *);
78 static rtx
split_insn (rtx_insn
*);
80 struct target_recog default_target_recog
;
82 struct target_recog
*this_target_recog
= &default_target_recog
;
85 /* Nonzero means allow operands to be volatile.
86 This should be 0 if you are generating rtl, such as if you are calling
87 the functions in optabs.c and expmed.c (most of the time).
88 This should be 1 if all valid insns need to be recognized,
89 such as in reginfo.c and final.c and reload.c.
91 init_recog and init_recog_no_volatile are responsible for setting this. */
95 struct recog_data_d recog_data
;
97 /* Contains a vector of operand_alternative structures, such that
98 operand OP of alternative A is at index A * n_operands + OP.
99 Set up by preprocess_constraints. */
100 const operand_alternative
*recog_op_alt
;
102 /* Used to provide recog_op_alt for asms. */
103 static operand_alternative asm_op_alt
[MAX_RECOG_OPERANDS
104 * MAX_RECOG_ALTERNATIVES
];
106 /* On return from `constrain_operands', indicate which alternative
109 int which_alternative
;
111 /* Nonzero after end of reload pass.
112 Set to 1 or 0 by toplev.c.
113 Controls the significance of (SUBREG (MEM)). */
115 int reload_completed
;
117 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
118 int epilogue_completed
;
120 /* Initialize data used by the function `recog'.
121 This must be called once in the compilation of a function
122 before any insn recognition may be done in the function. */
125 init_recog_no_volatile (void)
137 /* Return true if labels in asm operands BODY are LABEL_REFs. */
140 asm_labels_ok (rtx body
)
145 asmop
= extract_asm_operands (body
);
146 if (asmop
== NULL_RTX
)
149 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
150 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
156 /* Check that X is an insn-body for an `asm' with operands
157 and that the operands mentioned in it are legitimate. */
160 check_asm_operands (rtx x
)
164 const char **constraints
;
167 if (!asm_labels_ok (x
))
170 /* Post-reload, be more strict with things. */
171 if (reload_completed
)
173 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
174 rtx_insn
*insn
= make_insn_raw (x
);
176 constrain_operands (1, get_enabled_alternatives (insn
));
177 return which_alternative
>= 0;
180 noperands
= asm_noperands (x
);
186 operands
= XALLOCAVEC (rtx
, noperands
);
187 constraints
= XALLOCAVEC (const char *, noperands
);
189 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
191 for (i
= 0; i
< noperands
; i
++)
193 const char *c
= constraints
[i
];
196 if (! asm_operand_ok (operands
[i
], c
, constraints
))
203 /* Static data for the next two routines. */
205 typedef struct change_t
214 static change_t
*changes
;
215 static int changes_allocated
;
217 static int num_changes
= 0;
219 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
220 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
221 the change is simply made.
223 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
224 will be called with the address and mode as parameters. If OBJECT is
225 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
228 IN_GROUP is nonzero if this is part of a group of changes that must be
229 performed as a group. In that case, the changes will be stored. The
230 function `apply_change_group' will validate and apply the changes.
232 If IN_GROUP is zero, this is a single change. Try to recognize the insn
233 or validate the memory reference with the change applied. If the result
234 is not valid for the machine, suppress the change and return zero.
235 Otherwise, perform the change and return 1. */
238 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
242 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
245 gcc_assert (in_group
!= 0 || num_changes
== 0);
249 /* Save the information describing this change. */
250 if (num_changes
>= changes_allocated
)
252 if (changes_allocated
== 0)
253 /* This value allows for repeated substitutions inside complex
254 indexed addresses, or changes in up to 5 insns. */
255 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
257 changes_allocated
*= 2;
259 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
262 changes
[num_changes
].object
= object
;
263 changes
[num_changes
].loc
= loc
;
264 changes
[num_changes
].old
= old
;
265 changes
[num_changes
].unshare
= unshare
;
267 if (object
&& !MEM_P (object
))
269 /* Set INSN_CODE to force rerecognition of insn. Save old code in
271 changes
[num_changes
].old_code
= INSN_CODE (object
);
272 INSN_CODE (object
) = -1;
277 /* If we are making a group of changes, return 1. Otherwise, validate the
278 change group we made. */
283 return apply_change_group ();
286 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
290 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
292 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
295 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
299 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
301 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
305 /* Keep X canonicalized if some changes have made it non-canonical; only
306 modifies the operands of X, not (for example) its code. Simplifications
307 are not the job of this routine.
309 Return true if anything was changed. */
311 canonicalize_change_group (rtx insn
, rtx x
)
313 if (COMMUTATIVE_P (x
)
314 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
316 /* Oops, the caller has made X no longer canonical.
317 Let's redo the changes in the correct order. */
318 rtx tem
= XEXP (x
, 0);
319 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
320 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
328 /* This subroutine of apply_change_group verifies whether the changes to INSN
329 were valid; i.e. whether INSN can still be recognized.
331 If IN_GROUP is true clobbers which have to be added in order to
332 match the instructions will be added to the current change group.
333 Otherwise the changes will take effect immediately. */
336 insn_invalid_p (rtx_insn
*insn
, bool in_group
)
338 rtx pat
= PATTERN (insn
);
339 int num_clobbers
= 0;
340 /* If we are before reload and the pattern is a SET, see if we can add
342 int icode
= recog (pat
, insn
,
343 (GET_CODE (pat
) == SET
344 && ! reload_completed
345 && ! reload_in_progress
)
346 ? &num_clobbers
: 0);
347 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
350 /* If this is an asm and the operand aren't legal, then fail. Likewise if
351 this is not an asm and the insn wasn't recognized. */
352 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
353 || (!is_asm
&& icode
< 0))
356 /* If we have to add CLOBBERs, fail if we have to add ones that reference
357 hard registers since our callers can't know if they are live or not.
358 Otherwise, add them. */
359 if (num_clobbers
> 0)
363 if (added_clobbers_hard_reg_p (icode
))
366 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
367 XVECEXP (newpat
, 0, 0) = pat
;
368 add_clobbers (newpat
, icode
);
370 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
372 PATTERN (insn
) = pat
= newpat
;
375 /* After reload, verify that all constraints are satisfied. */
376 if (reload_completed
)
380 if (! constrain_operands (1, get_preferred_alternatives (insn
)))
384 INSN_CODE (insn
) = icode
;
388 /* Return number of changes made and not validated yet. */
390 num_changes_pending (void)
395 /* Tentatively apply the changes numbered NUM and up.
396 Return 1 if all changes are valid, zero otherwise. */
399 verify_changes (int num
)
402 rtx last_validated
= NULL_RTX
;
404 /* The changes have been applied and all INSN_CODEs have been reset to force
407 The changes are valid if we aren't given an object, or if we are
408 given a MEM and it still is a valid address, or if this is in insn
409 and it is recognized. In the latter case, if reload has completed,
410 we also require that the operands meet the constraints for
413 for (i
= num
; i
< num_changes
; i
++)
415 rtx object
= changes
[i
].object
;
417 /* If there is no object to test or if it is the same as the one we
418 already tested, ignore it. */
419 if (object
== 0 || object
== last_validated
)
424 if (! memory_address_addr_space_p (GET_MODE (object
),
426 MEM_ADDR_SPACE (object
)))
429 else if (/* changes[i].old might be zero, e.g. when putting a
430 REG_FRAME_RELATED_EXPR into a previously empty list. */
432 && REG_P (changes
[i
].old
)
433 && asm_noperands (PATTERN (object
)) > 0
434 && REG_EXPR (changes
[i
].old
) != NULL_TREE
435 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes
[i
].old
))
436 && DECL_REGISTER (REG_EXPR (changes
[i
].old
)))
438 /* Don't allow changes of hard register operands to inline
439 assemblies if they have been defined as register asm ("x"). */
442 else if (DEBUG_INSN_P (object
))
444 else if (insn_invalid_p (as_a
<rtx_insn
*> (object
), true))
446 rtx pat
= PATTERN (object
);
448 /* Perhaps we couldn't recognize the insn because there were
449 extra CLOBBERs at the end. If so, try to re-recognize
450 without the last CLOBBER (later iterations will cause each of
451 them to be eliminated, in turn). But don't do this if we
452 have an ASM_OPERAND. */
453 if (GET_CODE (pat
) == PARALLEL
454 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
455 && asm_noperands (PATTERN (object
)) < 0)
459 if (XVECLEN (pat
, 0) == 2)
460 newpat
= XVECEXP (pat
, 0, 0);
466 = gen_rtx_PARALLEL (VOIDmode
,
467 rtvec_alloc (XVECLEN (pat
, 0) - 1));
468 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
469 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
472 /* Add a new change to this group to replace the pattern
473 with this new pattern. Then consider this change
474 as having succeeded. The change we added will
475 cause the entire call to fail if things remain invalid.
477 Note that this can lose if a later change than the one
478 we are processing specified &XVECEXP (PATTERN (object), 0, X)
479 but this shouldn't occur. */
481 validate_change (object
, &PATTERN (object
), newpat
, 1);
484 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
485 || GET_CODE (pat
) == VAR_LOCATION
)
486 /* If this insn is a CLOBBER or USE, it is always valid, but is
492 last_validated
= object
;
495 return (i
== num_changes
);
498 /* A group of changes has previously been issued with validate_change
499 and verified with verify_changes. Call df_insn_rescan for each of
500 the insn changed and clear num_changes. */
503 confirm_change_group (void)
506 rtx last_object
= NULL
;
508 for (i
= 0; i
< num_changes
; i
++)
510 rtx object
= changes
[i
].object
;
512 if (changes
[i
].unshare
)
513 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
515 /* Avoid unnecessary rescanning when multiple changes to same instruction
519 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
520 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
521 last_object
= object
;
525 if (last_object
&& INSN_P (last_object
))
526 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
530 /* Apply a group of changes previously issued with `validate_change'.
531 If all changes are valid, call confirm_change_group and return 1,
532 otherwise, call cancel_changes and return 0. */
535 apply_change_group (void)
537 if (verify_changes (0))
539 confirm_change_group ();
550 /* Return the number of changes so far in the current group. */
553 num_validated_changes (void)
558 /* Retract the changes numbered NUM and up. */
561 cancel_changes (int num
)
565 /* Back out all the changes. Do this in the opposite order in which
567 for (i
= num_changes
- 1; i
>= num
; i
--)
569 *changes
[i
].loc
= changes
[i
].old
;
570 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
571 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
576 /* Reduce conditional compilation elsewhere. */
579 #define CODE_FOR_extv CODE_FOR_nothing
583 #define CODE_FOR_extzv CODE_FOR_nothing
586 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
590 simplify_while_replacing (rtx
*loc
, rtx to
, rtx object
,
591 machine_mode op0_mode
)
594 enum rtx_code code
= GET_CODE (x
);
595 rtx new_rtx
= NULL_RTX
;
597 if (SWAPPABLE_OPERANDS_P (x
)
598 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
600 validate_unshare_change (object
, loc
,
601 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
602 : swap_condition (code
),
603 GET_MODE (x
), XEXP (x
, 1),
609 /* Canonicalize arithmetics with all constant operands. */
610 switch (GET_RTX_CLASS (code
))
613 if (CONSTANT_P (XEXP (x
, 0)))
614 new_rtx
= simplify_unary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
619 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
620 new_rtx
= simplify_binary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
624 case RTX_COMM_COMPARE
:
625 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
626 new_rtx
= simplify_relational_operation (code
, GET_MODE (x
), op0_mode
,
627 XEXP (x
, 0), XEXP (x
, 1));
634 validate_change (object
, loc
, new_rtx
, 1);
641 /* If we have a PLUS whose second operand is now a CONST_INT, use
642 simplify_gen_binary to try to simplify it.
643 ??? We may want later to remove this, once simplification is
644 separated from this function. */
645 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
646 validate_change (object
, loc
,
648 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
651 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
652 validate_change (object
, loc
,
654 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
655 simplify_gen_unary (NEG
,
656 GET_MODE (x
), XEXP (x
, 1),
661 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
663 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
665 /* If any of the above failed, substitute in something that
666 we know won't be recognized. */
668 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
669 validate_change (object
, loc
, new_rtx
, 1);
673 /* All subregs possible to simplify should be simplified. */
674 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
677 /* Subregs of VOIDmode operands are incorrect. */
678 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
679 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
681 validate_change (object
, loc
, new_rtx
, 1);
685 /* If we are replacing a register with memory, try to change the memory
686 to be the mode required for memory in extract operations (this isn't
687 likely to be an insertion operation; if it was, nothing bad will
688 happen, we might just fail in some cases). */
690 if (MEM_P (XEXP (x
, 0))
691 && CONST_INT_P (XEXP (x
, 1))
692 && CONST_INT_P (XEXP (x
, 2))
693 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
694 MEM_ADDR_SPACE (XEXP (x
, 0)))
695 && !MEM_VOLATILE_P (XEXP (x
, 0)))
697 machine_mode wanted_mode
= VOIDmode
;
698 machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
699 int pos
= INTVAL (XEXP (x
, 2));
701 if (GET_CODE (x
) == ZERO_EXTRACT
&& HAVE_extzv
)
703 wanted_mode
= insn_data
[CODE_FOR_extzv
].operand
[1].mode
;
704 if (wanted_mode
== VOIDmode
)
705 wanted_mode
= word_mode
;
707 else if (GET_CODE (x
) == SIGN_EXTRACT
&& HAVE_extv
)
709 wanted_mode
= insn_data
[CODE_FOR_extv
].operand
[1].mode
;
710 if (wanted_mode
== VOIDmode
)
711 wanted_mode
= word_mode
;
714 /* If we have a narrower mode, we can do something. */
715 if (wanted_mode
!= VOIDmode
716 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
718 int offset
= pos
/ BITS_PER_UNIT
;
721 /* If the bytes and bits are counted differently, we
722 must adjust the offset. */
723 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
725 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
728 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
729 == GET_MODE_BITSIZE (wanted_mode
));
730 pos
%= GET_MODE_BITSIZE (wanted_mode
);
732 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
734 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
735 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
746 /* Replace every occurrence of FROM in X with TO. Mark each change with
747 validate_change passing OBJECT. */
750 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
,
757 machine_mode op0_mode
= VOIDmode
;
758 int prev_changes
= num_changes
;
764 fmt
= GET_RTX_FORMAT (code
);
766 op0_mode
= GET_MODE (XEXP (x
, 0));
768 /* X matches FROM if it is the same rtx or they are both referring to the
769 same register in the same mode. Avoid calling rtx_equal_p unless the
770 operands look similar. */
773 || (REG_P (x
) && REG_P (from
)
774 && GET_MODE (x
) == GET_MODE (from
)
775 && REGNO (x
) == REGNO (from
))
776 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
777 && rtx_equal_p (x
, from
)))
779 validate_unshare_change (object
, loc
, to
, 1);
783 /* Call ourself recursively to perform the replacements.
784 We must not replace inside already replaced expression, otherwise we
785 get infinite recursion for replacements like (reg X)->(subreg (reg X))
786 so we must special case shared ASM_OPERANDS. */
788 if (GET_CODE (x
) == PARALLEL
)
790 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
792 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
793 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
795 /* Verify that operands are really shared. */
796 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
797 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
799 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
800 from
, to
, object
, simplify
);
803 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
808 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
811 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
812 else if (fmt
[i
] == 'E')
813 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
814 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
818 /* If we didn't substitute, there is nothing more to do. */
819 if (num_changes
== prev_changes
)
822 /* ??? The regmove is no more, so is this aberration still necessary? */
823 /* Allow substituted expression to have different mode. This is used by
824 regmove to change mode of pseudo register. */
825 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
826 op0_mode
= GET_MODE (XEXP (x
, 0));
828 /* Do changes needed to keep rtx consistent. Don't do any other
829 simplifications, as it is not our job. */
831 simplify_while_replacing (loc
, to
, object
, op0_mode
);
834 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
835 with TO. After all changes have been made, validate by seeing
836 if INSN is still valid. */
839 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
841 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
842 return apply_change_group ();
845 /* Try replacing every occurrence of FROM in INSN with TO. After all
846 changes have been made, validate by seeing if INSN is still valid. */
849 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
851 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
852 return apply_change_group ();
855 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
856 is a part of INSN. After all changes have been made, validate by seeing if
858 validate_replace_rtx (from, to, insn) is equivalent to
859 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
862 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx insn
)
864 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
865 return apply_change_group ();
868 /* Same as above, but do not simplify rtx afterwards. */
870 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
873 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
874 return apply_change_group ();
878 /* Try replacing every occurrence of FROM in INSN with TO. This also
879 will replace in REG_EQUAL and REG_EQUIV notes. */
882 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
885 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
886 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
887 if (REG_NOTE_KIND (note
) == REG_EQUAL
888 || REG_NOTE_KIND (note
) == REG_EQUIV
)
889 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
892 /* Function called by note_uses to replace used subexpressions. */
893 struct validate_replace_src_data
895 rtx from
; /* Old RTX */
896 rtx to
; /* New RTX */
897 rtx insn
; /* Insn in which substitution is occurring. */
901 validate_replace_src_1 (rtx
*x
, void *data
)
903 struct validate_replace_src_data
*d
904 = (struct validate_replace_src_data
*) data
;
906 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
909 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
913 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
915 struct validate_replace_src_data d
;
920 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
923 /* Try simplify INSN.
924 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
925 pattern and return true if something was simplified. */
928 validate_simplify_insn (rtx insn
)
934 pat
= PATTERN (insn
);
936 if (GET_CODE (pat
) == SET
)
938 newpat
= simplify_rtx (SET_SRC (pat
));
939 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
940 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
941 newpat
= simplify_rtx (SET_DEST (pat
));
942 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
943 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
945 else if (GET_CODE (pat
) == PARALLEL
)
946 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
948 rtx s
= XVECEXP (pat
, 0, i
);
950 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
952 newpat
= simplify_rtx (SET_SRC (s
));
953 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
954 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
955 newpat
= simplify_rtx (SET_DEST (s
));
956 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
957 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
960 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
964 /* Return 1 if the insn using CC0 set by INSN does not contain
965 any ordered tests applied to the condition codes.
966 EQ and NE tests do not count. */
969 next_insn_tests_no_inequality (rtx insn
)
971 rtx next
= next_cc0_user (insn
);
973 /* If there is no next insn, we have to take the conservative choice. */
977 return (INSN_P (next
)
978 && ! inequality_comparisons_p (PATTERN (next
)));
982 /* Return 1 if OP is a valid general operand for machine mode MODE.
983 This is either a register reference, a memory reference,
984 or a constant. In the case of a memory reference, the address
985 is checked for general validity for the target machine.
987 Register and memory references must have mode MODE in order to be valid,
988 but some constants have no machine mode and are valid for any mode.
990 If MODE is VOIDmode, OP is checked for validity for whatever mode
993 The main use of this function is as a predicate in match_operand
994 expressions in the machine description. */
997 general_operand (rtx op
, machine_mode mode
)
999 enum rtx_code code
= GET_CODE (op
);
1001 if (mode
== VOIDmode
)
1002 mode
= GET_MODE (op
);
1004 /* Don't accept CONST_INT or anything similar
1005 if the caller wants something floating. */
1006 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1007 && GET_MODE_CLASS (mode
) != MODE_INT
1008 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1011 if (CONST_INT_P (op
)
1013 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1016 if (CONSTANT_P (op
))
1017 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1018 || mode
== VOIDmode
)
1019 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1020 && targetm
.legitimate_constant_p (mode
== VOIDmode
1024 /* Except for certain constants with VOIDmode, already checked for,
1025 OP's mode must match MODE if MODE specifies a mode. */
1027 if (GET_MODE (op
) != mode
)
1032 rtx sub
= SUBREG_REG (op
);
1034 #ifdef INSN_SCHEDULING
1035 /* On machines that have insn scheduling, we want all memory
1036 reference to be explicit, so outlaw paradoxical SUBREGs.
1037 However, we must allow them after reload so that they can
1038 get cleaned up by cleanup_subreg_operands. */
1039 if (!reload_completed
&& MEM_P (sub
)
1040 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
1043 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1044 may result in incorrect reference. We should simplify all valid
1045 subregs of MEM anyway. But allow this after reload because we
1046 might be called from cleanup_subreg_operands.
1048 ??? This is a kludge. */
1049 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
1053 #ifdef CANNOT_CHANGE_MODE_CLASS
1055 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1056 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1057 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1058 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1059 /* LRA can generate some invalid SUBREGS just for matched
1060 operand reload presentation. LRA needs to treat them as
1062 && ! LRA_SUBREG_P (op
))
1066 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1067 create such rtl, and we must reject it. */
1068 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1069 /* LRA can use subreg to store a floating point value in an
1070 integer mode. Although the floating point and the
1071 integer modes need the same number of hard registers, the
1072 size of floating point mode can be less than the integer
1074 && ! lra_in_progress
1075 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1079 code
= GET_CODE (op
);
1083 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1084 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1088 rtx y
= XEXP (op
, 0);
1090 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1093 /* Use the mem's mode, since it will be reloaded thus. LRA can
1094 generate move insn with invalid addresses which is made valid
1095 and efficiently calculated by LRA through further numerous
1098 || memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1105 /* Return 1 if OP is a valid memory address for a memory reference
1108 The main use of this function is as a predicate in match_operand
1109 expressions in the machine description. */
1112 address_operand (rtx op
, machine_mode mode
)
1114 return memory_address_p (mode
, op
);
1117 /* Return 1 if OP is a register reference of mode MODE.
1118 If MODE is VOIDmode, accept a register in any mode.
1120 The main use of this function is as a predicate in match_operand
1121 expressions in the machine description. */
1124 register_operand (rtx op
, machine_mode mode
)
1126 if (GET_CODE (op
) == SUBREG
)
1128 rtx sub
= SUBREG_REG (op
);
1130 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1131 because it is guaranteed to be reloaded into one.
1132 Just make sure the MEM is valid in itself.
1133 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1134 but currently it does result from (SUBREG (REG)...) where the
1135 reg went on the stack.) */
1136 if (!REG_P (sub
) && (reload_completed
|| !MEM_P (sub
)))
1139 else if (!REG_P (op
))
1141 return general_operand (op
, mode
);
1144 /* Return 1 for a register in Pmode; ignore the tested mode. */
1147 pmode_register_operand (rtx op
, machine_mode mode ATTRIBUTE_UNUSED
)
1149 return register_operand (op
, Pmode
);
1152 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1153 or a hard register. */
1156 scratch_operand (rtx op
, machine_mode mode
)
1158 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1161 return (GET_CODE (op
) == SCRATCH
1164 || (REGNO (op
) < FIRST_PSEUDO_REGISTER
1165 && REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
))));
1168 /* Return 1 if OP is a valid immediate operand for mode MODE.
1170 The main use of this function is as a predicate in match_operand
1171 expressions in the machine description. */
1174 immediate_operand (rtx op
, machine_mode mode
)
1176 /* Don't accept CONST_INT or anything similar
1177 if the caller wants something floating. */
1178 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1179 && GET_MODE_CLASS (mode
) != MODE_INT
1180 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1183 if (CONST_INT_P (op
)
1185 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1188 return (CONSTANT_P (op
)
1189 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1190 || GET_MODE (op
) == VOIDmode
)
1191 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1192 && targetm
.legitimate_constant_p (mode
== VOIDmode
1197 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1200 const_int_operand (rtx op
, machine_mode mode
)
1202 if (!CONST_INT_P (op
))
1205 if (mode
!= VOIDmode
1206 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1212 #if TARGET_SUPPORTS_WIDE_INT
1213 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1216 const_scalar_int_operand (rtx op
, machine_mode mode
)
1218 if (!CONST_SCALAR_INT_P (op
))
1221 if (CONST_INT_P (op
))
1222 return const_int_operand (op
, mode
);
1224 if (mode
!= VOIDmode
)
1226 int prec
= GET_MODE_PRECISION (mode
);
1227 int bitsize
= GET_MODE_BITSIZE (mode
);
1229 if (CONST_WIDE_INT_NUNITS (op
) * HOST_BITS_PER_WIDE_INT
> bitsize
)
1232 if (prec
== bitsize
)
1236 /* Multiword partial int. */
1238 = CONST_WIDE_INT_ELT (op
, CONST_WIDE_INT_NUNITS (op
) - 1);
1239 return (sext_hwi (x
, prec
& (HOST_BITS_PER_WIDE_INT
- 1)) == x
);
1245 /* Returns 1 if OP is an operand that is a constant integer or constant
1246 floating-point number of MODE. */
1249 const_double_operand (rtx op
, machine_mode mode
)
1251 return (GET_CODE (op
) == CONST_DOUBLE
)
1252 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
);
1255 /* Returns 1 if OP is an operand that is a constant integer or constant
1256 floating-point number of MODE. */
1259 const_double_operand (rtx op
, machine_mode mode
)
1261 /* Don't accept CONST_INT or anything similar
1262 if the caller wants something floating. */
1263 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1264 && GET_MODE_CLASS (mode
) != MODE_INT
1265 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1268 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1269 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1270 || GET_MODE (op
) == VOIDmode
));
1273 /* Return 1 if OP is a general operand that is not an immediate
1274 operand of mode MODE. */
1277 nonimmediate_operand (rtx op
, machine_mode mode
)
1279 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1282 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1285 nonmemory_operand (rtx op
, machine_mode mode
)
1287 if (CONSTANT_P (op
))
1288 return immediate_operand (op
, mode
);
1289 return register_operand (op
, mode
);
1292 /* Return 1 if OP is a valid operand that stands for pushing a
1293 value of mode MODE onto the stack.
1295 The main use of this function is as a predicate in match_operand
1296 expressions in the machine description. */
1299 push_operand (rtx op
, machine_mode mode
)
1301 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1303 #ifdef PUSH_ROUNDING
1304 rounded_size
= PUSH_ROUNDING (rounded_size
);
1310 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1315 if (rounded_size
== GET_MODE_SIZE (mode
))
1317 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1322 if (GET_CODE (op
) != PRE_MODIFY
1323 || GET_CODE (XEXP (op
, 1)) != PLUS
1324 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1325 || !CONST_INT_P (XEXP (XEXP (op
, 1), 1))
1326 #ifdef STACK_GROWS_DOWNWARD
1327 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1329 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1335 return XEXP (op
, 0) == stack_pointer_rtx
;
1338 /* Return 1 if OP is a valid operand that stands for popping a
1339 value of mode MODE off the stack.
1341 The main use of this function is as a predicate in match_operand
1342 expressions in the machine description. */
1345 pop_operand (rtx op
, machine_mode mode
)
1350 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1355 if (GET_CODE (op
) != STACK_POP_CODE
)
1358 return XEXP (op
, 0) == stack_pointer_rtx
;
1361 /* Return 1 if ADDR is a valid memory address
1362 for mode MODE in address space AS. */
1365 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED
,
1366 rtx addr
, addr_space_t as
)
1368 #ifdef GO_IF_LEGITIMATE_ADDRESS
1369 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1370 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1376 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1380 /* Return 1 if OP is a valid memory reference with mode MODE,
1381 including a valid address.
1383 The main use of this function is as a predicate in match_operand
1384 expressions in the machine description. */
1387 memory_operand (rtx op
, machine_mode mode
)
1391 if (! reload_completed
)
1392 /* Note that no SUBREG is a memory operand before end of reload pass,
1393 because (SUBREG (MEM...)) forces reloading into a register. */
1394 return MEM_P (op
) && general_operand (op
, mode
);
1396 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1400 if (GET_CODE (inner
) == SUBREG
)
1401 inner
= SUBREG_REG (inner
);
1403 return (MEM_P (inner
) && general_operand (op
, mode
));
1406 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1407 that is, a memory reference whose address is a general_operand. */
1410 indirect_operand (rtx op
, machine_mode mode
)
1412 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1413 if (! reload_completed
1414 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1416 int offset
= SUBREG_BYTE (op
);
1417 rtx inner
= SUBREG_REG (op
);
1419 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1422 /* The only way that we can have a general_operand as the resulting
1423 address is if OFFSET is zero and the address already is an operand
1424 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1427 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1428 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1429 && CONST_INT_P (XEXP (XEXP (inner
, 0), 1))
1430 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1431 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1435 && memory_operand (op
, mode
)
1436 && general_operand (XEXP (op
, 0), Pmode
));
1439 /* Return 1 if this is an ordered comparison operator (not including
1440 ORDERED and UNORDERED). */
1443 ordered_comparison_operator (rtx op
, machine_mode mode
)
1445 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1447 switch (GET_CODE (op
))
1465 /* Return 1 if this is a comparison operator. This allows the use of
1466 MATCH_OPERATOR to recognize all the branch insns. */
1469 comparison_operator (rtx op
, machine_mode mode
)
1471 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1472 && COMPARISON_P (op
));
1475 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1478 extract_asm_operands (rtx body
)
1481 switch (GET_CODE (body
))
1487 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1488 tmp
= SET_SRC (body
);
1489 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1494 tmp
= XVECEXP (body
, 0, 0);
1495 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1497 if (GET_CODE (tmp
) == SET
)
1499 tmp
= SET_SRC (tmp
);
1500 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1511 /* If BODY is an insn body that uses ASM_OPERANDS,
1512 return the number of operands (both input and output) in the insn.
1513 Otherwise return -1. */
1516 asm_noperands (const_rtx body
)
1518 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1524 if (GET_CODE (body
) == SET
)
1526 else if (GET_CODE (body
) == PARALLEL
)
1529 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1531 /* Multiple output operands, or 1 output plus some clobbers:
1533 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1534 /* Count backwards through CLOBBERs to determine number of SETs. */
1535 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1537 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1539 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1543 /* N_SETS is now number of output operands. */
1546 /* Verify that all the SETs we have
1547 came from a single original asm_operands insn
1548 (so that invalid combinations are blocked). */
1549 for (i
= 0; i
< n_sets
; i
++)
1551 rtx elt
= XVECEXP (body
, 0, i
);
1552 if (GET_CODE (elt
) != SET
)
1554 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1556 /* If these ASM_OPERANDS rtx's came from different original insns
1557 then they aren't allowed together. */
1558 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1559 != ASM_OPERANDS_INPUT_VEC (asm_op
))
1565 /* 0 outputs, but some clobbers:
1566 body is [(asm_operands ...) (clobber (reg ...))...]. */
1567 /* Make sure all the other parallel things really are clobbers. */
1568 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1569 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1574 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
1575 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
1578 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1579 copy its operands (both input and output) into the vector OPERANDS,
1580 the locations of the operands within the insn into the vector OPERAND_LOCS,
1581 and the constraints for the operands into CONSTRAINTS.
1582 Write the modes of the operands into MODES.
1583 Return the assembler-template.
1585 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1586 we don't store that info. */
1589 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1590 const char **constraints
, machine_mode
*modes
,
1593 int nbase
= 0, n
, i
;
1596 switch (GET_CODE (body
))
1599 /* Zero output asm: BODY is (asm_operands ...). */
1604 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1605 asmop
= SET_SRC (body
);
1607 /* The output is in the SET.
1608 Its constraint is in the ASM_OPERANDS itself. */
1610 operands
[0] = SET_DEST (body
);
1612 operand_locs
[0] = &SET_DEST (body
);
1614 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1616 modes
[0] = GET_MODE (SET_DEST (body
));
1622 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1624 asmop
= XVECEXP (body
, 0, 0);
1625 if (GET_CODE (asmop
) == SET
)
1627 asmop
= SET_SRC (asmop
);
1629 /* At least one output, plus some CLOBBERs. The outputs are in
1630 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1631 for (i
= 0; i
< nparallel
; i
++)
1633 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1634 break; /* Past last SET */
1636 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1638 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1640 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1642 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1653 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1654 for (i
= 0; i
< n
; i
++)
1657 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1659 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1661 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1663 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1667 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
1668 for (i
= 0; i
< n
; i
++)
1671 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
1673 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
1675 constraints
[nbase
+ i
] = "";
1677 modes
[nbase
+ i
] = Pmode
;
1681 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1683 return ASM_OPERANDS_TEMPLATE (asmop
);
1686 /* Parse inline assembly string STRING and determine which operands are
1687 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1688 to true if operand I is referenced.
1690 This is intended to distinguish barrier-like asms such as:
1692 asm ("" : "=m" (...));
1694 from real references such as:
1696 asm ("sw\t$0, %0" : "=m" (...)); */
1699 get_referenced_operands (const char *string
, bool *used
,
1700 unsigned int noperands
)
1702 memset (used
, 0, sizeof (bool) * noperands
);
1703 const char *p
= string
;
1709 /* A letter followed by a digit indicates an operand number. */
1710 if (ISALPHA (p
[0]) && ISDIGIT (p
[1]))
1715 unsigned long opnum
= strtoul (p
, &endptr
, 10);
1716 if (endptr
!= p
&& opnum
< noperands
)
1730 /* Check if an asm_operand matches its constraints.
1731 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1734 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
1738 bool incdec_ok
= false;
1741 /* Use constrain_operands after reload. */
1742 gcc_assert (!reload_completed
);
1744 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1745 many alternatives as required to match the other operands. */
1746 if (*constraint
== '\0')
1751 enum constraint_num cn
;
1752 char c
= *constraint
;
1760 case '0': case '1': case '2': case '3': case '4':
1761 case '5': case '6': case '7': case '8': case '9':
1762 /* If caller provided constraints pointer, look up
1763 the matching constraint. Otherwise, our caller should have
1764 given us the proper matching constraint, but we can't
1765 actually fail the check if they didn't. Indicate that
1766 results are inconclusive. */
1770 unsigned long match
;
1772 match
= strtoul (constraint
, &end
, 10);
1774 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
1775 constraint
= (const char *) end
;
1781 while (ISDIGIT (*constraint
));
1787 /* The rest of the compiler assumes that reloading the address
1788 of a MEM into a register will make it fit an 'o' constraint.
1789 That is, if it sees a MEM operand for an 'o' constraint,
1790 it assumes that (mem (base-reg)) will fit.
1792 That assumption fails on targets that don't have offsettable
1793 addresses at all. We therefore need to treat 'o' asm
1794 constraints as a special case and only accept operands that
1795 are already offsettable, thus proving that at least one
1796 offsettable address exists. */
1797 case 'o': /* offsettable */
1798 if (offsettable_nonstrict_memref_p (op
))
1803 if (general_operand (op
, VOIDmode
))
1810 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1811 to exist, excepting those that expand_call created. Further,
1812 on some machines which do not have generalized auto inc/dec,
1813 an inc/dec is not a memory_operand.
1815 Match any memory and hope things are resolved after reload. */
1819 cn
= lookup_constraint (constraint
);
1820 switch (get_constraint_type (cn
))
1824 && reg_class_for_constraint (cn
) != NO_REGS
1825 && GET_MODE (op
) != BLKmode
1826 && register_operand (op
, VOIDmode
))
1833 && insn_const_int_ok_for_constraint (INTVAL (op
), cn
))
1838 /* Every memory operand can be reloaded to fit. */
1839 result
= result
|| memory_operand (op
, VOIDmode
);
1843 /* Every address operand can be reloaded to fit. */
1844 result
= result
|| address_operand (op
, VOIDmode
);
1848 result
= result
|| constraint_satisfied_p (op
, cn
);
1853 len
= CONSTRAINT_LEN (c
, constraint
);
1856 while (--len
&& *constraint
);
1862 /* For operands without < or > constraints reject side-effects. */
1863 if (!incdec_ok
&& result
&& MEM_P (op
))
1864 switch (GET_CODE (XEXP (op
, 0)))
1881 /* Given an rtx *P, if it is a sum containing an integer constant term,
1882 return the location (type rtx *) of the pointer to that constant term.
1883 Otherwise, return a null pointer. */
1886 find_constant_term_loc (rtx
*p
)
1889 enum rtx_code code
= GET_CODE (*p
);
1891 /* If *P IS such a constant term, P is its location. */
1893 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1897 /* Otherwise, if not a sum, it has no constant term. */
1899 if (GET_CODE (*p
) != PLUS
)
1902 /* If one of the summands is constant, return its location. */
1904 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1905 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1908 /* Otherwise, check each summand for containing a constant term. */
1910 if (XEXP (*p
, 0) != 0)
1912 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1917 if (XEXP (*p
, 1) != 0)
1919 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1927 /* Return 1 if OP is a memory reference
1928 whose address contains no side effects
1929 and remains valid after the addition
1930 of a positive integer less than the
1931 size of the object being referenced.
1933 We assume that the original address is valid and do not check it.
1935 This uses strict_memory_address_p as a subroutine, so
1936 don't use it before reload. */
1939 offsettable_memref_p (rtx op
)
1941 return ((MEM_P (op
))
1942 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
1943 MEM_ADDR_SPACE (op
)));
1946 /* Similar, but don't require a strictly valid mem ref:
1947 consider pseudo-regs valid as index or base regs. */
1950 offsettable_nonstrict_memref_p (rtx op
)
1952 return ((MEM_P (op
))
1953 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
1954 MEM_ADDR_SPACE (op
)));
1957 /* Return 1 if Y is a memory address which contains no side effects
1958 and would remain valid for address space AS after the addition of
1959 a positive integer less than the size of that mode.
1961 We assume that the original address is valid and do not check it.
1962 We do check that it is valid for narrower modes.
1964 If STRICTP is nonzero, we require a strictly valid address,
1965 for the sake of use in reload.c. */
1968 offsettable_address_addr_space_p (int strictp
, machine_mode mode
, rtx y
,
1971 enum rtx_code ycode
= GET_CODE (y
);
1975 int (*addressp
) (machine_mode
, rtx
, addr_space_t
) =
1976 (strictp
? strict_memory_address_addr_space_p
1977 : memory_address_addr_space_p
);
1978 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1980 if (CONSTANT_ADDRESS_P (y
))
1983 /* Adjusting an offsettable address involves changing to a narrower mode.
1984 Make sure that's OK. */
1986 if (mode_dependent_address_p (y
, as
))
1989 machine_mode address_mode
= GET_MODE (y
);
1990 if (address_mode
== VOIDmode
)
1991 address_mode
= targetm
.addr_space
.address_mode (as
);
1992 #ifdef POINTERS_EXTEND_UNSIGNED
1993 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
1996 /* ??? How much offset does an offsettable BLKmode reference need?
1997 Clearly that depends on the situation in which it's being used.
1998 However, the current situation in which we test 0xffffffff is
1999 less than ideal. Caveat user. */
2001 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
2003 /* If the expression contains a constant term,
2004 see if it remains valid when max possible offset is added. */
2006 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
2011 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
2012 /* Use QImode because an odd displacement may be automatically invalid
2013 for any wider mode. But it should be valid for a single byte. */
2014 good
= (*addressp
) (QImode
, y
, as
);
2016 /* In any case, restore old contents of memory. */
2021 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2024 /* The offset added here is chosen as the maximum offset that
2025 any instruction could need to add when operating on something
2026 of the specified mode. We assume that if Y and Y+c are
2027 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2028 go inside a LO_SUM here, so we do so as well. */
2029 if (GET_CODE (y
) == LO_SUM
2031 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
2032 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2033 plus_constant (address_mode
, XEXP (y
, 1),
2035 #ifdef POINTERS_EXTEND_UNSIGNED
2036 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2037 else if (POINTERS_EXTEND_UNSIGNED
> 0
2038 && GET_CODE (y
) == ZERO_EXTEND
2039 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2040 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2041 plus_constant (pointer_mode
, XEXP (y
, 0),
2045 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2047 /* Use QImode because an odd displacement may be automatically invalid
2048 for any wider mode. But it should be valid for a single byte. */
2049 return (*addressp
) (QImode
, z
, as
);
2052 /* Return 1 if ADDR is an address-expression whose effect depends
2053 on the mode of the memory reference it is used in.
2055 ADDRSPACE is the address space associated with the address.
2057 Autoincrement addressing is a typical example of mode-dependence
2058 because the amount of the increment depends on the mode. */
2061 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2063 /* Auto-increment addressing with anything other than post_modify
2064 or pre_modify always introduces a mode dependency. Catch such
2065 cases now instead of deferring to the target. */
2066 if (GET_CODE (addr
) == PRE_INC
2067 || GET_CODE (addr
) == POST_INC
2068 || GET_CODE (addr
) == PRE_DEC
2069 || GET_CODE (addr
) == POST_DEC
)
2072 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2075 /* Return true if boolean attribute ATTR is supported. */
2078 have_bool_attr (bool_attr attr
)
2083 return HAVE_ATTR_enabled
;
2084 case BA_PREFERRED_FOR_SIZE
:
2085 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_size
;
2086 case BA_PREFERRED_FOR_SPEED
:
2087 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_speed
;
2092 /* Return the value of ATTR for instruction INSN. */
2095 get_bool_attr (rtx_insn
*insn
, bool_attr attr
)
2100 return get_attr_enabled (insn
);
2101 case BA_PREFERRED_FOR_SIZE
:
2102 return get_attr_enabled (insn
) && get_attr_preferred_for_size (insn
);
2103 case BA_PREFERRED_FOR_SPEED
:
2104 return get_attr_enabled (insn
) && get_attr_preferred_for_speed (insn
);
2109 /* Like get_bool_attr_mask, but don't use the cache. */
2111 static alternative_mask
2112 get_bool_attr_mask_uncached (rtx_insn
*insn
, bool_attr attr
)
2114 /* Temporarily install enough information for get_attr_<foo> to assume
2115 that the insn operands are already cached. As above, the attribute
2116 mustn't depend on the values of operands, so we don't provide their
2117 real values here. */
2118 rtx old_insn
= recog_data
.insn
;
2119 int old_alternative
= which_alternative
;
2121 recog_data
.insn
= insn
;
2122 alternative_mask mask
= ALL_ALTERNATIVES
;
2123 int n_alternatives
= insn_data
[INSN_CODE (insn
)].n_alternatives
;
2124 for (int i
= 0; i
< n_alternatives
; i
++)
2126 which_alternative
= i
;
2127 if (!get_bool_attr (insn
, attr
))
2128 mask
&= ~ALTERNATIVE_BIT (i
);
2131 recog_data
.insn
= old_insn
;
2132 which_alternative
= old_alternative
;
2136 /* Return the mask of operand alternatives that are allowed for INSN
2137 by boolean attribute ATTR. This mask depends only on INSN and on
2138 the current target; it does not depend on things like the values of
2141 static alternative_mask
2142 get_bool_attr_mask (rtx_insn
*insn
, bool_attr attr
)
2144 /* Quick exit for asms and for targets that don't use these attributes. */
2145 int code
= INSN_CODE (insn
);
2146 if (code
< 0 || !have_bool_attr (attr
))
2147 return ALL_ALTERNATIVES
;
2149 /* Calling get_attr_<foo> can be expensive, so cache the mask
2151 if (!this_target_recog
->x_bool_attr_masks
[code
][attr
])
2152 this_target_recog
->x_bool_attr_masks
[code
][attr
]
2153 = get_bool_attr_mask_uncached (insn
, attr
);
2154 return this_target_recog
->x_bool_attr_masks
[code
][attr
];
2157 /* Return the set of alternatives of INSN that are allowed by the current
2161 get_enabled_alternatives (rtx_insn
*insn
)
2163 return get_bool_attr_mask (insn
, BA_ENABLED
);
2166 /* Return the set of alternatives of INSN that are allowed by the current
2167 target and are preferred for the current size/speed optimization
2171 get_preferred_alternatives (rtx_insn
*insn
)
2173 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
)))
2174 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2176 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2179 /* Return the set of alternatives of INSN that are allowed by the current
2180 target and are preferred for the size/speed optimization choice
2181 associated with BB. Passing a separate BB is useful if INSN has not
2182 been emitted yet or if we are considering moving it to a different
2186 get_preferred_alternatives (rtx_insn
*insn
, basic_block bb
)
2188 if (optimize_bb_for_speed_p (bb
))
2189 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2191 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2194 /* Assert that the cached boolean attributes for INSN are still accurate.
2195 The backend is required to define these attributes in a way that only
2196 depends on the current target (rather than operands, compiler phase,
2200 check_bool_attrs (rtx_insn
*insn
)
2202 int code
= INSN_CODE (insn
);
2204 for (int i
= 0; i
<= BA_LAST
; ++i
)
2206 enum bool_attr attr
= (enum bool_attr
) i
;
2207 if (this_target_recog
->x_bool_attr_masks
[code
][attr
])
2208 gcc_assert (this_target_recog
->x_bool_attr_masks
[code
][attr
]
2209 == get_bool_attr_mask_uncached (insn
, attr
));
2214 /* Like extract_insn, but save insn extracted and don't extract again, when
2215 called again for the same insn expecting that recog_data still contain the
2216 valid information. This is used primary by gen_attr infrastructure that
2217 often does extract insn again and again. */
2219 extract_insn_cached (rtx_insn
*insn
)
2221 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2223 extract_insn (insn
);
2224 recog_data
.insn
= insn
;
2227 /* Do uncached extract_insn, constrain_operands and complain about failures.
2228 This should be used when extracting a pre-existing constrained instruction
2229 if the caller wants to know which alternative was chosen. */
2231 extract_constrain_insn (rtx_insn
*insn
)
2233 extract_insn (insn
);
2234 if (!constrain_operands (reload_completed
, get_enabled_alternatives (insn
)))
2235 fatal_insn_not_found (insn
);
2238 /* Do cached extract_insn, constrain_operands and complain about failures.
2239 Used by insn_attrtab. */
2241 extract_constrain_insn_cached (rtx_insn
*insn
)
2243 extract_insn_cached (insn
);
2244 if (which_alternative
== -1
2245 && !constrain_operands (reload_completed
,
2246 get_enabled_alternatives (insn
)))
2247 fatal_insn_not_found (insn
);
2250 /* Do cached constrain_operands on INSN and complain about failures. */
2252 constrain_operands_cached (rtx_insn
*insn
, int strict
)
2254 if (which_alternative
== -1)
2255 return constrain_operands (strict
, get_enabled_alternatives (insn
));
2260 /* Analyze INSN and fill in recog_data. */
2263 extract_insn (rtx_insn
*insn
)
2268 rtx body
= PATTERN (insn
);
2270 recog_data
.n_operands
= 0;
2271 recog_data
.n_alternatives
= 0;
2272 recog_data
.n_dups
= 0;
2273 recog_data
.is_asm
= false;
2275 switch (GET_CODE (body
))
2286 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2291 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2292 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2293 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2299 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2302 /* This insn is an `asm' with operands. */
2304 /* expand_asm_operands makes sure there aren't too many operands. */
2305 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2307 /* Now get the operand values and constraints out of the insn. */
2308 decode_asm_operands (body
, recog_data
.operand
,
2309 recog_data
.operand_loc
,
2310 recog_data
.constraints
,
2311 recog_data
.operand_mode
, NULL
);
2312 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2315 const char *p
= recog_data
.constraints
[0];
2316 recog_data
.n_alternatives
= 1;
2318 recog_data
.n_alternatives
+= (*p
++ == ',');
2320 recog_data
.is_asm
= true;
2323 fatal_insn_not_found (insn
);
2327 /* Ordinary insn: recognize it, get the operands via insn_extract
2328 and get the constraints. */
2330 icode
= recog_memoized (insn
);
2332 fatal_insn_not_found (insn
);
2334 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2335 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2336 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2338 insn_extract (insn
);
2340 for (i
= 0; i
< noperands
; i
++)
2342 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2343 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2344 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2345 /* VOIDmode match_operands gets mode from their real operand. */
2346 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2347 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2350 for (i
= 0; i
< noperands
; i
++)
2351 recog_data
.operand_type
[i
]
2352 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2353 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2356 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2358 recog_data
.insn
= NULL
;
2359 which_alternative
= -1;
2362 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2363 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2364 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2365 has N_OPERANDS entries. */
2368 preprocess_constraints (int n_operands
, int n_alternatives
,
2369 const char **constraints
,
2370 operand_alternative
*op_alt_base
)
2372 for (int i
= 0; i
< n_operands
; i
++)
2375 struct operand_alternative
*op_alt
;
2376 const char *p
= constraints
[i
];
2378 op_alt
= op_alt_base
;
2380 for (j
= 0; j
< n_alternatives
; j
++, op_alt
+= n_operands
)
2382 op_alt
[i
].cl
= NO_REGS
;
2383 op_alt
[i
].constraint
= p
;
2384 op_alt
[i
].matches
= -1;
2385 op_alt
[i
].matched
= -1;
2387 if (*p
== '\0' || *p
== ',')
2389 op_alt
[i
].anything_ok
= 1;
2399 while (c
!= ',' && c
!= '\0');
2400 if (c
== ',' || c
== '\0')
2409 op_alt
[i
].reject
+= 6;
2412 op_alt
[i
].reject
+= 600;
2415 op_alt
[i
].earlyclobber
= 1;
2418 case '0': case '1': case '2': case '3': case '4':
2419 case '5': case '6': case '7': case '8': case '9':
2422 op_alt
[i
].matches
= strtoul (p
, &end
, 10);
2423 op_alt
[op_alt
[i
].matches
].matched
= i
;
2429 op_alt
[i
].anything_ok
= 1;
2434 reg_class_subunion
[(int) op_alt
[i
].cl
][(int) GENERAL_REGS
];
2438 enum constraint_num cn
= lookup_constraint (p
);
2440 switch (get_constraint_type (cn
))
2443 cl
= reg_class_for_constraint (cn
);
2445 op_alt
[i
].cl
= reg_class_subunion
[op_alt
[i
].cl
][cl
];
2452 op_alt
[i
].memory_ok
= 1;
2456 op_alt
[i
].is_address
= 1;
2458 = (reg_class_subunion
2459 [(int) op_alt
[i
].cl
]
2460 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2461 ADDRESS
, SCRATCH
)]);
2469 p
+= CONSTRAINT_LEN (c
, p
);
2475 /* Return an array of operand_alternative instructions for
2476 instruction ICODE. */
2478 const operand_alternative
*
2479 preprocess_insn_constraints (int icode
)
2481 gcc_checking_assert (IN_RANGE (icode
, 0, LAST_INSN_CODE
));
2482 if (this_target_recog
->x_op_alt
[icode
])
2483 return this_target_recog
->x_op_alt
[icode
];
2485 int n_operands
= insn_data
[icode
].n_operands
;
2486 if (n_operands
== 0)
2488 /* Always provide at least one alternative so that which_op_alt ()
2489 works correctly. If the instruction has 0 alternatives (i.e. all
2490 constraint strings are empty) then each operand in this alternative
2491 will have anything_ok set. */
2492 int n_alternatives
= MAX (insn_data
[icode
].n_alternatives
, 1);
2493 int n_entries
= n_operands
* n_alternatives
;
2495 operand_alternative
*op_alt
= XCNEWVEC (operand_alternative
, n_entries
);
2496 const char **constraints
= XALLOCAVEC (const char *, n_operands
);
2498 for (int i
= 0; i
< n_operands
; ++i
)
2499 constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2500 preprocess_constraints (n_operands
, n_alternatives
, constraints
, op_alt
);
2502 this_target_recog
->x_op_alt
[icode
] = op_alt
;
2506 /* After calling extract_insn, you can use this function to extract some
2507 information from the constraint strings into a more usable form.
2508 The collected data is stored in recog_op_alt. */
2511 preprocess_constraints (rtx insn
)
2513 int icode
= INSN_CODE (insn
);
2515 recog_op_alt
= preprocess_insn_constraints (icode
);
2518 int n_operands
= recog_data
.n_operands
;
2519 int n_alternatives
= recog_data
.n_alternatives
;
2520 int n_entries
= n_operands
* n_alternatives
;
2521 memset (asm_op_alt
, 0, n_entries
* sizeof (operand_alternative
));
2522 preprocess_constraints (n_operands
, n_alternatives
,
2523 recog_data
.constraints
, asm_op_alt
);
2524 recog_op_alt
= asm_op_alt
;
2528 /* Check the operands of an insn against the insn's operand constraints
2529 and return 1 if they match any of the alternatives in ALTERNATIVES.
2531 The information about the insn's operands, constraints, operand modes
2532 etc. is obtained from the global variables set up by extract_insn.
2534 WHICH_ALTERNATIVE is set to a number which indicates which
2535 alternative of constraints was matched: 0 for the first alternative,
2536 1 for the next, etc.
2538 In addition, when two operands are required to match
2539 and it happens that the output operand is (reg) while the
2540 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2541 make the output operand look like the input.
2542 This is because the output operand is the one the template will print.
2544 This is used in final, just before printing the assembler code and by
2545 the routines that determine an insn's attribute.
2547 If STRICT is a positive nonzero value, it means that we have been
2548 called after reload has been completed. In that case, we must
2549 do all checks strictly. If it is zero, it means that we have been called
2550 before reload has completed. In that case, we first try to see if we can
2551 find an alternative that matches strictly. If not, we try again, this
2552 time assuming that reload will fix up the insn. This provides a "best
2553 guess" for the alternative and is used to compute attributes of insns prior
2554 to reload. A negative value of STRICT is used for this internal call. */
2562 constrain_operands (int strict
, alternative_mask alternatives
)
2564 const char *constraints
[MAX_RECOG_OPERANDS
];
2565 int matching_operands
[MAX_RECOG_OPERANDS
];
2566 int earlyclobber
[MAX_RECOG_OPERANDS
];
2569 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2570 int funny_match_index
;
2572 which_alternative
= 0;
2573 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2576 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2578 constraints
[c
] = recog_data
.constraints
[c
];
2579 matching_operands
[c
] = -1;
2584 int seen_earlyclobber_at
= -1;
2587 funny_match_index
= 0;
2589 if (!TEST_BIT (alternatives
, which_alternative
))
2593 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2594 constraints
[i
] = skip_alternative (constraints
[i
]);
2596 which_alternative
++;
2600 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2602 rtx op
= recog_data
.operand
[opno
];
2603 machine_mode mode
= GET_MODE (op
);
2604 const char *p
= constraints
[opno
];
2610 earlyclobber
[opno
] = 0;
2612 /* A unary operator may be accepted by the predicate, but it
2613 is irrelevant for matching constraints. */
2617 if (GET_CODE (op
) == SUBREG
)
2619 if (REG_P (SUBREG_REG (op
))
2620 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2621 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2622 GET_MODE (SUBREG_REG (op
)),
2625 op
= SUBREG_REG (op
);
2628 /* An empty constraint or empty alternative
2629 allows anything which matched the pattern. */
2630 if (*p
== 0 || *p
== ',')
2634 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2644 /* Ignore rest of this alternative as far as
2645 constraint checking is concerned. */
2648 while (*p
&& *p
!= ',');
2653 earlyclobber
[opno
] = 1;
2654 if (seen_earlyclobber_at
< 0)
2655 seen_earlyclobber_at
= opno
;
2658 case '0': case '1': case '2': case '3': case '4':
2659 case '5': case '6': case '7': case '8': case '9':
2661 /* This operand must be the same as a previous one.
2662 This kind of constraint is used for instructions such
2663 as add when they take only two operands.
2665 Note that the lower-numbered operand is passed first.
2667 If we are not testing strictly, assume that this
2668 constraint will be satisfied. */
2673 match
= strtoul (p
, &end
, 10);
2680 rtx op1
= recog_data
.operand
[match
];
2681 rtx op2
= recog_data
.operand
[opno
];
2683 /* A unary operator may be accepted by the predicate,
2684 but it is irrelevant for matching constraints. */
2686 op1
= XEXP (op1
, 0);
2688 op2
= XEXP (op2
, 0);
2690 val
= operands_match_p (op1
, op2
);
2693 matching_operands
[opno
] = match
;
2694 matching_operands
[match
] = opno
;
2699 /* If output is *x and input is *--x, arrange later
2700 to change the output to *--x as well, since the
2701 output op is the one that will be printed. */
2702 if (val
== 2 && strict
> 0)
2704 funny_match
[funny_match_index
].this_op
= opno
;
2705 funny_match
[funny_match_index
++].other
= match
;
2712 /* p is used for address_operands. When we are called by
2713 gen_reload, no one will have checked that the address is
2714 strictly valid, i.e., that all pseudos requiring hard regs
2715 have gotten them. */
2717 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2722 /* No need to check general_operand again;
2723 it was done in insn-recog.c. Well, except that reload
2724 doesn't check the validity of its replacements, but
2725 that should only matter when there's a bug. */
2727 /* Anything goes unless it is a REG and really has a hard reg
2728 but the hard reg is not in the class GENERAL_REGS. */
2732 || GENERAL_REGS
== ALL_REGS
2733 || (reload_in_progress
2734 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2735 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2738 else if (strict
< 0 || general_operand (op
, mode
))
2744 enum constraint_num cn
= lookup_constraint (p
);
2745 enum reg_class cl
= reg_class_for_constraint (cn
);
2751 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2752 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2754 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2758 else if (constraint_satisfied_p (op
, cn
))
2761 else if (insn_extra_memory_constraint (cn
)
2762 /* Every memory operand can be reloaded to fit. */
2763 && ((strict
< 0 && MEM_P (op
))
2764 /* Before reload, accept what reload can turn
2766 || (strict
< 0 && CONSTANT_P (op
))
2767 /* During reload, accept a pseudo */
2768 || (reload_in_progress
&& REG_P (op
)
2769 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2771 else if (insn_extra_address_constraint (cn
)
2772 /* Every address operand can be reloaded to fit. */
2775 /* Cater to architectures like IA-64 that define extra memory
2776 constraints without using define_memory_constraint. */
2777 else if (reload_in_progress
2779 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
2780 && reg_renumber
[REGNO (op
)] < 0
2781 && reg_equiv_mem (REGNO (op
)) != 0
2782 && constraint_satisfied_p
2783 (reg_equiv_mem (REGNO (op
)), cn
))
2788 while (p
+= len
, c
);
2790 constraints
[opno
] = p
;
2791 /* If this operand did not win somehow,
2792 this alternative loses. */
2796 /* This alternative won; the operands are ok.
2797 Change whichever operands this alternative says to change. */
2802 /* See if any earlyclobber operand conflicts with some other
2805 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2806 for (eopno
= seen_earlyclobber_at
;
2807 eopno
< recog_data
.n_operands
;
2809 /* Ignore earlyclobber operands now in memory,
2810 because we would often report failure when we have
2811 two memory operands, one of which was formerly a REG. */
2812 if (earlyclobber
[eopno
]
2813 && REG_P (recog_data
.operand
[eopno
]))
2814 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2815 if ((MEM_P (recog_data
.operand
[opno
])
2816 || recog_data
.operand_type
[opno
] != OP_OUT
)
2818 /* Ignore things like match_operator operands. */
2819 && *recog_data
.constraints
[opno
] != 0
2820 && ! (matching_operands
[opno
] == eopno
2821 && operands_match_p (recog_data
.operand
[opno
],
2822 recog_data
.operand
[eopno
]))
2823 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2824 recog_data
.operand
[eopno
]))
2829 while (--funny_match_index
>= 0)
2831 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2832 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2836 /* For operands without < or > constraints reject side-effects. */
2837 if (recog_data
.is_asm
)
2839 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2840 if (MEM_P (recog_data
.operand
[opno
]))
2841 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
2849 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
2850 && strchr (recog_data
.constraints
[opno
], '>')
2863 which_alternative
++;
2865 while (which_alternative
< recog_data
.n_alternatives
);
2867 which_alternative
= -1;
2868 /* If we are about to reject this, but we are not to test strictly,
2869 try a very loose test. Only return failure if it fails also. */
2871 return constrain_operands (-1, alternatives
);
2876 /* Return true iff OPERAND (assumed to be a REG rtx)
2877 is a hard reg in class CLASS when its regno is offset by OFFSET
2878 and changed to mode MODE.
2879 If REG occupies multiple hard regs, all of them must be in CLASS. */
2882 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
2885 unsigned int regno
= REGNO (operand
);
2890 /* Regno must not be a pseudo register. Offset may be negative. */
2891 return (HARD_REGISTER_NUM_P (regno
)
2892 && HARD_REGISTER_NUM_P (regno
+ offset
)
2893 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
2897 /* Split single instruction. Helper function for split_all_insns and
2898 split_all_insns_noflow. Return last insn in the sequence if successful,
2899 or NULL if unsuccessful. */
2902 split_insn (rtx_insn
*insn
)
2904 /* Split insns here to get max fine-grain parallelism. */
2905 rtx_insn
*first
= PREV_INSN (insn
);
2906 rtx_insn
*last
= try_split (PATTERN (insn
), insn
, 1);
2907 rtx insn_set
, last_set
, note
;
2912 /* If the original instruction was a single set that was known to be
2913 equivalent to a constant, see if we can say the same about the last
2914 instruction in the split sequence. The two instructions must set
2915 the same destination. */
2916 insn_set
= single_set (insn
);
2919 last_set
= single_set (last
);
2920 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
2922 note
= find_reg_equal_equiv_note (insn
);
2923 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2924 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
2925 else if (CONSTANT_P (SET_SRC (insn_set
)))
2926 set_unique_reg_note (last
, REG_EQUAL
,
2927 copy_rtx (SET_SRC (insn_set
)));
2931 /* try_split returns the NOTE that INSN became. */
2932 SET_INSN_DELETED (insn
);
2934 /* ??? Coddle to md files that generate subregs in post-reload
2935 splitters instead of computing the proper hard register. */
2936 if (reload_completed
&& first
!= last
)
2938 first
= NEXT_INSN (first
);
2942 cleanup_subreg_operands (first
);
2945 first
= NEXT_INSN (first
);
2952 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2955 split_all_insns (void)
2961 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2962 bitmap_clear (blocks
);
2965 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2967 rtx_insn
*insn
, *next
;
2968 bool finish
= false;
2970 rtl_profile_for_bb (bb
);
2971 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2973 /* Can't use `next_real_insn' because that might go across
2974 CODE_LABELS and short-out basic blocks. */
2975 next
= NEXT_INSN (insn
);
2976 finish
= (insn
== BB_END (bb
));
2979 rtx set
= single_set (insn
);
2981 /* Don't split no-op move insns. These should silently
2982 disappear later in final. Splitting such insns would
2983 break the code that handles LIBCALL blocks. */
2984 if (set
&& set_noop_p (set
))
2986 /* Nops get in the way while scheduling, so delete them
2987 now if register allocation has already been done. It
2988 is too risky to try to do this before register
2989 allocation, and there are unlikely to be very many
2990 nops then anyways. */
2991 if (reload_completed
)
2992 delete_insn_and_edges (insn
);
2996 if (split_insn (insn
))
2998 bitmap_set_bit (blocks
, bb
->index
);
3006 default_rtl_profile ();
3008 find_many_sub_basic_blocks (blocks
);
3010 #ifdef ENABLE_CHECKING
3011 verify_flow_info ();
3014 sbitmap_free (blocks
);
3017 /* Same as split_all_insns, but do not expect CFG to be available.
3018 Used by machine dependent reorg passes. */
3021 split_all_insns_noflow (void)
3023 rtx_insn
*next
, *insn
;
3025 for (insn
= get_insns (); insn
; insn
= next
)
3027 next
= NEXT_INSN (insn
);
3030 /* Don't split no-op move insns. These should silently
3031 disappear later in final. Splitting such insns would
3032 break the code that handles LIBCALL blocks. */
3033 rtx set
= single_set (insn
);
3034 if (set
&& set_noop_p (set
))
3036 /* Nops get in the way while scheduling, so delete them
3037 now if register allocation has already been done. It
3038 is too risky to try to do this before register
3039 allocation, and there are unlikely to be very many
3042 ??? Should we use delete_insn when the CFG isn't valid? */
3043 if (reload_completed
)
3044 delete_insn_and_edges (insn
);
3053 #ifdef HAVE_peephole2
3054 struct peep2_insn_data
3060 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3061 static int peep2_current
;
3063 static bool peep2_do_rebuild_jump_labels
;
3064 static bool peep2_do_cleanup_cfg
;
3066 /* The number of instructions available to match a peep2. */
3067 int peep2_current_count
;
3069 /* A non-insn marker indicating the last insn of the block.
3070 The live_before regset for this element is correct, indicating
3071 DF_LIVE_OUT for the block. */
3072 #define PEEP2_EOB pc_rtx
3074 /* Wrap N to fit into the peep2_insn_data buffer. */
3077 peep2_buf_position (int n
)
3079 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3080 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3084 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3085 does not exist. Used by the recognizer to find the next insn to match
3086 in a multi-insn pattern. */
3089 peep2_next_insn (int n
)
3091 gcc_assert (n
<= peep2_current_count
);
3093 n
= peep2_buf_position (peep2_current
+ n
);
3095 return peep2_insn_data
[n
].insn
;
3098 /* Return true if REGNO is dead before the Nth non-note insn
3102 peep2_regno_dead_p (int ofs
, int regno
)
3104 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3106 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3108 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3110 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3113 /* Similarly for a REG. */
3116 peep2_reg_dead_p (int ofs
, rtx reg
)
3120 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3122 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3124 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3126 regno
= REGNO (reg
);
3127 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
3129 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
3134 /* Regno offset to be used in the register search. */
3135 static int search_ofs
;
3137 /* Try to find a hard register of mode MODE, matching the register class in
3138 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3139 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3140 in which case the only condition is that the register must be available
3141 before CURRENT_INSN.
3142 Registers that already have bits set in REG_SET will not be considered.
3144 If an appropriate register is available, it will be returned and the
3145 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3149 peep2_find_free_register (int from
, int to
, const char *class_str
,
3150 machine_mode mode
, HARD_REG_SET
*reg_set
)
3157 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3158 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3160 from
= peep2_buf_position (peep2_current
+ from
);
3161 to
= peep2_buf_position (peep2_current
+ to
);
3163 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3164 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3168 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3170 /* Don't use registers set or clobbered by the insn. */
3171 FOR_EACH_INSN_DEF (def
, peep2_insn_data
[from
].insn
)
3172 SET_HARD_REG_BIT (live
, DF_REF_REGNO (def
));
3174 from
= peep2_buf_position (from
+ 1);
3177 cl
= reg_class_for_constraint (lookup_constraint (class_str
));
3179 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3181 int raw_regno
, regno
, success
, j
;
3183 /* Distribute the free registers as much as possible. */
3184 raw_regno
= search_ofs
+ i
;
3185 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3186 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3187 #ifdef REG_ALLOC_ORDER
3188 regno
= reg_alloc_order
[raw_regno
];
3193 /* Can it support the mode we need? */
3194 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3198 for (j
= 0; success
&& j
< hard_regno_nregs
[regno
][mode
]; j
++)
3200 /* Don't allocate fixed registers. */
3201 if (fixed_regs
[regno
+ j
])
3206 /* Don't allocate global registers. */
3207 if (global_regs
[regno
+ j
])
3212 /* Make sure the register is of the right class. */
3213 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3218 /* And that we don't create an extra save/restore. */
3219 if (! call_used_regs
[regno
+ j
] && ! df_regs_ever_live_p (regno
+ j
))
3225 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3231 /* And we don't clobber traceback for noreturn functions. */
3232 if ((regno
+ j
== FRAME_POINTER_REGNUM
3233 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3234 && (! reload_completed
|| frame_pointer_needed
))
3240 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3241 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3250 add_to_hard_reg_set (reg_set
, mode
, regno
);
3252 /* Start the next search with the next register. */
3253 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3255 search_ofs
= raw_regno
;
3257 return gen_rtx_REG (mode
, regno
);
3265 /* Forget all currently tracked instructions, only remember current
3269 peep2_reinit_state (regset live
)
3273 /* Indicate that all slots except the last holds invalid data. */
3274 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3275 peep2_insn_data
[i
].insn
= NULL_RTX
;
3276 peep2_current_count
= 0;
3278 /* Indicate that the last slot contains live_after data. */
3279 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3280 peep2_current
= MAX_INSNS_PER_PEEP2
;
3282 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3285 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3286 starting at INSN. Perform the replacement, removing the old insns and
3287 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3288 if the replacement is rejected. */
3291 peep2_attempt (basic_block bb
, rtx uncast_insn
, int match_len
, rtx_insn
*attempt
)
3293 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3295 rtx_insn
*last
, *before_try
, *x
;
3296 rtx eh_note
, as_note
;
3299 bool was_call
= false;
3301 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3302 match more than one insn, or to be split into more than one insn. */
3303 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[peep2_current
].insn
);
3304 if (RTX_FRAME_RELATED_P (old_insn
))
3306 bool any_note
= false;
3312 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3313 may be in the stream for the purpose of register allocation. */
3314 if (active_insn_p (attempt
))
3317 new_insn
= next_active_insn (attempt
);
3318 if (next_active_insn (new_insn
))
3321 /* We have a 1-1 replacement. Copy over any frame-related info. */
3322 RTX_FRAME_RELATED_P (new_insn
) = 1;
3324 /* Allow the backend to fill in a note during the split. */
3325 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3326 switch (REG_NOTE_KIND (note
))
3328 case REG_FRAME_RELATED_EXPR
:
3329 case REG_CFA_DEF_CFA
:
3330 case REG_CFA_ADJUST_CFA
:
3331 case REG_CFA_OFFSET
:
3332 case REG_CFA_REGISTER
:
3333 case REG_CFA_EXPRESSION
:
3334 case REG_CFA_RESTORE
:
3335 case REG_CFA_SET_VDRAP
:
3342 /* If the backend didn't supply a note, copy one over. */
3344 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3345 switch (REG_NOTE_KIND (note
))
3347 case REG_FRAME_RELATED_EXPR
:
3348 case REG_CFA_DEF_CFA
:
3349 case REG_CFA_ADJUST_CFA
:
3350 case REG_CFA_OFFSET
:
3351 case REG_CFA_REGISTER
:
3352 case REG_CFA_EXPRESSION
:
3353 case REG_CFA_RESTORE
:
3354 case REG_CFA_SET_VDRAP
:
3355 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3362 /* If there still isn't a note, make sure the unwind info sees the
3363 same expression as before the split. */
3366 rtx old_set
, new_set
;
3368 /* The old insn had better have been simple, or annotated. */
3369 old_set
= single_set (old_insn
);
3370 gcc_assert (old_set
!= NULL
);
3372 new_set
= single_set (new_insn
);
3373 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3374 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3377 /* Copy prologue/epilogue status. This is required in order to keep
3378 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3379 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3382 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3383 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3384 cfg-related call notes. */
3385 for (i
= 0; i
<= match_len
; ++i
)
3390 j
= peep2_buf_position (peep2_current
+ i
);
3391 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[j
].insn
);
3392 if (!CALL_P (old_insn
))
3397 while (new_insn
!= NULL_RTX
)
3399 if (CALL_P (new_insn
))
3401 new_insn
= NEXT_INSN (new_insn
);
3404 gcc_assert (new_insn
!= NULL_RTX
);
3406 CALL_INSN_FUNCTION_USAGE (new_insn
)
3407 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3408 SIBLING_CALL_P (new_insn
) = SIBLING_CALL_P (old_insn
);
3410 for (note
= REG_NOTES (old_insn
);
3412 note
= XEXP (note
, 1))
3413 switch (REG_NOTE_KIND (note
))
3418 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3422 /* Discard all other reg notes. */
3426 /* Croak if there is another call in the sequence. */
3427 while (++i
<= match_len
)
3429 j
= peep2_buf_position (peep2_current
+ i
);
3430 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[j
].insn
);
3431 gcc_assert (!CALL_P (old_insn
));
3436 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3437 move those notes over to the new sequence. */
3439 for (i
= match_len
; i
>= 0; --i
)
3441 int j
= peep2_buf_position (peep2_current
+ i
);
3442 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[j
].insn
);
3444 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3449 i
= peep2_buf_position (peep2_current
+ match_len
);
3450 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3452 /* Replace the old sequence with the new. */
3453 rtx_insn
*peepinsn
= as_a
<rtx_insn
*> (peep2_insn_data
[i
].insn
);
3454 last
= emit_insn_after_setloc (attempt
,
3455 peep2_insn_data
[i
].insn
,
3456 INSN_LOCATION (peepinsn
));
3457 before_try
= PREV_INSN (insn
);
3458 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3460 /* Re-insert the EH_REGION notes. */
3461 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3466 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3467 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3471 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3474 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3475 if (x
!= BB_END (bb
)
3476 && (can_throw_internal (x
)
3477 || can_nonlocal_goto (x
)))
3482 nfte
= split_block (bb
, x
);
3483 flags
= (eh_edge
->flags
3484 & (EDGE_EH
| EDGE_ABNORMAL
));
3486 flags
|= EDGE_ABNORMAL_CALL
;
3487 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3490 nehe
->probability
= eh_edge
->probability
;
3492 = REG_BR_PROB_BASE
- nehe
->probability
;
3494 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3499 /* Converting possibly trapping insn to non-trapping is
3500 possible. Zap dummy outgoing edges. */
3501 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
3504 /* Re-insert the ARGS_SIZE notes. */
3506 fixup_args_size_notes (before_try
, last
, INTVAL (XEXP (as_note
, 0)));
3508 /* If we generated a jump instruction, it won't have
3509 JUMP_LABEL set. Recompute after we're done. */
3510 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3513 peep2_do_rebuild_jump_labels
= true;
3520 /* After performing a replacement in basic block BB, fix up the life
3521 information in our buffer. LAST is the last of the insns that we
3522 emitted as a replacement. PREV is the insn before the start of
3523 the replacement. MATCH_LEN is the number of instructions that were
3524 matched, and which now need to be replaced in the buffer. */
3527 peep2_update_life (basic_block bb
, int match_len
, rtx_insn
*last
,
3530 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
3534 INIT_REG_SET (&live
);
3535 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
3537 gcc_assert (peep2_current_count
>= match_len
+ 1);
3538 peep2_current_count
-= match_len
+ 1;
3546 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
3548 peep2_current_count
++;
3550 i
= MAX_INSNS_PER_PEEP2
;
3551 peep2_insn_data
[i
].insn
= x
;
3552 df_simulate_one_insn_backwards (bb
, x
, &live
);
3553 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
3559 CLEAR_REG_SET (&live
);
3564 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3565 Return true if we added it, false otherwise. The caller will try to match
3566 peepholes against the buffer if we return false; otherwise it will try to
3567 add more instructions to the buffer. */
3570 peep2_fill_buffer (basic_block bb
, rtx insn
, regset live
)
3574 /* Once we have filled the maximum number of insns the buffer can hold,
3575 allow the caller to match the insns against peepholes. We wait until
3576 the buffer is full in case the target has similar peepholes of different
3577 length; we always want to match the longest if possible. */
3578 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
3581 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3582 any other pattern, lest it change the semantics of the frame info. */
3583 if (RTX_FRAME_RELATED_P (insn
))
3585 /* Let the buffer drain first. */
3586 if (peep2_current_count
> 0)
3588 /* Now the insn will be the only thing in the buffer. */
3591 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3592 peep2_insn_data
[pos
].insn
= insn
;
3593 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3594 peep2_current_count
++;
3596 df_simulate_one_insn_forwards (bb
, as_a
<rtx_insn
*> (insn
), live
);
3600 /* Perform the peephole2 optimization pass. */
3603 peephole2_optimize (void)
3610 peep2_do_cleanup_cfg
= false;
3611 peep2_do_rebuild_jump_labels
= false;
3613 df_set_flags (DF_LR_RUN_DCE
);
3614 df_note_add_problem ();
3617 /* Initialize the regsets we're going to use. */
3618 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3619 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
3621 live
= BITMAP_ALLOC (®_obstack
);
3623 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
3625 bool past_end
= false;
3628 rtl_profile_for_bb (bb
);
3630 /* Start up propagation. */
3631 bitmap_copy (live
, DF_LR_IN (bb
));
3632 df_simulate_initialize_forwards (bb
, live
);
3633 peep2_reinit_state (live
);
3635 insn
= BB_HEAD (bb
);
3642 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
3645 insn
= NEXT_INSN (insn
);
3646 if (insn
== NEXT_INSN (BB_END (bb
)))
3650 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
3653 /* If we did not fill an empty buffer, it signals the end of the
3655 if (peep2_current_count
== 0)
3658 /* The buffer filled to the current maximum, so try to match. */
3660 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3661 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
3662 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3664 /* Match the peephole. */
3665 head
= peep2_insn_data
[peep2_current
].insn
;
3666 attempt
= safe_as_a
<rtx_insn
*> (
3667 peephole2_insns (PATTERN (head
), head
, &match_len
));
3668 if (attempt
!= NULL
)
3670 rtx_insn
*last
= peep2_attempt (bb
, head
, match_len
, attempt
);
3673 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
3678 /* No match: advance the buffer by one insn. */
3679 peep2_current
= peep2_buf_position (peep2_current
+ 1);
3680 peep2_current_count
--;
3684 default_rtl_profile ();
3685 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3686 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3688 if (peep2_do_rebuild_jump_labels
)
3689 rebuild_jump_labels (get_insns ());
3690 if (peep2_do_cleanup_cfg
)
3691 cleanup_cfg (CLEANUP_CFG_CHANGED
);
3693 #endif /* HAVE_peephole2 */
3695 /* Common predicates for use with define_bypass. */
3697 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3698 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3699 must be either a single_set or a PARALLEL with SETs inside. */
3702 store_data_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3704 rtx out_set
, in_set
;
3705 rtx out_pat
, in_pat
;
3706 rtx out_exp
, in_exp
;
3709 in_set
= single_set (in_insn
);
3712 if (!MEM_P (SET_DEST (in_set
)))
3715 out_set
= single_set (out_insn
);
3718 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3723 out_pat
= PATTERN (out_insn
);
3725 if (GET_CODE (out_pat
) != PARALLEL
)
3728 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3730 out_exp
= XVECEXP (out_pat
, 0, i
);
3732 if (GET_CODE (out_exp
) == CLOBBER
)
3735 gcc_assert (GET_CODE (out_exp
) == SET
);
3737 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3744 in_pat
= PATTERN (in_insn
);
3745 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3747 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3749 in_exp
= XVECEXP (in_pat
, 0, i
);
3751 if (GET_CODE (in_exp
) == CLOBBER
)
3754 gcc_assert (GET_CODE (in_exp
) == SET
);
3756 if (!MEM_P (SET_DEST (in_exp
)))
3759 out_set
= single_set (out_insn
);
3762 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3767 out_pat
= PATTERN (out_insn
);
3768 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3770 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3772 out_exp
= XVECEXP (out_pat
, 0, j
);
3774 if (GET_CODE (out_exp
) == CLOBBER
)
3777 gcc_assert (GET_CODE (out_exp
) == SET
);
3779 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3789 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3790 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3791 or multiple set; IN_INSN should be single_set for truth, but for convenience
3792 of insn categorization may be any JUMP or CALL insn. */
3795 if_test_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3797 rtx out_set
, in_set
;
3799 in_set
= single_set (in_insn
);
3802 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3806 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3808 in_set
= SET_SRC (in_set
);
3810 out_set
= single_set (out_insn
);
3813 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3814 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3822 out_pat
= PATTERN (out_insn
);
3823 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3825 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3827 rtx exp
= XVECEXP (out_pat
, 0, i
);
3829 if (GET_CODE (exp
) == CLOBBER
)
3832 gcc_assert (GET_CODE (exp
) == SET
);
3834 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3835 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3844 rest_of_handle_peephole2 (void)
3846 #ifdef HAVE_peephole2
3847 peephole2_optimize ();
3854 const pass_data pass_data_peephole2
=
3856 RTL_PASS
, /* type */
3857 "peephole2", /* name */
3858 OPTGROUP_NONE
, /* optinfo_flags */
3859 TV_PEEPHOLE2
, /* tv_id */
3860 0, /* properties_required */
3861 0, /* properties_provided */
3862 0, /* properties_destroyed */
3863 0, /* todo_flags_start */
3864 TODO_df_finish
, /* todo_flags_finish */
3867 class pass_peephole2
: public rtl_opt_pass
3870 pass_peephole2 (gcc::context
*ctxt
)
3871 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
3874 /* opt_pass methods: */
3875 /* The epiphany backend creates a second instance of this pass, so we need
3877 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
3878 virtual bool gate (function
*) { return (optimize
> 0 && flag_peephole2
); }
3879 virtual unsigned int execute (function
*)
3881 return rest_of_handle_peephole2 ();
3884 }; // class pass_peephole2
3889 make_pass_peephole2 (gcc::context
*ctxt
)
3891 return new pass_peephole2 (ctxt
);
3896 const pass_data pass_data_split_all_insns
=
3898 RTL_PASS
, /* type */
3899 "split1", /* name */
3900 OPTGROUP_NONE
, /* optinfo_flags */
3901 TV_NONE
, /* tv_id */
3902 0, /* properties_required */
3903 0, /* properties_provided */
3904 0, /* properties_destroyed */
3905 0, /* todo_flags_start */
3906 0, /* todo_flags_finish */
3909 class pass_split_all_insns
: public rtl_opt_pass
3912 pass_split_all_insns (gcc::context
*ctxt
)
3913 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
3916 /* opt_pass methods: */
3917 /* The epiphany backend creates a second instance of this pass, so
3918 we need a clone method. */
3919 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
3920 virtual unsigned int execute (function
*)
3926 }; // class pass_split_all_insns
3931 make_pass_split_all_insns (gcc::context
*ctxt
)
3933 return new pass_split_all_insns (ctxt
);
3937 rest_of_handle_split_after_reload (void)
3939 /* If optimizing, then go ahead and split insns now. */
3949 const pass_data pass_data_split_after_reload
=
3951 RTL_PASS
, /* type */
3952 "split2", /* name */
3953 OPTGROUP_NONE
, /* optinfo_flags */
3954 TV_NONE
, /* tv_id */
3955 0, /* properties_required */
3956 0, /* properties_provided */
3957 0, /* properties_destroyed */
3958 0, /* todo_flags_start */
3959 0, /* todo_flags_finish */
3962 class pass_split_after_reload
: public rtl_opt_pass
3965 pass_split_after_reload (gcc::context
*ctxt
)
3966 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
3969 /* opt_pass methods: */
3970 virtual unsigned int execute (function
*)
3972 return rest_of_handle_split_after_reload ();
3975 }; // class pass_split_after_reload
3980 make_pass_split_after_reload (gcc::context
*ctxt
)
3982 return new pass_split_after_reload (ctxt
);
3987 const pass_data pass_data_split_before_regstack
=
3989 RTL_PASS
, /* type */
3990 "split3", /* name */
3991 OPTGROUP_NONE
, /* optinfo_flags */
3992 TV_NONE
, /* tv_id */
3993 0, /* properties_required */
3994 0, /* properties_provided */
3995 0, /* properties_destroyed */
3996 0, /* todo_flags_start */
3997 0, /* todo_flags_finish */
4000 class pass_split_before_regstack
: public rtl_opt_pass
4003 pass_split_before_regstack (gcc::context
*ctxt
)
4004 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
4007 /* opt_pass methods: */
4008 virtual bool gate (function
*);
4009 virtual unsigned int execute (function
*)
4015 }; // class pass_split_before_regstack
4018 pass_split_before_regstack::gate (function
*)
4020 #if HAVE_ATTR_length && defined (STACK_REGS)
4021 /* If flow2 creates new instructions which need splitting
4022 and scheduling after reload is not done, they might not be
4023 split until final which doesn't allow splitting
4024 if HAVE_ATTR_length. */
4025 # ifdef INSN_SCHEDULING
4026 return (optimize
&& !flag_schedule_insns_after_reload
);
4038 make_pass_split_before_regstack (gcc::context
*ctxt
)
4040 return new pass_split_before_regstack (ctxt
);
4044 rest_of_handle_split_before_sched2 (void)
4046 #ifdef INSN_SCHEDULING
4054 const pass_data pass_data_split_before_sched2
=
4056 RTL_PASS
, /* type */
4057 "split4", /* name */
4058 OPTGROUP_NONE
, /* optinfo_flags */
4059 TV_NONE
, /* tv_id */
4060 0, /* properties_required */
4061 0, /* properties_provided */
4062 0, /* properties_destroyed */
4063 0, /* todo_flags_start */
4064 0, /* todo_flags_finish */
4067 class pass_split_before_sched2
: public rtl_opt_pass
4070 pass_split_before_sched2 (gcc::context
*ctxt
)
4071 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4074 /* opt_pass methods: */
4075 virtual bool gate (function
*)
4077 #ifdef INSN_SCHEDULING
4078 return optimize
> 0 && flag_schedule_insns_after_reload
;
4084 virtual unsigned int execute (function
*)
4086 return rest_of_handle_split_before_sched2 ();
4089 }; // class pass_split_before_sched2
4094 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4096 return new pass_split_before_sched2 (ctxt
);
4101 const pass_data pass_data_split_for_shorten_branches
=
4103 RTL_PASS
, /* type */
4104 "split5", /* name */
4105 OPTGROUP_NONE
, /* optinfo_flags */
4106 TV_NONE
, /* tv_id */
4107 0, /* properties_required */
4108 0, /* properties_provided */
4109 0, /* properties_destroyed */
4110 0, /* todo_flags_start */
4111 0, /* todo_flags_finish */
4114 class pass_split_for_shorten_branches
: public rtl_opt_pass
4117 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4118 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4121 /* opt_pass methods: */
4122 virtual bool gate (function
*)
4124 /* The placement of the splitting that we do for shorten_branches
4125 depends on whether regstack is used by the target or not. */
4126 #if HAVE_ATTR_length && !defined (STACK_REGS)
4133 virtual unsigned int execute (function
*)
4135 return split_all_insns_noflow ();
4138 }; // class pass_split_for_shorten_branches
4143 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4145 return new pass_split_for_shorten_branches (ctxt
);
4148 /* (Re)initialize the target information after a change in target. */
4153 /* The information is zero-initialized, so we don't need to do anything
4154 first time round. */
4155 if (!this_target_recog
->x_initialized
)
4157 this_target_recog
->x_initialized
= true;
4160 memset (this_target_recog
->x_bool_attr_masks
, 0,
4161 sizeof (this_target_recog
->x_bool_attr_masks
));
4162 for (int i
= 0; i
< LAST_INSN_CODE
; ++i
)
4163 if (this_target_recog
->x_op_alt
[i
])
4165 free (this_target_recog
->x_op_alt
[i
]);
4166 this_target_recog
->x_op_alt
[i
] = 0;