1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "rtl-error.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
33 #include "addresses.h"
42 #include "basic-block.h"
45 #include "tree-pass.h"
47 #include "insn-codes.h"
49 #ifndef STACK_PUSH_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_PUSH_CODE PRE_DEC
53 #define STACK_PUSH_CODE PRE_INC
57 #ifndef STACK_POP_CODE
58 #ifdef STACK_GROWS_DOWNWARD
59 #define STACK_POP_CODE POST_INC
61 #define STACK_POP_CODE POST_DEC
65 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
, bool);
66 static void validate_replace_src_1 (rtx
*, void *);
67 static rtx
split_insn (rtx_insn
*);
69 struct target_recog default_target_recog
;
71 struct target_recog
*this_target_recog
= &default_target_recog
;
74 /* Nonzero means allow operands to be volatile.
75 This should be 0 if you are generating rtl, such as if you are calling
76 the functions in optabs.c and expmed.c (most of the time).
77 This should be 1 if all valid insns need to be recognized,
78 such as in reginfo.c and final.c and reload.c.
80 init_recog and init_recog_no_volatile are responsible for setting this. */
84 struct recog_data_d recog_data
;
86 /* Contains a vector of operand_alternative structures, such that
87 operand OP of alternative A is at index A * n_operands + OP.
88 Set up by preprocess_constraints. */
89 const operand_alternative
*recog_op_alt
;
91 /* Used to provide recog_op_alt for asms. */
92 static operand_alternative asm_op_alt
[MAX_RECOG_OPERANDS
93 * MAX_RECOG_ALTERNATIVES
];
95 /* On return from `constrain_operands', indicate which alternative
98 int which_alternative
;
100 /* Nonzero after end of reload pass.
101 Set to 1 or 0 by toplev.c.
102 Controls the significance of (SUBREG (MEM)). */
104 int reload_completed
;
106 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
107 int epilogue_completed
;
109 /* Initialize data used by the function `recog'.
110 This must be called once in the compilation of a function
111 before any insn recognition may be done in the function. */
114 init_recog_no_volatile (void)
126 /* Return true if labels in asm operands BODY are LABEL_REFs. */
129 asm_labels_ok (rtx body
)
134 asmop
= extract_asm_operands (body
);
135 if (asmop
== NULL_RTX
)
138 for (i
= 0; i
< ASM_OPERANDS_LABEL_LENGTH (asmop
); i
++)
139 if (GET_CODE (ASM_OPERANDS_LABEL (asmop
, i
)) != LABEL_REF
)
145 /* Check that X is an insn-body for an `asm' with operands
146 and that the operands mentioned in it are legitimate. */
149 check_asm_operands (rtx x
)
153 const char **constraints
;
156 if (!asm_labels_ok (x
))
159 /* Post-reload, be more strict with things. */
160 if (reload_completed
)
162 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
163 rtx_insn
*insn
= make_insn_raw (x
);
165 constrain_operands (1, get_enabled_alternatives (insn
));
166 return which_alternative
>= 0;
169 noperands
= asm_noperands (x
);
175 operands
= XALLOCAVEC (rtx
, noperands
);
176 constraints
= XALLOCAVEC (const char *, noperands
);
178 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
, NULL
);
180 for (i
= 0; i
< noperands
; i
++)
182 const char *c
= constraints
[i
];
185 if (! asm_operand_ok (operands
[i
], c
, constraints
))
192 /* Static data for the next two routines. */
194 typedef struct change_t
203 static change_t
*changes
;
204 static int changes_allocated
;
206 static int num_changes
= 0;
208 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
209 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
210 the change is simply made.
212 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
213 will be called with the address and mode as parameters. If OBJECT is
214 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
217 IN_GROUP is nonzero if this is part of a group of changes that must be
218 performed as a group. In that case, the changes will be stored. The
219 function `apply_change_group' will validate and apply the changes.
221 If IN_GROUP is zero, this is a single change. Try to recognize the insn
222 or validate the memory reference with the change applied. If the result
223 is not valid for the machine, suppress the change and return zero.
224 Otherwise, perform the change and return 1. */
227 validate_change_1 (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
, bool unshare
)
231 if (old
== new_rtx
|| rtx_equal_p (old
, new_rtx
))
234 gcc_assert (in_group
!= 0 || num_changes
== 0);
238 /* Save the information describing this change. */
239 if (num_changes
>= changes_allocated
)
241 if (changes_allocated
== 0)
242 /* This value allows for repeated substitutions inside complex
243 indexed addresses, or changes in up to 5 insns. */
244 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
246 changes_allocated
*= 2;
248 changes
= XRESIZEVEC (change_t
, changes
, changes_allocated
);
251 changes
[num_changes
].object
= object
;
252 changes
[num_changes
].loc
= loc
;
253 changes
[num_changes
].old
= old
;
254 changes
[num_changes
].unshare
= unshare
;
256 if (object
&& !MEM_P (object
))
258 /* Set INSN_CODE to force rerecognition of insn. Save old code in
260 changes
[num_changes
].old_code
= INSN_CODE (object
);
261 INSN_CODE (object
) = -1;
266 /* If we are making a group of changes, return 1. Otherwise, validate the
267 change group we made. */
272 return apply_change_group ();
275 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
279 validate_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
281 return validate_change_1 (object
, loc
, new_rtx
, in_group
, false);
284 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
288 validate_unshare_change (rtx object
, rtx
*loc
, rtx new_rtx
, bool in_group
)
290 return validate_change_1 (object
, loc
, new_rtx
, in_group
, true);
294 /* Keep X canonicalized if some changes have made it non-canonical; only
295 modifies the operands of X, not (for example) its code. Simplifications
296 are not the job of this routine.
298 Return true if anything was changed. */
300 canonicalize_change_group (rtx insn
, rtx x
)
302 if (COMMUTATIVE_P (x
)
303 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
305 /* Oops, the caller has made X no longer canonical.
306 Let's redo the changes in the correct order. */
307 rtx tem
= XEXP (x
, 0);
308 validate_unshare_change (insn
, &XEXP (x
, 0), XEXP (x
, 1), 1);
309 validate_unshare_change (insn
, &XEXP (x
, 1), tem
, 1);
317 /* This subroutine of apply_change_group verifies whether the changes to INSN
318 were valid; i.e. whether INSN can still be recognized.
320 If IN_GROUP is true clobbers which have to be added in order to
321 match the instructions will be added to the current change group.
322 Otherwise the changes will take effect immediately. */
325 insn_invalid_p (rtx_insn
*insn
, bool in_group
)
327 rtx pat
= PATTERN (insn
);
328 int num_clobbers
= 0;
329 /* If we are before reload and the pattern is a SET, see if we can add
331 int icode
= recog (pat
, insn
,
332 (GET_CODE (pat
) == SET
333 && ! reload_completed
334 && ! reload_in_progress
)
335 ? &num_clobbers
: 0);
336 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
339 /* If this is an asm and the operand aren't legal, then fail. Likewise if
340 this is not an asm and the insn wasn't recognized. */
341 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
342 || (!is_asm
&& icode
< 0))
345 /* If we have to add CLOBBERs, fail if we have to add ones that reference
346 hard registers since our callers can't know if they are live or not.
347 Otherwise, add them. */
348 if (num_clobbers
> 0)
352 if (added_clobbers_hard_reg_p (icode
))
355 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
356 XVECEXP (newpat
, 0, 0) = pat
;
357 add_clobbers (newpat
, icode
);
359 validate_change (insn
, &PATTERN (insn
), newpat
, 1);
361 PATTERN (insn
) = pat
= newpat
;
364 /* After reload, verify that all constraints are satisfied. */
365 if (reload_completed
)
369 if (! constrain_operands (1, get_preferred_alternatives (insn
)))
373 INSN_CODE (insn
) = icode
;
377 /* Return number of changes made and not validated yet. */
379 num_changes_pending (void)
384 /* Tentatively apply the changes numbered NUM and up.
385 Return 1 if all changes are valid, zero otherwise. */
388 verify_changes (int num
)
391 rtx last_validated
= NULL_RTX
;
393 /* The changes have been applied and all INSN_CODEs have been reset to force
396 The changes are valid if we aren't given an object, or if we are
397 given a MEM and it still is a valid address, or if this is in insn
398 and it is recognized. In the latter case, if reload has completed,
399 we also require that the operands meet the constraints for
402 for (i
= num
; i
< num_changes
; i
++)
404 rtx object
= changes
[i
].object
;
406 /* If there is no object to test or if it is the same as the one we
407 already tested, ignore it. */
408 if (object
== 0 || object
== last_validated
)
413 if (! memory_address_addr_space_p (GET_MODE (object
),
415 MEM_ADDR_SPACE (object
)))
418 else if (/* changes[i].old might be zero, e.g. when putting a
419 REG_FRAME_RELATED_EXPR into a previously empty list. */
421 && REG_P (changes
[i
].old
)
422 && asm_noperands (PATTERN (object
)) > 0
423 && REG_EXPR (changes
[i
].old
) != NULL_TREE
424 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes
[i
].old
))
425 && DECL_REGISTER (REG_EXPR (changes
[i
].old
)))
427 /* Don't allow changes of hard register operands to inline
428 assemblies if they have been defined as register asm ("x"). */
431 else if (DEBUG_INSN_P (object
))
433 else if (insn_invalid_p (as_a
<rtx_insn
*> (object
), true))
435 rtx pat
= PATTERN (object
);
437 /* Perhaps we couldn't recognize the insn because there were
438 extra CLOBBERs at the end. If so, try to re-recognize
439 without the last CLOBBER (later iterations will cause each of
440 them to be eliminated, in turn). But don't do this if we
441 have an ASM_OPERAND. */
442 if (GET_CODE (pat
) == PARALLEL
443 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
444 && asm_noperands (PATTERN (object
)) < 0)
448 if (XVECLEN (pat
, 0) == 2)
449 newpat
= XVECEXP (pat
, 0, 0);
455 = gen_rtx_PARALLEL (VOIDmode
,
456 rtvec_alloc (XVECLEN (pat
, 0) - 1));
457 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
458 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
461 /* Add a new change to this group to replace the pattern
462 with this new pattern. Then consider this change
463 as having succeeded. The change we added will
464 cause the entire call to fail if things remain invalid.
466 Note that this can lose if a later change than the one
467 we are processing specified &XVECEXP (PATTERN (object), 0, X)
468 but this shouldn't occur. */
470 validate_change (object
, &PATTERN (object
), newpat
, 1);
473 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
474 || GET_CODE (pat
) == VAR_LOCATION
)
475 /* If this insn is a CLOBBER or USE, it is always valid, but is
481 last_validated
= object
;
484 return (i
== num_changes
);
487 /* A group of changes has previously been issued with validate_change
488 and verified with verify_changes. Call df_insn_rescan for each of
489 the insn changed and clear num_changes. */
492 confirm_change_group (void)
495 rtx last_object
= NULL
;
497 for (i
= 0; i
< num_changes
; i
++)
499 rtx object
= changes
[i
].object
;
501 if (changes
[i
].unshare
)
502 *changes
[i
].loc
= copy_rtx (*changes
[i
].loc
);
504 /* Avoid unnecessary rescanning when multiple changes to same instruction
508 if (object
!= last_object
&& last_object
&& INSN_P (last_object
))
509 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
510 last_object
= object
;
514 if (last_object
&& INSN_P (last_object
))
515 df_insn_rescan (as_a
<rtx_insn
*> (last_object
));
519 /* Apply a group of changes previously issued with `validate_change'.
520 If all changes are valid, call confirm_change_group and return 1,
521 otherwise, call cancel_changes and return 0. */
524 apply_change_group (void)
526 if (verify_changes (0))
528 confirm_change_group ();
539 /* Return the number of changes so far in the current group. */
542 num_validated_changes (void)
547 /* Retract the changes numbered NUM and up. */
550 cancel_changes (int num
)
554 /* Back out all the changes. Do this in the opposite order in which
556 for (i
= num_changes
- 1; i
>= num
; i
--)
558 *changes
[i
].loc
= changes
[i
].old
;
559 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
560 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
565 /* Reduce conditional compilation elsewhere. */
568 #define CODE_FOR_extv CODE_FOR_nothing
572 #define CODE_FOR_extzv CODE_FOR_nothing
575 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
579 simplify_while_replacing (rtx
*loc
, rtx to
, rtx object
,
580 enum machine_mode op0_mode
)
583 enum rtx_code code
= GET_CODE (x
);
584 rtx new_rtx
= NULL_RTX
;
586 if (SWAPPABLE_OPERANDS_P (x
)
587 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
589 validate_unshare_change (object
, loc
,
590 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
591 : swap_condition (code
),
592 GET_MODE (x
), XEXP (x
, 1),
598 /* Canonicalize arithmetics with all constant operands. */
599 switch (GET_RTX_CLASS (code
))
602 if (CONSTANT_P (XEXP (x
, 0)))
603 new_rtx
= simplify_unary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
608 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
609 new_rtx
= simplify_binary_operation (code
, GET_MODE (x
), XEXP (x
, 0),
613 case RTX_COMM_COMPARE
:
614 if (CONSTANT_P (XEXP (x
, 0)) && CONSTANT_P (XEXP (x
, 1)))
615 new_rtx
= simplify_relational_operation (code
, GET_MODE (x
), op0_mode
,
616 XEXP (x
, 0), XEXP (x
, 1));
623 validate_change (object
, loc
, new_rtx
, 1);
630 /* If we have a PLUS whose second operand is now a CONST_INT, use
631 simplify_gen_binary to try to simplify it.
632 ??? We may want later to remove this, once simplification is
633 separated from this function. */
634 if (CONST_INT_P (XEXP (x
, 1)) && XEXP (x
, 1) == to
)
635 validate_change (object
, loc
,
637 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
640 if (CONST_SCALAR_INT_P (XEXP (x
, 1)))
641 validate_change (object
, loc
,
643 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
644 simplify_gen_unary (NEG
,
645 GET_MODE (x
), XEXP (x
, 1),
650 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
652 new_rtx
= simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
654 /* If any of the above failed, substitute in something that
655 we know won't be recognized. */
657 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
658 validate_change (object
, loc
, new_rtx
, 1);
662 /* All subregs possible to simplify should be simplified. */
663 new_rtx
= simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
666 /* Subregs of VOIDmode operands are incorrect. */
667 if (!new_rtx
&& GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
668 new_rtx
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
670 validate_change (object
, loc
, new_rtx
, 1);
674 /* If we are replacing a register with memory, try to change the memory
675 to be the mode required for memory in extract operations (this isn't
676 likely to be an insertion operation; if it was, nothing bad will
677 happen, we might just fail in some cases). */
679 if (MEM_P (XEXP (x
, 0))
680 && CONST_INT_P (XEXP (x
, 1))
681 && CONST_INT_P (XEXP (x
, 2))
682 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0),
683 MEM_ADDR_SPACE (XEXP (x
, 0)))
684 && !MEM_VOLATILE_P (XEXP (x
, 0)))
686 enum machine_mode wanted_mode
= VOIDmode
;
687 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
688 int pos
= INTVAL (XEXP (x
, 2));
690 if (GET_CODE (x
) == ZERO_EXTRACT
&& HAVE_extzv
)
692 wanted_mode
= insn_data
[CODE_FOR_extzv
].operand
[1].mode
;
693 if (wanted_mode
== VOIDmode
)
694 wanted_mode
= word_mode
;
696 else if (GET_CODE (x
) == SIGN_EXTRACT
&& HAVE_extv
)
698 wanted_mode
= insn_data
[CODE_FOR_extv
].operand
[1].mode
;
699 if (wanted_mode
== VOIDmode
)
700 wanted_mode
= word_mode
;
703 /* If we have a narrower mode, we can do something. */
704 if (wanted_mode
!= VOIDmode
705 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
707 int offset
= pos
/ BITS_PER_UNIT
;
710 /* If the bytes and bits are counted differently, we
711 must adjust the offset. */
712 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
714 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
717 gcc_assert (GET_MODE_PRECISION (wanted_mode
)
718 == GET_MODE_BITSIZE (wanted_mode
));
719 pos
%= GET_MODE_BITSIZE (wanted_mode
);
721 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
723 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
724 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
735 /* Replace every occurrence of FROM in X with TO. Mark each change with
736 validate_change passing OBJECT. */
739 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
,
746 enum machine_mode op0_mode
= VOIDmode
;
747 int prev_changes
= num_changes
;
753 fmt
= GET_RTX_FORMAT (code
);
755 op0_mode
= GET_MODE (XEXP (x
, 0));
757 /* X matches FROM if it is the same rtx or they are both referring to the
758 same register in the same mode. Avoid calling rtx_equal_p unless the
759 operands look similar. */
762 || (REG_P (x
) && REG_P (from
)
763 && GET_MODE (x
) == GET_MODE (from
)
764 && REGNO (x
) == REGNO (from
))
765 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
766 && rtx_equal_p (x
, from
)))
768 validate_unshare_change (object
, loc
, to
, 1);
772 /* Call ourself recursively to perform the replacements.
773 We must not replace inside already replaced expression, otherwise we
774 get infinite recursion for replacements like (reg X)->(subreg (reg X))
775 so we must special case shared ASM_OPERANDS. */
777 if (GET_CODE (x
) == PARALLEL
)
779 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
781 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
782 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
784 /* Verify that operands are really shared. */
785 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
786 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
788 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
789 from
, to
, object
, simplify
);
792 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
,
797 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
800 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
, simplify
);
801 else if (fmt
[i
] == 'E')
802 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
803 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
,
807 /* If we didn't substitute, there is nothing more to do. */
808 if (num_changes
== prev_changes
)
811 /* ??? The regmove is no more, so is this aberration still necessary? */
812 /* Allow substituted expression to have different mode. This is used by
813 regmove to change mode of pseudo register. */
814 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
815 op0_mode
= GET_MODE (XEXP (x
, 0));
817 /* Do changes needed to keep rtx consistent. Don't do any other
818 simplifications, as it is not our job. */
820 simplify_while_replacing (loc
, to
, object
, op0_mode
);
823 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
824 with TO. After all changes have been made, validate by seeing
825 if INSN is still valid. */
828 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
830 validate_replace_rtx_1 (loc
, from
, to
, insn
, true);
831 return apply_change_group ();
834 /* Try replacing every occurrence of FROM in INSN with TO. After all
835 changes have been made, validate by seeing if INSN is still valid. */
838 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
840 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
841 return apply_change_group ();
844 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
845 is a part of INSN. After all changes have been made, validate by seeing if
847 validate_replace_rtx (from, to, insn) is equivalent to
848 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
851 validate_replace_rtx_part (rtx from
, rtx to
, rtx
*where
, rtx insn
)
853 validate_replace_rtx_1 (where
, from
, to
, insn
, true);
854 return apply_change_group ();
857 /* Same as above, but do not simplify rtx afterwards. */
859 validate_replace_rtx_part_nosimplify (rtx from
, rtx to
, rtx
*where
,
862 validate_replace_rtx_1 (where
, from
, to
, insn
, false);
863 return apply_change_group ();
867 /* Try replacing every occurrence of FROM in INSN with TO. This also
868 will replace in REG_EQUAL and REG_EQUIV notes. */
871 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
874 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
, true);
875 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
876 if (REG_NOTE_KIND (note
) == REG_EQUAL
877 || REG_NOTE_KIND (note
) == REG_EQUIV
)
878 validate_replace_rtx_1 (&XEXP (note
, 0), from
, to
, insn
, true);
881 /* Function called by note_uses to replace used subexpressions. */
882 struct validate_replace_src_data
884 rtx from
; /* Old RTX */
885 rtx to
; /* New RTX */
886 rtx insn
; /* Insn in which substitution is occurring. */
890 validate_replace_src_1 (rtx
*x
, void *data
)
892 struct validate_replace_src_data
*d
893 = (struct validate_replace_src_data
*) data
;
895 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
, true);
898 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
902 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
904 struct validate_replace_src_data d
;
909 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
912 /* Try simplify INSN.
913 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
914 pattern and return true if something was simplified. */
917 validate_simplify_insn (rtx insn
)
923 pat
= PATTERN (insn
);
925 if (GET_CODE (pat
) == SET
)
927 newpat
= simplify_rtx (SET_SRC (pat
));
928 if (newpat
&& !rtx_equal_p (SET_SRC (pat
), newpat
))
929 validate_change (insn
, &SET_SRC (pat
), newpat
, 1);
930 newpat
= simplify_rtx (SET_DEST (pat
));
931 if (newpat
&& !rtx_equal_p (SET_DEST (pat
), newpat
))
932 validate_change (insn
, &SET_DEST (pat
), newpat
, 1);
934 else if (GET_CODE (pat
) == PARALLEL
)
935 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
937 rtx s
= XVECEXP (pat
, 0, i
);
939 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
)
941 newpat
= simplify_rtx (SET_SRC (s
));
942 if (newpat
&& !rtx_equal_p (SET_SRC (s
), newpat
))
943 validate_change (insn
, &SET_SRC (s
), newpat
, 1);
944 newpat
= simplify_rtx (SET_DEST (s
));
945 if (newpat
&& !rtx_equal_p (SET_DEST (s
), newpat
))
946 validate_change (insn
, &SET_DEST (s
), newpat
, 1);
949 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
953 /* Return 1 if the insn using CC0 set by INSN does not contain
954 any ordered tests applied to the condition codes.
955 EQ and NE tests do not count. */
958 next_insn_tests_no_inequality (rtx insn
)
960 rtx next
= next_cc0_user (insn
);
962 /* If there is no next insn, we have to take the conservative choice. */
966 return (INSN_P (next
)
967 && ! inequality_comparisons_p (PATTERN (next
)));
971 /* Return 1 if OP is a valid general operand for machine mode MODE.
972 This is either a register reference, a memory reference,
973 or a constant. In the case of a memory reference, the address
974 is checked for general validity for the target machine.
976 Register and memory references must have mode MODE in order to be valid,
977 but some constants have no machine mode and are valid for any mode.
979 If MODE is VOIDmode, OP is checked for validity for whatever mode
982 The main use of this function is as a predicate in match_operand
983 expressions in the machine description. */
986 general_operand (rtx op
, enum machine_mode mode
)
988 enum rtx_code code
= GET_CODE (op
);
990 if (mode
== VOIDmode
)
991 mode
= GET_MODE (op
);
993 /* Don't accept CONST_INT or anything similar
994 if the caller wants something floating. */
995 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
996 && GET_MODE_CLASS (mode
) != MODE_INT
997 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1000 if (CONST_INT_P (op
)
1002 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1005 if (CONSTANT_P (op
))
1006 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1007 || mode
== VOIDmode
)
1008 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1009 && targetm
.legitimate_constant_p (mode
== VOIDmode
1013 /* Except for certain constants with VOIDmode, already checked for,
1014 OP's mode must match MODE if MODE specifies a mode. */
1016 if (GET_MODE (op
) != mode
)
1021 rtx sub
= SUBREG_REG (op
);
1023 #ifdef INSN_SCHEDULING
1024 /* On machines that have insn scheduling, we want all memory
1025 reference to be explicit, so outlaw paradoxical SUBREGs.
1026 However, we must allow them after reload so that they can
1027 get cleaned up by cleanup_subreg_operands. */
1028 if (!reload_completed
&& MEM_P (sub
)
1029 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
1032 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1033 may result in incorrect reference. We should simplify all valid
1034 subregs of MEM anyway. But allow this after reload because we
1035 might be called from cleanup_subreg_operands.
1037 ??? This is a kludge. */
1038 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
1042 #ifdef CANNOT_CHANGE_MODE_CLASS
1044 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1045 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1046 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1047 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
1048 /* LRA can generate some invalid SUBREGS just for matched
1049 operand reload presentation. LRA needs to treat them as
1051 && ! LRA_SUBREG_P (op
))
1055 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1056 create such rtl, and we must reject it. */
1057 if (SCALAR_FLOAT_MODE_P (GET_MODE (op
))
1058 /* LRA can use subreg to store a floating point value in an
1059 integer mode. Although the floating point and the
1060 integer modes need the same number of hard registers, the
1061 size of floating point mode can be less than the integer
1063 && ! lra_in_progress
1064 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1068 code
= GET_CODE (op
);
1072 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1073 || in_hard_reg_set_p (operand_reg_set
, GET_MODE (op
), REGNO (op
)));
1077 rtx y
= XEXP (op
, 0);
1079 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1082 /* Use the mem's mode, since it will be reloaded thus. LRA can
1083 generate move insn with invalid addresses which is made valid
1084 and efficiently calculated by LRA through further numerous
1087 || memory_address_addr_space_p (GET_MODE (op
), y
, MEM_ADDR_SPACE (op
)))
1094 /* Return 1 if OP is a valid memory address for a memory reference
1097 The main use of this function is as a predicate in match_operand
1098 expressions in the machine description. */
1101 address_operand (rtx op
, enum machine_mode mode
)
1103 return memory_address_p (mode
, op
);
1106 /* Return 1 if OP is a register reference of mode MODE.
1107 If MODE is VOIDmode, accept a register in any mode.
1109 The main use of this function is as a predicate in match_operand
1110 expressions in the machine description. */
1113 register_operand (rtx op
, enum machine_mode mode
)
1115 if (GET_CODE (op
) == SUBREG
)
1117 rtx sub
= SUBREG_REG (op
);
1119 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1120 because it is guaranteed to be reloaded into one.
1121 Just make sure the MEM is valid in itself.
1122 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1123 but currently it does result from (SUBREG (REG)...) where the
1124 reg went on the stack.) */
1125 if (!REG_P (sub
) && (reload_completed
|| !MEM_P (sub
)))
1128 else if (!REG_P (op
))
1130 return general_operand (op
, mode
);
1133 /* Return 1 for a register in Pmode; ignore the tested mode. */
1136 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1138 return register_operand (op
, Pmode
);
1141 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1142 or a hard register. */
1145 scratch_operand (rtx op
, enum machine_mode mode
)
1147 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1150 return (GET_CODE (op
) == SCRATCH
1153 || (REGNO (op
) < FIRST_PSEUDO_REGISTER
1154 && REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
))));
1157 /* Return 1 if OP is a valid immediate operand for mode MODE.
1159 The main use of this function is as a predicate in match_operand
1160 expressions in the machine description. */
1163 immediate_operand (rtx op
, enum machine_mode mode
)
1165 /* Don't accept CONST_INT or anything similar
1166 if the caller wants something floating. */
1167 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1168 && GET_MODE_CLASS (mode
) != MODE_INT
1169 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1172 if (CONST_INT_P (op
)
1174 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1177 return (CONSTANT_P (op
)
1178 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1179 || GET_MODE (op
) == VOIDmode
)
1180 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1181 && targetm
.legitimate_constant_p (mode
== VOIDmode
1186 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1189 const_int_operand (rtx op
, enum machine_mode mode
)
1191 if (!CONST_INT_P (op
))
1194 if (mode
!= VOIDmode
1195 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1201 #if TARGET_SUPPORTS_WIDE_INT
1202 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1205 const_scalar_int_operand (rtx op
, enum machine_mode mode
)
1207 if (!CONST_SCALAR_INT_P (op
))
1210 if (CONST_INT_P (op
))
1211 return const_int_operand (op
, mode
);
1213 if (mode
!= VOIDmode
)
1215 int prec
= GET_MODE_PRECISION (mode
);
1216 int bitsize
= GET_MODE_BITSIZE (mode
);
1218 if (CONST_WIDE_INT_NUNITS (op
) * HOST_BITS_PER_WIDE_INT
> bitsize
)
1221 if (prec
== bitsize
)
1225 /* Multiword partial int. */
1227 = CONST_WIDE_INT_ELT (op
, CONST_WIDE_INT_NUNITS (op
) - 1);
1228 return (sext_hwi (x
, prec
& (HOST_BITS_PER_WIDE_INT
- 1)) == x
);
1234 /* Returns 1 if OP is an operand that is a constant integer or constant
1235 floating-point number of MODE. */
1238 const_double_operand (rtx op
, enum machine_mode mode
)
1240 return (GET_CODE (op
) == CONST_DOUBLE
)
1241 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
);
1244 /* Returns 1 if OP is an operand that is a constant integer or constant
1245 floating-point number of MODE. */
1248 const_double_operand (rtx op
, enum machine_mode mode
)
1250 /* Don't accept CONST_INT or anything similar
1251 if the caller wants something floating. */
1252 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1253 && GET_MODE_CLASS (mode
) != MODE_INT
1254 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1257 return ((CONST_DOUBLE_P (op
) || CONST_INT_P (op
))
1258 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1259 || GET_MODE (op
) == VOIDmode
));
1262 /* Return 1 if OP is a general operand that is not an immediate
1263 operand of mode MODE. */
1266 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1268 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1271 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1274 nonmemory_operand (rtx op
, enum machine_mode mode
)
1276 if (CONSTANT_P (op
))
1277 return immediate_operand (op
, mode
);
1278 return register_operand (op
, mode
);
1281 /* Return 1 if OP is a valid operand that stands for pushing a
1282 value of mode MODE onto the stack.
1284 The main use of this function is as a predicate in match_operand
1285 expressions in the machine description. */
1288 push_operand (rtx op
, enum machine_mode mode
)
1290 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1292 #ifdef PUSH_ROUNDING
1293 rounded_size
= PUSH_ROUNDING (rounded_size
);
1299 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1304 if (rounded_size
== GET_MODE_SIZE (mode
))
1306 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1311 if (GET_CODE (op
) != PRE_MODIFY
1312 || GET_CODE (XEXP (op
, 1)) != PLUS
1313 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1314 || !CONST_INT_P (XEXP (XEXP (op
, 1), 1))
1315 #ifdef STACK_GROWS_DOWNWARD
1316 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1318 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1324 return XEXP (op
, 0) == stack_pointer_rtx
;
1327 /* Return 1 if OP is a valid operand that stands for popping a
1328 value of mode MODE off the stack.
1330 The main use of this function is as a predicate in match_operand
1331 expressions in the machine description. */
1334 pop_operand (rtx op
, enum machine_mode mode
)
1339 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1344 if (GET_CODE (op
) != STACK_POP_CODE
)
1347 return XEXP (op
, 0) == stack_pointer_rtx
;
1350 /* Return 1 if ADDR is a valid memory address
1351 for mode MODE in address space AS. */
1354 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED
,
1355 rtx addr
, addr_space_t as
)
1357 #ifdef GO_IF_LEGITIMATE_ADDRESS
1358 gcc_assert (ADDR_SPACE_GENERIC_P (as
));
1359 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1365 return targetm
.addr_space
.legitimate_address_p (mode
, addr
, 0, as
);
1369 /* Return 1 if OP is a valid memory reference with mode MODE,
1370 including a valid address.
1372 The main use of this function is as a predicate in match_operand
1373 expressions in the machine description. */
1376 memory_operand (rtx op
, enum machine_mode mode
)
1380 if (! reload_completed
)
1381 /* Note that no SUBREG is a memory operand before end of reload pass,
1382 because (SUBREG (MEM...)) forces reloading into a register. */
1383 return MEM_P (op
) && general_operand (op
, mode
);
1385 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1389 if (GET_CODE (inner
) == SUBREG
)
1390 inner
= SUBREG_REG (inner
);
1392 return (MEM_P (inner
) && general_operand (op
, mode
));
1395 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1396 that is, a memory reference whose address is a general_operand. */
1399 indirect_operand (rtx op
, enum machine_mode mode
)
1401 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1402 if (! reload_completed
1403 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1405 int offset
= SUBREG_BYTE (op
);
1406 rtx inner
= SUBREG_REG (op
);
1408 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1411 /* The only way that we can have a general_operand as the resulting
1412 address is if OFFSET is zero and the address already is an operand
1413 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1416 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1417 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1418 && CONST_INT_P (XEXP (XEXP (inner
, 0), 1))
1419 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1420 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1424 && memory_operand (op
, mode
)
1425 && general_operand (XEXP (op
, 0), Pmode
));
1428 /* Return 1 if this is an ordered comparison operator (not including
1429 ORDERED and UNORDERED). */
1432 ordered_comparison_operator (rtx op
, enum machine_mode mode
)
1434 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1436 switch (GET_CODE (op
))
1454 /* Return 1 if this is a comparison operator. This allows the use of
1455 MATCH_OPERATOR to recognize all the branch insns. */
1458 comparison_operator (rtx op
, enum machine_mode mode
)
1460 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1461 && COMPARISON_P (op
));
1464 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1467 extract_asm_operands (rtx body
)
1470 switch (GET_CODE (body
))
1476 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1477 tmp
= SET_SRC (body
);
1478 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1483 tmp
= XVECEXP (body
, 0, 0);
1484 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1486 if (GET_CODE (tmp
) == SET
)
1488 tmp
= SET_SRC (tmp
);
1489 if (GET_CODE (tmp
) == ASM_OPERANDS
)
1500 /* If BODY is an insn body that uses ASM_OPERANDS,
1501 return the number of operands (both input and output) in the insn.
1502 Otherwise return -1. */
1505 asm_noperands (const_rtx body
)
1507 rtx asm_op
= extract_asm_operands (CONST_CAST_RTX (body
));
1513 if (GET_CODE (body
) == SET
)
1515 else if (GET_CODE (body
) == PARALLEL
)
1518 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1520 /* Multiple output operands, or 1 output plus some clobbers:
1522 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1523 /* Count backwards through CLOBBERs to determine number of SETs. */
1524 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1526 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1528 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1532 /* N_SETS is now number of output operands. */
1535 /* Verify that all the SETs we have
1536 came from a single original asm_operands insn
1537 (so that invalid combinations are blocked). */
1538 for (i
= 0; i
< n_sets
; i
++)
1540 rtx elt
= XVECEXP (body
, 0, i
);
1541 if (GET_CODE (elt
) != SET
)
1543 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1545 /* If these ASM_OPERANDS rtx's came from different original insns
1546 then they aren't allowed together. */
1547 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1548 != ASM_OPERANDS_INPUT_VEC (asm_op
))
1554 /* 0 outputs, but some clobbers:
1555 body is [(asm_operands ...) (clobber (reg ...))...]. */
1556 /* Make sure all the other parallel things really are clobbers. */
1557 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1558 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1563 return (ASM_OPERANDS_INPUT_LENGTH (asm_op
)
1564 + ASM_OPERANDS_LABEL_LENGTH (asm_op
) + n_sets
);
1567 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1568 copy its operands (both input and output) into the vector OPERANDS,
1569 the locations of the operands within the insn into the vector OPERAND_LOCS,
1570 and the constraints for the operands into CONSTRAINTS.
1571 Write the modes of the operands into MODES.
1572 Return the assembler-template.
1574 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1575 we don't store that info. */
1578 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1579 const char **constraints
, enum machine_mode
*modes
,
1582 int nbase
= 0, n
, i
;
1585 switch (GET_CODE (body
))
1588 /* Zero output asm: BODY is (asm_operands ...). */
1593 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1594 asmop
= SET_SRC (body
);
1596 /* The output is in the SET.
1597 Its constraint is in the ASM_OPERANDS itself. */
1599 operands
[0] = SET_DEST (body
);
1601 operand_locs
[0] = &SET_DEST (body
);
1603 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1605 modes
[0] = GET_MODE (SET_DEST (body
));
1611 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1613 asmop
= XVECEXP (body
, 0, 0);
1614 if (GET_CODE (asmop
) == SET
)
1616 asmop
= SET_SRC (asmop
);
1618 /* At least one output, plus some CLOBBERs. The outputs are in
1619 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1620 for (i
= 0; i
< nparallel
; i
++)
1622 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1623 break; /* Past last SET */
1625 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1627 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1629 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1631 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1642 n
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1643 for (i
= 0; i
< n
; i
++)
1646 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1648 operands
[nbase
+ i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1650 constraints
[nbase
+ i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1652 modes
[nbase
+ i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1656 n
= ASM_OPERANDS_LABEL_LENGTH (asmop
);
1657 for (i
= 0; i
< n
; i
++)
1660 operand_locs
[nbase
+ i
] = &ASM_OPERANDS_LABEL (asmop
, i
);
1662 operands
[nbase
+ i
] = ASM_OPERANDS_LABEL (asmop
, i
);
1664 constraints
[nbase
+ i
] = "";
1666 modes
[nbase
+ i
] = Pmode
;
1670 *loc
= ASM_OPERANDS_SOURCE_LOCATION (asmop
);
1672 return ASM_OPERANDS_TEMPLATE (asmop
);
1675 /* Parse inline assembly string STRING and determine which operands are
1676 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1677 to true if operand I is referenced.
1679 This is intended to distinguish barrier-like asms such as:
1681 asm ("" : "=m" (...));
1683 from real references such as:
1685 asm ("sw\t$0, %0" : "=m" (...)); */
1688 get_referenced_operands (const char *string
, bool *used
,
1689 unsigned int noperands
)
1691 memset (used
, 0, sizeof (bool) * noperands
);
1692 const char *p
= string
;
1698 /* A letter followed by a digit indicates an operand number. */
1699 if (ISALPHA (p
[0]) && ISDIGIT (p
[1]))
1704 unsigned long opnum
= strtoul (p
, &endptr
, 10);
1705 if (endptr
!= p
&& opnum
< noperands
)
1719 /* Check if an asm_operand matches its constraints.
1720 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1723 asm_operand_ok (rtx op
, const char *constraint
, const char **constraints
)
1727 bool incdec_ok
= false;
1730 /* Use constrain_operands after reload. */
1731 gcc_assert (!reload_completed
);
1733 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1734 many alternatives as required to match the other operands. */
1735 if (*constraint
== '\0')
1740 enum constraint_num cn
;
1741 char c
= *constraint
;
1749 case '0': case '1': case '2': case '3': case '4':
1750 case '5': case '6': case '7': case '8': case '9':
1751 /* If caller provided constraints pointer, look up
1752 the matching constraint. Otherwise, our caller should have
1753 given us the proper matching constraint, but we can't
1754 actually fail the check if they didn't. Indicate that
1755 results are inconclusive. */
1759 unsigned long match
;
1761 match
= strtoul (constraint
, &end
, 10);
1763 result
= asm_operand_ok (op
, constraints
[match
], NULL
);
1764 constraint
= (const char *) end
;
1770 while (ISDIGIT (*constraint
));
1776 /* The rest of the compiler assumes that reloading the address
1777 of a MEM into a register will make it fit an 'o' constraint.
1778 That is, if it sees a MEM operand for an 'o' constraint,
1779 it assumes that (mem (base-reg)) will fit.
1781 That assumption fails on targets that don't have offsettable
1782 addresses at all. We therefore need to treat 'o' asm
1783 constraints as a special case and only accept operands that
1784 are already offsettable, thus proving that at least one
1785 offsettable address exists. */
1786 case 'o': /* offsettable */
1787 if (offsettable_nonstrict_memref_p (op
))
1792 if (general_operand (op
, VOIDmode
))
1799 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1800 to exist, excepting those that expand_call created. Further,
1801 on some machines which do not have generalized auto inc/dec,
1802 an inc/dec is not a memory_operand.
1804 Match any memory and hope things are resolved after reload. */
1808 cn
= lookup_constraint (constraint
);
1809 switch (get_constraint_type (cn
))
1813 && reg_class_for_constraint (cn
) != NO_REGS
1814 && GET_MODE (op
) != BLKmode
1815 && register_operand (op
, VOIDmode
))
1822 && insn_const_int_ok_for_constraint (INTVAL (op
), cn
))
1827 /* Every memory operand can be reloaded to fit. */
1828 result
= result
|| memory_operand (op
, VOIDmode
);
1832 /* Every address operand can be reloaded to fit. */
1833 result
= result
|| address_operand (op
, VOIDmode
);
1837 result
= result
|| constraint_satisfied_p (op
, cn
);
1842 len
= CONSTRAINT_LEN (c
, constraint
);
1845 while (--len
&& *constraint
);
1851 /* For operands without < or > constraints reject side-effects. */
1852 if (!incdec_ok
&& result
&& MEM_P (op
))
1853 switch (GET_CODE (XEXP (op
, 0)))
1870 /* Given an rtx *P, if it is a sum containing an integer constant term,
1871 return the location (type rtx *) of the pointer to that constant term.
1872 Otherwise, return a null pointer. */
1875 find_constant_term_loc (rtx
*p
)
1878 enum rtx_code code
= GET_CODE (*p
);
1880 /* If *P IS such a constant term, P is its location. */
1882 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1886 /* Otherwise, if not a sum, it has no constant term. */
1888 if (GET_CODE (*p
) != PLUS
)
1891 /* If one of the summands is constant, return its location. */
1893 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1894 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1897 /* Otherwise, check each summand for containing a constant term. */
1899 if (XEXP (*p
, 0) != 0)
1901 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1906 if (XEXP (*p
, 1) != 0)
1908 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1916 /* Return 1 if OP is a memory reference
1917 whose address contains no side effects
1918 and remains valid after the addition
1919 of a positive integer less than the
1920 size of the object being referenced.
1922 We assume that the original address is valid and do not check it.
1924 This uses strict_memory_address_p as a subroutine, so
1925 don't use it before reload. */
1928 offsettable_memref_p (rtx op
)
1930 return ((MEM_P (op
))
1931 && offsettable_address_addr_space_p (1, GET_MODE (op
), XEXP (op
, 0),
1932 MEM_ADDR_SPACE (op
)));
1935 /* Similar, but don't require a strictly valid mem ref:
1936 consider pseudo-regs valid as index or base regs. */
1939 offsettable_nonstrict_memref_p (rtx op
)
1941 return ((MEM_P (op
))
1942 && offsettable_address_addr_space_p (0, GET_MODE (op
), XEXP (op
, 0),
1943 MEM_ADDR_SPACE (op
)));
1946 /* Return 1 if Y is a memory address which contains no side effects
1947 and would remain valid for address space AS after the addition of
1948 a positive integer less than the size of that mode.
1950 We assume that the original address is valid and do not check it.
1951 We do check that it is valid for narrower modes.
1953 If STRICTP is nonzero, we require a strictly valid address,
1954 for the sake of use in reload.c. */
1957 offsettable_address_addr_space_p (int strictp
, enum machine_mode mode
, rtx y
,
1960 enum rtx_code ycode
= GET_CODE (y
);
1964 int (*addressp
) (enum machine_mode
, rtx
, addr_space_t
) =
1965 (strictp
? strict_memory_address_addr_space_p
1966 : memory_address_addr_space_p
);
1967 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1969 if (CONSTANT_ADDRESS_P (y
))
1972 /* Adjusting an offsettable address involves changing to a narrower mode.
1973 Make sure that's OK. */
1975 if (mode_dependent_address_p (y
, as
))
1978 enum machine_mode address_mode
= GET_MODE (y
);
1979 if (address_mode
== VOIDmode
)
1980 address_mode
= targetm
.addr_space
.address_mode (as
);
1981 #ifdef POINTERS_EXTEND_UNSIGNED
1982 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
1985 /* ??? How much offset does an offsettable BLKmode reference need?
1986 Clearly that depends on the situation in which it's being used.
1987 However, the current situation in which we test 0xffffffff is
1988 less than ideal. Caveat user. */
1990 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1992 /* If the expression contains a constant term,
1993 see if it remains valid when max possible offset is added. */
1995 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
2000 *y2
= plus_constant (address_mode
, *y2
, mode_sz
- 1);
2001 /* Use QImode because an odd displacement may be automatically invalid
2002 for any wider mode. But it should be valid for a single byte. */
2003 good
= (*addressp
) (QImode
, y
, as
);
2005 /* In any case, restore old contents of memory. */
2010 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
2013 /* The offset added here is chosen as the maximum offset that
2014 any instruction could need to add when operating on something
2015 of the specified mode. We assume that if Y and Y+c are
2016 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2017 go inside a LO_SUM here, so we do so as well. */
2018 if (GET_CODE (y
) == LO_SUM
2020 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
2021 z
= gen_rtx_LO_SUM (address_mode
, XEXP (y
, 0),
2022 plus_constant (address_mode
, XEXP (y
, 1),
2024 #ifdef POINTERS_EXTEND_UNSIGNED
2025 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2026 else if (POINTERS_EXTEND_UNSIGNED
> 0
2027 && GET_CODE (y
) == ZERO_EXTEND
2028 && GET_MODE (XEXP (y
, 0)) == pointer_mode
)
2029 z
= gen_rtx_ZERO_EXTEND (address_mode
,
2030 plus_constant (pointer_mode
, XEXP (y
, 0),
2034 z
= plus_constant (address_mode
, y
, mode_sz
- 1);
2036 /* Use QImode because an odd displacement may be automatically invalid
2037 for any wider mode. But it should be valid for a single byte. */
2038 return (*addressp
) (QImode
, z
, as
);
2041 /* Return 1 if ADDR is an address-expression whose effect depends
2042 on the mode of the memory reference it is used in.
2044 ADDRSPACE is the address space associated with the address.
2046 Autoincrement addressing is a typical example of mode-dependence
2047 because the amount of the increment depends on the mode. */
2050 mode_dependent_address_p (rtx addr
, addr_space_t addrspace
)
2052 /* Auto-increment addressing with anything other than post_modify
2053 or pre_modify always introduces a mode dependency. Catch such
2054 cases now instead of deferring to the target. */
2055 if (GET_CODE (addr
) == PRE_INC
2056 || GET_CODE (addr
) == POST_INC
2057 || GET_CODE (addr
) == PRE_DEC
2058 || GET_CODE (addr
) == POST_DEC
)
2061 return targetm
.mode_dependent_address_p (addr
, addrspace
);
2064 /* Return true if boolean attribute ATTR is supported. */
2067 have_bool_attr (bool_attr attr
)
2072 return HAVE_ATTR_enabled
;
2073 case BA_PREFERRED_FOR_SIZE
:
2074 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_size
;
2075 case BA_PREFERRED_FOR_SPEED
:
2076 return HAVE_ATTR_enabled
|| HAVE_ATTR_preferred_for_speed
;
2081 /* Return the value of ATTR for instruction INSN. */
2084 get_bool_attr (rtx_insn
*insn
, bool_attr attr
)
2089 return get_attr_enabled (insn
);
2090 case BA_PREFERRED_FOR_SIZE
:
2091 return get_attr_enabled (insn
) && get_attr_preferred_for_size (insn
);
2092 case BA_PREFERRED_FOR_SPEED
:
2093 return get_attr_enabled (insn
) && get_attr_preferred_for_speed (insn
);
2098 /* Like get_bool_attr_mask, but don't use the cache. */
2100 static alternative_mask
2101 get_bool_attr_mask_uncached (rtx_insn
*insn
, bool_attr attr
)
2103 /* Temporarily install enough information for get_attr_<foo> to assume
2104 that the insn operands are already cached. As above, the attribute
2105 mustn't depend on the values of operands, so we don't provide their
2106 real values here. */
2107 rtx old_insn
= recog_data
.insn
;
2108 int old_alternative
= which_alternative
;
2110 recog_data
.insn
= insn
;
2111 alternative_mask mask
= ALL_ALTERNATIVES
;
2112 int n_alternatives
= insn_data
[INSN_CODE (insn
)].n_alternatives
;
2113 for (int i
= 0; i
< n_alternatives
; i
++)
2115 which_alternative
= i
;
2116 if (!get_bool_attr (insn
, attr
))
2117 mask
&= ~ALTERNATIVE_BIT (i
);
2120 recog_data
.insn
= old_insn
;
2121 which_alternative
= old_alternative
;
2125 /* Return the mask of operand alternatives that are allowed for INSN
2126 by boolean attribute ATTR. This mask depends only on INSN and on
2127 the current target; it does not depend on things like the values of
2130 static alternative_mask
2131 get_bool_attr_mask (rtx_insn
*insn
, bool_attr attr
)
2133 /* Quick exit for asms and for targets that don't use these attributes. */
2134 int code
= INSN_CODE (insn
);
2135 if (code
< 0 || !have_bool_attr (attr
))
2136 return ALL_ALTERNATIVES
;
2138 /* Calling get_attr_<foo> can be expensive, so cache the mask
2140 if (!this_target_recog
->x_bool_attr_masks
[code
][attr
])
2141 this_target_recog
->x_bool_attr_masks
[code
][attr
]
2142 = get_bool_attr_mask_uncached (insn
, attr
);
2143 return this_target_recog
->x_bool_attr_masks
[code
][attr
];
2146 /* Return the set of alternatives of INSN that are allowed by the current
2150 get_enabled_alternatives (rtx_insn
*insn
)
2152 return get_bool_attr_mask (insn
, BA_ENABLED
);
2155 /* Return the set of alternatives of INSN that are allowed by the current
2156 target and are preferred for the current size/speed optimization
2160 get_preferred_alternatives (rtx_insn
*insn
)
2162 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn
)))
2163 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2165 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2168 /* Return the set of alternatives of INSN that are allowed by the current
2169 target and are preferred for the size/speed optimization choice
2170 associated with BB. Passing a separate BB is useful if INSN has not
2171 been emitted yet or if we are considering moving it to a different
2175 get_preferred_alternatives (rtx_insn
*insn
, basic_block bb
)
2177 if (optimize_bb_for_speed_p (bb
))
2178 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SPEED
);
2180 return get_bool_attr_mask (insn
, BA_PREFERRED_FOR_SIZE
);
2183 /* Assert that the cached boolean attributes for INSN are still accurate.
2184 The backend is required to define these attributes in a way that only
2185 depends on the current target (rather than operands, compiler phase,
2189 check_bool_attrs (rtx_insn
*insn
)
2191 int code
= INSN_CODE (insn
);
2193 for (int i
= 0; i
<= BA_LAST
; ++i
)
2195 enum bool_attr attr
= (enum bool_attr
) i
;
2196 if (this_target_recog
->x_bool_attr_masks
[code
][attr
])
2197 gcc_assert (this_target_recog
->x_bool_attr_masks
[code
][attr
]
2198 == get_bool_attr_mask_uncached (insn
, attr
));
2203 /* Like extract_insn, but save insn extracted and don't extract again, when
2204 called again for the same insn expecting that recog_data still contain the
2205 valid information. This is used primary by gen_attr infrastructure that
2206 often does extract insn again and again. */
2208 extract_insn_cached (rtx_insn
*insn
)
2210 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2212 extract_insn (insn
);
2213 recog_data
.insn
= insn
;
2216 /* Do uncached extract_insn, constrain_operands and complain about failures.
2217 This should be used when extracting a pre-existing constrained instruction
2218 if the caller wants to know which alternative was chosen. */
2220 extract_constrain_insn (rtx_insn
*insn
)
2222 extract_insn (insn
);
2223 if (!constrain_operands (reload_completed
, get_enabled_alternatives (insn
)))
2224 fatal_insn_not_found (insn
);
2227 /* Do cached extract_insn, constrain_operands and complain about failures.
2228 Used by insn_attrtab. */
2230 extract_constrain_insn_cached (rtx_insn
*insn
)
2232 extract_insn_cached (insn
);
2233 if (which_alternative
== -1
2234 && !constrain_operands (reload_completed
,
2235 get_enabled_alternatives (insn
)))
2236 fatal_insn_not_found (insn
);
2239 /* Do cached constrain_operands on INSN and complain about failures. */
2241 constrain_operands_cached (rtx_insn
*insn
, int strict
)
2243 if (which_alternative
== -1)
2244 return constrain_operands (strict
, get_enabled_alternatives (insn
));
2249 /* Analyze INSN and fill in recog_data. */
2252 extract_insn (rtx_insn
*insn
)
2257 rtx body
= PATTERN (insn
);
2259 recog_data
.n_operands
= 0;
2260 recog_data
.n_alternatives
= 0;
2261 recog_data
.n_dups
= 0;
2262 recog_data
.is_asm
= false;
2264 switch (GET_CODE (body
))
2275 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2280 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2281 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2282 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2288 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2291 /* This insn is an `asm' with operands. */
2293 /* expand_asm_operands makes sure there aren't too many operands. */
2294 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2296 /* Now get the operand values and constraints out of the insn. */
2297 decode_asm_operands (body
, recog_data
.operand
,
2298 recog_data
.operand_loc
,
2299 recog_data
.constraints
,
2300 recog_data
.operand_mode
, NULL
);
2301 memset (recog_data
.is_operator
, 0, sizeof recog_data
.is_operator
);
2304 const char *p
= recog_data
.constraints
[0];
2305 recog_data
.n_alternatives
= 1;
2307 recog_data
.n_alternatives
+= (*p
++ == ',');
2309 recog_data
.is_asm
= true;
2312 fatal_insn_not_found (insn
);
2316 /* Ordinary insn: recognize it, get the operands via insn_extract
2317 and get the constraints. */
2319 icode
= recog_memoized (insn
);
2321 fatal_insn_not_found (insn
);
2323 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2324 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2325 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2327 insn_extract (insn
);
2329 for (i
= 0; i
< noperands
; i
++)
2331 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2332 recog_data
.is_operator
[i
] = insn_data
[icode
].operand
[i
].is_operator
;
2333 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2334 /* VOIDmode match_operands gets mode from their real operand. */
2335 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2336 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2339 for (i
= 0; i
< noperands
; i
++)
2340 recog_data
.operand_type
[i
]
2341 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2342 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2345 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2347 recog_data
.insn
= NULL
;
2348 which_alternative
= -1;
2351 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2352 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2353 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2354 has N_OPERANDS entries. */
2357 preprocess_constraints (int n_operands
, int n_alternatives
,
2358 const char **constraints
,
2359 operand_alternative
*op_alt_base
)
2361 for (int i
= 0; i
< n_operands
; i
++)
2364 struct operand_alternative
*op_alt
;
2365 const char *p
= constraints
[i
];
2367 op_alt
= op_alt_base
;
2369 for (j
= 0; j
< n_alternatives
; j
++, op_alt
+= n_operands
)
2371 op_alt
[i
].cl
= NO_REGS
;
2372 op_alt
[i
].constraint
= p
;
2373 op_alt
[i
].matches
= -1;
2374 op_alt
[i
].matched
= -1;
2376 if (*p
== '\0' || *p
== ',')
2378 op_alt
[i
].anything_ok
= 1;
2388 while (c
!= ',' && c
!= '\0');
2389 if (c
== ',' || c
== '\0')
2398 op_alt
[i
].reject
+= 6;
2401 op_alt
[i
].reject
+= 600;
2404 op_alt
[i
].earlyclobber
= 1;
2407 case '0': case '1': case '2': case '3': case '4':
2408 case '5': case '6': case '7': case '8': case '9':
2411 op_alt
[i
].matches
= strtoul (p
, &end
, 10);
2412 op_alt
[op_alt
[i
].matches
].matched
= i
;
2418 op_alt
[i
].anything_ok
= 1;
2423 reg_class_subunion
[(int) op_alt
[i
].cl
][(int) GENERAL_REGS
];
2427 enum constraint_num cn
= lookup_constraint (p
);
2429 switch (get_constraint_type (cn
))
2432 cl
= reg_class_for_constraint (cn
);
2434 op_alt
[i
].cl
= reg_class_subunion
[op_alt
[i
].cl
][cl
];
2441 op_alt
[i
].memory_ok
= 1;
2445 op_alt
[i
].is_address
= 1;
2447 = (reg_class_subunion
2448 [(int) op_alt
[i
].cl
]
2449 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
2450 ADDRESS
, SCRATCH
)]);
2458 p
+= CONSTRAINT_LEN (c
, p
);
2464 /* Return an array of operand_alternative instructions for
2465 instruction ICODE. */
2467 const operand_alternative
*
2468 preprocess_insn_constraints (int icode
)
2470 gcc_checking_assert (IN_RANGE (icode
, 0, LAST_INSN_CODE
));
2471 if (this_target_recog
->x_op_alt
[icode
])
2472 return this_target_recog
->x_op_alt
[icode
];
2474 int n_operands
= insn_data
[icode
].n_operands
;
2475 if (n_operands
== 0)
2477 /* Always provide at least one alternative so that which_op_alt ()
2478 works correctly. If the instruction has 0 alternatives (i.e. all
2479 constraint strings are empty) then each operand in this alternative
2480 will have anything_ok set. */
2481 int n_alternatives
= MAX (insn_data
[icode
].n_alternatives
, 1);
2482 int n_entries
= n_operands
* n_alternatives
;
2484 operand_alternative
*op_alt
= XCNEWVEC (operand_alternative
, n_entries
);
2485 const char **constraints
= XALLOCAVEC (const char *, n_operands
);
2487 for (int i
= 0; i
< n_operands
; ++i
)
2488 constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2489 preprocess_constraints (n_operands
, n_alternatives
, constraints
, op_alt
);
2491 this_target_recog
->x_op_alt
[icode
] = op_alt
;
2495 /* After calling extract_insn, you can use this function to extract some
2496 information from the constraint strings into a more usable form.
2497 The collected data is stored in recog_op_alt. */
2500 preprocess_constraints (rtx insn
)
2502 int icode
= INSN_CODE (insn
);
2504 recog_op_alt
= preprocess_insn_constraints (icode
);
2507 int n_operands
= recog_data
.n_operands
;
2508 int n_alternatives
= recog_data
.n_alternatives
;
2509 int n_entries
= n_operands
* n_alternatives
;
2510 memset (asm_op_alt
, 0, n_entries
* sizeof (operand_alternative
));
2511 preprocess_constraints (n_operands
, n_alternatives
,
2512 recog_data
.constraints
, asm_op_alt
);
2513 recog_op_alt
= asm_op_alt
;
2517 /* Check the operands of an insn against the insn's operand constraints
2518 and return 1 if they match any of the alternatives in ALTERNATIVES.
2520 The information about the insn's operands, constraints, operand modes
2521 etc. is obtained from the global variables set up by extract_insn.
2523 WHICH_ALTERNATIVE is set to a number which indicates which
2524 alternative of constraints was matched: 0 for the first alternative,
2525 1 for the next, etc.
2527 In addition, when two operands are required to match
2528 and it happens that the output operand is (reg) while the
2529 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2530 make the output operand look like the input.
2531 This is because the output operand is the one the template will print.
2533 This is used in final, just before printing the assembler code and by
2534 the routines that determine an insn's attribute.
2536 If STRICT is a positive nonzero value, it means that we have been
2537 called after reload has been completed. In that case, we must
2538 do all checks strictly. If it is zero, it means that we have been called
2539 before reload has completed. In that case, we first try to see if we can
2540 find an alternative that matches strictly. If not, we try again, this
2541 time assuming that reload will fix up the insn. This provides a "best
2542 guess" for the alternative and is used to compute attributes of insns prior
2543 to reload. A negative value of STRICT is used for this internal call. */
2551 constrain_operands (int strict
, alternative_mask alternatives
)
2553 const char *constraints
[MAX_RECOG_OPERANDS
];
2554 int matching_operands
[MAX_RECOG_OPERANDS
];
2555 int earlyclobber
[MAX_RECOG_OPERANDS
];
2558 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2559 int funny_match_index
;
2561 which_alternative
= 0;
2562 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2565 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2567 constraints
[c
] = recog_data
.constraints
[c
];
2568 matching_operands
[c
] = -1;
2573 int seen_earlyclobber_at
= -1;
2576 funny_match_index
= 0;
2578 if (!TEST_BIT (alternatives
, which_alternative
))
2582 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2583 constraints
[i
] = skip_alternative (constraints
[i
]);
2585 which_alternative
++;
2589 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2591 rtx op
= recog_data
.operand
[opno
];
2592 enum machine_mode mode
= GET_MODE (op
);
2593 const char *p
= constraints
[opno
];
2599 earlyclobber
[opno
] = 0;
2601 /* A unary operator may be accepted by the predicate, but it
2602 is irrelevant for matching constraints. */
2606 if (GET_CODE (op
) == SUBREG
)
2608 if (REG_P (SUBREG_REG (op
))
2609 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2610 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2611 GET_MODE (SUBREG_REG (op
)),
2614 op
= SUBREG_REG (op
);
2617 /* An empty constraint or empty alternative
2618 allows anything which matched the pattern. */
2619 if (*p
== 0 || *p
== ',')
2623 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2633 /* Ignore rest of this alternative as far as
2634 constraint checking is concerned. */
2637 while (*p
&& *p
!= ',');
2642 earlyclobber
[opno
] = 1;
2643 if (seen_earlyclobber_at
< 0)
2644 seen_earlyclobber_at
= opno
;
2647 case '0': case '1': case '2': case '3': case '4':
2648 case '5': case '6': case '7': case '8': case '9':
2650 /* This operand must be the same as a previous one.
2651 This kind of constraint is used for instructions such
2652 as add when they take only two operands.
2654 Note that the lower-numbered operand is passed first.
2656 If we are not testing strictly, assume that this
2657 constraint will be satisfied. */
2662 match
= strtoul (p
, &end
, 10);
2669 rtx op1
= recog_data
.operand
[match
];
2670 rtx op2
= recog_data
.operand
[opno
];
2672 /* A unary operator may be accepted by the predicate,
2673 but it is irrelevant for matching constraints. */
2675 op1
= XEXP (op1
, 0);
2677 op2
= XEXP (op2
, 0);
2679 val
= operands_match_p (op1
, op2
);
2682 matching_operands
[opno
] = match
;
2683 matching_operands
[match
] = opno
;
2688 /* If output is *x and input is *--x, arrange later
2689 to change the output to *--x as well, since the
2690 output op is the one that will be printed. */
2691 if (val
== 2 && strict
> 0)
2693 funny_match
[funny_match_index
].this_op
= opno
;
2694 funny_match
[funny_match_index
++].other
= match
;
2701 /* p is used for address_operands. When we are called by
2702 gen_reload, no one will have checked that the address is
2703 strictly valid, i.e., that all pseudos requiring hard regs
2704 have gotten them. */
2706 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2711 /* No need to check general_operand again;
2712 it was done in insn-recog.c. Well, except that reload
2713 doesn't check the validity of its replacements, but
2714 that should only matter when there's a bug. */
2716 /* Anything goes unless it is a REG and really has a hard reg
2717 but the hard reg is not in the class GENERAL_REGS. */
2721 || GENERAL_REGS
== ALL_REGS
2722 || (reload_in_progress
2723 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2724 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2727 else if (strict
< 0 || general_operand (op
, mode
))
2733 enum constraint_num cn
= lookup_constraint (p
);
2734 enum reg_class cl
= reg_class_for_constraint (cn
);
2740 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2741 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2743 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2747 else if (constraint_satisfied_p (op
, cn
))
2750 else if (insn_extra_memory_constraint (cn
)
2751 /* Every memory operand can be reloaded to fit. */
2752 && ((strict
< 0 && MEM_P (op
))
2753 /* Before reload, accept what reload can turn
2755 || (strict
< 0 && CONSTANT_P (op
))
2756 /* During reload, accept a pseudo */
2757 || (reload_in_progress
&& REG_P (op
)
2758 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2760 else if (insn_extra_address_constraint (cn
)
2761 /* Every address operand can be reloaded to fit. */
2764 /* Cater to architectures like IA-64 that define extra memory
2765 constraints without using define_memory_constraint. */
2766 else if (reload_in_progress
2768 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
2769 && reg_renumber
[REGNO (op
)] < 0
2770 && reg_equiv_mem (REGNO (op
)) != 0
2771 && constraint_satisfied_p
2772 (reg_equiv_mem (REGNO (op
)), cn
))
2777 while (p
+= len
, c
);
2779 constraints
[opno
] = p
;
2780 /* If this operand did not win somehow,
2781 this alternative loses. */
2785 /* This alternative won; the operands are ok.
2786 Change whichever operands this alternative says to change. */
2791 /* See if any earlyclobber operand conflicts with some other
2794 if (strict
> 0 && seen_earlyclobber_at
>= 0)
2795 for (eopno
= seen_earlyclobber_at
;
2796 eopno
< recog_data
.n_operands
;
2798 /* Ignore earlyclobber operands now in memory,
2799 because we would often report failure when we have
2800 two memory operands, one of which was formerly a REG. */
2801 if (earlyclobber
[eopno
]
2802 && REG_P (recog_data
.operand
[eopno
]))
2803 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2804 if ((MEM_P (recog_data
.operand
[opno
])
2805 || recog_data
.operand_type
[opno
] != OP_OUT
)
2807 /* Ignore things like match_operator operands. */
2808 && *recog_data
.constraints
[opno
] != 0
2809 && ! (matching_operands
[opno
] == eopno
2810 && operands_match_p (recog_data
.operand
[opno
],
2811 recog_data
.operand
[eopno
]))
2812 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2813 recog_data
.operand
[eopno
]))
2818 while (--funny_match_index
>= 0)
2820 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2821 = recog_data
.operand
[funny_match
[funny_match_index
].this_op
];
2825 /* For operands without < or > constraints reject side-effects. */
2826 if (recog_data
.is_asm
)
2828 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2829 if (MEM_P (recog_data
.operand
[opno
]))
2830 switch (GET_CODE (XEXP (recog_data
.operand
[opno
], 0)))
2838 if (strchr (recog_data
.constraints
[opno
], '<') == NULL
2839 && strchr (recog_data
.constraints
[opno
], '>')
2852 which_alternative
++;
2854 while (which_alternative
< recog_data
.n_alternatives
);
2856 which_alternative
= -1;
2857 /* If we are about to reject this, but we are not to test strictly,
2858 try a very loose test. Only return failure if it fails also. */
2860 return constrain_operands (-1, alternatives
);
2865 /* Return true iff OPERAND (assumed to be a REG rtx)
2866 is a hard reg in class CLASS when its regno is offset by OFFSET
2867 and changed to mode MODE.
2868 If REG occupies multiple hard regs, all of them must be in CLASS. */
2871 reg_fits_class_p (const_rtx operand
, reg_class_t cl
, int offset
,
2872 enum machine_mode mode
)
2874 unsigned int regno
= REGNO (operand
);
2879 /* Regno must not be a pseudo register. Offset may be negative. */
2880 return (HARD_REGISTER_NUM_P (regno
)
2881 && HARD_REGISTER_NUM_P (regno
+ offset
)
2882 && in_hard_reg_set_p (reg_class_contents
[(int) cl
], mode
,
2886 /* Split single instruction. Helper function for split_all_insns and
2887 split_all_insns_noflow. Return last insn in the sequence if successful,
2888 or NULL if unsuccessful. */
2891 split_insn (rtx_insn
*insn
)
2893 /* Split insns here to get max fine-grain parallelism. */
2894 rtx_insn
*first
= PREV_INSN (insn
);
2895 rtx_insn
*last
= try_split (PATTERN (insn
), insn
, 1);
2896 rtx insn_set
, last_set
, note
;
2901 /* If the original instruction was a single set that was known to be
2902 equivalent to a constant, see if we can say the same about the last
2903 instruction in the split sequence. The two instructions must set
2904 the same destination. */
2905 insn_set
= single_set (insn
);
2908 last_set
= single_set (last
);
2909 if (last_set
&& rtx_equal_p (SET_DEST (last_set
), SET_DEST (insn_set
)))
2911 note
= find_reg_equal_equiv_note (insn
);
2912 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2913 set_unique_reg_note (last
, REG_EQUAL
, XEXP (note
, 0));
2914 else if (CONSTANT_P (SET_SRC (insn_set
)))
2915 set_unique_reg_note (last
, REG_EQUAL
,
2916 copy_rtx (SET_SRC (insn_set
)));
2920 /* try_split returns the NOTE that INSN became. */
2921 SET_INSN_DELETED (insn
);
2923 /* ??? Coddle to md files that generate subregs in post-reload
2924 splitters instead of computing the proper hard register. */
2925 if (reload_completed
&& first
!= last
)
2927 first
= NEXT_INSN (first
);
2931 cleanup_subreg_operands (first
);
2934 first
= NEXT_INSN (first
);
2941 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2944 split_all_insns (void)
2950 blocks
= sbitmap_alloc (last_basic_block_for_fn (cfun
));
2951 bitmap_clear (blocks
);
2954 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
2956 rtx_insn
*insn
, *next
;
2957 bool finish
= false;
2959 rtl_profile_for_bb (bb
);
2960 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2962 /* Can't use `next_real_insn' because that might go across
2963 CODE_LABELS and short-out basic blocks. */
2964 next
= NEXT_INSN (insn
);
2965 finish
= (insn
== BB_END (bb
));
2968 rtx set
= single_set (insn
);
2970 /* Don't split no-op move insns. These should silently
2971 disappear later in final. Splitting such insns would
2972 break the code that handles LIBCALL blocks. */
2973 if (set
&& set_noop_p (set
))
2975 /* Nops get in the way while scheduling, so delete them
2976 now if register allocation has already been done. It
2977 is too risky to try to do this before register
2978 allocation, and there are unlikely to be very many
2979 nops then anyways. */
2980 if (reload_completed
)
2981 delete_insn_and_edges (insn
);
2985 if (split_insn (insn
))
2987 bitmap_set_bit (blocks
, bb
->index
);
2995 default_rtl_profile ();
2997 find_many_sub_basic_blocks (blocks
);
2999 #ifdef ENABLE_CHECKING
3000 verify_flow_info ();
3003 sbitmap_free (blocks
);
3006 /* Same as split_all_insns, but do not expect CFG to be available.
3007 Used by machine dependent reorg passes. */
3010 split_all_insns_noflow (void)
3012 rtx_insn
*next
, *insn
;
3014 for (insn
= get_insns (); insn
; insn
= next
)
3016 next
= NEXT_INSN (insn
);
3019 /* Don't split no-op move insns. These should silently
3020 disappear later in final. Splitting such insns would
3021 break the code that handles LIBCALL blocks. */
3022 rtx set
= single_set (insn
);
3023 if (set
&& set_noop_p (set
))
3025 /* Nops get in the way while scheduling, so delete them
3026 now if register allocation has already been done. It
3027 is too risky to try to do this before register
3028 allocation, and there are unlikely to be very many
3031 ??? Should we use delete_insn when the CFG isn't valid? */
3032 if (reload_completed
)
3033 delete_insn_and_edges (insn
);
3042 #ifdef HAVE_peephole2
3043 struct peep2_insn_data
3049 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
3050 static int peep2_current
;
3052 static bool peep2_do_rebuild_jump_labels
;
3053 static bool peep2_do_cleanup_cfg
;
3055 /* The number of instructions available to match a peep2. */
3056 int peep2_current_count
;
3058 /* A non-insn marker indicating the last insn of the block.
3059 The live_before regset for this element is correct, indicating
3060 DF_LIVE_OUT for the block. */
3061 #define PEEP2_EOB pc_rtx
3063 /* Wrap N to fit into the peep2_insn_data buffer. */
3066 peep2_buf_position (int n
)
3068 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
3069 n
-= MAX_INSNS_PER_PEEP2
+ 1;
3073 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3074 does not exist. Used by the recognizer to find the next insn to match
3075 in a multi-insn pattern. */
3078 peep2_next_insn (int n
)
3080 gcc_assert (n
<= peep2_current_count
);
3082 n
= peep2_buf_position (peep2_current
+ n
);
3084 return peep2_insn_data
[n
].insn
;
3087 /* Return true if REGNO is dead before the Nth non-note insn
3091 peep2_regno_dead_p (int ofs
, int regno
)
3093 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3095 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3097 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3099 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
3102 /* Similarly for a REG. */
3105 peep2_reg_dead_p (int ofs
, rtx reg
)
3109 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
3111 ofs
= peep2_buf_position (peep2_current
+ ofs
);
3113 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
3115 regno
= REGNO (reg
);
3116 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
3118 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
3123 /* Regno offset to be used in the register search. */
3124 static int search_ofs
;
3126 /* Try to find a hard register of mode MODE, matching the register class in
3127 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3128 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3129 in which case the only condition is that the register must be available
3130 before CURRENT_INSN.
3131 Registers that already have bits set in REG_SET will not be considered.
3133 If an appropriate register is available, it will be returned and the
3134 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3138 peep2_find_free_register (int from
, int to
, const char *class_str
,
3139 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
3146 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
3147 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
3149 from
= peep2_buf_position (peep2_current
+ from
);
3150 to
= peep2_buf_position (peep2_current
+ to
);
3152 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3153 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
3157 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
3159 /* Don't use registers set or clobbered by the insn. */
3160 FOR_EACH_INSN_DEF (def
, peep2_insn_data
[from
].insn
)
3161 SET_HARD_REG_BIT (live
, DF_REF_REGNO (def
));
3163 from
= peep2_buf_position (from
+ 1);
3166 cl
= reg_class_for_constraint (lookup_constraint (class_str
));
3168 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3170 int raw_regno
, regno
, success
, j
;
3172 /* Distribute the free registers as much as possible. */
3173 raw_regno
= search_ofs
+ i
;
3174 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3175 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3176 #ifdef REG_ALLOC_ORDER
3177 regno
= reg_alloc_order
[raw_regno
];
3182 /* Can it support the mode we need? */
3183 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3187 for (j
= 0; success
&& j
< hard_regno_nregs
[regno
][mode
]; j
++)
3189 /* Don't allocate fixed registers. */
3190 if (fixed_regs
[regno
+ j
])
3195 /* Don't allocate global registers. */
3196 if (global_regs
[regno
+ j
])
3201 /* Make sure the register is of the right class. */
3202 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
+ j
))
3207 /* And that we don't create an extra save/restore. */
3208 if (! call_used_regs
[regno
+ j
] && ! df_regs_ever_live_p (regno
+ j
))
3214 if (! targetm
.hard_regno_scratch_ok (regno
+ j
))
3220 /* And we don't clobber traceback for noreturn functions. */
3221 if ((regno
+ j
== FRAME_POINTER_REGNUM
3222 || regno
+ j
== HARD_FRAME_POINTER_REGNUM
)
3223 && (! reload_completed
|| frame_pointer_needed
))
3229 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3230 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3239 add_to_hard_reg_set (reg_set
, mode
, regno
);
3241 /* Start the next search with the next register. */
3242 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3244 search_ofs
= raw_regno
;
3246 return gen_rtx_REG (mode
, regno
);
3254 /* Forget all currently tracked instructions, only remember current
3258 peep2_reinit_state (regset live
)
3262 /* Indicate that all slots except the last holds invalid data. */
3263 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3264 peep2_insn_data
[i
].insn
= NULL_RTX
;
3265 peep2_current_count
= 0;
3267 /* Indicate that the last slot contains live_after data. */
3268 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3269 peep2_current
= MAX_INSNS_PER_PEEP2
;
3271 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3274 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3275 starting at INSN. Perform the replacement, removing the old insns and
3276 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3277 if the replacement is rejected. */
3280 peep2_attempt (basic_block bb
, rtx uncast_insn
, int match_len
, rtx_insn
*attempt
)
3282 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3284 rtx_insn
*last
, *before_try
, *x
;
3285 rtx eh_note
, as_note
;
3288 bool was_call
= false;
3290 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3291 match more than one insn, or to be split into more than one insn. */
3292 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[peep2_current
].insn
);
3293 if (RTX_FRAME_RELATED_P (old_insn
))
3295 bool any_note
= false;
3301 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3302 may be in the stream for the purpose of register allocation. */
3303 if (active_insn_p (attempt
))
3306 new_insn
= next_active_insn (attempt
);
3307 if (next_active_insn (new_insn
))
3310 /* We have a 1-1 replacement. Copy over any frame-related info. */
3311 RTX_FRAME_RELATED_P (new_insn
) = 1;
3313 /* Allow the backend to fill in a note during the split. */
3314 for (note
= REG_NOTES (new_insn
); note
; note
= XEXP (note
, 1))
3315 switch (REG_NOTE_KIND (note
))
3317 case REG_FRAME_RELATED_EXPR
:
3318 case REG_CFA_DEF_CFA
:
3319 case REG_CFA_ADJUST_CFA
:
3320 case REG_CFA_OFFSET
:
3321 case REG_CFA_REGISTER
:
3322 case REG_CFA_EXPRESSION
:
3323 case REG_CFA_RESTORE
:
3324 case REG_CFA_SET_VDRAP
:
3331 /* If the backend didn't supply a note, copy one over. */
3333 for (note
= REG_NOTES (old_insn
); note
; note
= XEXP (note
, 1))
3334 switch (REG_NOTE_KIND (note
))
3336 case REG_FRAME_RELATED_EXPR
:
3337 case REG_CFA_DEF_CFA
:
3338 case REG_CFA_ADJUST_CFA
:
3339 case REG_CFA_OFFSET
:
3340 case REG_CFA_REGISTER
:
3341 case REG_CFA_EXPRESSION
:
3342 case REG_CFA_RESTORE
:
3343 case REG_CFA_SET_VDRAP
:
3344 add_reg_note (new_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3351 /* If there still isn't a note, make sure the unwind info sees the
3352 same expression as before the split. */
3355 rtx old_set
, new_set
;
3357 /* The old insn had better have been simple, or annotated. */
3358 old_set
= single_set (old_insn
);
3359 gcc_assert (old_set
!= NULL
);
3361 new_set
= single_set (new_insn
);
3362 if (!new_set
|| !rtx_equal_p (new_set
, old_set
))
3363 add_reg_note (new_insn
, REG_FRAME_RELATED_EXPR
, old_set
);
3366 /* Copy prologue/epilogue status. This is required in order to keep
3367 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3368 maybe_copy_prologue_epilogue_insn (old_insn
, new_insn
);
3371 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3372 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3373 cfg-related call notes. */
3374 for (i
= 0; i
<= match_len
; ++i
)
3379 j
= peep2_buf_position (peep2_current
+ i
);
3380 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[j
].insn
);
3381 if (!CALL_P (old_insn
))
3386 while (new_insn
!= NULL_RTX
)
3388 if (CALL_P (new_insn
))
3390 new_insn
= NEXT_INSN (new_insn
);
3393 gcc_assert (new_insn
!= NULL_RTX
);
3395 CALL_INSN_FUNCTION_USAGE (new_insn
)
3396 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3397 SIBLING_CALL_P (new_insn
) = SIBLING_CALL_P (old_insn
);
3399 for (note
= REG_NOTES (old_insn
);
3401 note
= XEXP (note
, 1))
3402 switch (REG_NOTE_KIND (note
))
3407 add_reg_note (new_insn
, REG_NOTE_KIND (note
),
3411 /* Discard all other reg notes. */
3415 /* Croak if there is another call in the sequence. */
3416 while (++i
<= match_len
)
3418 j
= peep2_buf_position (peep2_current
+ i
);
3419 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[j
].insn
);
3420 gcc_assert (!CALL_P (old_insn
));
3425 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3426 move those notes over to the new sequence. */
3428 for (i
= match_len
; i
>= 0; --i
)
3430 int j
= peep2_buf_position (peep2_current
+ i
);
3431 old_insn
= as_a
<rtx_insn
*> (peep2_insn_data
[j
].insn
);
3433 as_note
= find_reg_note (old_insn
, REG_ARGS_SIZE
, NULL
);
3438 i
= peep2_buf_position (peep2_current
+ match_len
);
3439 eh_note
= find_reg_note (peep2_insn_data
[i
].insn
, REG_EH_REGION
, NULL_RTX
);
3441 /* Replace the old sequence with the new. */
3442 rtx_insn
*peepinsn
= as_a
<rtx_insn
*> (peep2_insn_data
[i
].insn
);
3443 last
= emit_insn_after_setloc (attempt
,
3444 peep2_insn_data
[i
].insn
,
3445 INSN_LOCATION (peepinsn
));
3446 before_try
= PREV_INSN (insn
);
3447 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
, false);
3449 /* Re-insert the EH_REGION notes. */
3450 if (eh_note
|| (was_call
&& nonlocal_goto_handler_labels
))
3455 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3456 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3460 copy_reg_eh_region_note_backward (eh_note
, last
, before_try
);
3463 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3464 if (x
!= BB_END (bb
)
3465 && (can_throw_internal (x
)
3466 || can_nonlocal_goto (x
)))
3471 nfte
= split_block (bb
, x
);
3472 flags
= (eh_edge
->flags
3473 & (EDGE_EH
| EDGE_ABNORMAL
));
3475 flags
|= EDGE_ABNORMAL_CALL
;
3476 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3479 nehe
->probability
= eh_edge
->probability
;
3481 = REG_BR_PROB_BASE
- nehe
->probability
;
3483 peep2_do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3488 /* Converting possibly trapping insn to non-trapping is
3489 possible. Zap dummy outgoing edges. */
3490 peep2_do_cleanup_cfg
|= purge_dead_edges (bb
);
3493 /* Re-insert the ARGS_SIZE notes. */
3495 fixup_args_size_notes (before_try
, last
, INTVAL (XEXP (as_note
, 0)));
3497 /* If we generated a jump instruction, it won't have
3498 JUMP_LABEL set. Recompute after we're done. */
3499 for (x
= last
; x
!= before_try
; x
= PREV_INSN (x
))
3502 peep2_do_rebuild_jump_labels
= true;
3509 /* After performing a replacement in basic block BB, fix up the life
3510 information in our buffer. LAST is the last of the insns that we
3511 emitted as a replacement. PREV is the insn before the start of
3512 the replacement. MATCH_LEN is the number of instructions that were
3513 matched, and which now need to be replaced in the buffer. */
3516 peep2_update_life (basic_block bb
, int match_len
, rtx_insn
*last
,
3519 int i
= peep2_buf_position (peep2_current
+ match_len
+ 1);
3523 INIT_REG_SET (&live
);
3524 COPY_REG_SET (&live
, peep2_insn_data
[i
].live_before
);
3526 gcc_assert (peep2_current_count
>= match_len
+ 1);
3527 peep2_current_count
-= match_len
+ 1;
3535 if (peep2_current_count
< MAX_INSNS_PER_PEEP2
)
3537 peep2_current_count
++;
3539 i
= MAX_INSNS_PER_PEEP2
;
3540 peep2_insn_data
[i
].insn
= x
;
3541 df_simulate_one_insn_backwards (bb
, x
, &live
);
3542 COPY_REG_SET (peep2_insn_data
[i
].live_before
, &live
);
3548 CLEAR_REG_SET (&live
);
3553 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3554 Return true if we added it, false otherwise. The caller will try to match
3555 peepholes against the buffer if we return false; otherwise it will try to
3556 add more instructions to the buffer. */
3559 peep2_fill_buffer (basic_block bb
, rtx insn
, regset live
)
3563 /* Once we have filled the maximum number of insns the buffer can hold,
3564 allow the caller to match the insns against peepholes. We wait until
3565 the buffer is full in case the target has similar peepholes of different
3566 length; we always want to match the longest if possible. */
3567 if (peep2_current_count
== MAX_INSNS_PER_PEEP2
)
3570 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3571 any other pattern, lest it change the semantics of the frame info. */
3572 if (RTX_FRAME_RELATED_P (insn
))
3574 /* Let the buffer drain first. */
3575 if (peep2_current_count
> 0)
3577 /* Now the insn will be the only thing in the buffer. */
3580 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3581 peep2_insn_data
[pos
].insn
= insn
;
3582 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3583 peep2_current_count
++;
3585 df_simulate_one_insn_forwards (bb
, as_a
<rtx_insn
*> (insn
), live
);
3589 /* Perform the peephole2 optimization pass. */
3592 peephole2_optimize (void)
3599 peep2_do_cleanup_cfg
= false;
3600 peep2_do_rebuild_jump_labels
= false;
3602 df_set_flags (DF_LR_RUN_DCE
);
3603 df_note_add_problem ();
3606 /* Initialize the regsets we're going to use. */
3607 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3608 peep2_insn_data
[i
].live_before
= BITMAP_ALLOC (®_obstack
);
3610 live
= BITMAP_ALLOC (®_obstack
);
3612 FOR_EACH_BB_REVERSE_FN (bb
, cfun
)
3614 bool past_end
= false;
3617 rtl_profile_for_bb (bb
);
3619 /* Start up propagation. */
3620 bitmap_copy (live
, DF_LR_IN (bb
));
3621 df_simulate_initialize_forwards (bb
, live
);
3622 peep2_reinit_state (live
);
3624 insn
= BB_HEAD (bb
);
3631 if (!past_end
&& !NONDEBUG_INSN_P (insn
))
3634 insn
= NEXT_INSN (insn
);
3635 if (insn
== NEXT_INSN (BB_END (bb
)))
3639 if (!past_end
&& peep2_fill_buffer (bb
, insn
, live
))
3642 /* If we did not fill an empty buffer, it signals the end of the
3644 if (peep2_current_count
== 0)
3647 /* The buffer filled to the current maximum, so try to match. */
3649 pos
= peep2_buf_position (peep2_current
+ peep2_current_count
);
3650 peep2_insn_data
[pos
].insn
= PEEP2_EOB
;
3651 COPY_REG_SET (peep2_insn_data
[pos
].live_before
, live
);
3653 /* Match the peephole. */
3654 head
= peep2_insn_data
[peep2_current
].insn
;
3655 attempt
= safe_as_a
<rtx_insn
*> (
3656 peephole2_insns (PATTERN (head
), head
, &match_len
));
3657 if (attempt
!= NULL
)
3659 rtx_insn
*last
= peep2_attempt (bb
, head
, match_len
, attempt
);
3662 peep2_update_life (bb
, match_len
, last
, PREV_INSN (attempt
));
3667 /* No match: advance the buffer by one insn. */
3668 peep2_current
= peep2_buf_position (peep2_current
+ 1);
3669 peep2_current_count
--;
3673 default_rtl_profile ();
3674 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3675 BITMAP_FREE (peep2_insn_data
[i
].live_before
);
3677 if (peep2_do_rebuild_jump_labels
)
3678 rebuild_jump_labels (get_insns ());
3679 if (peep2_do_cleanup_cfg
)
3680 cleanup_cfg (CLEANUP_CFG_CHANGED
);
3682 #endif /* HAVE_peephole2 */
3684 /* Common predicates for use with define_bypass. */
3686 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3687 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3688 must be either a single_set or a PARALLEL with SETs inside. */
3691 store_data_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3693 rtx out_set
, in_set
;
3694 rtx out_pat
, in_pat
;
3695 rtx out_exp
, in_exp
;
3698 in_set
= single_set (in_insn
);
3701 if (!MEM_P (SET_DEST (in_set
)))
3704 out_set
= single_set (out_insn
);
3707 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3712 out_pat
= PATTERN (out_insn
);
3714 if (GET_CODE (out_pat
) != PARALLEL
)
3717 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3719 out_exp
= XVECEXP (out_pat
, 0, i
);
3721 if (GET_CODE (out_exp
) == CLOBBER
)
3724 gcc_assert (GET_CODE (out_exp
) == SET
);
3726 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_set
)))
3733 in_pat
= PATTERN (in_insn
);
3734 gcc_assert (GET_CODE (in_pat
) == PARALLEL
);
3736 for (i
= 0; i
< XVECLEN (in_pat
, 0); i
++)
3738 in_exp
= XVECEXP (in_pat
, 0, i
);
3740 if (GET_CODE (in_exp
) == CLOBBER
)
3743 gcc_assert (GET_CODE (in_exp
) == SET
);
3745 if (!MEM_P (SET_DEST (in_exp
)))
3748 out_set
= single_set (out_insn
);
3751 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_exp
)))
3756 out_pat
= PATTERN (out_insn
);
3757 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3759 for (j
= 0; j
< XVECLEN (out_pat
, 0); j
++)
3761 out_exp
= XVECEXP (out_pat
, 0, j
);
3763 if (GET_CODE (out_exp
) == CLOBBER
)
3766 gcc_assert (GET_CODE (out_exp
) == SET
);
3768 if (reg_mentioned_p (SET_DEST (out_exp
), SET_DEST (in_exp
)))
3778 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3779 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3780 or multiple set; IN_INSN should be single_set for truth, but for convenience
3781 of insn categorization may be any JUMP or CALL insn. */
3784 if_test_bypass_p (rtx_insn
*out_insn
, rtx_insn
*in_insn
)
3786 rtx out_set
, in_set
;
3788 in_set
= single_set (in_insn
);
3791 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3795 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3797 in_set
= SET_SRC (in_set
);
3799 out_set
= single_set (out_insn
);
3802 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3803 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3811 out_pat
= PATTERN (out_insn
);
3812 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3814 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3816 rtx exp
= XVECEXP (out_pat
, 0, i
);
3818 if (GET_CODE (exp
) == CLOBBER
)
3821 gcc_assert (GET_CODE (exp
) == SET
);
3823 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3824 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3833 rest_of_handle_peephole2 (void)
3835 #ifdef HAVE_peephole2
3836 peephole2_optimize ();
3843 const pass_data pass_data_peephole2
=
3845 RTL_PASS
, /* type */
3846 "peephole2", /* name */
3847 OPTGROUP_NONE
, /* optinfo_flags */
3848 TV_PEEPHOLE2
, /* tv_id */
3849 0, /* properties_required */
3850 0, /* properties_provided */
3851 0, /* properties_destroyed */
3852 0, /* todo_flags_start */
3853 TODO_df_finish
, /* todo_flags_finish */
3856 class pass_peephole2
: public rtl_opt_pass
3859 pass_peephole2 (gcc::context
*ctxt
)
3860 : rtl_opt_pass (pass_data_peephole2
, ctxt
)
3863 /* opt_pass methods: */
3864 /* The epiphany backend creates a second instance of this pass, so we need
3866 opt_pass
* clone () { return new pass_peephole2 (m_ctxt
); }
3867 virtual bool gate (function
*) { return (optimize
> 0 && flag_peephole2
); }
3868 virtual unsigned int execute (function
*)
3870 return rest_of_handle_peephole2 ();
3873 }; // class pass_peephole2
3878 make_pass_peephole2 (gcc::context
*ctxt
)
3880 return new pass_peephole2 (ctxt
);
3885 const pass_data pass_data_split_all_insns
=
3887 RTL_PASS
, /* type */
3888 "split1", /* name */
3889 OPTGROUP_NONE
, /* optinfo_flags */
3890 TV_NONE
, /* tv_id */
3891 0, /* properties_required */
3892 0, /* properties_provided */
3893 0, /* properties_destroyed */
3894 0, /* todo_flags_start */
3895 0, /* todo_flags_finish */
3898 class pass_split_all_insns
: public rtl_opt_pass
3901 pass_split_all_insns (gcc::context
*ctxt
)
3902 : rtl_opt_pass (pass_data_split_all_insns
, ctxt
)
3905 /* opt_pass methods: */
3906 /* The epiphany backend creates a second instance of this pass, so
3907 we need a clone method. */
3908 opt_pass
* clone () { return new pass_split_all_insns (m_ctxt
); }
3909 virtual unsigned int execute (function
*)
3915 }; // class pass_split_all_insns
3920 make_pass_split_all_insns (gcc::context
*ctxt
)
3922 return new pass_split_all_insns (ctxt
);
3926 rest_of_handle_split_after_reload (void)
3928 /* If optimizing, then go ahead and split insns now. */
3938 const pass_data pass_data_split_after_reload
=
3940 RTL_PASS
, /* type */
3941 "split2", /* name */
3942 OPTGROUP_NONE
, /* optinfo_flags */
3943 TV_NONE
, /* tv_id */
3944 0, /* properties_required */
3945 0, /* properties_provided */
3946 0, /* properties_destroyed */
3947 0, /* todo_flags_start */
3948 0, /* todo_flags_finish */
3951 class pass_split_after_reload
: public rtl_opt_pass
3954 pass_split_after_reload (gcc::context
*ctxt
)
3955 : rtl_opt_pass (pass_data_split_after_reload
, ctxt
)
3958 /* opt_pass methods: */
3959 virtual unsigned int execute (function
*)
3961 return rest_of_handle_split_after_reload ();
3964 }; // class pass_split_after_reload
3969 make_pass_split_after_reload (gcc::context
*ctxt
)
3971 return new pass_split_after_reload (ctxt
);
3976 const pass_data pass_data_split_before_regstack
=
3978 RTL_PASS
, /* type */
3979 "split3", /* name */
3980 OPTGROUP_NONE
, /* optinfo_flags */
3981 TV_NONE
, /* tv_id */
3982 0, /* properties_required */
3983 0, /* properties_provided */
3984 0, /* properties_destroyed */
3985 0, /* todo_flags_start */
3986 0, /* todo_flags_finish */
3989 class pass_split_before_regstack
: public rtl_opt_pass
3992 pass_split_before_regstack (gcc::context
*ctxt
)
3993 : rtl_opt_pass (pass_data_split_before_regstack
, ctxt
)
3996 /* opt_pass methods: */
3997 virtual bool gate (function
*);
3998 virtual unsigned int execute (function
*)
4004 }; // class pass_split_before_regstack
4007 pass_split_before_regstack::gate (function
*)
4009 #if HAVE_ATTR_length && defined (STACK_REGS)
4010 /* If flow2 creates new instructions which need splitting
4011 and scheduling after reload is not done, they might not be
4012 split until final which doesn't allow splitting
4013 if HAVE_ATTR_length. */
4014 # ifdef INSN_SCHEDULING
4015 return (optimize
&& !flag_schedule_insns_after_reload
);
4027 make_pass_split_before_regstack (gcc::context
*ctxt
)
4029 return new pass_split_before_regstack (ctxt
);
4033 rest_of_handle_split_before_sched2 (void)
4035 #ifdef INSN_SCHEDULING
4043 const pass_data pass_data_split_before_sched2
=
4045 RTL_PASS
, /* type */
4046 "split4", /* name */
4047 OPTGROUP_NONE
, /* optinfo_flags */
4048 TV_NONE
, /* tv_id */
4049 0, /* properties_required */
4050 0, /* properties_provided */
4051 0, /* properties_destroyed */
4052 0, /* todo_flags_start */
4053 0, /* todo_flags_finish */
4056 class pass_split_before_sched2
: public rtl_opt_pass
4059 pass_split_before_sched2 (gcc::context
*ctxt
)
4060 : rtl_opt_pass (pass_data_split_before_sched2
, ctxt
)
4063 /* opt_pass methods: */
4064 virtual bool gate (function
*)
4066 #ifdef INSN_SCHEDULING
4067 return optimize
> 0 && flag_schedule_insns_after_reload
;
4073 virtual unsigned int execute (function
*)
4075 return rest_of_handle_split_before_sched2 ();
4078 }; // class pass_split_before_sched2
4083 make_pass_split_before_sched2 (gcc::context
*ctxt
)
4085 return new pass_split_before_sched2 (ctxt
);
4090 const pass_data pass_data_split_for_shorten_branches
=
4092 RTL_PASS
, /* type */
4093 "split5", /* name */
4094 OPTGROUP_NONE
, /* optinfo_flags */
4095 TV_NONE
, /* tv_id */
4096 0, /* properties_required */
4097 0, /* properties_provided */
4098 0, /* properties_destroyed */
4099 0, /* todo_flags_start */
4100 0, /* todo_flags_finish */
4103 class pass_split_for_shorten_branches
: public rtl_opt_pass
4106 pass_split_for_shorten_branches (gcc::context
*ctxt
)
4107 : rtl_opt_pass (pass_data_split_for_shorten_branches
, ctxt
)
4110 /* opt_pass methods: */
4111 virtual bool gate (function
*)
4113 /* The placement of the splitting that we do for shorten_branches
4114 depends on whether regstack is used by the target or not. */
4115 #if HAVE_ATTR_length && !defined (STACK_REGS)
4122 virtual unsigned int execute (function
*)
4124 return split_all_insns_noflow ();
4127 }; // class pass_split_for_shorten_branches
4132 make_pass_split_for_shorten_branches (gcc::context
*ctxt
)
4134 return new pass_split_for_shorten_branches (ctxt
);
4137 /* (Re)initialize the target information after a change in target. */
4142 /* The information is zero-initialized, so we don't need to do anything
4143 first time round. */
4144 if (!this_target_recog
->x_initialized
)
4146 this_target_recog
->x_initialized
= true;
4149 memset (this_target_recog
->x_bool_attr_masks
, 0,
4150 sizeof (this_target_recog
->x_bool_attr_masks
));
4151 for (int i
= 0; i
< LAST_INSN_CODE
; ++i
)
4152 if (this_target_recog
->x_op_alt
[i
])
4154 free (this_target_recog
->x_op_alt
[i
]);
4155 this_target_recog
->x_op_alt
[i
] = 0;