1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
47 #define STACK_PUSH_CODE PRE_INC
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
55 #define STACK_POP_CODE POST_DEC
59 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
60 static rtx
*find_single_use_1 (rtx
, rtx
*);
61 static void validate_replace_src_1 (rtx
*, void *);
62 static rtx
split_insn (rtx
);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data
;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative
;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed
;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile (void)
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (rtx insn
)
122 if (INSN_CODE (insn
) < 0)
123 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, 0);
124 return INSN_CODE (insn
);
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
131 check_asm_operands (rtx x
)
135 const char **constraints
;
138 /* Post-reload, be more strict with things. */
139 if (reload_completed
)
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x
));
143 constrain_operands (1);
144 return which_alternative
>= 0;
147 noperands
= asm_noperands (x
);
153 operands
= alloca (noperands
* sizeof (rtx
));
154 constraints
= alloca (noperands
* sizeof (char *));
156 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
158 for (i
= 0; i
< noperands
; i
++)
160 const char *c
= constraints
[i
];
163 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
164 c
= constraints
[c
[0] - '0'];
166 if (! asm_operand_ok (operands
[i
], c
))
173 /* Static data for the next two routines. */
175 typedef struct change_t
183 static change_t
*changes
;
184 static int changes_allocated
;
186 static int num_changes
= 0;
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
207 validate_change (rtx object
, rtx
*loc
, rtx
new, int in_group
)
211 if (old
== new || rtx_equal_p (old
, new))
214 gcc_assert (in_group
!= 0 || num_changes
== 0);
218 /* Save the information describing this change. */
219 if (num_changes
>= changes_allocated
)
221 if (changes_allocated
== 0)
222 /* This value allows for repeated substitutions inside complex
223 indexed addresses, or changes in up to 5 insns. */
224 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
226 changes_allocated
*= 2;
228 changes
= xrealloc (changes
, sizeof (change_t
) * changes_allocated
);
231 changes
[num_changes
].object
= object
;
232 changes
[num_changes
].loc
= loc
;
233 changes
[num_changes
].old
= old
;
235 if (object
&& !MEM_P (object
))
237 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 changes
[num_changes
].old_code
= INSN_CODE (object
);
240 INSN_CODE (object
) = -1;
245 /* If we are making a group of changes, return 1. Otherwise, validate the
246 change group we made. */
251 return apply_change_group ();
254 /* This subroutine of apply_change_group verifies whether the changes to INSN
255 were valid; i.e. whether INSN can still be recognized. */
258 insn_invalid_p (rtx insn
)
260 rtx pat
= PATTERN (insn
);
261 int num_clobbers
= 0;
262 /* If we are before reload and the pattern is a SET, see if we can add
264 int icode
= recog (pat
, insn
,
265 (GET_CODE (pat
) == SET
266 && ! reload_completed
&& ! reload_in_progress
)
267 ? &num_clobbers
: 0);
268 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
271 /* If this is an asm and the operand aren't legal, then fail. Likewise if
272 this is not an asm and the insn wasn't recognized. */
273 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
274 || (!is_asm
&& icode
< 0))
277 /* If we have to add CLOBBERs, fail if we have to add ones that reference
278 hard registers since our callers can't know if they are live or not.
279 Otherwise, add them. */
280 if (num_clobbers
> 0)
284 if (added_clobbers_hard_reg_p (icode
))
287 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
288 XVECEXP (newpat
, 0, 0) = pat
;
289 add_clobbers (newpat
, icode
);
290 PATTERN (insn
) = pat
= newpat
;
293 /* After reload, verify that all constraints are satisfied. */
294 if (reload_completed
)
298 if (! constrain_operands (1))
302 INSN_CODE (insn
) = icode
;
306 /* Return number of changes made and not validated yet. */
308 num_changes_pending (void)
313 /* Apply a group of changes previously issued with `validate_change'.
314 Return 1 if all changes are valid, zero otherwise. */
317 apply_change_group (void)
320 rtx last_validated
= NULL_RTX
;
322 /* The changes have been applied and all INSN_CODEs have been reset to force
325 The changes are valid if we aren't given an object, or if we are
326 given a MEM and it still is a valid address, or if this is in insn
327 and it is recognized. In the latter case, if reload has completed,
328 we also require that the operands meet the constraints for
331 for (i
= 0; i
< num_changes
; i
++)
333 rtx object
= changes
[i
].object
;
335 /* If there is no object to test or if it is the same as the one we
336 already tested, ignore it. */
337 if (object
== 0 || object
== last_validated
)
342 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
345 else if (insn_invalid_p (object
))
347 rtx pat
= PATTERN (object
);
349 /* Perhaps we couldn't recognize the insn because there were
350 extra CLOBBERs at the end. If so, try to re-recognize
351 without the last CLOBBER (later iterations will cause each of
352 them to be eliminated, in turn). But don't do this if we
353 have an ASM_OPERAND. */
354 if (GET_CODE (pat
) == PARALLEL
355 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
356 && asm_noperands (PATTERN (object
)) < 0)
360 if (XVECLEN (pat
, 0) == 2)
361 newpat
= XVECEXP (pat
, 0, 0);
367 = gen_rtx_PARALLEL (VOIDmode
,
368 rtvec_alloc (XVECLEN (pat
, 0) - 1));
369 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
370 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
373 /* Add a new change to this group to replace the pattern
374 with this new pattern. Then consider this change
375 as having succeeded. The change we added will
376 cause the entire call to fail if things remain invalid.
378 Note that this can lose if a later change than the one
379 we are processing specified &XVECEXP (PATTERN (object), 0, X)
380 but this shouldn't occur. */
382 validate_change (object
, &PATTERN (object
), newpat
, 1);
385 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
386 /* If this insn is a CLOBBER or USE, it is always valid, but is
392 last_validated
= object
;
395 if (i
== num_changes
)
399 for (i
= 0; i
< num_changes
; i
++)
400 if (changes
[i
].object
401 && INSN_P (changes
[i
].object
)
402 && (bb
= BLOCK_FOR_INSN (changes
[i
].object
)))
403 bb
->flags
|= BB_DIRTY
;
415 /* Return the number of changes so far in the current group. */
418 num_validated_changes (void)
423 /* Retract the changes numbered NUM and up. */
426 cancel_changes (int num
)
430 /* Back out all the changes. Do this in the opposite order in which
432 for (i
= num_changes
- 1; i
>= num
; i
--)
434 *changes
[i
].loc
= changes
[i
].old
;
435 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
436 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
441 /* Replace every occurrence of FROM in X with TO. Mark each change with
442 validate_change passing OBJECT. */
445 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
451 enum machine_mode op0_mode
= VOIDmode
;
452 int prev_changes
= num_changes
;
459 fmt
= GET_RTX_FORMAT (code
);
461 op0_mode
= GET_MODE (XEXP (x
, 0));
463 /* X matches FROM if it is the same rtx or they are both referring to the
464 same register in the same mode. Avoid calling rtx_equal_p unless the
465 operands look similar. */
468 || (REG_P (x
) && REG_P (from
)
469 && GET_MODE (x
) == GET_MODE (from
)
470 && REGNO (x
) == REGNO (from
))
471 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
472 && rtx_equal_p (x
, from
)))
474 validate_change (object
, loc
, to
, 1);
478 /* Call ourself recursively to perform the replacements.
479 We must not replace inside already replaced expression, otherwise we
480 get infinite recursion for replacements like (reg X)->(subreg (reg X))
481 done by regmove, so we must special case shared ASM_OPERANDS. */
483 if (GET_CODE (x
) == PARALLEL
)
485 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
487 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
488 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
490 /* Verify that operands are really shared. */
491 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
492 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
494 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
498 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
502 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
505 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
506 else if (fmt
[i
] == 'E')
507 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
508 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
511 /* If we didn't substitute, there is nothing more to do. */
512 if (num_changes
== prev_changes
)
515 /* Allow substituted expression to have different mode. This is used by
516 regmove to change mode of pseudo register. */
517 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
518 op0_mode
= GET_MODE (XEXP (x
, 0));
520 /* Do changes needed to keep rtx consistent. Don't do any other
521 simplifications, as it is not our job. */
523 if (SWAPPABLE_OPERANDS_P (x
)
524 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
526 validate_change (object
, loc
,
527 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
528 : swap_condition (code
),
529 GET_MODE (x
), XEXP (x
, 1),
538 /* If we have a PLUS whose second operand is now a CONST_INT, use
539 simplify_gen_binary to try to simplify it.
540 ??? We may want later to remove this, once simplification is
541 separated from this function. */
542 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
543 validate_change (object
, loc
,
545 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
548 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
549 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
550 validate_change (object
, loc
,
552 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
553 simplify_gen_unary (NEG
,
554 GET_MODE (x
), XEXP (x
, 1),
559 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
561 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
566 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
567 validate_change (object
, loc
, new, 1);
571 /* All subregs possible to simplify should be simplified. */
572 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
575 /* Subregs of VOIDmode operands are incorrect. */
576 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
577 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
579 validate_change (object
, loc
, new, 1);
583 /* If we are replacing a register with memory, try to change the memory
584 to be the mode required for memory in extract operations (this isn't
585 likely to be an insertion operation; if it was, nothing bad will
586 happen, we might just fail in some cases). */
588 if (MEM_P (XEXP (x
, 0))
589 && GET_CODE (XEXP (x
, 1)) == CONST_INT
590 && GET_CODE (XEXP (x
, 2)) == CONST_INT
591 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
592 && !MEM_VOLATILE_P (XEXP (x
, 0)))
594 enum machine_mode wanted_mode
= VOIDmode
;
595 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
596 int pos
= INTVAL (XEXP (x
, 2));
598 if (GET_CODE (x
) == ZERO_EXTRACT
)
600 enum machine_mode new_mode
601 = mode_for_extraction (EP_extzv
, 1);
602 if (new_mode
!= MAX_MACHINE_MODE
)
603 wanted_mode
= new_mode
;
605 else if (GET_CODE (x
) == SIGN_EXTRACT
)
607 enum machine_mode new_mode
608 = mode_for_extraction (EP_extv
, 1);
609 if (new_mode
!= MAX_MACHINE_MODE
)
610 wanted_mode
= new_mode
;
613 /* If we have a narrower mode, we can do something. */
614 if (wanted_mode
!= VOIDmode
615 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
617 int offset
= pos
/ BITS_PER_UNIT
;
620 /* If the bytes and bits are counted differently, we
621 must adjust the offset. */
622 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
624 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
627 pos
%= GET_MODE_BITSIZE (wanted_mode
);
629 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
631 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
632 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
643 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
644 with TO. After all changes have been made, validate by seeing
645 if INSN is still valid. */
648 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
650 validate_replace_rtx_1 (loc
, from
, to
, insn
);
651 return apply_change_group ();
654 /* Try replacing every occurrence of FROM in INSN with TO. After all
655 changes have been made, validate by seeing if INSN is still valid. */
658 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
660 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
661 return apply_change_group ();
664 /* Try replacing every occurrence of FROM in INSN with TO. */
667 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
669 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
672 /* Function called by note_uses to replace used subexpressions. */
673 struct validate_replace_src_data
675 rtx from
; /* Old RTX */
676 rtx to
; /* New RTX */
677 rtx insn
; /* Insn in which substitution is occurring. */
681 validate_replace_src_1 (rtx
*x
, void *data
)
683 struct validate_replace_src_data
*d
684 = (struct validate_replace_src_data
*) data
;
686 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
689 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
693 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
695 struct validate_replace_src_data d
;
700 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
704 /* Return 1 if the insn using CC0 set by INSN does not contain
705 any ordered tests applied to the condition codes.
706 EQ and NE tests do not count. */
709 next_insn_tests_no_inequality (rtx insn
)
711 rtx next
= next_cc0_user (insn
);
713 /* If there is no next insn, we have to take the conservative choice. */
717 return (INSN_P (next
)
718 && ! inequality_comparisons_p (PATTERN (next
)));
722 /* This is used by find_single_use to locate an rtx that contains exactly one
723 use of DEST, which is typically either a REG or CC0. It returns a
724 pointer to the innermost rtx expression containing DEST. Appearances of
725 DEST that are being used to totally replace it are not counted. */
728 find_single_use_1 (rtx dest
, rtx
*loc
)
731 enum rtx_code code
= GET_CODE (x
);
749 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
750 of a REG that occupies all of the REG, the insn uses DEST if
751 it is mentioned in the destination or the source. Otherwise, we
752 need just check the source. */
753 if (GET_CODE (SET_DEST (x
)) != CC0
754 && GET_CODE (SET_DEST (x
)) != PC
755 && !REG_P (SET_DEST (x
))
756 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
757 && REG_P (SUBREG_REG (SET_DEST (x
)))
758 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
759 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
760 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
761 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
764 return find_single_use_1 (dest
, &SET_SRC (x
));
768 return find_single_use_1 (dest
, &XEXP (x
, 0));
774 /* If it wasn't one of the common cases above, check each expression and
775 vector of this code. Look for a unique usage of DEST. */
777 fmt
= GET_RTX_FORMAT (code
);
778 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
782 if (dest
== XEXP (x
, i
)
783 || (REG_P (dest
) && REG_P (XEXP (x
, i
))
784 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
787 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
790 result
= this_result
;
791 else if (this_result
)
792 /* Duplicate usage. */
795 else if (fmt
[i
] == 'E')
799 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
801 if (XVECEXP (x
, i
, j
) == dest
803 && REG_P (XVECEXP (x
, i
, j
))
804 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
807 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
810 result
= this_result
;
811 else if (this_result
)
820 /* See if DEST, produced in INSN, is used only a single time in the
821 sequel. If so, return a pointer to the innermost rtx expression in which
824 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
826 This routine will return usually zero either before flow is called (because
827 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
828 note can't be trusted).
830 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
831 care about REG_DEAD notes or LOG_LINKS.
833 Otherwise, we find the single use by finding an insn that has a
834 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
835 only referenced once in that insn, we know that it must be the first
836 and last insn referencing DEST. */
839 find_single_use (rtx dest
, rtx insn
, rtx
*ploc
)
848 next
= NEXT_INSN (insn
);
850 || (!NONJUMP_INSN_P (next
) && !JUMP_P (next
)))
853 result
= find_single_use_1 (dest
, &PATTERN (next
));
860 if (reload_completed
|| reload_in_progress
|| !REG_P (dest
))
863 for (next
= next_nonnote_insn (insn
);
864 next
!= 0 && !LABEL_P (next
);
865 next
= next_nonnote_insn (next
))
866 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
868 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
869 if (XEXP (link
, 0) == insn
)
874 result
= find_single_use_1 (dest
, &PATTERN (next
));
884 /* Return 1 if OP is a valid general operand for machine mode MODE.
885 This is either a register reference, a memory reference,
886 or a constant. In the case of a memory reference, the address
887 is checked for general validity for the target machine.
889 Register and memory references must have mode MODE in order to be valid,
890 but some constants have no machine mode and are valid for any mode.
892 If MODE is VOIDmode, OP is checked for validity for whatever mode
895 The main use of this function is as a predicate in match_operand
896 expressions in the machine description.
898 For an explanation of this function's behavior for registers of
899 class NO_REGS, see the comment for `register_operand'. */
902 general_operand (rtx op
, enum machine_mode mode
)
904 enum rtx_code code
= GET_CODE (op
);
906 if (mode
== VOIDmode
)
907 mode
= GET_MODE (op
);
909 /* Don't accept CONST_INT or anything similar
910 if the caller wants something floating. */
911 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
912 && GET_MODE_CLASS (mode
) != MODE_INT
913 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
916 if (GET_CODE (op
) == CONST_INT
918 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
922 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
924 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
925 && LEGITIMATE_CONSTANT_P (op
));
927 /* Except for certain constants with VOIDmode, already checked for,
928 OP's mode must match MODE if MODE specifies a mode. */
930 if (GET_MODE (op
) != mode
)
935 rtx sub
= SUBREG_REG (op
);
937 #ifdef INSN_SCHEDULING
938 /* On machines that have insn scheduling, we want all memory
939 reference to be explicit, so outlaw paradoxical SUBREGs.
940 However, we must allow them after reload so that they can
941 get cleaned up by cleanup_subreg_operands. */
942 if (!reload_completed
&& MEM_P (sub
)
943 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
946 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
947 may result in incorrect reference. We should simplify all valid
948 subregs of MEM anyway. But allow this after reload because we
949 might be called from cleanup_subreg_operands.
951 ??? This is a kludge. */
952 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
956 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
957 create such rtl, and we must reject it. */
958 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
959 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
963 code
= GET_CODE (op
);
967 /* A register whose class is NO_REGS is not a general operand. */
968 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
969 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
973 rtx y
= XEXP (op
, 0);
975 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
978 /* Use the mem's mode, since it will be reloaded thus. */
979 if (memory_address_p (GET_MODE (op
), y
))
986 /* Return 1 if OP is a valid memory address for a memory reference
989 The main use of this function is as a predicate in match_operand
990 expressions in the machine description. */
993 address_operand (rtx op
, enum machine_mode mode
)
995 return memory_address_p (mode
, op
);
998 /* Return 1 if OP is a register reference of mode MODE.
999 If MODE is VOIDmode, accept a register in any mode.
1001 The main use of this function is as a predicate in match_operand
1002 expressions in the machine description.
1004 As a special exception, registers whose class is NO_REGS are
1005 not accepted by `register_operand'. The reason for this change
1006 is to allow the representation of special architecture artifacts
1007 (such as a condition code register) without extending the rtl
1008 definitions. Since registers of class NO_REGS cannot be used
1009 as registers in any case where register classes are examined,
1010 it is most consistent to keep this function from accepting them. */
1013 register_operand (rtx op
, enum machine_mode mode
)
1015 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1018 if (GET_CODE (op
) == SUBREG
)
1020 rtx sub
= SUBREG_REG (op
);
1022 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1023 because it is guaranteed to be reloaded into one.
1024 Just make sure the MEM is valid in itself.
1025 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1026 but currently it does result from (SUBREG (REG)...) where the
1027 reg went on the stack.) */
1028 if (! reload_completed
&& MEM_P (sub
))
1029 return general_operand (op
, mode
);
1031 #ifdef CANNOT_CHANGE_MODE_CLASS
1033 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1034 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1035 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1036 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1040 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1041 create such rtl, and we must reject it. */
1042 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1043 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1049 /* We don't consider registers whose class is NO_REGS
1050 to be a register operand. */
1052 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1053 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1056 /* Return 1 for a register in Pmode; ignore the tested mode. */
1059 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1061 return register_operand (op
, Pmode
);
1064 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1065 or a hard register. */
1068 scratch_operand (rtx op
, enum machine_mode mode
)
1070 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1073 return (GET_CODE (op
) == SCRATCH
1075 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1078 /* Return 1 if OP is a valid immediate operand for mode MODE.
1080 The main use of this function is as a predicate in match_operand
1081 expressions in the machine description. */
1084 immediate_operand (rtx op
, enum machine_mode mode
)
1086 /* Don't accept CONST_INT or anything similar
1087 if the caller wants something floating. */
1088 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1089 && GET_MODE_CLASS (mode
) != MODE_INT
1090 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1093 if (GET_CODE (op
) == CONST_INT
1095 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1098 return (CONSTANT_P (op
)
1099 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1100 || GET_MODE (op
) == VOIDmode
)
1101 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1102 && LEGITIMATE_CONSTANT_P (op
));
1105 /* Returns 1 if OP is an operand that is a CONST_INT. */
1108 const_int_operand (rtx op
, enum machine_mode mode
)
1110 if (GET_CODE (op
) != CONST_INT
)
1113 if (mode
!= VOIDmode
1114 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1120 /* Returns 1 if OP is an operand that is a constant integer or constant
1121 floating-point number. */
1124 const_double_operand (rtx op
, enum machine_mode mode
)
1126 /* Don't accept CONST_INT or anything similar
1127 if the caller wants something floating. */
1128 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1129 && GET_MODE_CLASS (mode
) != MODE_INT
1130 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1133 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1134 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1135 || GET_MODE (op
) == VOIDmode
));
1138 /* Return 1 if OP is a general operand that is not an immediate operand. */
1141 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1143 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1146 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1149 nonmemory_operand (rtx op
, enum machine_mode mode
)
1151 if (CONSTANT_P (op
))
1153 /* Don't accept CONST_INT or anything similar
1154 if the caller wants something floating. */
1155 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1156 && GET_MODE_CLASS (mode
) != MODE_INT
1157 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1160 if (GET_CODE (op
) == CONST_INT
1162 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1165 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1166 || mode
== VOIDmode
)
1167 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1168 && LEGITIMATE_CONSTANT_P (op
));
1171 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1174 if (GET_CODE (op
) == SUBREG
)
1176 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1177 because it is guaranteed to be reloaded into one.
1178 Just make sure the MEM is valid in itself.
1179 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1180 but currently it does result from (SUBREG (REG)...) where the
1181 reg went on the stack.) */
1182 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1183 return general_operand (op
, mode
);
1184 op
= SUBREG_REG (op
);
1187 /* We don't consider registers whose class is NO_REGS
1188 to be a register operand. */
1190 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1191 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1194 /* Return 1 if OP is a valid operand that stands for pushing a
1195 value of mode MODE onto the stack.
1197 The main use of this function is as a predicate in match_operand
1198 expressions in the machine description. */
1201 push_operand (rtx op
, enum machine_mode mode
)
1203 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1205 #ifdef PUSH_ROUNDING
1206 rounded_size
= PUSH_ROUNDING (rounded_size
);
1212 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1217 if (rounded_size
== GET_MODE_SIZE (mode
))
1219 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1224 if (GET_CODE (op
) != PRE_MODIFY
1225 || GET_CODE (XEXP (op
, 1)) != PLUS
1226 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1227 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1228 #ifdef STACK_GROWS_DOWNWARD
1229 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1231 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1237 return XEXP (op
, 0) == stack_pointer_rtx
;
1240 /* Return 1 if OP is a valid operand that stands for popping a
1241 value of mode MODE off the stack.
1243 The main use of this function is as a predicate in match_operand
1244 expressions in the machine description. */
1247 pop_operand (rtx op
, enum machine_mode mode
)
1252 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1257 if (GET_CODE (op
) != STACK_POP_CODE
)
1260 return XEXP (op
, 0) == stack_pointer_rtx
;
1263 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1266 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1268 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1275 /* Return 1 if OP is a valid memory reference with mode MODE,
1276 including a valid address.
1278 The main use of this function is as a predicate in match_operand
1279 expressions in the machine description. */
1282 memory_operand (rtx op
, enum machine_mode mode
)
1286 if (! reload_completed
)
1287 /* Note that no SUBREG is a memory operand before end of reload pass,
1288 because (SUBREG (MEM...)) forces reloading into a register. */
1289 return MEM_P (op
) && general_operand (op
, mode
);
1291 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1295 if (GET_CODE (inner
) == SUBREG
)
1296 inner
= SUBREG_REG (inner
);
1298 return (MEM_P (inner
) && general_operand (op
, mode
));
1301 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1302 that is, a memory reference whose address is a general_operand. */
1305 indirect_operand (rtx op
, enum machine_mode mode
)
1307 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1308 if (! reload_completed
1309 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1311 int offset
= SUBREG_BYTE (op
);
1312 rtx inner
= SUBREG_REG (op
);
1314 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1317 /* The only way that we can have a general_operand as the resulting
1318 address is if OFFSET is zero and the address already is an operand
1319 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1322 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1323 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1324 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1325 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1326 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1330 && memory_operand (op
, mode
)
1331 && general_operand (XEXP (op
, 0), Pmode
));
1334 /* Return 1 if this is a comparison operator. This allows the use of
1335 MATCH_OPERATOR to recognize all the branch insns. */
1338 comparison_operator (rtx op
, enum machine_mode mode
)
1340 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1341 && COMPARISON_P (op
));
1344 /* If BODY is an insn body that uses ASM_OPERANDS,
1345 return the number of operands (both input and output) in the insn.
1346 Otherwise return -1. */
1349 asm_noperands (rtx body
)
1351 switch (GET_CODE (body
))
1354 /* No output operands: return number of input operands. */
1355 return ASM_OPERANDS_INPUT_LENGTH (body
);
1357 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1358 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1359 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1363 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1364 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1366 /* Multiple output operands, or 1 output plus some clobbers:
1367 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1371 /* Count backwards through CLOBBERs to determine number of SETs. */
1372 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1374 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1376 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1380 /* N_SETS is now number of output operands. */
1383 /* Verify that all the SETs we have
1384 came from a single original asm_operands insn
1385 (so that invalid combinations are blocked). */
1386 for (i
= 0; i
< n_sets
; i
++)
1388 rtx elt
= XVECEXP (body
, 0, i
);
1389 if (GET_CODE (elt
) != SET
)
1391 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1393 /* If these ASM_OPERANDS rtx's came from different original insns
1394 then they aren't allowed together. */
1395 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1396 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1399 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1402 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1404 /* 0 outputs, but some clobbers:
1405 body is [(asm_operands ...) (clobber (reg ...))...]. */
1408 /* Make sure all the other parallel things really are clobbers. */
1409 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1410 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1413 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1422 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1423 copy its operands (both input and output) into the vector OPERANDS,
1424 the locations of the operands within the insn into the vector OPERAND_LOCS,
1425 and the constraints for the operands into CONSTRAINTS.
1426 Write the modes of the operands into MODES.
1427 Return the assembler-template.
1429 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1430 we don't store that info. */
1433 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1434 const char **constraints
, enum machine_mode
*modes
)
1438 const char *template = 0;
1440 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1442 rtx asmop
= SET_SRC (body
);
1443 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1445 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1447 for (i
= 1; i
< noperands
; i
++)
1450 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1452 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1454 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1456 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1459 /* The output is in the SET.
1460 Its constraint is in the ASM_OPERANDS itself. */
1462 operands
[0] = SET_DEST (body
);
1464 operand_locs
[0] = &SET_DEST (body
);
1466 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1468 modes
[0] = GET_MODE (SET_DEST (body
));
1469 template = ASM_OPERANDS_TEMPLATE (asmop
);
1471 else if (GET_CODE (body
) == ASM_OPERANDS
)
1474 /* No output operands: BODY is (asm_operands ....). */
1476 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1478 /* The input operands are found in the 1st element vector. */
1479 /* Constraints for inputs are in the 2nd element vector. */
1480 for (i
= 0; i
< noperands
; i
++)
1483 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1485 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1487 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1489 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1491 template = ASM_OPERANDS_TEMPLATE (asmop
);
1493 else if (GET_CODE (body
) == PARALLEL
1494 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1495 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1497 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1498 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1499 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1500 int nout
= 0; /* Does not include CLOBBERs. */
1502 /* At least one output, plus some CLOBBERs. */
1504 /* The outputs are in the SETs.
1505 Their constraints are in the ASM_OPERANDS itself. */
1506 for (i
= 0; i
< nparallel
; i
++)
1508 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1509 break; /* Past last SET */
1512 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1514 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1516 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1518 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1522 for (i
= 0; i
< nin
; i
++)
1525 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1527 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1529 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1531 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1534 template = ASM_OPERANDS_TEMPLATE (asmop
);
1536 else if (GET_CODE (body
) == PARALLEL
1537 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1539 /* No outputs, but some CLOBBERs. */
1541 rtx asmop
= XVECEXP (body
, 0, 0);
1542 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1544 for (i
= 0; i
< nin
; i
++)
1547 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1549 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1551 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1553 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1556 template = ASM_OPERANDS_TEMPLATE (asmop
);
1562 /* Check if an asm_operand matches its constraints.
1563 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1566 asm_operand_ok (rtx op
, const char *constraint
)
1570 /* Use constrain_operands after reload. */
1571 gcc_assert (!reload_completed
);
1575 char c
= *constraint
;
1592 case '0': case '1': case '2': case '3': case '4':
1593 case '5': case '6': case '7': case '8': case '9':
1594 /* For best results, our caller should have given us the
1595 proper matching constraint, but we can't actually fail
1596 the check if they didn't. Indicate that results are
1600 while (ISDIGIT (*constraint
));
1606 if (address_operand (op
, VOIDmode
))
1611 case 'V': /* non-offsettable */
1612 if (memory_operand (op
, VOIDmode
))
1616 case 'o': /* offsettable */
1617 if (offsettable_nonstrict_memref_p (op
))
1622 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1623 excepting those that expand_call created. Further, on some
1624 machines which do not have generalized auto inc/dec, an inc/dec
1625 is not a memory_operand.
1627 Match any memory and hope things are resolved after reload. */
1631 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1632 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1639 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1640 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1646 if (GET_CODE (op
) == CONST_DOUBLE
1647 || (GET_CODE (op
) == CONST_VECTOR
1648 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1653 if (GET_CODE (op
) == CONST_DOUBLE
1654 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1658 if (GET_CODE (op
) == CONST_DOUBLE
1659 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1664 if (GET_CODE (op
) == CONST_INT
1665 || (GET_CODE (op
) == CONST_DOUBLE
1666 && GET_MODE (op
) == VOIDmode
))
1671 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1676 if (GET_CODE (op
) == CONST_INT
1677 || (GET_CODE (op
) == CONST_DOUBLE
1678 && GET_MODE (op
) == VOIDmode
))
1683 if (GET_CODE (op
) == CONST_INT
1684 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1688 if (GET_CODE (op
) == CONST_INT
1689 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1693 if (GET_CODE (op
) == CONST_INT
1694 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1698 if (GET_CODE (op
) == CONST_INT
1699 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1703 if (GET_CODE (op
) == CONST_INT
1704 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1708 if (GET_CODE (op
) == CONST_INT
1709 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1713 if (GET_CODE (op
) == CONST_INT
1714 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1718 if (GET_CODE (op
) == CONST_INT
1719 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1728 if (general_operand (op
, VOIDmode
))
1733 /* For all other letters, we first check for a register class,
1734 otherwise it is an EXTRA_CONSTRAINT. */
1735 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1738 if (GET_MODE (op
) == BLKmode
)
1740 if (register_operand (op
, VOIDmode
))
1743 #ifdef EXTRA_CONSTRAINT_STR
1744 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1746 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1747 /* Every memory operand can be reloaded to fit. */
1748 && memory_operand (op
, VOIDmode
))
1750 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1751 /* Every address operand can be reloaded to fit. */
1752 && address_operand (op
, VOIDmode
))
1757 len
= CONSTRAINT_LEN (c
, constraint
);
1760 while (--len
&& *constraint
);
1768 /* Given an rtx *P, if it is a sum containing an integer constant term,
1769 return the location (type rtx *) of the pointer to that constant term.
1770 Otherwise, return a null pointer. */
1773 find_constant_term_loc (rtx
*p
)
1776 enum rtx_code code
= GET_CODE (*p
);
1778 /* If *P IS such a constant term, P is its location. */
1780 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1784 /* Otherwise, if not a sum, it has no constant term. */
1786 if (GET_CODE (*p
) != PLUS
)
1789 /* If one of the summands is constant, return its location. */
1791 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1792 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1795 /* Otherwise, check each summand for containing a constant term. */
1797 if (XEXP (*p
, 0) != 0)
1799 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1804 if (XEXP (*p
, 1) != 0)
1806 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1814 /* Return 1 if OP is a memory reference
1815 whose address contains no side effects
1816 and remains valid after the addition
1817 of a positive integer less than the
1818 size of the object being referenced.
1820 We assume that the original address is valid and do not check it.
1822 This uses strict_memory_address_p as a subroutine, so
1823 don't use it before reload. */
1826 offsettable_memref_p (rtx op
)
1828 return ((MEM_P (op
))
1829 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1832 /* Similar, but don't require a strictly valid mem ref:
1833 consider pseudo-regs valid as index or base regs. */
1836 offsettable_nonstrict_memref_p (rtx op
)
1838 return ((MEM_P (op
))
1839 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1842 /* Return 1 if Y is a memory address which contains no side effects
1843 and would remain valid after the addition of a positive integer
1844 less than the size of that mode.
1846 We assume that the original address is valid and do not check it.
1847 We do check that it is valid for narrower modes.
1849 If STRICTP is nonzero, we require a strictly valid address,
1850 for the sake of use in reload.c. */
1853 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1855 enum rtx_code ycode
= GET_CODE (y
);
1859 int (*addressp
) (enum machine_mode
, rtx
) =
1860 (strictp
? strict_memory_address_p
: memory_address_p
);
1861 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1863 if (CONSTANT_ADDRESS_P (y
))
1866 /* Adjusting an offsettable address involves changing to a narrower mode.
1867 Make sure that's OK. */
1869 if (mode_dependent_address_p (y
))
1872 /* ??? How much offset does an offsettable BLKmode reference need?
1873 Clearly that depends on the situation in which it's being used.
1874 However, the current situation in which we test 0xffffffff is
1875 less than ideal. Caveat user. */
1877 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1879 /* If the expression contains a constant term,
1880 see if it remains valid when max possible offset is added. */
1882 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1887 *y2
= plus_constant (*y2
, mode_sz
- 1);
1888 /* Use QImode because an odd displacement may be automatically invalid
1889 for any wider mode. But it should be valid for a single byte. */
1890 good
= (*addressp
) (QImode
, y
);
1892 /* In any case, restore old contents of memory. */
1897 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1900 /* The offset added here is chosen as the maximum offset that
1901 any instruction could need to add when operating on something
1902 of the specified mode. We assume that if Y and Y+c are
1903 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1904 go inside a LO_SUM here, so we do so as well. */
1905 if (GET_CODE (y
) == LO_SUM
1907 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1908 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1909 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1911 z
= plus_constant (y
, mode_sz
- 1);
1913 /* Use QImode because an odd displacement may be automatically invalid
1914 for any wider mode. But it should be valid for a single byte. */
1915 return (*addressp
) (QImode
, z
);
1918 /* Return 1 if ADDR is an address-expression whose effect depends
1919 on the mode of the memory reference it is used in.
1921 Autoincrement addressing is a typical example of mode-dependence
1922 because the amount of the increment depends on the mode. */
1925 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED
/* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1927 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1929 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1930 win
: ATTRIBUTE_UNUSED_LABEL
1934 /* Like extract_insn, but save insn extracted and don't extract again, when
1935 called again for the same insn expecting that recog_data still contain the
1936 valid information. This is used primary by gen_attr infrastructure that
1937 often does extract insn again and again. */
1939 extract_insn_cached (rtx insn
)
1941 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1943 extract_insn (insn
);
1944 recog_data
.insn
= insn
;
1946 /* Do cached extract_insn, constrain_operands and complain about failures.
1947 Used by insn_attrtab. */
1949 extract_constrain_insn_cached (rtx insn
)
1951 extract_insn_cached (insn
);
1952 if (which_alternative
== -1
1953 && !constrain_operands (reload_completed
))
1954 fatal_insn_not_found (insn
);
1956 /* Do cached constrain_operands and complain about failures. */
1958 constrain_operands_cached (int strict
)
1960 if (which_alternative
== -1)
1961 return constrain_operands (strict
);
1966 /* Analyze INSN and fill in recog_data. */
1969 extract_insn (rtx insn
)
1974 rtx body
= PATTERN (insn
);
1976 recog_data
.insn
= NULL
;
1977 recog_data
.n_operands
= 0;
1978 recog_data
.n_alternatives
= 0;
1979 recog_data
.n_dups
= 0;
1980 which_alternative
= -1;
1982 switch (GET_CODE (body
))
1992 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1997 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
1998 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1999 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2005 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2008 /* This insn is an `asm' with operands. */
2010 /* expand_asm_operands makes sure there aren't too many operands. */
2011 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
2013 /* Now get the operand values and constraints out of the insn. */
2014 decode_asm_operands (body
, recog_data
.operand
,
2015 recog_data
.operand_loc
,
2016 recog_data
.constraints
,
2017 recog_data
.operand_mode
);
2020 const char *p
= recog_data
.constraints
[0];
2021 recog_data
.n_alternatives
= 1;
2023 recog_data
.n_alternatives
+= (*p
++ == ',');
2027 fatal_insn_not_found (insn
);
2031 /* Ordinary insn: recognize it, get the operands via insn_extract
2032 and get the constraints. */
2034 icode
= recog_memoized (insn
);
2036 fatal_insn_not_found (insn
);
2038 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2039 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2040 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2042 insn_extract (insn
);
2044 for (i
= 0; i
< noperands
; i
++)
2046 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2047 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2048 /* VOIDmode match_operands gets mode from their real operand. */
2049 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2050 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2053 for (i
= 0; i
< noperands
; i
++)
2054 recog_data
.operand_type
[i
]
2055 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2056 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2059 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2062 /* After calling extract_insn, you can use this function to extract some
2063 information from the constraint strings into a more usable form.
2064 The collected data is stored in recog_op_alt. */
2066 preprocess_constraints (void)
2070 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2071 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2072 * sizeof (struct operand_alternative
)));
2074 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2077 struct operand_alternative
*op_alt
;
2078 const char *p
= recog_data
.constraints
[i
];
2080 op_alt
= recog_op_alt
[i
];
2082 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2084 op_alt
[j
].cl
= NO_REGS
;
2085 op_alt
[j
].constraint
= p
;
2086 op_alt
[j
].matches
= -1;
2087 op_alt
[j
].matched
= -1;
2089 if (*p
== '\0' || *p
== ',')
2091 op_alt
[j
].anything_ok
= 1;
2101 while (c
!= ',' && c
!= '\0');
2102 if (c
== ',' || c
== '\0')
2110 case '=': case '+': case '*': case '%':
2111 case 'E': case 'F': case 'G': case 'H':
2112 case 's': case 'i': case 'n':
2113 case 'I': case 'J': case 'K': case 'L':
2114 case 'M': case 'N': case 'O': case 'P':
2115 /* These don't say anything we care about. */
2119 op_alt
[j
].reject
+= 6;
2122 op_alt
[j
].reject
+= 600;
2125 op_alt
[j
].earlyclobber
= 1;
2128 case '0': case '1': case '2': case '3': case '4':
2129 case '5': case '6': case '7': case '8': case '9':
2132 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2133 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2139 op_alt
[j
].memory_ok
= 1;
2142 op_alt
[j
].decmem_ok
= 1;
2145 op_alt
[j
].incmem_ok
= 1;
2148 op_alt
[j
].nonoffmem_ok
= 1;
2151 op_alt
[j
].offmem_ok
= 1;
2154 op_alt
[j
].anything_ok
= 1;
2158 op_alt
[j
].is_address
= 1;
2159 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2160 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
2166 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2170 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2172 op_alt
[j
].memory_ok
= 1;
2175 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2177 op_alt
[j
].is_address
= 1;
2179 = (reg_class_subunion
2180 [(int) op_alt
[j
].cl
]
2181 [(int) MODE_BASE_REG_CLASS (VOIDmode
)]);
2186 = (reg_class_subunion
2187 [(int) op_alt
[j
].cl
]
2188 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2191 p
+= CONSTRAINT_LEN (c
, p
);
2197 /* Check the operands of an insn against the insn's operand constraints
2198 and return 1 if they are valid.
2199 The information about the insn's operands, constraints, operand modes
2200 etc. is obtained from the global variables set up by extract_insn.
2202 WHICH_ALTERNATIVE is set to a number which indicates which
2203 alternative of constraints was matched: 0 for the first alternative,
2204 1 for the next, etc.
2206 In addition, when two operands are required to match
2207 and it happens that the output operand is (reg) while the
2208 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2209 make the output operand look like the input.
2210 This is because the output operand is the one the template will print.
2212 This is used in final, just before printing the assembler code and by
2213 the routines that determine an insn's attribute.
2215 If STRICT is a positive nonzero value, it means that we have been
2216 called after reload has been completed. In that case, we must
2217 do all checks strictly. If it is zero, it means that we have been called
2218 before reload has completed. In that case, we first try to see if we can
2219 find an alternative that matches strictly. If not, we try again, this
2220 time assuming that reload will fix up the insn. This provides a "best
2221 guess" for the alternative and is used to compute attributes of insns prior
2222 to reload. A negative value of STRICT is used for this internal call. */
2230 constrain_operands (int strict
)
2232 const char *constraints
[MAX_RECOG_OPERANDS
];
2233 int matching_operands
[MAX_RECOG_OPERANDS
];
2234 int earlyclobber
[MAX_RECOG_OPERANDS
];
2237 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2238 int funny_match_index
;
2240 which_alternative
= 0;
2241 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2244 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2246 constraints
[c
] = recog_data
.constraints
[c
];
2247 matching_operands
[c
] = -1;
2254 funny_match_index
= 0;
2256 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2258 rtx op
= recog_data
.operand
[opno
];
2259 enum machine_mode mode
= GET_MODE (op
);
2260 const char *p
= constraints
[opno
];
2266 earlyclobber
[opno
] = 0;
2268 /* A unary operator may be accepted by the predicate, but it
2269 is irrelevant for matching constraints. */
2273 if (GET_CODE (op
) == SUBREG
)
2275 if (REG_P (SUBREG_REG (op
))
2276 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2277 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2278 GET_MODE (SUBREG_REG (op
)),
2281 op
= SUBREG_REG (op
);
2284 /* An empty constraint or empty alternative
2285 allows anything which matched the pattern. */
2286 if (*p
== 0 || *p
== ',')
2290 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2299 case '?': case '!': case '*': case '%':
2304 /* Ignore rest of this alternative as far as
2305 constraint checking is concerned. */
2308 while (*p
&& *p
!= ',');
2313 earlyclobber
[opno
] = 1;
2316 case '0': case '1': case '2': case '3': case '4':
2317 case '5': case '6': case '7': case '8': case '9':
2319 /* This operand must be the same as a previous one.
2320 This kind of constraint is used for instructions such
2321 as add when they take only two operands.
2323 Note that the lower-numbered operand is passed first.
2325 If we are not testing strictly, assume that this
2326 constraint will be satisfied. */
2331 match
= strtoul (p
, &end
, 10);
2338 rtx op1
= recog_data
.operand
[match
];
2339 rtx op2
= recog_data
.operand
[opno
];
2341 /* A unary operator may be accepted by the predicate,
2342 but it is irrelevant for matching constraints. */
2344 op1
= XEXP (op1
, 0);
2346 op2
= XEXP (op2
, 0);
2348 val
= operands_match_p (op1
, op2
);
2351 matching_operands
[opno
] = match
;
2352 matching_operands
[match
] = opno
;
2357 /* If output is *x and input is *--x, arrange later
2358 to change the output to *--x as well, since the
2359 output op is the one that will be printed. */
2360 if (val
== 2 && strict
> 0)
2362 funny_match
[funny_match_index
].this = opno
;
2363 funny_match
[funny_match_index
++].other
= match
;
2370 /* p is used for address_operands. When we are called by
2371 gen_reload, no one will have checked that the address is
2372 strictly valid, i.e., that all pseudos requiring hard regs
2373 have gotten them. */
2375 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2380 /* No need to check general_operand again;
2381 it was done in insn-recog.c. */
2383 /* Anything goes unless it is a REG and really has a hard reg
2384 but the hard reg is not in the class GENERAL_REGS. */
2386 || GENERAL_REGS
== ALL_REGS
2388 || (reload_in_progress
2389 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2390 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2395 /* This is used for a MATCH_SCRATCH in the cases when
2396 we don't actually need anything. So anything goes
2402 /* Memory operands must be valid, to the extent
2403 required by STRICT. */
2407 && !strict_memory_address_p (GET_MODE (op
),
2411 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2415 /* Before reload, accept what reload can turn into mem. */
2416 else if (strict
< 0 && CONSTANT_P (op
))
2418 /* During reload, accept a pseudo */
2419 else if (reload_in_progress
&& REG_P (op
)
2420 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2426 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2427 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2433 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2434 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2440 if (GET_CODE (op
) == CONST_DOUBLE
2441 || (GET_CODE (op
) == CONST_VECTOR
2442 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2448 if (GET_CODE (op
) == CONST_DOUBLE
2449 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2454 if (GET_CODE (op
) == CONST_INT
2455 || (GET_CODE (op
) == CONST_DOUBLE
2456 && GET_MODE (op
) == VOIDmode
))
2459 if (CONSTANT_P (op
))
2464 if (GET_CODE (op
) == CONST_INT
2465 || (GET_CODE (op
) == CONST_DOUBLE
2466 && GET_MODE (op
) == VOIDmode
))
2478 if (GET_CODE (op
) == CONST_INT
2479 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2485 && ((strict
> 0 && ! offsettable_memref_p (op
))
2487 && !(CONSTANT_P (op
) || MEM_P (op
)))
2488 || (reload_in_progress
2490 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2495 if ((strict
> 0 && offsettable_memref_p (op
))
2496 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2497 /* Before reload, accept what reload can handle. */
2499 && (CONSTANT_P (op
) || MEM_P (op
)))
2500 /* During reload, accept a pseudo */
2501 || (reload_in_progress
&& REG_P (op
)
2502 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2511 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2517 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2518 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2520 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2523 #ifdef EXTRA_CONSTRAINT_STR
2524 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2527 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2528 /* Every memory operand can be reloaded to fit. */
2529 && ((strict
< 0 && MEM_P (op
))
2530 /* Before reload, accept what reload can turn
2532 || (strict
< 0 && CONSTANT_P (op
))
2533 /* During reload, accept a pseudo */
2534 || (reload_in_progress
&& REG_P (op
)
2535 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2537 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2538 /* Every address operand can be reloaded to fit. */
2545 while (p
+= len
, c
);
2547 constraints
[opno
] = p
;
2548 /* If this operand did not win somehow,
2549 this alternative loses. */
2553 /* This alternative won; the operands are ok.
2554 Change whichever operands this alternative says to change. */
2559 /* See if any earlyclobber operand conflicts with some other
2563 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2564 /* Ignore earlyclobber operands now in memory,
2565 because we would often report failure when we have
2566 two memory operands, one of which was formerly a REG. */
2567 if (earlyclobber
[eopno
]
2568 && REG_P (recog_data
.operand
[eopno
]))
2569 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2570 if ((MEM_P (recog_data
.operand
[opno
])
2571 || recog_data
.operand_type
[opno
] != OP_OUT
)
2573 /* Ignore things like match_operator operands. */
2574 && *recog_data
.constraints
[opno
] != 0
2575 && ! (matching_operands
[opno
] == eopno
2576 && operands_match_p (recog_data
.operand
[opno
],
2577 recog_data
.operand
[eopno
]))
2578 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2579 recog_data
.operand
[eopno
]))
2584 while (--funny_match_index
>= 0)
2586 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2587 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2594 which_alternative
++;
2596 while (which_alternative
< recog_data
.n_alternatives
);
2598 which_alternative
= -1;
2599 /* If we are about to reject this, but we are not to test strictly,
2600 try a very loose test. Only return failure if it fails also. */
2602 return constrain_operands (-1);
2607 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2608 is a hard reg in class CLASS when its regno is offset by OFFSET
2609 and changed to mode MODE.
2610 If REG occupies multiple hard regs, all of them must be in CLASS. */
2613 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2614 enum machine_mode mode
)
2616 int regno
= REGNO (operand
);
2617 if (regno
< FIRST_PSEUDO_REGISTER
2618 && TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2623 for (sr
= hard_regno_nregs
[regno
][mode
] - 1;
2625 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2634 /* Split single instruction. Helper function for split_all_insns and
2635 split_all_insns_noflow. Return last insn in the sequence if successful,
2636 or NULL if unsuccessful. */
2639 split_insn (rtx insn
)
2641 /* Split insns here to get max fine-grain parallelism. */
2642 rtx first
= PREV_INSN (insn
);
2643 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2648 /* try_split returns the NOTE that INSN became. */
2649 SET_INSN_DELETED (insn
);
2651 /* ??? Coddle to md files that generate subregs in post-reload
2652 splitters instead of computing the proper hard register. */
2653 if (reload_completed
&& first
!= last
)
2655 first
= NEXT_INSN (first
);
2659 cleanup_subreg_operands (first
);
2662 first
= NEXT_INSN (first
);
2668 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2671 split_all_insns (int upd_life
)
2677 blocks
= sbitmap_alloc (last_basic_block
);
2678 sbitmap_zero (blocks
);
2681 FOR_EACH_BB_REVERSE (bb
)
2684 bool finish
= false;
2686 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2688 /* Can't use `next_real_insn' because that might go across
2689 CODE_LABELS and short-out basic blocks. */
2690 next
= NEXT_INSN (insn
);
2691 finish
= (insn
== BB_END (bb
));
2694 rtx set
= single_set (insn
);
2696 /* Don't split no-op move insns. These should silently
2697 disappear later in final. Splitting such insns would
2698 break the code that handles REG_NO_CONFLICT blocks. */
2699 if (set
&& set_noop_p (set
))
2701 /* Nops get in the way while scheduling, so delete them
2702 now if register allocation has already been done. It
2703 is too risky to try to do this before register
2704 allocation, and there are unlikely to be very many
2705 nops then anyways. */
2706 if (reload_completed
)
2708 /* If the no-op set has a REG_UNUSED note, we need
2709 to update liveness information. */
2710 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
))
2712 SET_BIT (blocks
, bb
->index
);
2715 /* ??? Is life info affected by deleting edges? */
2716 delete_insn_and_edges (insn
);
2721 rtx last
= split_insn (insn
);
2724 /* The split sequence may include barrier, but the
2725 BB boundary we are interested in will be set to
2728 while (BARRIER_P (last
))
2729 last
= PREV_INSN (last
);
2730 SET_BIT (blocks
, bb
->index
);
2740 int old_last_basic_block
= last_basic_block
;
2742 find_many_sub_basic_blocks (blocks
);
2744 if (old_last_basic_block
!= last_basic_block
&& upd_life
)
2745 blocks
= sbitmap_resize (blocks
, last_basic_block
, 1);
2748 if (changed
&& upd_life
)
2749 update_life_info (blocks
, UPDATE_LIFE_GLOBAL_RM_NOTES
,
2752 #ifdef ENABLE_CHECKING
2753 verify_flow_info ();
2756 sbitmap_free (blocks
);
2759 /* Same as split_all_insns, but do not expect CFG to be available.
2760 Used by machine dependent reorg passes. */
2763 split_all_insns_noflow (void)
2767 for (insn
= get_insns (); insn
; insn
= next
)
2769 next
= NEXT_INSN (insn
);
2772 /* Don't split no-op move insns. These should silently
2773 disappear later in final. Splitting such insns would
2774 break the code that handles REG_NO_CONFLICT blocks. */
2775 rtx set
= single_set (insn
);
2776 if (set
&& set_noop_p (set
))
2778 /* Nops get in the way while scheduling, so delete them
2779 now if register allocation has already been done. It
2780 is too risky to try to do this before register
2781 allocation, and there are unlikely to be very many
2784 ??? Should we use delete_insn when the CFG isn't valid? */
2785 if (reload_completed
)
2786 delete_insn_and_edges (insn
);
2794 #ifdef HAVE_peephole2
2795 struct peep2_insn_data
2801 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2802 static int peep2_current
;
2804 /* A non-insn marker indicating the last insn of the block.
2805 The live_before regset for this element is correct, indicating
2806 global_live_at_end for the block. */
2807 #define PEEP2_EOB pc_rtx
2809 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2810 does not exist. Used by the recognizer to find the next insn to match
2811 in a multi-insn pattern. */
2814 peep2_next_insn (int n
)
2816 gcc_assert (n
< MAX_INSNS_PER_PEEP2
+ 1);
2819 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2820 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2822 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2824 return peep2_insn_data
[n
].insn
;
2827 /* Return true if REGNO is dead before the Nth non-note insn
2831 peep2_regno_dead_p (int ofs
, int regno
)
2833 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2835 ofs
+= peep2_current
;
2836 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2837 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2839 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2841 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2844 /* Similarly for a REG. */
2847 peep2_reg_dead_p (int ofs
, rtx reg
)
2851 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2853 ofs
+= peep2_current
;
2854 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2855 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2857 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2859 regno
= REGNO (reg
);
2860 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2862 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2867 /* Try to find a hard register of mode MODE, matching the register class in
2868 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2869 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2870 in which case the only condition is that the register must be available
2871 before CURRENT_INSN.
2872 Registers that already have bits set in REG_SET will not be considered.
2874 If an appropriate register is available, it will be returned and the
2875 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2879 peep2_find_free_register (int from
, int to
, const char *class_str
,
2880 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2882 static int search_ofs
;
2887 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2888 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2890 from
+= peep2_current
;
2891 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2892 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2893 to
+= peep2_current
;
2894 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2895 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2897 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2898 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2902 HARD_REG_SET this_live
;
2904 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2906 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2907 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2908 IOR_HARD_REG_SET (live
, this_live
);
2911 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2912 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2914 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2916 int raw_regno
, regno
, success
, j
;
2918 /* Distribute the free registers as much as possible. */
2919 raw_regno
= search_ofs
+ i
;
2920 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2921 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2922 #ifdef REG_ALLOC_ORDER
2923 regno
= reg_alloc_order
[raw_regno
];
2928 /* Don't allocate fixed registers. */
2929 if (fixed_regs
[regno
])
2931 /* Make sure the register is of the right class. */
2932 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2934 /* And can support the mode we need. */
2935 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2937 /* And that we don't create an extra save/restore. */
2938 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
2940 /* And we don't clobber traceback for noreturn functions. */
2941 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2942 && (! reload_completed
|| frame_pointer_needed
))
2946 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2948 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2949 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2957 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2958 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
2960 /* Start the next search with the next register. */
2961 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2963 search_ofs
= raw_regno
;
2965 return gen_rtx_REG (mode
, regno
);
2973 /* Perform the peephole2 optimization pass. */
2976 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED
)
2982 #ifdef HAVE_conditional_execution
2986 bool do_cleanup_cfg
= false;
2987 bool do_rebuild_jump_labels
= false;
2989 /* Initialize the regsets we're going to use. */
2990 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2991 peep2_insn_data
[i
].live_before
= OBSTACK_ALLOC_REG_SET (®_obstack
);
2992 live
= OBSTACK_ALLOC_REG_SET (®_obstack
);
2994 #ifdef HAVE_conditional_execution
2995 blocks
= sbitmap_alloc (last_basic_block
);
2996 sbitmap_zero (blocks
);
2999 count_or_remove_death_notes (NULL
, 1);
3002 FOR_EACH_BB_REVERSE (bb
)
3004 struct propagate_block_info
*pbi
;
3006 /* Indicate that all slots except the last holds invalid data. */
3007 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3008 peep2_insn_data
[i
].insn
= NULL_RTX
;
3010 /* Indicate that the last slot contains live_after data. */
3011 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3012 peep2_current
= MAX_INSNS_PER_PEEP2
;
3014 /* Start up propagation. */
3015 COPY_REG_SET (live
, bb
->global_live_at_end
);
3016 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3018 #ifdef HAVE_conditional_execution
3019 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3021 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3024 for (insn
= BB_END (bb
); ; insn
= prev
)
3026 prev
= PREV_INSN (insn
);
3029 rtx
try, before_try
, x
;
3032 bool was_call
= false;
3034 /* Record this insn. */
3035 if (--peep2_current
< 0)
3036 peep2_current
= MAX_INSNS_PER_PEEP2
;
3037 peep2_insn_data
[peep2_current
].insn
= insn
;
3038 propagate_one_insn (pbi
, insn
);
3039 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3041 /* Match the peephole. */
3042 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3045 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3046 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3047 cfg-related call notes. */
3048 for (i
= 0; i
<= match_len
; ++i
)
3051 rtx old_insn
, new_insn
, note
;
3053 j
= i
+ peep2_current
;
3054 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3055 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3056 old_insn
= peep2_insn_data
[j
].insn
;
3057 if (!CALL_P (old_insn
))
3062 while (new_insn
!= NULL_RTX
)
3064 if (CALL_P (new_insn
))
3066 new_insn
= NEXT_INSN (new_insn
);
3069 gcc_assert (new_insn
!= NULL_RTX
);
3071 CALL_INSN_FUNCTION_USAGE (new_insn
)
3072 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3074 for (note
= REG_NOTES (old_insn
);
3076 note
= XEXP (note
, 1))
3077 switch (REG_NOTE_KIND (note
))
3081 case REG_ALWAYS_RETURN
:
3082 REG_NOTES (new_insn
)
3083 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
3085 REG_NOTES (new_insn
));
3087 /* Discard all other reg notes. */
3091 /* Croak if there is another call in the sequence. */
3092 while (++i
<= match_len
)
3094 j
= i
+ peep2_current
;
3095 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3096 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3097 old_insn
= peep2_insn_data
[j
].insn
;
3098 gcc_assert (!CALL_P (old_insn
));
3103 i
= match_len
+ peep2_current
;
3104 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3105 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3107 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3108 REG_EH_REGION
, NULL_RTX
);
3110 /* Replace the old sequence with the new. */
3111 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3112 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3113 before_try
= PREV_INSN (insn
);
3114 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3116 /* Re-insert the EH_REGION notes. */
3117 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3122 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3123 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3126 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3128 || (flag_non_call_exceptions
3129 && may_trap_p (PATTERN (x
))
3130 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3134 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3138 if (x
!= BB_END (bb
) && eh_edge
)
3143 nfte
= split_block (bb
, x
);
3144 flags
= (eh_edge
->flags
3145 & (EDGE_EH
| EDGE_ABNORMAL
));
3147 flags
|= EDGE_ABNORMAL_CALL
;
3148 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3151 nehe
->probability
= eh_edge
->probability
;
3153 = REG_BR_PROB_BASE
- nehe
->probability
;
3155 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3156 #ifdef HAVE_conditional_execution
3157 SET_BIT (blocks
, nfte
->dest
->index
);
3165 /* Converting possibly trapping insn to non-trapping is
3166 possible. Zap dummy outgoing edges. */
3167 do_cleanup_cfg
|= purge_dead_edges (bb
);
3170 #ifdef HAVE_conditional_execution
3171 /* With conditional execution, we cannot back up the
3172 live information so easily, since the conditional
3173 death data structures are not so self-contained.
3174 So record that we've made a modification to this
3175 block and update life information at the end. */
3176 SET_BIT (blocks
, bb
->index
);
3179 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3180 peep2_insn_data
[i
].insn
= NULL_RTX
;
3181 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3183 /* Back up lifetime information past the end of the
3184 newly created sequence. */
3185 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3187 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3189 /* Update life information for the new sequence. */
3196 i
= MAX_INSNS_PER_PEEP2
;
3197 peep2_insn_data
[i
].insn
= x
;
3198 propagate_one_insn (pbi
, x
);
3199 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3205 /* ??? Should verify that LIVE now matches what we
3206 had before the new sequence. */
3211 /* If we generated a jump instruction, it won't have
3212 JUMP_LABEL set. Recompute after we're done. */
3213 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3216 do_rebuild_jump_labels
= true;
3222 if (insn
== BB_HEAD (bb
))
3226 free_propagate_block_info (pbi
);
3229 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3230 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3231 FREE_REG_SET (live
);
3233 if (do_rebuild_jump_labels
)
3234 rebuild_jump_labels (get_insns ());
3236 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3237 we've changed global life since exception handlers are no longer
3242 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES
, PROP_DEATH_NOTES
);
3244 #ifdef HAVE_conditional_execution
3247 count_or_remove_death_notes (blocks
, 1);
3248 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3250 sbitmap_free (blocks
);
3253 #endif /* HAVE_peephole2 */
3255 /* Common predicates for use with define_bypass. */
3257 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3258 data not the address operand(s) of the store. IN_INSN must be
3259 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3263 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3265 rtx out_set
, in_set
;
3267 in_set
= single_set (in_insn
);
3268 gcc_assert (in_set
);
3270 if (!MEM_P (SET_DEST (in_set
)))
3273 out_set
= single_set (out_insn
);
3276 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3284 out_pat
= PATTERN (out_insn
);
3285 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3287 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3289 rtx exp
= XVECEXP (out_pat
, 0, i
);
3291 if (GET_CODE (exp
) == CLOBBER
)
3294 gcc_assert (GET_CODE (exp
) == SET
);
3296 if (reg_mentioned_p (SET_DEST (exp
), SET_DEST (in_set
)))
3304 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3305 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3306 or multiple set; IN_INSN should be single_set for truth, but for convenience
3307 of insn categorization may be any JUMP or CALL insn. */
3310 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3312 rtx out_set
, in_set
;
3314 in_set
= single_set (in_insn
);
3317 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3321 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3323 in_set
= SET_SRC (in_set
);
3325 out_set
= single_set (out_insn
);
3328 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3329 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3337 out_pat
= PATTERN (out_insn
);
3338 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3340 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3342 rtx exp
= XVECEXP (out_pat
, 0, i
);
3344 if (GET_CODE (exp
) == CLOBBER
)
3347 gcc_assert (GET_CODE (exp
) == SET
);
3349 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3350 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))