1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
36 #include "basic-block.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
44 #define STACK_PUSH_CODE PRE_INC
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
52 #define STACK_POP_CODE POST_DEC
56 static void validate_replace_rtx_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
57 static rtx
*find_single_use_1
PARAMS ((rtx
, rtx
*));
58 static rtx
*find_constant_term_loc
PARAMS ((rtx
*));
59 static void validate_replace_src_1
PARAMS ((rtx
*, void *));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
71 struct recog_data recog_data
;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
77 /* On return from `constrain_operands', indicate which alternative
80 int which_alternative
;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
93 init_recog_no_volatile ()
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn
)
117 if (INSN_CODE (insn
) < 0)
118 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, 0);
119 return INSN_CODE (insn
);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x
)
131 const char **constraints
;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed
)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x
));
139 constrain_operands (1);
140 return which_alternative
>= 0;
143 noperands
= asm_noperands (x
);
149 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
150 constraints
= (const char **) alloca (noperands
* sizeof (char *));
152 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
154 for (i
= 0; i
< noperands
; i
++)
156 const char *c
= constraints
[i
];
159 if (ISDIGIT ((unsigned char)c
[0]) && c
[1] == '\0')
160 c
= constraints
[c
[0] - '0'];
162 if (! asm_operand_ok (operands
[i
], c
))
169 /* Static data for the next two routines. */
171 typedef struct change_t
179 static change_t
*changes
;
180 static int changes_allocated
;
182 static int num_changes
= 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object
, loc
, new, in_group
)
211 if (old
== new || rtx_equal_p (old
, new))
214 if (in_group
== 0 && num_changes
!= 0)
219 /* Save the information describing this change. */
220 if (num_changes
>= changes_allocated
)
222 if (changes_allocated
== 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
227 changes_allocated
*= 2;
230 (change_t
*) xrealloc (changes
,
231 sizeof (change_t
) * changes_allocated
);
234 changes
[num_changes
].object
= object
;
235 changes
[num_changes
].loc
= loc
;
236 changes
[num_changes
].old
= old
;
238 if (object
&& GET_CODE (object
) != MEM
)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 changes
[num_changes
].old_code
= INSN_CODE (object
);
243 INSN_CODE (object
) = -1;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
261 insn_invalid_p (insn
)
264 rtx pat
= PATTERN (insn
);
265 int num_clobbers
= 0;
266 /* If we are before reload and the pattern is a SET, see if we can add
268 int icode
= recog (pat
, insn
,
269 (GET_CODE (pat
) == SET
270 && ! reload_completed
&& ! reload_in_progress
)
271 ? &num_clobbers
: 0);
272 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
275 /* If this is an asm and the operand aren't legal, then fail. Likewise if
276 this is not an asm and the insn wasn't recognized. */
277 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
278 || (!is_asm
&& icode
< 0))
281 /* If we have to add CLOBBERs, fail if we have to add ones that reference
282 hard registers since our callers can't know if they are live or not.
283 Otherwise, add them. */
284 if (num_clobbers
> 0)
288 if (added_clobbers_hard_reg_p (icode
))
291 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
292 XVECEXP (newpat
, 0, 0) = pat
;
293 add_clobbers (newpat
, icode
);
294 PATTERN (insn
) = pat
= newpat
;
297 /* After reload, verify that all constraints are satisfied. */
298 if (reload_completed
)
302 if (! constrain_operands (1))
306 INSN_CODE (insn
) = icode
;
310 /* Apply a group of changes previously issued with `validate_change'.
311 Return 1 if all changes are valid, zero otherwise. */
314 apply_change_group ()
317 rtx last_validated
= NULL_RTX
;
319 /* The changes have been applied and all INSN_CODEs have been reset to force
322 The changes are valid if we aren't given an object, or if we are
323 given a MEM and it still is a valid address, or if this is in insn
324 and it is recognized. In the latter case, if reload has completed,
325 we also require that the operands meet the constraints for
328 for (i
= 0; i
< num_changes
; i
++)
330 rtx object
= changes
[i
].object
;
332 /* if there is no object to test or if it is the same as the one we
333 already tested, ignore it. */
334 if (object
== 0 || object
== last_validated
)
337 if (GET_CODE (object
) == MEM
)
339 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
342 else if (insn_invalid_p (object
))
344 rtx pat
= PATTERN (object
);
346 /* Perhaps we couldn't recognize the insn because there were
347 extra CLOBBERs at the end. If so, try to re-recognize
348 without the last CLOBBER (later iterations will cause each of
349 them to be eliminated, in turn). But don't do this if we
350 have an ASM_OPERAND. */
351 if (GET_CODE (pat
) == PARALLEL
352 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
353 && asm_noperands (PATTERN (object
)) < 0)
357 if (XVECLEN (pat
, 0) == 2)
358 newpat
= XVECEXP (pat
, 0, 0);
364 = gen_rtx_PARALLEL (VOIDmode
,
365 rtvec_alloc (XVECLEN (pat
, 0) - 1));
366 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
367 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
370 /* Add a new change to this group to replace the pattern
371 with this new pattern. Then consider this change
372 as having succeeded. The change we added will
373 cause the entire call to fail if things remain invalid.
375 Note that this can lose if a later change than the one
376 we are processing specified &XVECEXP (PATTERN (object), 0, X)
377 but this shouldn't occur. */
379 validate_change (object
, &PATTERN (object
), newpat
, 1);
382 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
383 /* If this insn is a CLOBBER or USE, it is always valid, but is
389 last_validated
= object
;
392 if (i
== num_changes
)
404 /* Return the number of changes so far in the current group. */
407 num_validated_changes ()
412 /* Retract the changes numbered NUM and up. */
420 /* Back out all the changes. Do this in the opposite order in which
422 for (i
= num_changes
- 1; i
>= num
; i
--)
424 *changes
[i
].loc
= changes
[i
].old
;
425 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
426 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
431 /* Replace every occurrence of FROM in X with TO. Mark each change with
432 validate_change passing OBJECT. */
435 validate_replace_rtx_1 (loc
, from
, to
, object
)
437 rtx from
, to
, object
;
440 register const char *fmt
;
441 register rtx x
= *loc
;
447 /* X matches FROM if it is the same rtx or they are both referring to the
448 same register in the same mode. Avoid calling rtx_equal_p unless the
449 operands look similar. */
452 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
453 && GET_MODE (x
) == GET_MODE (from
)
454 && REGNO (x
) == REGNO (from
))
455 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
456 && rtx_equal_p (x
, from
)))
458 validate_change (object
, loc
, to
, 1);
462 /* For commutative or comparison operations, try replacing each argument
463 separately and seeing if we made any changes. If so, put a constant
465 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
467 int prev_changes
= num_changes
;
469 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
470 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
471 if (prev_changes
!= num_changes
&& CONSTANT_P (XEXP (x
, 0)))
473 validate_change (object
, loc
,
474 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
475 : swap_condition (code
),
476 GET_MODE (x
), XEXP (x
, 1),
484 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
485 done the substitution, otherwise we won't. */
490 /* If we have a PLUS whose second operand is now a CONST_INT, use
491 plus_constant to try to simplify it. */
492 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
493 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
498 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
500 validate_change (object
, loc
,
501 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
509 /* In these cases, the operation to be performed depends on the mode
510 of the operand. If we are replacing the operand with a VOIDmode
511 constant, we lose the information. So try to simplify the operation
513 if (GET_MODE (to
) == VOIDmode
514 && (rtx_equal_p (XEXP (x
, 0), from
)
515 || (GET_CODE (XEXP (x
, 0)) == SUBREG
516 && rtx_equal_p (SUBREG_REG (XEXP (x
, 0)), from
))))
520 /* If there is a subreg involved, crop to the portion of the
521 constant that we are interested in. */
522 if (GET_CODE (XEXP (x
, 0)) == SUBREG
)
524 if (GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))) <= UNITS_PER_WORD
)
525 to
= operand_subword (to
,
526 (SUBREG_BYTE (XEXP (x
, 0))
529 else if (GET_MODE_CLASS (GET_MODE (from
)) == MODE_INT
530 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
531 <= HOST_BITS_PER_WIDE_INT
))
533 int i
= SUBREG_BYTE (XEXP (x
, 0)) * BITS_PER_UNIT
;
535 unsigned HOST_WIDE_INT vall
;
537 if (GET_CODE (to
) == CONST_INT
)
540 valh
= (HOST_WIDE_INT
) vall
< 0 ? ~0 : 0;
544 vall
= CONST_DOUBLE_LOW (to
);
545 valh
= CONST_DOUBLE_HIGH (to
);
548 if (WORDS_BIG_ENDIAN
)
549 i
= (GET_MODE_BITSIZE (GET_MODE (from
))
550 - GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - i
);
551 if (i
> 0 && i
< HOST_BITS_PER_WIDE_INT
)
552 vall
= vall
>> i
| valh
<< (HOST_BITS_PER_WIDE_INT
- i
);
553 else if (i
>= HOST_BITS_PER_WIDE_INT
)
554 vall
= valh
>> (i
- HOST_BITS_PER_WIDE_INT
);
555 to
= GEN_INT (trunc_int_for_mode (vall
,
556 GET_MODE (XEXP (x
, 0))));
559 to
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
562 /* If the above didn't fail, perform the extension from the
563 mode of the operand (and not the mode of FROM). */
565 new = simplify_unary_operation (code
, GET_MODE (x
), to
,
566 GET_MODE (XEXP (x
, 0)));
568 /* If any of the above failed, substitute in something that
569 we know won't be recognized. */
571 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
573 validate_change (object
, loc
, new, 1);
579 /* In case we are replacing by constant, attempt to simplify it to
580 non-SUBREG expression. We can't do this later, since the information
581 about inner mode may be lost. */
582 if (rtx_equal_p (SUBREG_REG (x
), from
))
585 temp
= simplify_subreg (GET_MODE (x
), to
,
586 GET_MODE (to
) != VOIDmode
587 ? GET_MODE (to
) : GET_MODE (SUBREG_REG (x
)),
591 validate_change (object
, loc
, temp
, 1);
594 /* Avoid creating of invalid SUBREGS. */
595 if (GET_MODE (from
) == VOIDmode
)
597 /* Substitute in something that we know won't be
599 to
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
600 validate_change (object
, loc
, to
, 1);
608 /* If we are replacing a register with memory, try to change the memory
609 to be the mode required for memory in extract operations (this isn't
610 likely to be an insertion operation; if it was, nothing bad will
611 happen, we might just fail in some cases). */
613 if (GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
614 && rtx_equal_p (XEXP (x
, 0), from
)
615 && GET_CODE (XEXP (x
, 1)) == CONST_INT
616 && GET_CODE (XEXP (x
, 2)) == CONST_INT
617 && ! mode_dependent_address_p (XEXP (to
, 0))
618 && ! MEM_VOLATILE_P (to
))
620 enum machine_mode wanted_mode
= VOIDmode
;
621 enum machine_mode is_mode
= GET_MODE (to
);
622 int pos
= INTVAL (XEXP (x
, 2));
625 if (code
== ZERO_EXTRACT
)
627 wanted_mode
= insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
628 if (wanted_mode
== VOIDmode
)
629 wanted_mode
= word_mode
;
633 if (code
== SIGN_EXTRACT
)
635 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
636 if (wanted_mode
== VOIDmode
)
637 wanted_mode
= word_mode
;
641 /* If we have a narrower mode, we can do something. */
642 if (wanted_mode
!= VOIDmode
643 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
645 int offset
= pos
/ BITS_PER_UNIT
;
648 /* If the bytes and bits are counted differently, we
649 must adjust the offset. */
650 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
651 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
654 pos
%= GET_MODE_BITSIZE (wanted_mode
);
656 newmem
= gen_rtx_MEM (wanted_mode
,
657 plus_constant (XEXP (to
, 0), offset
));
658 MEM_COPY_ATTRIBUTES (newmem
, to
);
660 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
661 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
671 /* For commutative or comparison operations we've already performed
672 replacements. Don't try to perform them again. */
673 if (GET_RTX_CLASS (code
) != '<' && GET_RTX_CLASS (code
) != 'c')
675 fmt
= GET_RTX_FORMAT (code
);
676 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
679 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
680 else if (fmt
[i
] == 'E')
681 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
682 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
687 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
688 with TO. After all changes have been made, validate by seeing
689 if INSN is still valid. */
692 validate_replace_rtx_subexp (from
, to
, insn
, loc
)
693 rtx from
, to
, insn
, *loc
;
695 validate_replace_rtx_1 (loc
, from
, to
, insn
);
696 return apply_change_group ();
699 /* Try replacing every occurrence of FROM in INSN with TO. After all
700 changes have been made, validate by seeing if INSN is still valid. */
703 validate_replace_rtx (from
, to
, insn
)
706 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
707 return apply_change_group ();
710 /* Try replacing every occurrence of FROM in INSN with TO. */
713 validate_replace_rtx_group (from
, to
, insn
)
716 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
719 /* Function called by note_uses to replace used subexpressions. */
720 struct validate_replace_src_data
722 rtx from
; /* Old RTX */
723 rtx to
; /* New RTX */
724 rtx insn
; /* Insn in which substitution is occurring. */
728 validate_replace_src_1 (x
, data
)
732 struct validate_replace_src_data
*d
733 = (struct validate_replace_src_data
*) data
;
735 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
738 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
739 SET_DESTs. After all changes have been made, validate by seeing if
740 INSN is still valid. */
743 validate_replace_src (from
, to
, insn
)
746 struct validate_replace_src_data d
;
751 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
752 return apply_change_group ();
756 /* Return 1 if the insn using CC0 set by INSN does not contain
757 any ordered tests applied to the condition codes.
758 EQ and NE tests do not count. */
761 next_insn_tests_no_inequality (insn
)
764 register rtx next
= next_cc0_user (insn
);
766 /* If there is no next insn, we have to take the conservative choice. */
770 return ((GET_CODE (next
) == JUMP_INSN
771 || GET_CODE (next
) == INSN
772 || GET_CODE (next
) == CALL_INSN
)
773 && ! inequality_comparisons_p (PATTERN (next
)));
776 #if 0 /* This is useless since the insn that sets the cc's
777 must be followed immediately by the use of them. */
778 /* Return 1 if the CC value set up by INSN is not used. */
781 next_insns_test_no_inequality (insn
)
784 register rtx next
= NEXT_INSN (insn
);
786 for (; next
!= 0; next
= NEXT_INSN (next
))
788 if (GET_CODE (next
) == CODE_LABEL
789 || GET_CODE (next
) == BARRIER
)
791 if (GET_CODE (next
) == NOTE
)
793 if (inequality_comparisons_p (PATTERN (next
)))
795 if (sets_cc0_p (PATTERN (next
)) == 1)
797 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
805 /* This is used by find_single_use to locate an rtx that contains exactly one
806 use of DEST, which is typically either a REG or CC0. It returns a
807 pointer to the innermost rtx expression containing DEST. Appearances of
808 DEST that are being used to totally replace it are not counted. */
811 find_single_use_1 (dest
, loc
)
816 enum rtx_code code
= GET_CODE (x
);
833 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
834 of a REG that occupies all of the REG, the insn uses DEST if
835 it is mentioned in the destination or the source. Otherwise, we
836 need just check the source. */
837 if (GET_CODE (SET_DEST (x
)) != CC0
838 && GET_CODE (SET_DEST (x
)) != PC
839 && GET_CODE (SET_DEST (x
)) != REG
840 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
841 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
842 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
843 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
844 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
845 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
848 return find_single_use_1 (dest
, &SET_SRC (x
));
852 return find_single_use_1 (dest
, &XEXP (x
, 0));
858 /* If it wasn't one of the common cases above, check each expression and
859 vector of this code. Look for a unique usage of DEST. */
861 fmt
= GET_RTX_FORMAT (code
);
862 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
866 if (dest
== XEXP (x
, i
)
867 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
868 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
871 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
874 result
= this_result
;
875 else if (this_result
)
876 /* Duplicate usage. */
879 else if (fmt
[i
] == 'E')
883 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
885 if (XVECEXP (x
, i
, j
) == dest
886 || (GET_CODE (dest
) == REG
887 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
888 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
891 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
894 result
= this_result
;
895 else if (this_result
)
904 /* See if DEST, produced in INSN, is used only a single time in the
905 sequel. If so, return a pointer to the innermost rtx expression in which
908 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
910 This routine will return usually zero either before flow is called (because
911 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
912 note can't be trusted).
914 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
915 care about REG_DEAD notes or LOG_LINKS.
917 Otherwise, we find the single use by finding an insn that has a
918 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
919 only referenced once in that insn, we know that it must be the first
920 and last insn referencing DEST. */
923 find_single_use (dest
, insn
, ploc
)
935 next
= NEXT_INSN (insn
);
937 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
940 result
= find_single_use_1 (dest
, &PATTERN (next
));
947 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
950 for (next
= next_nonnote_insn (insn
);
951 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
952 next
= next_nonnote_insn (next
))
953 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
955 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
956 if (XEXP (link
, 0) == insn
)
961 result
= find_single_use_1 (dest
, &PATTERN (next
));
971 /* Return 1 if OP is a valid general operand for machine mode MODE.
972 This is either a register reference, a memory reference,
973 or a constant. In the case of a memory reference, the address
974 is checked for general validity for the target machine.
976 Register and memory references must have mode MODE in order to be valid,
977 but some constants have no machine mode and are valid for any mode.
979 If MODE is VOIDmode, OP is checked for validity for whatever mode
982 The main use of this function is as a predicate in match_operand
983 expressions in the machine description.
985 For an explanation of this function's behavior for registers of
986 class NO_REGS, see the comment for `register_operand'. */
989 general_operand (op
, mode
)
991 enum machine_mode mode
;
993 register enum rtx_code code
= GET_CODE (op
);
995 if (mode
== VOIDmode
)
996 mode
= GET_MODE (op
);
998 /* Don't accept CONST_INT or anything similar
999 if the caller wants something floating. */
1000 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1001 && GET_MODE_CLASS (mode
) != MODE_INT
1002 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1005 if (GET_CODE (op
) == CONST_INT
1006 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1009 if (CONSTANT_P (op
))
1010 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1011 || mode
== VOIDmode
)
1012 #ifdef LEGITIMATE_PIC_OPERAND_P
1013 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1015 && LEGITIMATE_CONSTANT_P (op
));
1017 /* Except for certain constants with VOIDmode, already checked for,
1018 OP's mode must match MODE if MODE specifies a mode. */
1020 if (GET_MODE (op
) != mode
)
1025 #ifdef INSN_SCHEDULING
1026 /* On machines that have insn scheduling, we want all memory
1027 reference to be explicit, so outlaw paradoxical SUBREGs. */
1028 if (GET_CODE (SUBREG_REG (op
)) == MEM
1029 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
1033 op
= SUBREG_REG (op
);
1034 code
= GET_CODE (op
);
1038 /* A register whose class is NO_REGS is not a general operand. */
1039 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1040 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
1044 register rtx y
= XEXP (op
, 0);
1046 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1049 if (GET_CODE (y
) == ADDRESSOF
)
1052 /* Use the mem's mode, since it will be reloaded thus. */
1053 mode
= GET_MODE (op
);
1054 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
1057 /* Pretend this is an operand for now; we'll run force_operand
1058 on its replacement in fixup_var_refs_1. */
1059 if (code
== ADDRESSOF
)
1068 /* Return 1 if OP is a valid memory address for a memory reference
1071 The main use of this function is as a predicate in match_operand
1072 expressions in the machine description. */
1075 address_operand (op
, mode
)
1077 enum machine_mode mode
;
1079 return memory_address_p (mode
, op
);
1082 /* Return 1 if OP is a register reference of mode MODE.
1083 If MODE is VOIDmode, accept a register in any mode.
1085 The main use of this function is as a predicate in match_operand
1086 expressions in the machine description.
1088 As a special exception, registers whose class is NO_REGS are
1089 not accepted by `register_operand'. The reason for this change
1090 is to allow the representation of special architecture artifacts
1091 (such as a condition code register) without extending the rtl
1092 definitions. Since registers of class NO_REGS cannot be used
1093 as registers in any case where register classes are examined,
1094 it is most consistent to keep this function from accepting them. */
1097 register_operand (op
, mode
)
1099 enum machine_mode mode
;
1101 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1104 if (GET_CODE (op
) == SUBREG
)
1106 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1107 because it is guaranteed to be reloaded into one.
1108 Just make sure the MEM is valid in itself.
1109 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1110 but currently it does result from (SUBREG (REG)...) where the
1111 reg went on the stack.) */
1112 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1113 return general_operand (op
, mode
);
1115 #ifdef CLASS_CANNOT_CHANGE_MODE
1116 if (GET_CODE (SUBREG_REG (op
)) == REG
1117 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
1118 && (TEST_HARD_REG_BIT
1119 (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_MODE
],
1120 REGNO (SUBREG_REG (op
))))
1121 && CLASS_CANNOT_CHANGE_MODE_P (mode
, GET_MODE (SUBREG_REG (op
)))
1122 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
1123 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
1127 op
= SUBREG_REG (op
);
1130 /* If we have an ADDRESSOF, consider it valid since it will be
1131 converted into something that will not be a MEM. */
1132 if (GET_CODE (op
) == ADDRESSOF
)
1135 /* We don't consider registers whose class is NO_REGS
1136 to be a register operand. */
1137 return (GET_CODE (op
) == REG
1138 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1139 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1142 /* Return 1 for a register in Pmode; ignore the tested mode. */
1145 pmode_register_operand (op
, mode
)
1147 enum machine_mode mode ATTRIBUTE_UNUSED
;
1149 return register_operand (op
, Pmode
);
1152 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1153 or a hard register. */
1156 scratch_operand (op
, mode
)
1158 enum machine_mode mode
;
1160 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1163 return (GET_CODE (op
) == SCRATCH
1164 || (GET_CODE (op
) == REG
1165 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1168 /* Return 1 if OP is a valid immediate operand for mode MODE.
1170 The main use of this function is as a predicate in match_operand
1171 expressions in the machine description. */
1174 immediate_operand (op
, mode
)
1176 enum machine_mode mode
;
1178 /* Don't accept CONST_INT or anything similar
1179 if the caller wants something floating. */
1180 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1181 && GET_MODE_CLASS (mode
) != MODE_INT
1182 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1185 if (GET_CODE (op
) == CONST_INT
1186 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1189 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1190 result in 0/1. It seems a safe assumption that this is
1191 in range for everyone. */
1192 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1195 return (CONSTANT_P (op
)
1196 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1197 || GET_MODE (op
) == VOIDmode
)
1198 #ifdef LEGITIMATE_PIC_OPERAND_P
1199 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1201 && LEGITIMATE_CONSTANT_P (op
));
1204 /* Returns 1 if OP is an operand that is a CONST_INT. */
1207 const_int_operand (op
, mode
)
1209 enum machine_mode mode ATTRIBUTE_UNUSED
;
1211 return GET_CODE (op
) == CONST_INT
;
1214 /* Returns 1 if OP is an operand that is a constant integer or constant
1215 floating-point number. */
1218 const_double_operand (op
, mode
)
1220 enum machine_mode mode
;
1222 /* Don't accept CONST_INT or anything similar
1223 if the caller wants something floating. */
1224 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1225 && GET_MODE_CLASS (mode
) != MODE_INT
1226 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1229 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1230 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1231 || GET_MODE (op
) == VOIDmode
));
1234 /* Return 1 if OP is a general operand that is not an immediate operand. */
1237 nonimmediate_operand (op
, mode
)
1239 enum machine_mode mode
;
1241 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1244 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1247 nonmemory_operand (op
, mode
)
1249 enum machine_mode mode
;
1251 if (CONSTANT_P (op
))
1253 /* Don't accept CONST_INT or anything similar
1254 if the caller wants something floating. */
1255 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1256 && GET_MODE_CLASS (mode
) != MODE_INT
1257 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1260 if (GET_CODE (op
) == CONST_INT
1261 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1264 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1265 || mode
== VOIDmode
)
1266 #ifdef LEGITIMATE_PIC_OPERAND_P
1267 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1269 && LEGITIMATE_CONSTANT_P (op
));
1272 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1275 if (GET_CODE (op
) == SUBREG
)
1277 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1278 because it is guaranteed to be reloaded into one.
1279 Just make sure the MEM is valid in itself.
1280 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1281 but currently it does result from (SUBREG (REG)...) where the
1282 reg went on the stack.) */
1283 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1284 return general_operand (op
, mode
);
1285 op
= SUBREG_REG (op
);
1288 /* We don't consider registers whose class is NO_REGS
1289 to be a register operand. */
1290 return (GET_CODE (op
) == REG
1291 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1292 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1295 /* Return 1 if OP is a valid operand that stands for pushing a
1296 value of mode MODE onto the stack.
1298 The main use of this function is as a predicate in match_operand
1299 expressions in the machine description. */
1302 push_operand (op
, mode
)
1304 enum machine_mode mode
;
1306 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1308 #ifdef PUSH_ROUNDING
1309 rounded_size
= PUSH_ROUNDING (rounded_size
);
1312 if (GET_CODE (op
) != MEM
)
1315 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1320 if (rounded_size
== GET_MODE_SIZE (mode
))
1322 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1327 if (GET_CODE (op
) != PRE_MODIFY
1328 || GET_CODE (XEXP (op
, 1)) != PLUS
1329 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1330 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1331 #ifdef STACK_GROWS_DOWNWARD
1332 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1334 || INTVAL (XEXP (XEXP (op
, 1), 1)) != rounded_size
1340 return XEXP (op
, 0) == stack_pointer_rtx
;
1343 /* Return 1 if OP is a valid operand that stands for popping a
1344 value of mode MODE off the stack.
1346 The main use of this function is as a predicate in match_operand
1347 expressions in the machine description. */
1350 pop_operand (op
, mode
)
1352 enum machine_mode mode
;
1354 if (GET_CODE (op
) != MEM
)
1357 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1362 if (GET_CODE (op
) != STACK_POP_CODE
)
1365 return XEXP (op
, 0) == stack_pointer_rtx
;
1368 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1371 memory_address_p (mode
, addr
)
1372 enum machine_mode mode ATTRIBUTE_UNUSED
;
1375 if (GET_CODE (addr
) == ADDRESSOF
)
1378 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1385 /* Return 1 if OP is a valid memory reference with mode MODE,
1386 including a valid address.
1388 The main use of this function is as a predicate in match_operand
1389 expressions in the machine description. */
1392 memory_operand (op
, mode
)
1394 enum machine_mode mode
;
1398 if (! reload_completed
)
1399 /* Note that no SUBREG is a memory operand before end of reload pass,
1400 because (SUBREG (MEM...)) forces reloading into a register. */
1401 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1403 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1407 if (GET_CODE (inner
) == SUBREG
)
1408 inner
= SUBREG_REG (inner
);
1410 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1413 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1414 that is, a memory reference whose address is a general_operand. */
1417 indirect_operand (op
, mode
)
1419 enum machine_mode mode
;
1421 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1422 if (! reload_completed
1423 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1425 register int offset
= SUBREG_BYTE (op
);
1426 rtx inner
= SUBREG_REG (op
);
1428 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1431 /* The only way that we can have a general_operand as the resulting
1432 address is if OFFSET is zero and the address already is an operand
1433 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1436 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1437 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1438 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1439 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1440 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1443 return (GET_CODE (op
) == MEM
1444 && memory_operand (op
, mode
)
1445 && general_operand (XEXP (op
, 0), Pmode
));
1448 /* Return 1 if this is a comparison operator. This allows the use of
1449 MATCH_OPERATOR to recognize all the branch insns. */
1452 comparison_operator (op
, mode
)
1454 enum machine_mode mode
;
1456 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1457 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1460 /* If BODY is an insn body that uses ASM_OPERANDS,
1461 return the number of operands (both input and output) in the insn.
1462 Otherwise return -1. */
1465 asm_noperands (body
)
1468 switch (GET_CODE (body
))
1471 /* No output operands: return number of input operands. */
1472 return ASM_OPERANDS_INPUT_LENGTH (body
);
1474 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1475 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1476 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1480 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1481 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1483 /* Multiple output operands, or 1 output plus some clobbers:
1484 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1488 /* Count backwards through CLOBBERs to determine number of SETs. */
1489 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1491 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1493 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1497 /* N_SETS is now number of output operands. */
1500 /* Verify that all the SETs we have
1501 came from a single original asm_operands insn
1502 (so that invalid combinations are blocked). */
1503 for (i
= 0; i
< n_sets
; i
++)
1505 rtx elt
= XVECEXP (body
, 0, i
);
1506 if (GET_CODE (elt
) != SET
)
1508 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1510 /* If these ASM_OPERANDS rtx's came from different original insns
1511 then they aren't allowed together. */
1512 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1513 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1516 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1519 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1521 /* 0 outputs, but some clobbers:
1522 body is [(asm_operands ...) (clobber (reg ...))...]. */
1525 /* Make sure all the other parallel things really are clobbers. */
1526 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1527 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1530 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1539 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1540 copy its operands (both input and output) into the vector OPERANDS,
1541 the locations of the operands within the insn into the vector OPERAND_LOCS,
1542 and the constraints for the operands into CONSTRAINTS.
1543 Write the modes of the operands into MODES.
1544 Return the assembler-template.
1546 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1547 we don't store that info. */
1550 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1554 const char **constraints
;
1555 enum machine_mode
*modes
;
1559 const char *template = 0;
1561 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1563 rtx asmop
= SET_SRC (body
);
1564 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1566 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1568 for (i
= 1; i
< noperands
; i
++)
1571 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1573 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1575 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1577 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1580 /* The output is in the SET.
1581 Its constraint is in the ASM_OPERANDS itself. */
1583 operands
[0] = SET_DEST (body
);
1585 operand_locs
[0] = &SET_DEST (body
);
1587 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1589 modes
[0] = GET_MODE (SET_DEST (body
));
1590 template = ASM_OPERANDS_TEMPLATE (asmop
);
1592 else if (GET_CODE (body
) == ASM_OPERANDS
)
1595 /* No output operands: BODY is (asm_operands ....). */
1597 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1599 /* The input operands are found in the 1st element vector. */
1600 /* Constraints for inputs are in the 2nd element vector. */
1601 for (i
= 0; i
< noperands
; i
++)
1604 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1606 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1608 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1610 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1612 template = ASM_OPERANDS_TEMPLATE (asmop
);
1614 else if (GET_CODE (body
) == PARALLEL
1615 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1617 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1618 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1619 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1620 int nout
= 0; /* Does not include CLOBBERs. */
1622 /* At least one output, plus some CLOBBERs. */
1624 /* The outputs are in the SETs.
1625 Their constraints are in the ASM_OPERANDS itself. */
1626 for (i
= 0; i
< nparallel
; i
++)
1628 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1629 break; /* Past last SET */
1632 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1634 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1636 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1638 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1642 for (i
= 0; i
< nin
; i
++)
1645 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1647 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1649 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1651 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1654 template = ASM_OPERANDS_TEMPLATE (asmop
);
1656 else if (GET_CODE (body
) == PARALLEL
1657 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1659 /* No outputs, but some CLOBBERs. */
1661 rtx asmop
= XVECEXP (body
, 0, 0);
1662 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1664 for (i
= 0; i
< nin
; i
++)
1667 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1669 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1671 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1673 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1676 template = ASM_OPERANDS_TEMPLATE (asmop
);
1682 /* Check if an asm_operand matches it's constraints.
1683 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1686 asm_operand_ok (op
, constraint
)
1688 const char *constraint
;
1692 /* Use constrain_operands after reload. */
1693 if (reload_completed
)
1698 char c
= *constraint
++;
1712 case '0': case '1': case '2': case '3': case '4':
1713 case '5': case '6': case '7': case '8': case '9':
1714 /* For best results, our caller should have given us the
1715 proper matching constraint, but we can't actually fail
1716 the check if they didn't. Indicate that results are
1722 if (address_operand (op
, VOIDmode
))
1727 case 'V': /* non-offsettable */
1728 if (memory_operand (op
, VOIDmode
))
1732 case 'o': /* offsettable */
1733 if (offsettable_nonstrict_memref_p (op
))
1738 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1739 excepting those that expand_call created. Further, on some
1740 machines which do not have generalized auto inc/dec, an inc/dec
1741 is not a memory_operand.
1743 Match any memory and hope things are resolved after reload. */
1745 if (GET_CODE (op
) == MEM
1747 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1748 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1753 if (GET_CODE (op
) == MEM
1755 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1756 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1761 #ifndef REAL_ARITHMETIC
1762 /* Match any floating double constant, but only if
1763 we can examine the bits of it reliably. */
1764 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1765 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1766 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1772 if (GET_CODE (op
) == CONST_DOUBLE
)
1777 if (GET_CODE (op
) == CONST_DOUBLE
1778 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'G'))
1782 if (GET_CODE (op
) == CONST_DOUBLE
1783 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'H'))
1788 if (GET_CODE (op
) == CONST_INT
1789 || (GET_CODE (op
) == CONST_DOUBLE
1790 && GET_MODE (op
) == VOIDmode
))
1796 #ifdef LEGITIMATE_PIC_OPERAND_P
1797 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1804 if (GET_CODE (op
) == CONST_INT
1805 || (GET_CODE (op
) == CONST_DOUBLE
1806 && GET_MODE (op
) == VOIDmode
))
1811 if (GET_CODE (op
) == CONST_INT
1812 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'))
1816 if (GET_CODE (op
) == CONST_INT
1817 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'J'))
1821 if (GET_CODE (op
) == CONST_INT
1822 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1826 if (GET_CODE (op
) == CONST_INT
1827 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'))
1831 if (GET_CODE (op
) == CONST_INT
1832 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
1836 if (GET_CODE (op
) == CONST_INT
1837 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'N'))
1841 if (GET_CODE (op
) == CONST_INT
1842 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'O'))
1846 if (GET_CODE (op
) == CONST_INT
1847 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P'))
1855 if (general_operand (op
, VOIDmode
))
1860 /* For all other letters, we first check for a register class,
1861 otherwise it is an EXTRA_CONSTRAINT. */
1862 if (REG_CLASS_FROM_LETTER (c
) != NO_REGS
)
1865 if (GET_MODE (op
) == BLKmode
)
1867 if (register_operand (op
, VOIDmode
))
1870 #ifdef EXTRA_CONSTRAINT
1871 if (EXTRA_CONSTRAINT (op
, c
))
1881 /* Given an rtx *P, if it is a sum containing an integer constant term,
1882 return the location (type rtx *) of the pointer to that constant term.
1883 Otherwise, return a null pointer. */
1886 find_constant_term_loc (p
)
1890 register enum rtx_code code
= GET_CODE (*p
);
1892 /* If *P IS such a constant term, P is its location. */
1894 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1898 /* Otherwise, if not a sum, it has no constant term. */
1900 if (GET_CODE (*p
) != PLUS
)
1903 /* If one of the summands is constant, return its location. */
1905 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1906 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1909 /* Otherwise, check each summand for containing a constant term. */
1911 if (XEXP (*p
, 0) != 0)
1913 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1918 if (XEXP (*p
, 1) != 0)
1920 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1928 /* Return 1 if OP is a memory reference
1929 whose address contains no side effects
1930 and remains valid after the addition
1931 of a positive integer less than the
1932 size of the object being referenced.
1934 We assume that the original address is valid and do not check it.
1936 This uses strict_memory_address_p as a subroutine, so
1937 don't use it before reload. */
1940 offsettable_memref_p (op
)
1943 return ((GET_CODE (op
) == MEM
)
1944 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1947 /* Similar, but don't require a strictly valid mem ref:
1948 consider pseudo-regs valid as index or base regs. */
1951 offsettable_nonstrict_memref_p (op
)
1954 return ((GET_CODE (op
) == MEM
)
1955 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1958 /* Return 1 if Y is a memory address which contains no side effects
1959 and would remain valid after the addition of a positive integer
1960 less than the size of that mode.
1962 We assume that the original address is valid and do not check it.
1963 We do check that it is valid for narrower modes.
1965 If STRICTP is nonzero, we require a strictly valid address,
1966 for the sake of use in reload.c. */
1969 offsettable_address_p (strictp
, mode
, y
)
1971 enum machine_mode mode
;
1974 register enum rtx_code ycode
= GET_CODE (y
);
1978 int (*addressp
) PARAMS ((enum machine_mode
, rtx
)) =
1979 (strictp
? strict_memory_address_p
: memory_address_p
);
1980 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1982 if (CONSTANT_ADDRESS_P (y
))
1985 /* Adjusting an offsettable address involves changing to a narrower mode.
1986 Make sure that's OK. */
1988 if (mode_dependent_address_p (y
))
1991 /* ??? How much offset does an offsettable BLKmode reference need?
1992 Clearly that depends on the situation in which it's being used.
1993 However, the current situation in which we test 0xffffffff is
1994 less than ideal. Caveat user. */
1996 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1998 /* If the expression contains a constant term,
1999 see if it remains valid when max possible offset is added. */
2001 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
2006 *y2
= plus_constant (*y2
, mode_sz
- 1);
2007 /* Use QImode because an odd displacement may be automatically invalid
2008 for any wider mode. But it should be valid for a single byte. */
2009 good
= (*addressp
) (QImode
, y
);
2011 /* In any case, restore old contents of memory. */
2016 if (GET_RTX_CLASS (ycode
) == 'a')
2019 /* The offset added here is chosen as the maximum offset that
2020 any instruction could need to add when operating on something
2021 of the specified mode. We assume that if Y and Y+c are
2022 valid addresses then so is Y+d for all 0<d<c. */
2024 z
= plus_constant_for_output (y
, mode_sz
- 1);
2026 /* Use QImode because an odd displacement may be automatically invalid
2027 for any wider mode. But it should be valid for a single byte. */
2028 return (*addressp
) (QImode
, z
);
2031 /* Return 1 if ADDR is an address-expression whose effect depends
2032 on the mode of the memory reference it is used in.
2034 Autoincrement addressing is a typical example of mode-dependence
2035 because the amount of the increment depends on the mode. */
2038 mode_dependent_address_p (addr
)
2039 rtx addr ATTRIBUTE_UNUSED
; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2041 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
2043 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2044 win
: ATTRIBUTE_UNUSED_LABEL
2048 /* Return 1 if OP is a general operand
2049 other than a memory ref with a mode dependent address. */
2052 mode_independent_operand (op
, mode
)
2053 enum machine_mode mode
;
2058 if (! general_operand (op
, mode
))
2061 if (GET_CODE (op
) != MEM
)
2064 addr
= XEXP (op
, 0);
2065 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
2067 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2068 lose
: ATTRIBUTE_UNUSED_LABEL
2072 /* Given an operand OP that is a valid memory reference which
2073 satisfies offsettable_memref_p, return a new memory reference whose
2074 address has been adjusted by OFFSET. OFFSET should be positive and
2075 less than the size of the object referenced. */
2078 adj_offsettable_operand (op
, offset
)
2082 register enum rtx_code code
= GET_CODE (op
);
2086 register rtx y
= XEXP (op
, 0);
2089 if (CONSTANT_ADDRESS_P (y
))
2091 new = gen_rtx_MEM (GET_MODE (op
),
2092 plus_constant_for_output (y
, offset
));
2093 MEM_COPY_ATTRIBUTES (new, op
);
2097 if (GET_CODE (y
) == PLUS
)
2100 register rtx
*const_loc
;
2104 const_loc
= find_constant_term_loc (&z
);
2107 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
2112 new = gen_rtx_MEM (GET_MODE (op
), plus_constant_for_output (y
, offset
));
2113 MEM_COPY_ATTRIBUTES (new, op
);
2119 /* Like extract_insn, but save insn extracted and don't extract again, when
2120 called again for the same insn expecting that recog_data still contain the
2121 valid information. This is used primary by gen_attr infrastructure that
2122 often does extract insn again and again. */
2124 extract_insn_cached (insn
)
2127 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2129 extract_insn (insn
);
2130 recog_data
.insn
= insn
;
2132 /* Do cached extract_insn, constrain_operand and complain about failures.
2133 Used by insn_attrtab. */
2135 extract_constrain_insn_cached (insn
)
2138 extract_insn_cached (insn
);
2139 if (which_alternative
== -1
2140 && !constrain_operands (reload_completed
))
2141 fatal_insn_not_found (insn
);
2143 /* Do cached constrain_operand and complain about failures. */
2145 constrain_operands_cached (strict
)
2148 if (which_alternative
== -1)
2149 return constrain_operands (strict
);
2154 /* Analyze INSN and fill in recog_data. */
2163 rtx body
= PATTERN (insn
);
2165 recog_data
.insn
= NULL
;
2166 recog_data
.n_operands
= 0;
2167 recog_data
.n_alternatives
= 0;
2168 recog_data
.n_dups
= 0;
2169 which_alternative
= -1;
2171 switch (GET_CODE (body
))
2181 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2186 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2187 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2188 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2194 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2197 /* This insn is an `asm' with operands. */
2199 /* expand_asm_operands makes sure there aren't too many operands. */
2200 if (noperands
> MAX_RECOG_OPERANDS
)
2203 /* Now get the operand values and constraints out of the insn. */
2204 decode_asm_operands (body
, recog_data
.operand
,
2205 recog_data
.operand_loc
,
2206 recog_data
.constraints
,
2207 recog_data
.operand_mode
);
2210 const char *p
= recog_data
.constraints
[0];
2211 recog_data
.n_alternatives
= 1;
2213 recog_data
.n_alternatives
+= (*p
++ == ',');
2217 fatal_insn_not_found (insn
);
2221 /* Ordinary insn: recognize it, get the operands via insn_extract
2222 and get the constraints. */
2224 icode
= recog_memoized (insn
);
2226 fatal_insn_not_found (insn
);
2228 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2229 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2230 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2232 insn_extract (insn
);
2234 for (i
= 0; i
< noperands
; i
++)
2236 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2237 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2238 /* VOIDmode match_operands gets mode from their real operand. */
2239 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2240 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2243 for (i
= 0; i
< noperands
; i
++)
2244 recog_data
.operand_type
[i
]
2245 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2246 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2249 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2253 /* After calling extract_insn, you can use this function to extract some
2254 information from the constraint strings into a more usable form.
2255 The collected data is stored in recog_op_alt. */
2257 preprocess_constraints ()
2261 memset (recog_op_alt
, 0, sizeof recog_op_alt
);
2262 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2265 struct operand_alternative
*op_alt
;
2266 const char *p
= recog_data
.constraints
[i
];
2268 op_alt
= recog_op_alt
[i
];
2270 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2272 op_alt
[j
].class = NO_REGS
;
2273 op_alt
[j
].constraint
= p
;
2274 op_alt
[j
].matches
= -1;
2275 op_alt
[j
].matched
= -1;
2277 if (*p
== '\0' || *p
== ',')
2279 op_alt
[j
].anything_ok
= 1;
2289 while (c
!= ',' && c
!= '\0');
2290 if (c
== ',' || c
== '\0')
2295 case '=': case '+': case '*': case '%':
2296 case 'E': case 'F': case 'G': case 'H':
2297 case 's': case 'i': case 'n':
2298 case 'I': case 'J': case 'K': case 'L':
2299 case 'M': case 'N': case 'O': case 'P':
2300 /* These don't say anything we care about. */
2304 op_alt
[j
].reject
+= 6;
2307 op_alt
[j
].reject
+= 600;
2310 op_alt
[j
].earlyclobber
= 1;
2313 case '0': case '1': case '2': case '3': case '4':
2314 case '5': case '6': case '7': case '8': case '9':
2315 op_alt
[j
].matches
= c
- '0';
2316 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2320 op_alt
[j
].memory_ok
= 1;
2323 op_alt
[j
].decmem_ok
= 1;
2326 op_alt
[j
].incmem_ok
= 1;
2329 op_alt
[j
].nonoffmem_ok
= 1;
2332 op_alt
[j
].offmem_ok
= 1;
2335 op_alt
[j
].anything_ok
= 1;
2339 op_alt
[j
].is_address
= 1;
2340 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) BASE_REG_CLASS
];
2344 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2348 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c
)];
2356 /* Check the operands of an insn against the insn's operand constraints
2357 and return 1 if they are valid.
2358 The information about the insn's operands, constraints, operand modes
2359 etc. is obtained from the global variables set up by extract_insn.
2361 WHICH_ALTERNATIVE is set to a number which indicates which
2362 alternative of constraints was matched: 0 for the first alternative,
2363 1 for the next, etc.
2365 In addition, when two operands are match
2366 and it happens that the output operand is (reg) while the
2367 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2368 make the output operand look like the input.
2369 This is because the output operand is the one the template will print.
2371 This is used in final, just before printing the assembler code and by
2372 the routines that determine an insn's attribute.
2374 If STRICT is a positive non-zero value, it means that we have been
2375 called after reload has been completed. In that case, we must
2376 do all checks strictly. If it is zero, it means that we have been called
2377 before reload has completed. In that case, we first try to see if we can
2378 find an alternative that matches strictly. If not, we try again, this
2379 time assuming that reload will fix up the insn. This provides a "best
2380 guess" for the alternative and is used to compute attributes of insns prior
2381 to reload. A negative value of STRICT is used for this internal call. */
2389 constrain_operands (strict
)
2392 const char *constraints
[MAX_RECOG_OPERANDS
];
2393 int matching_operands
[MAX_RECOG_OPERANDS
];
2394 int earlyclobber
[MAX_RECOG_OPERANDS
];
2397 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2398 int funny_match_index
;
2400 which_alternative
= 0;
2401 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2404 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2406 constraints
[c
] = recog_data
.constraints
[c
];
2407 matching_operands
[c
] = -1;
2414 funny_match_index
= 0;
2416 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2418 register rtx op
= recog_data
.operand
[opno
];
2419 enum machine_mode mode
= GET_MODE (op
);
2420 register const char *p
= constraints
[opno
];
2425 earlyclobber
[opno
] = 0;
2427 /* A unary operator may be accepted by the predicate, but it
2428 is irrelevant for matching constraints. */
2429 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2432 if (GET_CODE (op
) == SUBREG
)
2434 if (GET_CODE (SUBREG_REG (op
)) == REG
2435 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2436 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2437 GET_MODE (SUBREG_REG (op
)),
2440 op
= SUBREG_REG (op
);
2443 /* An empty constraint or empty alternative
2444 allows anything which matched the pattern. */
2445 if (*p
== 0 || *p
== ',')
2448 while (*p
&& (c
= *p
++) != ',')
2451 case '?': case '!': case '*': case '%':
2456 /* Ignore rest of this alternative as far as
2457 constraint checking is concerned. */
2458 while (*p
&& *p
!= ',')
2463 earlyclobber
[opno
] = 1;
2466 case '0': case '1': case '2': case '3': case '4':
2467 case '5': case '6': case '7': case '8': case '9':
2469 /* This operand must be the same as a previous one.
2470 This kind of constraint is used for instructions such
2471 as add when they take only two operands.
2473 Note that the lower-numbered operand is passed first.
2475 If we are not testing strictly, assume that this constraint
2476 will be satisfied. */
2481 rtx op1
= recog_data
.operand
[c
- '0'];
2482 rtx op2
= recog_data
.operand
[opno
];
2484 /* A unary operator may be accepted by the predicate,
2485 but it is irrelevant for matching constraints. */
2486 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2487 op1
= XEXP (op1
, 0);
2488 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2489 op2
= XEXP (op2
, 0);
2491 val
= operands_match_p (op1
, op2
);
2494 matching_operands
[opno
] = c
- '0';
2495 matching_operands
[c
- '0'] = opno
;
2499 /* If output is *x and input is *--x,
2500 arrange later to change the output to *--x as well,
2501 since the output op is the one that will be printed. */
2502 if (val
== 2 && strict
> 0)
2504 funny_match
[funny_match_index
].this = opno
;
2505 funny_match
[funny_match_index
++].other
= c
- '0';
2510 /* p is used for address_operands. When we are called by
2511 gen_reload, no one will have checked that the address is
2512 strictly valid, i.e., that all pseudos requiring hard regs
2513 have gotten them. */
2515 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2520 /* No need to check general_operand again;
2521 it was done in insn-recog.c. */
2523 /* Anything goes unless it is a REG and really has a hard reg
2524 but the hard reg is not in the class GENERAL_REGS. */
2526 || GENERAL_REGS
== ALL_REGS
2527 || GET_CODE (op
) != REG
2528 || (reload_in_progress
2529 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2530 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2535 /* This is used for a MATCH_SCRATCH in the cases when
2536 we don't actually need anything. So anything goes
2542 if (GET_CODE (op
) == MEM
2543 /* Before reload, accept what reload can turn into mem. */
2544 || (strict
< 0 && CONSTANT_P (op
))
2545 /* During reload, accept a pseudo */
2546 || (reload_in_progress
&& GET_CODE (op
) == REG
2547 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2552 if (GET_CODE (op
) == MEM
2553 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2554 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2559 if (GET_CODE (op
) == MEM
2560 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2561 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2566 #ifndef REAL_ARITHMETIC
2567 /* Match any CONST_DOUBLE, but only if
2568 we can examine the bits of it reliably. */
2569 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
2570 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
2571 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
2574 if (GET_CODE (op
) == CONST_DOUBLE
)
2579 if (GET_CODE (op
) == CONST_DOUBLE
)
2585 if (GET_CODE (op
) == CONST_DOUBLE
2586 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
2591 if (GET_CODE (op
) == CONST_INT
2592 || (GET_CODE (op
) == CONST_DOUBLE
2593 && GET_MODE (op
) == VOIDmode
))
2596 if (CONSTANT_P (op
))
2601 if (GET_CODE (op
) == CONST_INT
2602 || (GET_CODE (op
) == CONST_DOUBLE
2603 && GET_MODE (op
) == VOIDmode
))
2615 if (GET_CODE (op
) == CONST_INT
2616 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
2621 if (GET_CODE (op
) == MEM
2622 && ((strict
> 0 && ! offsettable_memref_p (op
))
2624 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2625 || (reload_in_progress
2626 && !(GET_CODE (op
) == REG
2627 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2632 if ((strict
> 0 && offsettable_memref_p (op
))
2633 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2634 /* Before reload, accept what reload can handle. */
2636 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2637 /* During reload, accept a pseudo */
2638 || (reload_in_progress
&& GET_CODE (op
) == REG
2639 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2645 enum reg_class
class;
2647 class = (c
== 'r' ? GENERAL_REGS
: REG_CLASS_FROM_LETTER (c
));
2648 if (class != NO_REGS
)
2652 && GET_CODE (op
) == REG
2653 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2654 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2655 || (GET_CODE (op
) == REG
2656 && reg_fits_class_p (op
, class, offset
, mode
)))
2659 #ifdef EXTRA_CONSTRAINT
2660 else if (EXTRA_CONSTRAINT (op
, c
))
2667 constraints
[opno
] = p
;
2668 /* If this operand did not win somehow,
2669 this alternative loses. */
2673 /* This alternative won; the operands are ok.
2674 Change whichever operands this alternative says to change. */
2679 /* See if any earlyclobber operand conflicts with some other
2683 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2684 /* Ignore earlyclobber operands now in memory,
2685 because we would often report failure when we have
2686 two memory operands, one of which was formerly a REG. */
2687 if (earlyclobber
[eopno
]
2688 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2689 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2690 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2691 || recog_data
.operand_type
[opno
] != OP_OUT
)
2693 /* Ignore things like match_operator operands. */
2694 && *recog_data
.constraints
[opno
] != 0
2695 && ! (matching_operands
[opno
] == eopno
2696 && operands_match_p (recog_data
.operand
[opno
],
2697 recog_data
.operand
[eopno
]))
2698 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2699 recog_data
.operand
[eopno
]))
2704 while (--funny_match_index
>= 0)
2706 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2707 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2714 which_alternative
++;
2716 while (which_alternative
< recog_data
.n_alternatives
);
2718 which_alternative
= -1;
2719 /* If we are about to reject this, but we are not to test strictly,
2720 try a very loose test. Only return failure if it fails also. */
2722 return constrain_operands (-1);
2727 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2728 is a hard reg in class CLASS when its regno is offset by OFFSET
2729 and changed to mode MODE.
2730 If REG occupies multiple hard regs, all of them must be in CLASS. */
2733 reg_fits_class_p (operand
, class, offset
, mode
)
2735 register enum reg_class
class;
2737 enum machine_mode mode
;
2739 register int regno
= REGNO (operand
);
2740 if (regno
< FIRST_PSEUDO_REGISTER
2741 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2746 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2748 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2757 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2760 split_all_insns (upd_life
)
2767 blocks
= sbitmap_alloc (n_basic_blocks
);
2768 sbitmap_zero (blocks
);
2771 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2773 basic_block bb
= BASIC_BLOCK (i
);
2776 for (insn
= bb
->head
; insn
; insn
= next
)
2780 /* Can't use `next_real_insn' because that might go across
2781 CODE_LABELS and short-out basic blocks. */
2782 next
= NEXT_INSN (insn
);
2783 if (! INSN_P (insn
))
2786 /* Don't split no-op move insns. These should silently
2787 disappear later in final. Splitting such insns would
2788 break the code that handles REG_NO_CONFLICT blocks. */
2790 else if ((set
= single_set (insn
)) != NULL
2791 && set_noop_p (set
))
2793 /* Nops get in the way while scheduling, so delete them
2794 now if register allocation has already been done. It
2795 is too risky to try to do this before register
2796 allocation, and there are unlikely to be very many
2797 nops then anyways. */
2798 if (reload_completed
)
2800 PUT_CODE (insn
, NOTE
);
2801 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2802 NOTE_SOURCE_FILE (insn
) = 0;
2807 /* Split insns here to get max fine-grain parallelism. */
2808 rtx first
= PREV_INSN (insn
);
2809 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2813 SET_BIT (blocks
, i
);
2816 /* try_split returns the NOTE that INSN became. */
2817 PUT_CODE (insn
, NOTE
);
2818 NOTE_SOURCE_FILE (insn
) = 0;
2819 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2821 /* ??? Coddle to md files that generate subregs in post-
2822 reload splitters instead of computing the proper
2824 if (reload_completed
&& first
!= last
)
2826 first
= NEXT_INSN (first
);
2830 cleanup_subreg_operands (first
);
2833 first
= NEXT_INSN (first
);
2837 if (insn
== bb
->end
)
2845 if (insn
== bb
->end
)
2849 /* ??? When we're called from just after reload, the CFG is in bad
2850 shape, and we may have fallen off the end. This could be fixed
2851 by having reload not try to delete unreachable code. Otherwise
2852 assert we found the end insn. */
2853 if (insn
== NULL
&& upd_life
)
2857 if (changed
&& upd_life
)
2859 compute_bb_for_insn (get_max_uid ());
2860 count_or_remove_death_notes (blocks
, 1);
2861 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
2864 sbitmap_free (blocks
);
2867 #ifdef HAVE_peephole2
2868 struct peep2_insn_data
2874 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2875 static int peep2_current
;
2877 /* A non-insn marker indicating the last insn of the block.
2878 The live_before regset for this element is correct, indicating
2879 global_live_at_end for the block. */
2880 #define PEEP2_EOB pc_rtx
2882 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2883 does not exist. Used by the recognizer to find the next insn to match
2884 in a multi-insn pattern. */
2890 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2894 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2895 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2897 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2899 return peep2_insn_data
[n
].insn
;
2902 /* Return true if REGNO is dead before the Nth non-note insn
2906 peep2_regno_dead_p (ofs
, regno
)
2910 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2913 ofs
+= peep2_current
;
2914 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2915 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2917 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2920 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2923 /* Similarly for a REG. */
2926 peep2_reg_dead_p (ofs
, reg
)
2932 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2935 ofs
+= peep2_current
;
2936 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2937 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2939 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2942 regno
= REGNO (reg
);
2943 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2945 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2950 /* Try to find a hard register of mode MODE, matching the register class in
2951 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2952 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2953 in which case the only condition is that the register must be available
2954 before CURRENT_INSN.
2955 Registers that already have bits set in REG_SET will not be considered.
2957 If an appropriate register is available, it will be returned and the
2958 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2962 peep2_find_free_register (from
, to
, class_str
, mode
, reg_set
)
2964 const char *class_str
;
2965 enum machine_mode mode
;
2966 HARD_REG_SET
*reg_set
;
2968 static int search_ofs
;
2969 enum reg_class
class;
2973 if (from
>= MAX_INSNS_PER_PEEP2
+ 1 || to
>= MAX_INSNS_PER_PEEP2
+ 1)
2976 from
+= peep2_current
;
2977 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2978 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2979 to
+= peep2_current
;
2980 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2981 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2983 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2985 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2989 HARD_REG_SET this_live
;
2991 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2993 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2995 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2996 IOR_HARD_REG_SET (live
, this_live
);
2999 class = (class_str
[0] == 'r' ? GENERAL_REGS
3000 : REG_CLASS_FROM_LETTER (class_str
[0]));
3002 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3004 int raw_regno
, regno
, success
, j
;
3006 /* Distribute the free registers as much as possible. */
3007 raw_regno
= search_ofs
+ i
;
3008 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
3009 raw_regno
-= FIRST_PSEUDO_REGISTER
;
3010 #ifdef REG_ALLOC_ORDER
3011 regno
= reg_alloc_order
[raw_regno
];
3016 /* Don't allocate fixed registers. */
3017 if (fixed_regs
[regno
])
3019 /* Make sure the register is of the right class. */
3020 if (! TEST_HARD_REG_BIT (reg_class_contents
[class], regno
))
3022 /* And can support the mode we need. */
3023 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3025 /* And that we don't create an extra save/restore. */
3026 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
3028 /* And we don't clobber traceback for noreturn functions. */
3029 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
3030 && (! reload_completed
|| frame_pointer_needed
))
3034 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
3036 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3037 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3045 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
3046 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
3048 /* Start the next search with the next register. */
3049 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3051 search_ofs
= raw_regno
;
3053 return gen_rtx_REG (mode
, regno
);
3061 /* Perform the peephole2 optimization pass. */
3064 peephole2_optimize (dump_file
)
3065 FILE *dump_file ATTRIBUTE_UNUSED
;
3067 regset_head rs_heads
[MAX_INSNS_PER_PEEP2
+ 2];
3071 #ifdef HAVE_conditional_execution
3076 /* Initialize the regsets we're going to use. */
3077 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3078 peep2_insn_data
[i
].live_before
= INITIALIZE_REG_SET (rs_heads
[i
]);
3079 live
= INITIALIZE_REG_SET (rs_heads
[i
]);
3081 #ifdef HAVE_conditional_execution
3082 blocks
= sbitmap_alloc (n_basic_blocks
);
3083 sbitmap_zero (blocks
);
3086 count_or_remove_death_notes (NULL
, 1);
3089 for (b
= n_basic_blocks
- 1; b
>= 0; --b
)
3091 basic_block bb
= BASIC_BLOCK (b
);
3092 struct propagate_block_info
*pbi
;
3094 /* Indicate that all slots except the last holds invalid data. */
3095 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3096 peep2_insn_data
[i
].insn
= NULL_RTX
;
3098 /* Indicate that the last slot contains live_after data. */
3099 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3100 peep2_current
= MAX_INSNS_PER_PEEP2
;
3102 /* Start up propagation. */
3103 COPY_REG_SET (live
, bb
->global_live_at_end
);
3104 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3106 #ifdef HAVE_conditional_execution
3107 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3109 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3112 for (insn
= bb
->end
; ; insn
= prev
)
3114 prev
= PREV_INSN (insn
);
3120 /* Record this insn. */
3121 if (--peep2_current
< 0)
3122 peep2_current
= MAX_INSNS_PER_PEEP2
;
3123 peep2_insn_data
[peep2_current
].insn
= insn
;
3124 propagate_one_insn (pbi
, insn
);
3125 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3127 /* Match the peephole. */
3128 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3131 i
= match_len
+ peep2_current
;
3132 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3133 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3135 /* Replace the old sequence with the new. */
3136 flow_delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3137 try = emit_insn_after (try, prev
);
3139 /* Adjust the basic block boundaries. */
3140 if (peep2_insn_data
[i
].insn
== bb
->end
)
3142 if (insn
== bb
->head
)
3143 bb
->head
= NEXT_INSN (prev
);
3145 #ifdef HAVE_conditional_execution
3146 /* With conditional execution, we cannot back up the
3147 live information so easily, since the conditional
3148 death data structures are not so self-contained.
3149 So record that we've made a modification to this
3150 block and update life information at the end. */
3151 SET_BIT (blocks
, b
);
3154 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3155 peep2_insn_data
[i
].insn
= NULL_RTX
;
3156 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3158 /* Back up lifetime information past the end of the
3159 newly created sequence. */
3160 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3162 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3164 /* Update life information for the new sequence. */
3170 i
= MAX_INSNS_PER_PEEP2
;
3171 peep2_insn_data
[i
].insn
= try;
3172 propagate_one_insn (pbi
, try);
3173 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3175 try = PREV_INSN (try);
3177 while (try != prev
);
3179 /* ??? Should verify that LIVE now matches what we
3180 had before the new sequence. */
3187 if (insn
== bb
->head
)
3191 free_propagate_block_info (pbi
);
3194 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3195 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3196 FREE_REG_SET (live
);
3198 #ifdef HAVE_conditional_execution
3199 count_or_remove_death_notes (blocks
, 1);
3200 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3201 sbitmap_free (blocks
);
3204 #endif /* HAVE_peephole2 */