1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
36 #include "basic-block.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
44 #define STACK_PUSH_CODE PRE_INC
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
52 #define STACK_POP_CODE POST_DEC
56 static void validate_replace_rtx_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
57 static rtx
*find_single_use_1
PARAMS ((rtx
, rtx
*));
58 static rtx
*find_constant_term_loc
PARAMS ((rtx
*));
59 static int insn_invalid_p
PARAMS ((rtx
));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
71 struct recog_data recog_data
;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
77 /* On return from `constrain_operands', indicate which alternative
80 int which_alternative
;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
93 init_recog_no_volatile ()
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn
)
117 if (INSN_CODE (insn
) < 0)
118 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, NULL_PTR
);
119 return INSN_CODE (insn
);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x
)
131 const char **constraints
;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed
)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x
));
139 constrain_operands (1);
140 return which_alternative
>= 0;
143 noperands
= asm_noperands (x
);
149 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
150 constraints
= (const char **) alloca (noperands
* sizeof (char *));
152 decode_asm_operands (x
, operands
, NULL_PTR
, constraints
, NULL_PTR
);
154 for (i
= 0; i
< noperands
; i
++)
156 const char *c
= constraints
[i
];
159 if (ISDIGIT ((unsigned char)c
[0]) && c
[1] == '\0')
160 c
= constraints
[c
[0] - '0'];
162 if (! asm_operand_ok (operands
[i
], c
))
169 /* Static data for the next two routines. */
171 typedef struct change_t
179 static change_t
*changes
;
180 static int changes_allocated
;
182 static int num_changes
= 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object
, loc
, new, in_group
)
211 if (old
== new || rtx_equal_p (old
, new))
214 if (in_group
== 0 && num_changes
!= 0)
219 /* Save the information describing this change. */
220 if (num_changes
>= changes_allocated
)
222 if (changes_allocated
== 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
227 changes_allocated
*= 2;
230 (change_t
*) xrealloc (changes
,
231 sizeof (change_t
) * changes_allocated
);
234 changes
[num_changes
].object
= object
;
235 changes
[num_changes
].loc
= loc
;
236 changes
[num_changes
].old
= old
;
238 if (object
&& GET_CODE (object
) != MEM
)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 changes
[num_changes
].old_code
= INSN_CODE (object
);
243 INSN_CODE (object
) = -1;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
261 insn_invalid_p (insn
)
264 int icode
= recog_memoized (insn
);
265 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
267 if (is_asm
&& ! check_asm_operands (PATTERN (insn
)))
269 if (! is_asm
&& icode
< 0)
272 /* After reload, verify that all constraints are satisfied. */
273 if (reload_completed
)
277 if (! constrain_operands (1))
284 /* Apply a group of changes previously issued with `validate_change'.
285 Return 1 if all changes are valid, zero otherwise. */
288 apply_change_group ()
292 /* The changes have been applied and all INSN_CODEs have been reset to force
295 The changes are valid if we aren't given an object, or if we are
296 given a MEM and it still is a valid address, or if this is in insn
297 and it is recognized. In the latter case, if reload has completed,
298 we also require that the operands meet the constraints for
301 for (i
= 0; i
< num_changes
; i
++)
303 rtx object
= changes
[i
].object
;
308 if (GET_CODE (object
) == MEM
)
310 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
313 else if (insn_invalid_p (object
))
315 rtx pat
= PATTERN (object
);
317 /* Perhaps we couldn't recognize the insn because there were
318 extra CLOBBERs at the end. If so, try to re-recognize
319 without the last CLOBBER (later iterations will cause each of
320 them to be eliminated, in turn). But don't do this if we
321 have an ASM_OPERAND. */
322 if (GET_CODE (pat
) == PARALLEL
323 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
324 && asm_noperands (PATTERN (object
)) < 0)
328 if (XVECLEN (pat
, 0) == 2)
329 newpat
= XVECEXP (pat
, 0, 0);
335 = gen_rtx_PARALLEL (VOIDmode
,
336 rtvec_alloc (XVECLEN (pat
, 0) - 1));
337 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
338 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
341 /* Add a new change to this group to replace the pattern
342 with this new pattern. Then consider this change
343 as having succeeded. The change we added will
344 cause the entire call to fail if things remain invalid.
346 Note that this can lose if a later change than the one
347 we are processing specified &XVECEXP (PATTERN (object), 0, X)
348 but this shouldn't occur. */
350 validate_change (object
, &PATTERN (object
), newpat
, 1);
352 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
353 /* If this insn is a CLOBBER or USE, it is always valid, but is
361 if (i
== num_changes
)
373 /* Return the number of changes so far in the current group. */
376 num_validated_changes ()
381 /* Retract the changes numbered NUM and up. */
389 /* Back out all the changes. Do this in the opposite order in which
391 for (i
= num_changes
- 1; i
>= num
; i
--)
393 *changes
[i
].loc
= changes
[i
].old
;
394 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
395 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
400 /* Replace every occurrence of FROM in X with TO. Mark each change with
401 validate_change passing OBJECT. */
404 validate_replace_rtx_1 (loc
, from
, to
, object
)
406 rtx from
, to
, object
;
409 register const char *fmt
;
410 register rtx x
= *loc
;
416 /* X matches FROM if it is the same rtx or they are both referring to the
417 same register in the same mode. Avoid calling rtx_equal_p unless the
418 operands look similar. */
421 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
422 && GET_MODE (x
) == GET_MODE (from
)
423 && REGNO (x
) == REGNO (from
))
424 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
425 && rtx_equal_p (x
, from
)))
427 validate_change (object
, loc
, to
, 1);
431 /* For commutative or comparison operations, try replacing each argument
432 separately and seeing if we made any changes. If so, put a constant
434 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
436 int prev_changes
= num_changes
;
438 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
439 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
440 /* If nothing changed, we can exit now. In fact, continuing on
441 into the switch statement below can be wrong, eg. turning
442 (plus (symbol_ref) (const_int)) into
443 (const (plus (symbol_ref) (const_int))). This might not seem
444 so bad, but the first rtx is already enclosed in `const', so
445 we get a string of (const (const (const...))). */
446 if (prev_changes
== num_changes
)
448 if (CONSTANT_P (XEXP (x
, 0)))
450 validate_change (object
, loc
,
451 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
452 : swap_condition (code
),
453 GET_MODE (x
), XEXP (x
, 1),
461 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
462 done the substitution, otherwise we won't. */
467 /* If we have a PLUS whose second operand is now a CONST_INT, use
468 plus_constant to try to simplify it. */
469 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
470 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
475 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
477 validate_change (object
, loc
,
478 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
486 /* In these cases, the operation to be performed depends on the mode
487 of the operand. If we are replacing the operand with a VOIDmode
488 constant, we lose the information. So try to simplify the operation
490 if (GET_MODE (to
) == VOIDmode
491 && (rtx_equal_p (XEXP (x
, 0), from
)
492 || (GET_CODE (XEXP (x
, 0)) == SUBREG
493 && rtx_equal_p (SUBREG_REG (XEXP (x
, 0)), from
))))
497 /* If there is a subreg involved, crop to the portion of the
498 constant that we are interested in. */
499 if (GET_CODE (XEXP (x
, 0)) == SUBREG
)
501 if (GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))) <= UNITS_PER_WORD
)
502 to
= operand_subword (to
, SUBREG_WORD (XEXP (x
, 0)),
504 else if (GET_MODE_CLASS (GET_MODE (from
)) == MODE_INT
505 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
506 <= HOST_BITS_PER_WIDE_INT
))
508 int i
= SUBREG_WORD (XEXP (x
, 0)) * BITS_PER_WORD
;
510 unsigned HOST_WIDE_INT vall
;
512 if (GET_CODE (to
) == CONST_INT
)
515 valh
= (HOST_WIDE_INT
) vall
< 0 ? ~0 : 0;
519 vall
= CONST_DOUBLE_LOW (to
);
520 valh
= CONST_DOUBLE_HIGH (to
);
523 if (WORDS_BIG_ENDIAN
)
524 i
= (GET_MODE_BITSIZE (GET_MODE (from
))
525 - GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - i
);
526 if (i
> 0 && i
< HOST_BITS_PER_WIDE_INT
)
527 vall
= vall
>> i
| valh
<< (HOST_BITS_PER_WIDE_INT
- i
);
528 else if (i
>= HOST_BITS_PER_WIDE_INT
)
529 vall
= valh
>> (i
- HOST_BITS_PER_WIDE_INT
);
530 to
= GEN_INT (trunc_int_for_mode (vall
,
531 GET_MODE (XEXP (x
, 0))));
534 to
= gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
537 /* If the above didn't fail, perform the extension from the
538 mode of the operand (and not the mode of FROM). */
540 new = simplify_unary_operation (code
, GET_MODE (x
), to
,
541 GET_MODE (XEXP (x
, 0)));
543 /* If any of the above failed, substitute in something that
544 we know won't be recognized. */
546 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
548 validate_change (object
, loc
, new, 1);
554 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
555 expression. We can't do this later, since the information about inner mode
557 if (CONSTANT_P (to
) && rtx_equal_p (SUBREG_REG (x
), from
))
559 if (GET_MODE_SIZE (GET_MODE (x
)) == UNITS_PER_WORD
560 && GET_MODE_SIZE (GET_MODE (from
)) > UNITS_PER_WORD
561 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
)
563 rtx temp
= operand_subword (to
, SUBREG_WORD (x
),
567 validate_change (object
, loc
, temp
, 1);
571 if (subreg_lowpart_p (x
))
573 rtx
new = gen_lowpart_if_possible (GET_MODE (x
), to
);
576 validate_change (object
, loc
, new, 1);
581 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
582 since we are saying that the high bits don't matter. */
583 if (GET_MODE (to
) == VOIDmode
584 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (from
)))
586 validate_change (object
, loc
, to
, 1);
591 /* Changing mode twice with SUBREG => just change it once,
592 or not at all if changing back to starting mode. */
593 if (GET_CODE (to
) == SUBREG
594 && rtx_equal_p (SUBREG_REG (x
), from
))
596 if (GET_MODE (x
) == GET_MODE (SUBREG_REG (to
))
597 && SUBREG_WORD (x
) == 0 && SUBREG_WORD (to
) == 0)
599 validate_change (object
, loc
, SUBREG_REG (to
), 1);
603 validate_change (object
, loc
,
604 gen_rtx_SUBREG (GET_MODE (x
), SUBREG_REG (to
),
605 SUBREG_WORD (x
) + SUBREG_WORD (to
)), 1);
609 /* If we have a SUBREG of a register that we are replacing and we are
610 replacing it with a MEM, make a new MEM and try replacing the
611 SUBREG with it. Don't do this if the MEM has a mode-dependent address
612 or if we would be widening it. */
614 if (GET_CODE (from
) == REG
615 && GET_CODE (to
) == MEM
616 && rtx_equal_p (SUBREG_REG (x
), from
)
617 && ! mode_dependent_address_p (XEXP (to
, 0))
618 && ! MEM_VOLATILE_P (to
)
619 && GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (GET_MODE (to
)))
621 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
622 enum machine_mode mode
= GET_MODE (x
);
625 if (BYTES_BIG_ENDIAN
)
626 offset
+= (MIN (UNITS_PER_WORD
,
627 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
628 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
630 new = gen_rtx_MEM (mode
, plus_constant (XEXP (to
, 0), offset
));
631 MEM_COPY_ATTRIBUTES (new, to
);
632 validate_change (object
, loc
, new, 1);
639 /* If we are replacing a register with memory, try to change the memory
640 to be the mode required for memory in extract operations (this isn't
641 likely to be an insertion operation; if it was, nothing bad will
642 happen, we might just fail in some cases). */
644 if (GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
645 && rtx_equal_p (XEXP (x
, 0), from
)
646 && GET_CODE (XEXP (x
, 1)) == CONST_INT
647 && GET_CODE (XEXP (x
, 2)) == CONST_INT
648 && ! mode_dependent_address_p (XEXP (to
, 0))
649 && ! MEM_VOLATILE_P (to
))
651 enum machine_mode wanted_mode
= VOIDmode
;
652 enum machine_mode is_mode
= GET_MODE (to
);
653 int pos
= INTVAL (XEXP (x
, 2));
656 if (code
== ZERO_EXTRACT
)
658 wanted_mode
= insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
659 if (wanted_mode
== VOIDmode
)
660 wanted_mode
= word_mode
;
664 if (code
== SIGN_EXTRACT
)
666 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
667 if (wanted_mode
== VOIDmode
)
668 wanted_mode
= word_mode
;
672 /* If we have a narrower mode, we can do something. */
673 if (wanted_mode
!= VOIDmode
674 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
676 int offset
= pos
/ BITS_PER_UNIT
;
679 /* If the bytes and bits are counted differently, we
680 must adjust the offset. */
681 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
682 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
685 pos
%= GET_MODE_BITSIZE (wanted_mode
);
687 newmem
= gen_rtx_MEM (wanted_mode
,
688 plus_constant (XEXP (to
, 0), offset
));
689 MEM_COPY_ATTRIBUTES (newmem
, to
);
691 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
692 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
702 /* For commutative or comparison operations we've already performed
703 replacements. Don't try to perform them again. */
704 if (GET_RTX_CLASS (code
) != '<' && GET_RTX_CLASS (code
) != 'c')
706 fmt
= GET_RTX_FORMAT (code
);
707 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
710 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
711 else if (fmt
[i
] == 'E')
712 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
713 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
718 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
719 with TO. After all changes have been made, validate by seeing
720 if INSN is still valid. */
723 validate_replace_rtx_subexp (from
, to
, insn
, loc
)
724 rtx from
, to
, insn
, *loc
;
726 validate_replace_rtx_1 (loc
, from
, to
, insn
);
727 return apply_change_group ();
730 /* Try replacing every occurrence of FROM in INSN with TO. After all
731 changes have been made, validate by seeing if INSN is still valid. */
734 validate_replace_rtx (from
, to
, insn
)
737 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
738 return apply_change_group ();
741 /* Try replacing every occurrence of FROM in INSN with TO. After all
742 changes have been made, validate by seeing if INSN is still valid. */
745 validate_replace_rtx_group (from
, to
, insn
)
748 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
751 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
752 SET_DESTs. After all changes have been made, validate by seeing if
753 INSN is still valid. */
756 validate_replace_src (from
, to
, insn
)
759 if ((GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != JUMP_INSN
)
760 || GET_CODE (PATTERN (insn
)) != SET
)
763 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn
)), from
, to
, insn
);
764 if (GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
765 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn
)), 0),
767 return apply_change_group ();
771 /* Return 1 if the insn using CC0 set by INSN does not contain
772 any ordered tests applied to the condition codes.
773 EQ and NE tests do not count. */
776 next_insn_tests_no_inequality (insn
)
779 register rtx next
= next_cc0_user (insn
);
781 /* If there is no next insn, we have to take the conservative choice. */
785 return ((GET_CODE (next
) == JUMP_INSN
786 || GET_CODE (next
) == INSN
787 || GET_CODE (next
) == CALL_INSN
)
788 && ! inequality_comparisons_p (PATTERN (next
)));
791 #if 0 /* This is useless since the insn that sets the cc's
792 must be followed immediately by the use of them. */
793 /* Return 1 if the CC value set up by INSN is not used. */
796 next_insns_test_no_inequality (insn
)
799 register rtx next
= NEXT_INSN (insn
);
801 for (; next
!= 0; next
= NEXT_INSN (next
))
803 if (GET_CODE (next
) == CODE_LABEL
804 || GET_CODE (next
) == BARRIER
)
806 if (GET_CODE (next
) == NOTE
)
808 if (inequality_comparisons_p (PATTERN (next
)))
810 if (sets_cc0_p (PATTERN (next
)) == 1)
812 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
820 /* This is used by find_single_use to locate an rtx that contains exactly one
821 use of DEST, which is typically either a REG or CC0. It returns a
822 pointer to the innermost rtx expression containing DEST. Appearances of
823 DEST that are being used to totally replace it are not counted. */
826 find_single_use_1 (dest
, loc
)
831 enum rtx_code code
= GET_CODE (x
);
848 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
849 of a REG that occupies all of the REG, the insn uses DEST if
850 it is mentioned in the destination or the source. Otherwise, we
851 need just check the source. */
852 if (GET_CODE (SET_DEST (x
)) != CC0
853 && GET_CODE (SET_DEST (x
)) != PC
854 && GET_CODE (SET_DEST (x
)) != REG
855 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
856 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
857 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
858 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
859 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
860 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
863 return find_single_use_1 (dest
, &SET_SRC (x
));
867 return find_single_use_1 (dest
, &XEXP (x
, 0));
873 /* If it wasn't one of the common cases above, check each expression and
874 vector of this code. Look for a unique usage of DEST. */
876 fmt
= GET_RTX_FORMAT (code
);
877 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
881 if (dest
== XEXP (x
, i
)
882 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
883 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
886 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
889 result
= this_result
;
890 else if (this_result
)
891 /* Duplicate usage. */
894 else if (fmt
[i
] == 'E')
898 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
900 if (XVECEXP (x
, i
, j
) == dest
901 || (GET_CODE (dest
) == REG
902 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
903 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
906 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
909 result
= this_result
;
910 else if (this_result
)
919 /* See if DEST, produced in INSN, is used only a single time in the
920 sequel. If so, return a pointer to the innermost rtx expression in which
923 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
925 This routine will return usually zero either before flow is called (because
926 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
927 note can't be trusted).
929 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
930 care about REG_DEAD notes or LOG_LINKS.
932 Otherwise, we find the single use by finding an insn that has a
933 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
934 only referenced once in that insn, we know that it must be the first
935 and last insn referencing DEST. */
938 find_single_use (dest
, insn
, ploc
)
950 next
= NEXT_INSN (insn
);
952 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
955 result
= find_single_use_1 (dest
, &PATTERN (next
));
962 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
965 for (next
= next_nonnote_insn (insn
);
966 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
967 next
= next_nonnote_insn (next
))
968 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
970 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
971 if (XEXP (link
, 0) == insn
)
976 result
= find_single_use_1 (dest
, &PATTERN (next
));
986 /* Return 1 if OP is a valid general operand for machine mode MODE.
987 This is either a register reference, a memory reference,
988 or a constant. In the case of a memory reference, the address
989 is checked for general validity for the target machine.
991 Register and memory references must have mode MODE in order to be valid,
992 but some constants have no machine mode and are valid for any mode.
994 If MODE is VOIDmode, OP is checked for validity for whatever mode
997 The main use of this function is as a predicate in match_operand
998 expressions in the machine description.
1000 For an explanation of this function's behavior for registers of
1001 class NO_REGS, see the comment for `register_operand'. */
1004 general_operand (op
, mode
)
1006 enum machine_mode mode
;
1008 register enum rtx_code code
= GET_CODE (op
);
1009 int mode_altering_drug
= 0;
1011 if (mode
== VOIDmode
)
1012 mode
= GET_MODE (op
);
1014 /* Don't accept CONST_INT or anything similar
1015 if the caller wants something floating. */
1016 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1017 && GET_MODE_CLASS (mode
) != MODE_INT
1018 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1021 if (CONSTANT_P (op
))
1022 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1023 || mode
== VOIDmode
)
1024 #ifdef LEGITIMATE_PIC_OPERAND_P
1025 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1027 && LEGITIMATE_CONSTANT_P (op
));
1029 /* Except for certain constants with VOIDmode, already checked for,
1030 OP's mode must match MODE if MODE specifies a mode. */
1032 if (GET_MODE (op
) != mode
)
1037 #ifdef INSN_SCHEDULING
1038 /* On machines that have insn scheduling, we want all memory
1039 reference to be explicit, so outlaw paradoxical SUBREGs. */
1040 if (GET_CODE (SUBREG_REG (op
)) == MEM
1041 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
1045 op
= SUBREG_REG (op
);
1046 code
= GET_CODE (op
);
1048 /* No longer needed, since (SUBREG (MEM...))
1049 will load the MEM into a reload reg in the MEM's own mode. */
1050 mode_altering_drug
= 1;
1055 /* A register whose class is NO_REGS is not a general operand. */
1056 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1057 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
1061 register rtx y
= XEXP (op
, 0);
1063 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
1066 if (GET_CODE (y
) == ADDRESSOF
)
1069 /* Use the mem's mode, since it will be reloaded thus. */
1070 mode
= GET_MODE (op
);
1071 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
1074 /* Pretend this is an operand for now; we'll run force_operand
1075 on its replacement in fixup_var_refs_1. */
1076 if (code
== ADDRESSOF
)
1082 if (mode_altering_drug
)
1083 return ! mode_dependent_address_p (XEXP (op
, 0));
1087 /* Return 1 if OP is a valid memory address for a memory reference
1090 The main use of this function is as a predicate in match_operand
1091 expressions in the machine description. */
1094 address_operand (op
, mode
)
1096 enum machine_mode mode
;
1098 return memory_address_p (mode
, op
);
1101 /* Return 1 if OP is a register reference of mode MODE.
1102 If MODE is VOIDmode, accept a register in any mode.
1104 The main use of this function is as a predicate in match_operand
1105 expressions in the machine description.
1107 As a special exception, registers whose class is NO_REGS are
1108 not accepted by `register_operand'. The reason for this change
1109 is to allow the representation of special architecture artifacts
1110 (such as a condition code register) without extending the rtl
1111 definitions. Since registers of class NO_REGS cannot be used
1112 as registers in any case where register classes are examined,
1113 it is most consistent to keep this function from accepting them. */
1116 register_operand (op
, mode
)
1118 enum machine_mode mode
;
1120 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1123 if (GET_CODE (op
) == SUBREG
)
1125 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1126 because it is guaranteed to be reloaded into one.
1127 Just make sure the MEM is valid in itself.
1128 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1129 but currently it does result from (SUBREG (REG)...) where the
1130 reg went on the stack.) */
1131 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1132 return general_operand (op
, mode
);
1134 #ifdef CLASS_CANNOT_CHANGE_MODE
1135 if (GET_CODE (SUBREG_REG (op
)) == REG
1136 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
1137 && (TEST_HARD_REG_BIT
1138 (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_MODE
],
1139 REGNO (SUBREG_REG (op
))))
1140 && CLASS_CANNOT_CHANGE_MODE_P (mode
, GET_MODE (SUBREG_REG (op
)))
1141 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
1142 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
1146 op
= SUBREG_REG (op
);
1149 /* If we have an ADDRESSOF, consider it valid since it will be
1150 converted into something that will not be a MEM. */
1151 if (GET_CODE (op
) == ADDRESSOF
)
1154 /* We don't consider registers whose class is NO_REGS
1155 to be a register operand. */
1156 return (GET_CODE (op
) == REG
1157 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1158 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1161 /* Return 1 for a register in Pmode; ignore the tested mode. */
1164 pmode_register_operand (op
, mode
)
1166 enum machine_mode mode ATTRIBUTE_UNUSED
;
1168 return register_operand (op
, Pmode
);
1171 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1172 or a hard register. */
1175 scratch_operand (op
, mode
)
1177 enum machine_mode mode
;
1179 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1182 return (GET_CODE (op
) == SCRATCH
1183 || (GET_CODE (op
) == REG
1184 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1187 /* Return 1 if OP is a valid immediate operand for mode MODE.
1189 The main use of this function is as a predicate in match_operand
1190 expressions in the machine description. */
1193 immediate_operand (op
, mode
)
1195 enum machine_mode mode
;
1197 /* Don't accept CONST_INT or anything similar
1198 if the caller wants something floating. */
1199 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1200 && GET_MODE_CLASS (mode
) != MODE_INT
1201 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1204 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1205 result in 0/1. It seems a safe assumption that this is
1206 in range for everyone. */
1207 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1210 return (CONSTANT_P (op
)
1211 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1212 || GET_MODE (op
) == VOIDmode
)
1213 #ifdef LEGITIMATE_PIC_OPERAND_P
1214 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1216 && LEGITIMATE_CONSTANT_P (op
));
1219 /* Returns 1 if OP is an operand that is a CONST_INT. */
1222 const_int_operand (op
, mode
)
1224 enum machine_mode mode ATTRIBUTE_UNUSED
;
1226 return GET_CODE (op
) == CONST_INT
;
1229 /* Returns 1 if OP is an operand that is a constant integer or constant
1230 floating-point number. */
1233 const_double_operand (op
, mode
)
1235 enum machine_mode mode
;
1237 /* Don't accept CONST_INT or anything similar
1238 if the caller wants something floating. */
1239 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1240 && GET_MODE_CLASS (mode
) != MODE_INT
1241 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1244 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1245 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1246 || GET_MODE (op
) == VOIDmode
));
1249 /* Return 1 if OP is a general operand that is not an immediate operand. */
1252 nonimmediate_operand (op
, mode
)
1254 enum machine_mode mode
;
1256 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1259 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1262 nonmemory_operand (op
, mode
)
1264 enum machine_mode mode
;
1266 if (CONSTANT_P (op
))
1268 /* Don't accept CONST_INT or anything similar
1269 if the caller wants something floating. */
1270 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1271 && GET_MODE_CLASS (mode
) != MODE_INT
1272 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1275 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1276 || mode
== VOIDmode
)
1277 #ifdef LEGITIMATE_PIC_OPERAND_P
1278 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1280 && LEGITIMATE_CONSTANT_P (op
));
1283 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1286 if (GET_CODE (op
) == SUBREG
)
1288 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1289 because it is guaranteed to be reloaded into one.
1290 Just make sure the MEM is valid in itself.
1291 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1292 but currently it does result from (SUBREG (REG)...) where the
1293 reg went on the stack.) */
1294 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1295 return general_operand (op
, mode
);
1296 op
= SUBREG_REG (op
);
1299 /* We don't consider registers whose class is NO_REGS
1300 to be a register operand. */
1301 return (GET_CODE (op
) == REG
1302 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1303 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1306 /* Return 1 if OP is a valid operand that stands for pushing a
1307 value of mode MODE onto the stack.
1309 The main use of this function is as a predicate in match_operand
1310 expressions in the machine description. */
1313 push_operand (op
, mode
)
1315 enum machine_mode mode
;
1317 if (GET_CODE (op
) != MEM
)
1320 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1325 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1328 return XEXP (op
, 0) == stack_pointer_rtx
;
1331 /* Return 1 if OP is a valid operand that stands for popping a
1332 value of mode MODE off the stack.
1334 The main use of this function is as a predicate in match_operand
1335 expressions in the machine description. */
1338 pop_operand (op
, mode
)
1340 enum machine_mode mode
;
1342 if (GET_CODE (op
) != MEM
)
1345 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1350 if (GET_CODE (op
) != STACK_POP_CODE
)
1353 return XEXP (op
, 0) == stack_pointer_rtx
;
1356 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1359 memory_address_p (mode
, addr
)
1360 enum machine_mode mode ATTRIBUTE_UNUSED
;
1363 if (GET_CODE (addr
) == ADDRESSOF
)
1366 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1373 /* Return 1 if OP is a valid memory reference with mode MODE,
1374 including a valid address.
1376 The main use of this function is as a predicate in match_operand
1377 expressions in the machine description. */
1380 memory_operand (op
, mode
)
1382 enum machine_mode mode
;
1386 if (! reload_completed
)
1387 /* Note that no SUBREG is a memory operand before end of reload pass,
1388 because (SUBREG (MEM...)) forces reloading into a register. */
1389 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1391 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1395 if (GET_CODE (inner
) == SUBREG
)
1396 inner
= SUBREG_REG (inner
);
1398 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1401 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1402 that is, a memory reference whose address is a general_operand. */
1405 indirect_operand (op
, mode
)
1407 enum machine_mode mode
;
1409 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1410 if (! reload_completed
1411 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1413 register int offset
= SUBREG_WORD (op
) * UNITS_PER_WORD
;
1414 rtx inner
= SUBREG_REG (op
);
1416 if (BYTES_BIG_ENDIAN
)
1417 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (op
)))
1418 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (inner
))));
1420 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1423 /* The only way that we can have a general_operand as the resulting
1424 address is if OFFSET is zero and the address already is an operand
1425 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1428 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1429 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1430 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1431 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1432 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1435 return (GET_CODE (op
) == MEM
1436 && memory_operand (op
, mode
)
1437 && general_operand (XEXP (op
, 0), Pmode
));
1440 /* Return 1 if this is a comparison operator. This allows the use of
1441 MATCH_OPERATOR to recognize all the branch insns. */
1444 comparison_operator (op
, mode
)
1446 enum machine_mode mode
;
1448 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1449 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1452 /* If BODY is an insn body that uses ASM_OPERANDS,
1453 return the number of operands (both input and output) in the insn.
1454 Otherwise return -1. */
1457 asm_noperands (body
)
1460 switch (GET_CODE (body
))
1463 /* No output operands: return number of input operands. */
1464 return ASM_OPERANDS_INPUT_LENGTH (body
);
1466 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1467 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1468 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1472 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1473 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1475 /* Multiple output operands, or 1 output plus some clobbers:
1476 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1480 /* Count backwards through CLOBBERs to determine number of SETs. */
1481 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1483 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1485 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1489 /* N_SETS is now number of output operands. */
1492 /* Verify that all the SETs we have
1493 came from a single original asm_operands insn
1494 (so that invalid combinations are blocked). */
1495 for (i
= 0; i
< n_sets
; i
++)
1497 rtx elt
= XVECEXP (body
, 0, i
);
1498 if (GET_CODE (elt
) != SET
)
1500 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1502 /* If these ASM_OPERANDS rtx's came from different original insns
1503 then they aren't allowed together. */
1504 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1505 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1508 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1511 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1513 /* 0 outputs, but some clobbers:
1514 body is [(asm_operands ...) (clobber (reg ...))...]. */
1517 /* Make sure all the other parallel things really are clobbers. */
1518 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1519 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1522 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1531 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1532 copy its operands (both input and output) into the vector OPERANDS,
1533 the locations of the operands within the insn into the vector OPERAND_LOCS,
1534 and the constraints for the operands into CONSTRAINTS.
1535 Write the modes of the operands into MODES.
1536 Return the assembler-template.
1538 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1539 we don't store that info. */
1542 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1546 const char **constraints
;
1547 enum machine_mode
*modes
;
1551 const char *template = 0;
1553 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1555 rtx asmop
= SET_SRC (body
);
1556 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1558 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1560 for (i
= 1; i
< noperands
; i
++)
1563 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1565 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1567 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1569 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1572 /* The output is in the SET.
1573 Its constraint is in the ASM_OPERANDS itself. */
1575 operands
[0] = SET_DEST (body
);
1577 operand_locs
[0] = &SET_DEST (body
);
1579 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1581 modes
[0] = GET_MODE (SET_DEST (body
));
1582 template = ASM_OPERANDS_TEMPLATE (asmop
);
1584 else if (GET_CODE (body
) == ASM_OPERANDS
)
1587 /* No output operands: BODY is (asm_operands ....). */
1589 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1591 /* The input operands are found in the 1st element vector. */
1592 /* Constraints for inputs are in the 2nd element vector. */
1593 for (i
= 0; i
< noperands
; i
++)
1596 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1598 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1600 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1602 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1604 template = ASM_OPERANDS_TEMPLATE (asmop
);
1606 else if (GET_CODE (body
) == PARALLEL
1607 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1609 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1610 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1611 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1612 int nout
= 0; /* Does not include CLOBBERs. */
1614 /* At least one output, plus some CLOBBERs. */
1616 /* The outputs are in the SETs.
1617 Their constraints are in the ASM_OPERANDS itself. */
1618 for (i
= 0; i
< nparallel
; i
++)
1620 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1621 break; /* Past last SET */
1624 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1626 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1628 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1630 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1634 for (i
= 0; i
< nin
; i
++)
1637 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1639 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1641 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1643 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1646 template = ASM_OPERANDS_TEMPLATE (asmop
);
1648 else if (GET_CODE (body
) == PARALLEL
1649 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1651 /* No outputs, but some CLOBBERs. */
1653 rtx asmop
= XVECEXP (body
, 0, 0);
1654 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1656 for (i
= 0; i
< nin
; i
++)
1659 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1661 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1663 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1665 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1668 template = ASM_OPERANDS_TEMPLATE (asmop
);
1674 /* Check if an asm_operand matches it's constraints.
1675 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1678 asm_operand_ok (op
, constraint
)
1680 const char *constraint
;
1684 /* Use constrain_operands after reload. */
1685 if (reload_completed
)
1690 char c
= *constraint
++;
1704 case '0': case '1': case '2': case '3': case '4':
1705 case '5': case '6': case '7': case '8': case '9':
1706 /* For best results, our caller should have given us the
1707 proper matching constraint, but we can't actually fail
1708 the check if they didn't. Indicate that results are
1714 if (address_operand (op
, VOIDmode
))
1719 case 'V': /* non-offsettable */
1720 if (memory_operand (op
, VOIDmode
))
1724 case 'o': /* offsettable */
1725 if (offsettable_nonstrict_memref_p (op
))
1730 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1731 excepting those that expand_call created. Further, on some
1732 machines which do not have generalized auto inc/dec, an inc/dec
1733 is not a memory_operand.
1735 Match any memory and hope things are resolved after reload. */
1737 if (GET_CODE (op
) == MEM
1739 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1740 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1745 if (GET_CODE (op
) == MEM
1747 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1748 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1753 #ifndef REAL_ARITHMETIC
1754 /* Match any floating double constant, but only if
1755 we can examine the bits of it reliably. */
1756 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1757 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1758 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1764 if (GET_CODE (op
) == CONST_DOUBLE
)
1769 if (GET_CODE (op
) == CONST_DOUBLE
1770 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'G'))
1774 if (GET_CODE (op
) == CONST_DOUBLE
1775 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'H'))
1780 if (GET_CODE (op
) == CONST_INT
1781 || (GET_CODE (op
) == CONST_DOUBLE
1782 && GET_MODE (op
) == VOIDmode
))
1788 #ifdef LEGITIMATE_PIC_OPERAND_P
1789 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1796 if (GET_CODE (op
) == CONST_INT
1797 || (GET_CODE (op
) == CONST_DOUBLE
1798 && GET_MODE (op
) == VOIDmode
))
1803 if (GET_CODE (op
) == CONST_INT
1804 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'))
1808 if (GET_CODE (op
) == CONST_INT
1809 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'J'))
1813 if (GET_CODE (op
) == CONST_INT
1814 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1818 if (GET_CODE (op
) == CONST_INT
1819 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'))
1823 if (GET_CODE (op
) == CONST_INT
1824 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
1828 if (GET_CODE (op
) == CONST_INT
1829 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'N'))
1833 if (GET_CODE (op
) == CONST_INT
1834 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'O'))
1838 if (GET_CODE (op
) == CONST_INT
1839 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P'))
1847 if (general_operand (op
, VOIDmode
))
1852 /* For all other letters, we first check for a register class,
1853 otherwise it is an EXTRA_CONSTRAINT. */
1854 if (REG_CLASS_FROM_LETTER (c
) != NO_REGS
)
1857 if (GET_MODE (op
) == BLKmode
)
1859 if (register_operand (op
, VOIDmode
))
1862 #ifdef EXTRA_CONSTRAINT
1863 if (EXTRA_CONSTRAINT (op
, c
))
1873 /* Given an rtx *P, if it is a sum containing an integer constant term,
1874 return the location (type rtx *) of the pointer to that constant term.
1875 Otherwise, return a null pointer. */
1878 find_constant_term_loc (p
)
1882 register enum rtx_code code
= GET_CODE (*p
);
1884 /* If *P IS such a constant term, P is its location. */
1886 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1890 /* Otherwise, if not a sum, it has no constant term. */
1892 if (GET_CODE (*p
) != PLUS
)
1895 /* If one of the summands is constant, return its location. */
1897 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1898 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1901 /* Otherwise, check each summand for containing a constant term. */
1903 if (XEXP (*p
, 0) != 0)
1905 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1910 if (XEXP (*p
, 1) != 0)
1912 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1920 /* Return 1 if OP is a memory reference
1921 whose address contains no side effects
1922 and remains valid after the addition
1923 of a positive integer less than the
1924 size of the object being referenced.
1926 We assume that the original address is valid and do not check it.
1928 This uses strict_memory_address_p as a subroutine, so
1929 don't use it before reload. */
1932 offsettable_memref_p (op
)
1935 return ((GET_CODE (op
) == MEM
)
1936 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1939 /* Similar, but don't require a strictly valid mem ref:
1940 consider pseudo-regs valid as index or base regs. */
1943 offsettable_nonstrict_memref_p (op
)
1946 return ((GET_CODE (op
) == MEM
)
1947 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1950 /* Return 1 if Y is a memory address which contains no side effects
1951 and would remain valid after the addition of a positive integer
1952 less than the size of that mode.
1954 We assume that the original address is valid and do not check it.
1955 We do check that it is valid for narrower modes.
1957 If STRICTP is nonzero, we require a strictly valid address,
1958 for the sake of use in reload.c. */
1961 offsettable_address_p (strictp
, mode
, y
)
1963 enum machine_mode mode
;
1966 register enum rtx_code ycode
= GET_CODE (y
);
1970 int (*addressp
) PARAMS ((enum machine_mode
, rtx
)) =
1971 (strictp
? strict_memory_address_p
: memory_address_p
);
1972 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1974 if (CONSTANT_ADDRESS_P (y
))
1977 /* Adjusting an offsettable address involves changing to a narrower mode.
1978 Make sure that's OK. */
1980 if (mode_dependent_address_p (y
))
1983 /* ??? How much offset does an offsettable BLKmode reference need?
1984 Clearly that depends on the situation in which it's being used.
1985 However, the current situation in which we test 0xffffffff is
1986 less than ideal. Caveat user. */
1988 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1990 /* If the expression contains a constant term,
1991 see if it remains valid when max possible offset is added. */
1993 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1998 *y2
= plus_constant (*y2
, mode_sz
- 1);
1999 /* Use QImode because an odd displacement may be automatically invalid
2000 for any wider mode. But it should be valid for a single byte. */
2001 good
= (*addressp
) (QImode
, y
);
2003 /* In any case, restore old contents of memory. */
2008 if (GET_RTX_CLASS (ycode
) == 'a')
2011 /* The offset added here is chosen as the maximum offset that
2012 any instruction could need to add when operating on something
2013 of the specified mode. We assume that if Y and Y+c are
2014 valid addresses then so is Y+d for all 0<d<c. */
2016 z
= plus_constant_for_output (y
, mode_sz
- 1);
2018 /* Use QImode because an odd displacement may be automatically invalid
2019 for any wider mode. But it should be valid for a single byte. */
2020 return (*addressp
) (QImode
, z
);
2023 /* Return 1 if ADDR is an address-expression whose effect depends
2024 on the mode of the memory reference it is used in.
2026 Autoincrement addressing is a typical example of mode-dependence
2027 because the amount of the increment depends on the mode. */
2030 mode_dependent_address_p (addr
)
2031 rtx addr ATTRIBUTE_UNUSED
; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2033 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
2035 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2036 win
: ATTRIBUTE_UNUSED_LABEL
2040 /* Return 1 if OP is a general operand
2041 other than a memory ref with a mode dependent address. */
2044 mode_independent_operand (op
, mode
)
2045 enum machine_mode mode
;
2050 if (! general_operand (op
, mode
))
2053 if (GET_CODE (op
) != MEM
)
2056 addr
= XEXP (op
, 0);
2057 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
2059 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2060 lose
: ATTRIBUTE_UNUSED_LABEL
2064 /* Given an operand OP that is a valid memory reference which
2065 satisfies offsettable_memref_p, return a new memory reference whose
2066 address has been adjusted by OFFSET. OFFSET should be positive and
2067 less than the size of the object referenced. */
2070 adj_offsettable_operand (op
, offset
)
2074 register enum rtx_code code
= GET_CODE (op
);
2078 register rtx y
= XEXP (op
, 0);
2081 if (CONSTANT_ADDRESS_P (y
))
2083 new = gen_rtx_MEM (GET_MODE (op
),
2084 plus_constant_for_output (y
, offset
));
2085 MEM_COPY_ATTRIBUTES (new, op
);
2089 if (GET_CODE (y
) == PLUS
)
2092 register rtx
*const_loc
;
2096 const_loc
= find_constant_term_loc (&z
);
2099 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
2104 new = gen_rtx_MEM (GET_MODE (op
), plus_constant_for_output (y
, offset
));
2105 MEM_COPY_ATTRIBUTES (new, op
);
2111 /* Like extract_insn, but save insn extracted and don't extract again, when
2112 called again for the same insn expecting that recog_data still contain the
2113 valid information. This is used primary by gen_attr infrastructure that
2114 often does extract insn again and again. */
2116 extract_insn_cached (insn
)
2119 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
2121 extract_insn (insn
);
2122 recog_data
.insn
= insn
;
2124 /* Do cached extract_insn, constrain_operand and complain about failures.
2125 Used by insn_attrtab. */
2127 extract_constrain_insn_cached (insn
)
2130 extract_insn_cached (insn
);
2131 if (which_alternative
== -1
2132 && !constrain_operands (reload_completed
))
2133 fatal_insn_not_found (insn
);
2135 /* Do cached constrain_operand and complain about failures. */
2137 constrain_operands_cached (strict
)
2140 if (which_alternative
== -1)
2141 return constrain_operands (strict
);
2146 /* Analyze INSN and fill in recog_data. */
2155 rtx body
= PATTERN (insn
);
2157 recog_data
.insn
= NULL
;
2158 recog_data
.n_operands
= 0;
2159 recog_data
.n_alternatives
= 0;
2160 recog_data
.n_dups
= 0;
2161 which_alternative
= -1;
2163 switch (GET_CODE (body
))
2173 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
2178 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
2179 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
2180 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
2186 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2189 /* This insn is an `asm' with operands. */
2191 /* expand_asm_operands makes sure there aren't too many operands. */
2192 if (noperands
> MAX_RECOG_OPERANDS
)
2195 /* Now get the operand values and constraints out of the insn. */
2196 decode_asm_operands (body
, recog_data
.operand
,
2197 recog_data
.operand_loc
,
2198 recog_data
.constraints
,
2199 recog_data
.operand_mode
);
2202 const char *p
= recog_data
.constraints
[0];
2203 recog_data
.n_alternatives
= 1;
2205 recog_data
.n_alternatives
+= (*p
++ == ',');
2209 fatal_insn_not_found (insn
);
2213 /* Ordinary insn: recognize it, get the operands via insn_extract
2214 and get the constraints. */
2216 icode
= recog_memoized (insn
);
2218 fatal_insn_not_found (insn
);
2220 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2221 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2222 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2224 insn_extract (insn
);
2226 for (i
= 0; i
< noperands
; i
++)
2228 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2229 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2230 /* VOIDmode match_operands gets mode from their real operand. */
2231 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2232 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2235 for (i
= 0; i
< noperands
; i
++)
2236 recog_data
.operand_type
[i
]
2237 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2238 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2241 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2245 /* After calling extract_insn, you can use this function to extract some
2246 information from the constraint strings into a more usable form.
2247 The collected data is stored in recog_op_alt. */
2249 preprocess_constraints ()
2253 memset (recog_op_alt
, 0, sizeof recog_op_alt
);
2254 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2257 struct operand_alternative
*op_alt
;
2258 const char *p
= recog_data
.constraints
[i
];
2260 op_alt
= recog_op_alt
[i
];
2262 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2264 op_alt
[j
].class = NO_REGS
;
2265 op_alt
[j
].constraint
= p
;
2266 op_alt
[j
].matches
= -1;
2267 op_alt
[j
].matched
= -1;
2269 if (*p
== '\0' || *p
== ',')
2271 op_alt
[j
].anything_ok
= 1;
2281 while (c
!= ',' && c
!= '\0');
2282 if (c
== ',' || c
== '\0')
2287 case '=': case '+': case '*': case '%':
2288 case 'E': case 'F': case 'G': case 'H':
2289 case 's': case 'i': case 'n':
2290 case 'I': case 'J': case 'K': case 'L':
2291 case 'M': case 'N': case 'O': case 'P':
2292 /* These don't say anything we care about. */
2296 op_alt
[j
].reject
+= 6;
2299 op_alt
[j
].reject
+= 600;
2302 op_alt
[j
].earlyclobber
= 1;
2305 case '0': case '1': case '2': case '3': case '4':
2306 case '5': case '6': case '7': case '8': case '9':
2307 op_alt
[j
].matches
= c
- '0';
2308 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2312 op_alt
[j
].memory_ok
= 1;
2315 op_alt
[j
].decmem_ok
= 1;
2318 op_alt
[j
].incmem_ok
= 1;
2321 op_alt
[j
].nonoffmem_ok
= 1;
2324 op_alt
[j
].offmem_ok
= 1;
2327 op_alt
[j
].anything_ok
= 1;
2331 op_alt
[j
].is_address
= 1;
2332 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) BASE_REG_CLASS
];
2336 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2340 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c
)];
2348 /* Check the operands of an insn against the insn's operand constraints
2349 and return 1 if they are valid.
2350 The information about the insn's operands, constraints, operand modes
2351 etc. is obtained from the global variables set up by extract_insn.
2353 WHICH_ALTERNATIVE is set to a number which indicates which
2354 alternative of constraints was matched: 0 for the first alternative,
2355 1 for the next, etc.
2357 In addition, when two operands are match
2358 and it happens that the output operand is (reg) while the
2359 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2360 make the output operand look like the input.
2361 This is because the output operand is the one the template will print.
2363 This is used in final, just before printing the assembler code and by
2364 the routines that determine an insn's attribute.
2366 If STRICT is a positive non-zero value, it means that we have been
2367 called after reload has been completed. In that case, we must
2368 do all checks strictly. If it is zero, it means that we have been called
2369 before reload has completed. In that case, we first try to see if we can
2370 find an alternative that matches strictly. If not, we try again, this
2371 time assuming that reload will fix up the insn. This provides a "best
2372 guess" for the alternative and is used to compute attributes of insns prior
2373 to reload. A negative value of STRICT is used for this internal call. */
2381 constrain_operands (strict
)
2384 const char *constraints
[MAX_RECOG_OPERANDS
];
2385 int matching_operands
[MAX_RECOG_OPERANDS
];
2386 int earlyclobber
[MAX_RECOG_OPERANDS
];
2389 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2390 int funny_match_index
;
2392 which_alternative
= 0;
2393 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2396 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2398 constraints
[c
] = recog_data
.constraints
[c
];
2399 matching_operands
[c
] = -1;
2406 funny_match_index
= 0;
2408 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2410 register rtx op
= recog_data
.operand
[opno
];
2411 enum machine_mode mode
= GET_MODE (op
);
2412 register const char *p
= constraints
[opno
];
2417 earlyclobber
[opno
] = 0;
2419 /* A unary operator may be accepted by the predicate, but it
2420 is irrelevant for matching constraints. */
2421 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2424 if (GET_CODE (op
) == SUBREG
)
2426 if (GET_CODE (SUBREG_REG (op
)) == REG
2427 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2428 offset
= SUBREG_WORD (op
);
2429 op
= SUBREG_REG (op
);
2432 /* An empty constraint or empty alternative
2433 allows anything which matched the pattern. */
2434 if (*p
== 0 || *p
== ',')
2437 while (*p
&& (c
= *p
++) != ',')
2440 case '?': case '!': case '*': case '%':
2445 /* Ignore rest of this alternative as far as
2446 constraint checking is concerned. */
2447 while (*p
&& *p
!= ',')
2452 earlyclobber
[opno
] = 1;
2455 case '0': case '1': case '2': case '3': case '4':
2456 case '5': case '6': case '7': case '8': case '9':
2458 /* This operand must be the same as a previous one.
2459 This kind of constraint is used for instructions such
2460 as add when they take only two operands.
2462 Note that the lower-numbered operand is passed first.
2464 If we are not testing strictly, assume that this constraint
2465 will be satisfied. */
2470 rtx op1
= recog_data
.operand
[c
- '0'];
2471 rtx op2
= recog_data
.operand
[opno
];
2473 /* A unary operator may be accepted by the predicate,
2474 but it is irrelevant for matching constraints. */
2475 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2476 op1
= XEXP (op1
, 0);
2477 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2478 op2
= XEXP (op2
, 0);
2480 val
= operands_match_p (op1
, op2
);
2483 matching_operands
[opno
] = c
- '0';
2484 matching_operands
[c
- '0'] = opno
;
2488 /* If output is *x and input is *--x,
2489 arrange later to change the output to *--x as well,
2490 since the output op is the one that will be printed. */
2491 if (val
== 2 && strict
> 0)
2493 funny_match
[funny_match_index
].this = opno
;
2494 funny_match
[funny_match_index
++].other
= c
- '0';
2499 /* p is used for address_operands. When we are called by
2500 gen_reload, no one will have checked that the address is
2501 strictly valid, i.e., that all pseudos requiring hard regs
2502 have gotten them. */
2504 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2509 /* No need to check general_operand again;
2510 it was done in insn-recog.c. */
2512 /* Anything goes unless it is a REG and really has a hard reg
2513 but the hard reg is not in the class GENERAL_REGS. */
2515 || GENERAL_REGS
== ALL_REGS
2516 || GET_CODE (op
) != REG
2517 || (reload_in_progress
2518 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2519 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2524 /* This is used for a MATCH_SCRATCH in the cases when
2525 we don't actually need anything. So anything goes
2531 if (GET_CODE (op
) == MEM
2532 /* Before reload, accept what reload can turn into mem. */
2533 || (strict
< 0 && CONSTANT_P (op
))
2534 /* During reload, accept a pseudo */
2535 || (reload_in_progress
&& GET_CODE (op
) == REG
2536 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2541 if (GET_CODE (op
) == MEM
2542 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2543 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2548 if (GET_CODE (op
) == MEM
2549 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2550 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2555 #ifndef REAL_ARITHMETIC
2556 /* Match any CONST_DOUBLE, but only if
2557 we can examine the bits of it reliably. */
2558 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
2559 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
2560 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
2563 if (GET_CODE (op
) == CONST_DOUBLE
)
2568 if (GET_CODE (op
) == CONST_DOUBLE
)
2574 if (GET_CODE (op
) == CONST_DOUBLE
2575 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
2580 if (GET_CODE (op
) == CONST_INT
2581 || (GET_CODE (op
) == CONST_DOUBLE
2582 && GET_MODE (op
) == VOIDmode
))
2585 if (CONSTANT_P (op
))
2590 if (GET_CODE (op
) == CONST_INT
2591 || (GET_CODE (op
) == CONST_DOUBLE
2592 && GET_MODE (op
) == VOIDmode
))
2604 if (GET_CODE (op
) == CONST_INT
2605 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
2610 if (GET_CODE (op
) == MEM
2611 && ((strict
> 0 && ! offsettable_memref_p (op
))
2613 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2614 || (reload_in_progress
2615 && !(GET_CODE (op
) == REG
2616 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2621 if ((strict
> 0 && offsettable_memref_p (op
))
2622 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2623 /* Before reload, accept what reload can handle. */
2625 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2626 /* During reload, accept a pseudo */
2627 || (reload_in_progress
&& GET_CODE (op
) == REG
2628 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2634 enum reg_class
class;
2636 class = (c
== 'r' ? GENERAL_REGS
: REG_CLASS_FROM_LETTER (c
));
2637 if (class != NO_REGS
)
2641 && GET_CODE (op
) == REG
2642 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2643 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2644 || (GET_CODE (op
) == REG
2645 && reg_fits_class_p (op
, class, offset
, mode
)))
2648 #ifdef EXTRA_CONSTRAINT
2649 else if (EXTRA_CONSTRAINT (op
, c
))
2656 constraints
[opno
] = p
;
2657 /* If this operand did not win somehow,
2658 this alternative loses. */
2662 /* This alternative won; the operands are ok.
2663 Change whichever operands this alternative says to change. */
2668 /* See if any earlyclobber operand conflicts with some other
2672 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2673 /* Ignore earlyclobber operands now in memory,
2674 because we would often report failure when we have
2675 two memory operands, one of which was formerly a REG. */
2676 if (earlyclobber
[eopno
]
2677 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2678 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2679 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2680 || recog_data
.operand_type
[opno
] != OP_OUT
)
2682 /* Ignore things like match_operator operands. */
2683 && *recog_data
.constraints
[opno
] != 0
2684 && ! (matching_operands
[opno
] == eopno
2685 && operands_match_p (recog_data
.operand
[opno
],
2686 recog_data
.operand
[eopno
]))
2687 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2688 recog_data
.operand
[eopno
]))
2693 while (--funny_match_index
>= 0)
2695 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2696 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2703 which_alternative
++;
2705 while (which_alternative
< recog_data
.n_alternatives
);
2707 which_alternative
= -1;
2708 /* If we are about to reject this, but we are not to test strictly,
2709 try a very loose test. Only return failure if it fails also. */
2711 return constrain_operands (-1);
2716 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2717 is a hard reg in class CLASS when its regno is offset by OFFSET
2718 and changed to mode MODE.
2719 If REG occupies multiple hard regs, all of them must be in CLASS. */
2722 reg_fits_class_p (operand
, class, offset
, mode
)
2724 register enum reg_class
class;
2726 enum machine_mode mode
;
2728 register int regno
= REGNO (operand
);
2729 if (regno
< FIRST_PSEUDO_REGISTER
2730 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2735 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2737 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2746 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2749 split_all_insns (upd_life
)
2756 blocks
= sbitmap_alloc (n_basic_blocks
);
2757 sbitmap_zero (blocks
);
2760 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2762 basic_block bb
= BASIC_BLOCK (i
);
2765 for (insn
= bb
->head
; insn
; insn
= next
)
2769 /* Can't use `next_real_insn' because that might go across
2770 CODE_LABELS and short-out basic blocks. */
2771 next
= NEXT_INSN (insn
);
2772 if (! INSN_P (insn
))
2775 /* Don't split no-op move insns. These should silently
2776 disappear later in final. Splitting such insns would
2777 break the code that handles REG_NO_CONFLICT blocks. */
2779 else if ((set
= single_set (insn
)) != NULL
2780 && rtx_equal_p (SET_SRC (set
), SET_DEST (set
)))
2782 /* Nops get in the way while scheduling, so delete them
2783 now if register allocation has already been done. It
2784 is too risky to try to do this before register
2785 allocation, and there are unlikely to be very many
2786 nops then anyways. */
2787 if (reload_completed
)
2789 PUT_CODE (insn
, NOTE
);
2790 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2791 NOTE_SOURCE_FILE (insn
) = 0;
2796 /* Split insns here to get max fine-grain parallelism. */
2797 rtx first
= PREV_INSN (insn
);
2798 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2802 SET_BIT (blocks
, i
);
2805 /* try_split returns the NOTE that INSN became. */
2806 PUT_CODE (insn
, NOTE
);
2807 NOTE_SOURCE_FILE (insn
) = 0;
2808 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2810 /* ??? Coddle to md files that generate subregs in post-
2811 reload splitters instead of computing the proper
2813 if (reload_completed
&& first
!= last
)
2815 first
= NEXT_INSN (first
);
2819 cleanup_subreg_operands (first
);
2822 first
= NEXT_INSN (first
);
2826 if (insn
== bb
->end
)
2834 if (insn
== bb
->end
)
2838 /* ??? When we're called from just after reload, the CFG is in bad
2839 shape, and we may have fallen off the end. This could be fixed
2840 by having reload not try to delete unreachable code. Otherwise
2841 assert we found the end insn. */
2842 if (insn
== NULL
&& upd_life
)
2846 if (changed
&& upd_life
)
2848 compute_bb_for_insn (get_max_uid ());
2849 count_or_remove_death_notes (blocks
, 1);
2850 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
2853 sbitmap_free (blocks
);
2856 #ifdef HAVE_peephole2
2857 struct peep2_insn_data
2863 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2864 static int peep2_current
;
2866 /* A non-insn marker indicating the last insn of the block.
2867 The live_before regset for this element is correct, indicating
2868 global_live_at_end for the block. */
2869 #define PEEP2_EOB pc_rtx
2871 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2872 does not exist. Used by the recognizer to find the next insn to match
2873 in a multi-insn pattern. */
2879 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2883 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2884 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2886 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2888 return peep2_insn_data
[n
].insn
;
2891 /* Return true if REGNO is dead before the Nth non-note insn
2895 peep2_regno_dead_p (ofs
, regno
)
2899 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2902 ofs
+= peep2_current
;
2903 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2904 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2906 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2909 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2912 /* Similarly for a REG. */
2915 peep2_reg_dead_p (ofs
, reg
)
2921 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2924 ofs
+= peep2_current
;
2925 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2926 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2928 if (peep2_insn_data
[ofs
].insn
== NULL_RTX
)
2931 regno
= REGNO (reg
);
2932 n
= HARD_REGNO_NREGS (regno
, GET_MODE (reg
));
2934 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2939 /* Try to find a hard register of mode MODE, matching the register class in
2940 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2941 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2942 in which case the only condition is that the register must be available
2943 before CURRENT_INSN.
2944 Registers that already have bits set in REG_SET will not be considered.
2946 If an appropriate register is available, it will be returned and the
2947 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2951 peep2_find_free_register (from
, to
, class_str
, mode
, reg_set
)
2953 const char *class_str
;
2954 enum machine_mode mode
;
2955 HARD_REG_SET
*reg_set
;
2957 static int search_ofs
;
2958 enum reg_class
class;
2962 if (from
>= MAX_INSNS_PER_PEEP2
+ 1 || to
>= MAX_INSNS_PER_PEEP2
+ 1)
2965 from
+= peep2_current
;
2966 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2967 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2968 to
+= peep2_current
;
2969 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2970 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2972 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2974 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2978 HARD_REG_SET this_live
;
2980 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2982 if (peep2_insn_data
[from
].insn
== NULL_RTX
)
2984 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2985 IOR_HARD_REG_SET (live
, this_live
);
2988 class = (class_str
[0] == 'r' ? GENERAL_REGS
2989 : REG_CLASS_FROM_LETTER (class_str
[0]));
2991 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2993 int raw_regno
, regno
, success
, j
;
2995 /* Distribute the free registers as much as possible. */
2996 raw_regno
= search_ofs
+ i
;
2997 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2998 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2999 #ifdef REG_ALLOC_ORDER
3000 regno
= reg_alloc_order
[raw_regno
];
3005 /* Don't allocate fixed registers. */
3006 if (fixed_regs
[regno
])
3008 /* Make sure the register is of the right class. */
3009 if (! TEST_HARD_REG_BIT (reg_class_contents
[class], regno
))
3011 /* And can support the mode we need. */
3012 if (! HARD_REGNO_MODE_OK (regno
, mode
))
3014 /* And that we don't create an extra save/restore. */
3015 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
3017 /* And we don't clobber traceback for noreturn functions. */
3018 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
3019 && (! reload_completed
|| frame_pointer_needed
))
3023 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
3025 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
3026 || TEST_HARD_REG_BIT (live
, regno
+ j
))
3034 for (j
= HARD_REGNO_NREGS (regno
, mode
) - 1; j
>= 0; j
--)
3035 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
3037 /* Start the next search with the next register. */
3038 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
3040 search_ofs
= raw_regno
;
3042 return gen_rtx_REG (mode
, regno
);
3050 /* Perform the peephole2 optimization pass. */
3053 peephole2_optimize (dump_file
)
3054 FILE *dump_file ATTRIBUTE_UNUSED
;
3056 regset_head rs_heads
[MAX_INSNS_PER_PEEP2
+ 2];
3060 #ifdef HAVE_conditional_execution
3065 /* Initialize the regsets we're going to use. */
3066 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3067 peep2_insn_data
[i
].live_before
= INITIALIZE_REG_SET (rs_heads
[i
]);
3068 live
= INITIALIZE_REG_SET (rs_heads
[i
]);
3070 #ifdef HAVE_conditional_execution
3071 blocks
= sbitmap_alloc (n_basic_blocks
);
3072 sbitmap_zero (blocks
);
3075 count_or_remove_death_notes (NULL
, 1);
3078 for (b
= n_basic_blocks
- 1; b
>= 0; --b
)
3080 basic_block bb
= BASIC_BLOCK (b
);
3081 struct propagate_block_info
*pbi
;
3083 /* Indicate that all slots except the last holds invalid data. */
3084 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
3085 peep2_insn_data
[i
].insn
= NULL_RTX
;
3087 /* Indicate that the last slot contains live_after data. */
3088 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
3089 peep2_current
= MAX_INSNS_PER_PEEP2
;
3091 /* Start up propagation. */
3092 COPY_REG_SET (live
, bb
->global_live_at_end
);
3093 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3095 #ifdef HAVE_conditional_execution
3096 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3098 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3101 for (insn
= bb
->end
; ; insn
= prev
)
3103 prev
= PREV_INSN (insn
);
3109 /* Record this insn. */
3110 if (--peep2_current
< 0)
3111 peep2_current
= MAX_INSNS_PER_PEEP2
;
3112 peep2_insn_data
[peep2_current
].insn
= insn
;
3113 propagate_one_insn (pbi
, insn
);
3114 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3116 /* Match the peephole. */
3117 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3120 i
= match_len
+ peep2_current
;
3121 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3122 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3124 /* Replace the old sequence with the new. */
3125 flow_delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3126 try = emit_insn_after (try, prev
);
3128 /* Adjust the basic block boundaries. */
3129 if (peep2_insn_data
[i
].insn
== bb
->end
)
3131 if (insn
== bb
->head
)
3132 bb
->head
= NEXT_INSN (prev
);
3134 #ifdef HAVE_conditional_execution
3135 /* With conditional execution, we cannot back up the
3136 live information so easily, since the conditional
3137 death data structures are not so self-contained.
3138 So record that we've made a modification to this
3139 block and update life information at the end. */
3140 SET_BIT (blocks
, b
);
3143 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3144 peep2_insn_data
[i
].insn
= NULL_RTX
;
3145 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3147 /* Back up lifetime information past the end of the
3148 newly created sequence. */
3149 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3151 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3153 /* Update life information for the new sequence. */
3159 i
= MAX_INSNS_PER_PEEP2
;
3160 peep2_insn_data
[i
].insn
= try;
3161 propagate_one_insn (pbi
, try);
3162 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3164 try = PREV_INSN (try);
3166 while (try != prev
);
3168 /* ??? Should verify that LIVE now matches what we
3169 had before the new sequence. */
3176 if (insn
== bb
->head
)
3180 free_propagate_block_info (pbi
);
3183 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3184 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3185 FREE_REG_SET (live
);
3187 #ifdef HAVE_conditional_execution
3188 count_or_remove_death_notes (blocks
, 1);
3189 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3190 sbitmap_free (blocks
);
3193 #endif /* HAVE_peephole2 */