1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-attr.h"
28 #include "insn-flags.h"
29 #include "insn-codes.h"
32 #include "hard-reg-set.h"
37 #include "basic-block.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
45 #define STACK_PUSH_CODE PRE_INC
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
53 #define STACK_POP_CODE POST_DEC
57 static void validate_replace_rtx_1
PROTO((rtx
*, rtx
, rtx
, rtx
));
58 static rtx
*find_single_use_1
PROTO((rtx
, rtx
*));
59 static rtx
*find_constant_term_loc
PROTO((rtx
*));
60 static int insn_invalid_p
PROTO((rtx
));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data recog_data
;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
78 /* On return from `constrain_operands', indicate which alternative
81 int which_alternative
;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
94 init_recog_no_volatile ()
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized (insn
)
118 if (INSN_CODE (insn
) < 0)
119 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, NULL_PTR
);
120 return INSN_CODE (insn
);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x
)
132 const char **constraints
;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed
)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x
));
140 constrain_operands (1);
141 return which_alternative
>= 0;
144 noperands
= asm_noperands (x
);
150 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
151 constraints
= (const char **) alloca (noperands
* sizeof (char *));
153 decode_asm_operands (x
, operands
, NULL_PTR
, constraints
, NULL_PTR
);
155 for (i
= 0; i
< noperands
; i
++)
157 const char *c
= constraints
[i
];
160 if (ISDIGIT ((unsigned char)c
[0]) && c
[1] == '\0')
161 c
= constraints
[c
[0] - '0'];
163 if (! asm_operand_ok (operands
[i
], c
))
170 /* Static data for the next two routines. */
172 typedef struct change_t
180 static change_t
*changes
;
181 static int changes_allocated
;
183 static int num_changes
= 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object
, loc
, new, in_group
)
212 if (old
== new || rtx_equal_p (old
, new))
215 if (in_group
== 0 && num_changes
!= 0)
220 /* Save the information describing this change. */
221 if (num_changes
>= changes_allocated
)
223 if (changes_allocated
== 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
228 changes_allocated
*= 2;
231 (change_t
*) xrealloc (changes
,
232 sizeof (change_t
) * changes_allocated
);
235 changes
[num_changes
].object
= object
;
236 changes
[num_changes
].loc
= loc
;
237 changes
[num_changes
].old
= old
;
239 if (object
&& GET_CODE (object
) != MEM
)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 changes
[num_changes
].old_code
= INSN_CODE (object
);
244 INSN_CODE (object
) = -1;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn
)
265 int icode
= recog_memoized (insn
);
266 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
268 if (is_asm
&& ! check_asm_operands (PATTERN (insn
)))
270 if (! is_asm
&& icode
< 0)
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed
)
278 if (! constrain_operands (1))
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
293 /* The changes have been applied and all INSN_CODEs have been reset to force
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
302 for (i
= 0; i
< num_changes
; i
++)
304 rtx object
= changes
[i
].object
;
309 if (GET_CODE (object
) == MEM
)
311 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
314 else if (insn_invalid_p (object
))
316 rtx pat
= PATTERN (object
);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat
) == PARALLEL
324 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object
)) < 0)
329 if (XVECLEN (pat
, 0) == 2)
330 newpat
= XVECEXP (pat
, 0, 0);
336 = gen_rtx_PARALLEL (VOIDmode
,
337 gen_rtvec (XVECLEN (pat
, 0) - 1));
338 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
339 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
351 validate_change (object
, &PATTERN (object
), newpat
, 1);
353 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
362 if (i
== num_changes
)
374 /* Return the number of changes so far in the current group. */
377 num_validated_changes ()
382 /* Retract the changes numbered NUM and up. */
390 /* Back out all the changes. Do this in the opposite order in which
392 for (i
= num_changes
- 1; i
>= num
; i
--)
394 *changes
[i
].loc
= changes
[i
].old
;
395 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
396 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
405 validate_replace_rtx_1 (loc
, from
, to
, object
)
407 rtx from
, to
, object
;
410 register const char *fmt
;
411 register rtx x
= *loc
;
412 enum rtx_code code
= GET_CODE (x
);
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
419 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
420 && GET_MODE (x
) == GET_MODE (from
)
421 && REGNO (x
) == REGNO (from
))
422 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
423 && rtx_equal_p (x
, from
)))
425 validate_change (object
, loc
, to
, 1);
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
432 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
434 int prev_changes
= num_changes
;
436 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
437 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
438 if (prev_changes
!= num_changes
&& CONSTANT_P (XEXP (x
, 0)))
440 validate_change (object
, loc
,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
442 : swap_condition (code
),
443 GET_MODE (x
), XEXP (x
, 1),
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
460 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
465 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
467 validate_change (object
, loc
,
468 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to
) == VOIDmode
482 && (XEXP (x
, 0) == from
483 || (GET_CODE (XEXP (x
, 0)) == REG
&& GET_CODE (from
) == REG
484 && GET_MODE (XEXP (x
, 0)) == GET_MODE (from
)
485 && REGNO (XEXP (x
, 0)) == REGNO (from
))))
487 rtx
new = simplify_unary_operation (code
, GET_MODE (x
), to
,
490 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
492 validate_change (object
, loc
, new, 1);
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
503 if (SUBREG_REG (x
) == from
504 && GET_CODE (from
) == REG
505 && GET_CODE (to
) == MEM
506 && ! mode_dependent_address_p (XEXP (to
, 0))
507 && ! MEM_VOLATILE_P (to
)
508 && GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (GET_MODE (to
)))
510 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
511 enum machine_mode mode
= GET_MODE (x
);
514 if (BYTES_BIG_ENDIAN
)
515 offset
+= (MIN (UNITS_PER_WORD
,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
517 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
519 new = gen_rtx_MEM (mode
, plus_constant (XEXP (to
, 0), offset
));
520 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to
);
521 MEM_COPY_ATTRIBUTES (new, to
);
522 validate_change (object
, loc
, new, 1);
529 /* If we are replacing a register with memory, try to change the memory
530 to be the mode required for memory in extract operations (this isn't
531 likely to be an insertion operation; if it was, nothing bad will
532 happen, we might just fail in some cases). */
534 if (XEXP (x
, 0) == from
&& GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
535 && GET_CODE (XEXP (x
, 1)) == CONST_INT
536 && GET_CODE (XEXP (x
, 2)) == CONST_INT
537 && ! mode_dependent_address_p (XEXP (to
, 0))
538 && ! MEM_VOLATILE_P (to
))
540 enum machine_mode wanted_mode
= VOIDmode
;
541 enum machine_mode is_mode
= GET_MODE (to
);
542 int pos
= INTVAL (XEXP (x
, 2));
545 if (code
== ZERO_EXTRACT
)
547 wanted_mode
= insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
548 if (wanted_mode
== VOIDmode
)
549 wanted_mode
= word_mode
;
553 if (code
== SIGN_EXTRACT
)
555 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
556 if (wanted_mode
== VOIDmode
)
557 wanted_mode
= word_mode
;
561 /* If we have a narrower mode, we can do something. */
562 if (wanted_mode
!= VOIDmode
563 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
565 int offset
= pos
/ BITS_PER_UNIT
;
568 /* If the bytes and bits are counted differently, we
569 must adjust the offset. */
570 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
571 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
574 pos
%= GET_MODE_BITSIZE (wanted_mode
);
576 newmem
= gen_rtx_MEM (wanted_mode
,
577 plus_constant (XEXP (to
, 0), offset
));
578 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (to
);
579 MEM_COPY_ATTRIBUTES (newmem
, to
);
581 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
582 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
592 /* For commutative or comparison operations we've already performed
593 replacements. Don't try to perform them again. */
594 if (GET_RTX_CLASS (code
) != '<' && GET_RTX_CLASS (code
) != 'c')
596 fmt
= GET_RTX_FORMAT (code
);
597 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
600 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
601 else if (fmt
[i
] == 'E')
602 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
603 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
608 /* Try replacing every occurrence of FROM in INSN with TO. After all
609 changes have been made, validate by seeing if INSN is still valid. */
612 validate_replace_rtx (from
, to
, insn
)
615 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
616 return apply_change_group ();
619 /* Try replacing every occurrence of FROM in INSN with TO. After all
620 changes have been made, validate by seeing if INSN is still valid. */
623 validate_replace_rtx_group (from
, to
, insn
)
626 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
629 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
630 SET_DESTs. After all changes have been made, validate by seeing if
631 INSN is still valid. */
634 validate_replace_src (from
, to
, insn
)
637 if ((GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != JUMP_INSN
)
638 || GET_CODE (PATTERN (insn
)) != SET
)
641 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn
)), from
, to
, insn
);
642 if (GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
643 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn
)), 0),
645 return apply_change_group ();
649 /* Return 1 if the insn using CC0 set by INSN does not contain
650 any ordered tests applied to the condition codes.
651 EQ and NE tests do not count. */
654 next_insn_tests_no_inequality (insn
)
657 register rtx next
= next_cc0_user (insn
);
659 /* If there is no next insn, we have to take the conservative choice. */
663 return ((GET_CODE (next
) == JUMP_INSN
664 || GET_CODE (next
) == INSN
665 || GET_CODE (next
) == CALL_INSN
)
666 && ! inequality_comparisons_p (PATTERN (next
)));
669 #if 0 /* This is useless since the insn that sets the cc's
670 must be followed immediately by the use of them. */
671 /* Return 1 if the CC value set up by INSN is not used. */
674 next_insns_test_no_inequality (insn
)
677 register rtx next
= NEXT_INSN (insn
);
679 for (; next
!= 0; next
= NEXT_INSN (next
))
681 if (GET_CODE (next
) == CODE_LABEL
682 || GET_CODE (next
) == BARRIER
)
684 if (GET_CODE (next
) == NOTE
)
686 if (inequality_comparisons_p (PATTERN (next
)))
688 if (sets_cc0_p (PATTERN (next
)) == 1)
690 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
698 /* This is used by find_single_use to locate an rtx that contains exactly one
699 use of DEST, which is typically either a REG or CC0. It returns a
700 pointer to the innermost rtx expression containing DEST. Appearances of
701 DEST that are being used to totally replace it are not counted. */
704 find_single_use_1 (dest
, loc
)
709 enum rtx_code code
= GET_CODE (x
);
726 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
727 of a REG that occupies all of the REG, the insn uses DEST if
728 it is mentioned in the destination or the source. Otherwise, we
729 need just check the source. */
730 if (GET_CODE (SET_DEST (x
)) != CC0
731 && GET_CODE (SET_DEST (x
)) != PC
732 && GET_CODE (SET_DEST (x
)) != REG
733 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
734 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
736 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
737 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
738 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
741 return find_single_use_1 (dest
, &SET_SRC (x
));
745 return find_single_use_1 (dest
, &XEXP (x
, 0));
751 /* If it wasn't one of the common cases above, check each expression and
752 vector of this code. Look for a unique usage of DEST. */
754 fmt
= GET_RTX_FORMAT (code
);
755 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
759 if (dest
== XEXP (x
, i
)
760 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
761 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
764 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
767 result
= this_result
;
768 else if (this_result
)
769 /* Duplicate usage. */
772 else if (fmt
[i
] == 'E')
776 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
778 if (XVECEXP (x
, i
, j
) == dest
779 || (GET_CODE (dest
) == REG
780 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
781 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
784 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
787 result
= this_result
;
788 else if (this_result
)
797 /* See if DEST, produced in INSN, is used only a single time in the
798 sequel. If so, return a pointer to the innermost rtx expression in which
801 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
803 This routine will return usually zero either before flow is called (because
804 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
805 note can't be trusted).
807 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
808 care about REG_DEAD notes or LOG_LINKS.
810 Otherwise, we find the single use by finding an insn that has a
811 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
812 only referenced once in that insn, we know that it must be the first
813 and last insn referencing DEST. */
816 find_single_use (dest
, insn
, ploc
)
828 next
= NEXT_INSN (insn
);
830 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
833 result
= find_single_use_1 (dest
, &PATTERN (next
));
840 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
843 for (next
= next_nonnote_insn (insn
);
844 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
845 next
= next_nonnote_insn (next
))
846 if (GET_RTX_CLASS (GET_CODE (next
)) == 'i' && dead_or_set_p (next
, dest
))
848 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
849 if (XEXP (link
, 0) == insn
)
854 result
= find_single_use_1 (dest
, &PATTERN (next
));
864 /* Return 1 if OP is a valid general operand for machine mode MODE.
865 This is either a register reference, a memory reference,
866 or a constant. In the case of a memory reference, the address
867 is checked for general validity for the target machine.
869 Register and memory references must have mode MODE in order to be valid,
870 but some constants have no machine mode and are valid for any mode.
872 If MODE is VOIDmode, OP is checked for validity for whatever mode
875 The main use of this function is as a predicate in match_operand
876 expressions in the machine description.
878 For an explanation of this function's behavior for registers of
879 class NO_REGS, see the comment for `register_operand'. */
882 general_operand (op
, mode
)
884 enum machine_mode mode
;
886 register enum rtx_code code
= GET_CODE (op
);
887 int mode_altering_drug
= 0;
889 if (mode
== VOIDmode
)
890 mode
= GET_MODE (op
);
892 /* Don't accept CONST_INT or anything similar
893 if the caller wants something floating. */
894 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
895 && GET_MODE_CLASS (mode
) != MODE_INT
896 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
900 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
901 #ifdef LEGITIMATE_PIC_OPERAND_P
902 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
904 && LEGITIMATE_CONSTANT_P (op
));
906 /* Except for certain constants with VOIDmode, already checked for,
907 OP's mode must match MODE if MODE specifies a mode. */
909 if (GET_MODE (op
) != mode
)
914 #ifdef INSN_SCHEDULING
915 /* On machines that have insn scheduling, we want all memory
916 reference to be explicit, so outlaw paradoxical SUBREGs. */
917 if (GET_CODE (SUBREG_REG (op
)) == MEM
918 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
922 op
= SUBREG_REG (op
);
923 code
= GET_CODE (op
);
925 /* No longer needed, since (SUBREG (MEM...))
926 will load the MEM into a reload reg in the MEM's own mode. */
927 mode_altering_drug
= 1;
932 /* A register whose class is NO_REGS is not a general operand. */
933 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
934 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
938 register rtx y
= XEXP (op
, 0);
939 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
941 if (GET_CODE (y
) == ADDRESSOF
)
943 /* Use the mem's mode, since it will be reloaded thus. */
944 mode
= GET_MODE (op
);
945 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
948 /* Pretend this is an operand for now; we'll run force_operand
949 on its replacement in fixup_var_refs_1. */
950 if (code
== ADDRESSOF
)
956 if (mode_altering_drug
)
957 return ! mode_dependent_address_p (XEXP (op
, 0));
961 /* Return 1 if OP is a valid memory address for a memory reference
964 The main use of this function is as a predicate in match_operand
965 expressions in the machine description. */
968 address_operand (op
, mode
)
970 enum machine_mode mode
;
972 return memory_address_p (mode
, op
);
975 /* Return 1 if OP is a register reference of mode MODE.
976 If MODE is VOIDmode, accept a register in any mode.
978 The main use of this function is as a predicate in match_operand
979 expressions in the machine description.
981 As a special exception, registers whose class is NO_REGS are
982 not accepted by `register_operand'. The reason for this change
983 is to allow the representation of special architecture artifacts
984 (such as a condition code register) without extending the rtl
985 definitions. Since registers of class NO_REGS cannot be used
986 as registers in any case where register classes are examined,
987 it is most consistent to keep this function from accepting them. */
990 register_operand (op
, mode
)
992 enum machine_mode mode
;
994 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
997 if (GET_CODE (op
) == SUBREG
)
999 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1000 because it is guaranteed to be reloaded into one.
1001 Just make sure the MEM is valid in itself.
1002 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1003 but currently it does result from (SUBREG (REG)...) where the
1004 reg went on the stack.) */
1005 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1006 return general_operand (op
, mode
);
1008 #ifdef CLASS_CANNOT_CHANGE_SIZE
1009 if (GET_CODE (SUBREG_REG (op
)) == REG
1010 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
1011 && TEST_HARD_REG_BIT (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_SIZE
],
1012 REGNO (SUBREG_REG (op
)))
1013 && (GET_MODE_SIZE (mode
)
1014 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
1015 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
1020 op
= SUBREG_REG (op
);
1023 /* We don't consider registers whose class is NO_REGS
1024 to be a register operand. */
1025 return (GET_CODE (op
) == REG
1026 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1027 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1030 /* Return 1 for a register in Pmode; ignore the tested mode. */
1033 pmode_register_operand (op
, mode
)
1035 enum machine_mode mode ATTRIBUTE_UNUSED
;
1037 return register_operand (op
, Pmode
);
1040 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1041 or a hard register. */
1044 scratch_operand (op
, mode
)
1046 enum machine_mode mode
;
1048 return (GET_MODE (op
) == mode
1049 && (GET_CODE (op
) == SCRATCH
1050 || (GET_CODE (op
) == REG
1051 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)));
1054 /* Return 1 if OP is a valid immediate operand for mode MODE.
1056 The main use of this function is as a predicate in match_operand
1057 expressions in the machine description. */
1060 immediate_operand (op
, mode
)
1062 enum machine_mode mode
;
1064 /* Don't accept CONST_INT or anything similar
1065 if the caller wants something floating. */
1066 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1067 && GET_MODE_CLASS (mode
) != MODE_INT
1068 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1071 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1072 result in 0/1. It seems a safe assumption that this is
1073 in range for everyone. */
1074 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1077 return (CONSTANT_P (op
)
1078 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1079 || GET_MODE (op
) == VOIDmode
)
1080 #ifdef LEGITIMATE_PIC_OPERAND_P
1081 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1083 && LEGITIMATE_CONSTANT_P (op
));
1086 /* Returns 1 if OP is an operand that is a CONST_INT. */
1089 const_int_operand (op
, mode
)
1091 enum machine_mode mode ATTRIBUTE_UNUSED
;
1093 return GET_CODE (op
) == CONST_INT
;
1096 /* Returns 1 if OP is an operand that is a constant integer or constant
1097 floating-point number. */
1100 const_double_operand (op
, mode
)
1102 enum machine_mode mode
;
1104 /* Don't accept CONST_INT or anything similar
1105 if the caller wants something floating. */
1106 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1107 && GET_MODE_CLASS (mode
) != MODE_INT
1108 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1111 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1112 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1113 || GET_MODE (op
) == VOIDmode
));
1116 /* Return 1 if OP is a general operand that is not an immediate operand. */
1119 nonimmediate_operand (op
, mode
)
1121 enum machine_mode mode
;
1123 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1126 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1129 nonmemory_operand (op
, mode
)
1131 enum machine_mode mode
;
1133 if (CONSTANT_P (op
))
1135 /* Don't accept CONST_INT or anything similar
1136 if the caller wants something floating. */
1137 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1138 && GET_MODE_CLASS (mode
) != MODE_INT
1139 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1142 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
1143 #ifdef LEGITIMATE_PIC_OPERAND_P
1144 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1146 && LEGITIMATE_CONSTANT_P (op
));
1149 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1152 if (GET_CODE (op
) == SUBREG
)
1154 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1155 because it is guaranteed to be reloaded into one.
1156 Just make sure the MEM is valid in itself.
1157 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1158 but currently it does result from (SUBREG (REG)...) where the
1159 reg went on the stack.) */
1160 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1161 return general_operand (op
, mode
);
1162 op
= SUBREG_REG (op
);
1165 /* We don't consider registers whose class is NO_REGS
1166 to be a register operand. */
1167 return (GET_CODE (op
) == REG
1168 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1169 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1172 /* Return 1 if OP is a valid operand that stands for pushing a
1173 value of mode MODE onto the stack.
1175 The main use of this function is as a predicate in match_operand
1176 expressions in the machine description. */
1179 push_operand (op
, mode
)
1181 enum machine_mode mode
;
1183 if (GET_CODE (op
) != MEM
)
1186 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1191 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1194 return XEXP (op
, 0) == stack_pointer_rtx
;
1197 /* Return 1 if OP is a valid operand that stands for popping a
1198 value of mode MODE off the stack.
1200 The main use of this function is as a predicate in match_operand
1201 expressions in the machine description. */
1204 pop_operand (op
, mode
)
1206 enum machine_mode mode
;
1208 if (GET_CODE (op
) != MEM
)
1211 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1216 if (GET_CODE (op
) != STACK_POP_CODE
)
1219 return XEXP (op
, 0) == stack_pointer_rtx
;
1222 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1225 memory_address_p (mode
, addr
)
1226 enum machine_mode mode
;
1229 if (GET_CODE (addr
) == ADDRESSOF
)
1232 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1239 /* Return 1 if OP is a valid memory reference with mode MODE,
1240 including a valid address.
1242 The main use of this function is as a predicate in match_operand
1243 expressions in the machine description. */
1246 memory_operand (op
, mode
)
1248 enum machine_mode mode
;
1252 if (! reload_completed
)
1253 /* Note that no SUBREG is a memory operand before end of reload pass,
1254 because (SUBREG (MEM...)) forces reloading into a register. */
1255 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1257 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1261 if (GET_CODE (inner
) == SUBREG
)
1262 inner
= SUBREG_REG (inner
);
1264 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1267 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1268 that is, a memory reference whose address is a general_operand. */
1271 indirect_operand (op
, mode
)
1273 enum machine_mode mode
;
1275 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1276 if (! reload_completed
1277 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1279 register int offset
= SUBREG_WORD (op
) * UNITS_PER_WORD
;
1280 rtx inner
= SUBREG_REG (op
);
1282 if (BYTES_BIG_ENDIAN
)
1283 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (op
)))
1284 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (inner
))));
1286 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1289 /* The only way that we can have a general_operand as the resulting
1290 address is if OFFSET is zero and the address already is an operand
1291 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1294 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1295 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1296 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1297 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1298 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1301 return (GET_CODE (op
) == MEM
1302 && memory_operand (op
, mode
)
1303 && general_operand (XEXP (op
, 0), Pmode
));
1306 /* Return 1 if this is a comparison operator. This allows the use of
1307 MATCH_OPERATOR to recognize all the branch insns. */
1310 comparison_operator (op
, mode
)
1312 enum machine_mode mode
;
1314 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1315 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1318 /* If BODY is an insn body that uses ASM_OPERANDS,
1319 return the number of operands (both input and output) in the insn.
1320 Otherwise return -1. */
1323 asm_noperands (body
)
1326 if (GET_CODE (body
) == ASM_OPERANDS
)
1327 /* No output operands: return number of input operands. */
1328 return ASM_OPERANDS_INPUT_LENGTH (body
);
1329 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1330 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1331 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1332 else if (GET_CODE (body
) == PARALLEL
1333 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1334 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1336 /* Multiple output operands, or 1 output plus some clobbers:
1337 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1341 /* Count backwards through CLOBBERs to determine number of SETs. */
1342 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1344 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1346 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1350 /* N_SETS is now number of output operands. */
1353 /* Verify that all the SETs we have
1354 came from a single original asm_operands insn
1355 (so that invalid combinations are blocked). */
1356 for (i
= 0; i
< n_sets
; i
++)
1358 rtx elt
= XVECEXP (body
, 0, i
);
1359 if (GET_CODE (elt
) != SET
)
1361 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1363 /* If these ASM_OPERANDS rtx's came from different original insns
1364 then they aren't allowed together. */
1365 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1366 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1369 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1372 else if (GET_CODE (body
) == PARALLEL
1373 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1375 /* 0 outputs, but some clobbers:
1376 body is [(asm_operands ...) (clobber (reg ...))...]. */
1379 /* Make sure all the other parallel things really are clobbers. */
1380 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1381 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1384 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1390 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1391 copy its operands (both input and output) into the vector OPERANDS,
1392 the locations of the operands within the insn into the vector OPERAND_LOCS,
1393 and the constraints for the operands into CONSTRAINTS.
1394 Write the modes of the operands into MODES.
1395 Return the assembler-template.
1397 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1398 we don't store that info. */
1401 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1405 const char **constraints
;
1406 enum machine_mode
*modes
;
1412 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1414 rtx asmop
= SET_SRC (body
);
1415 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1417 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1419 for (i
= 1; i
< noperands
; i
++)
1422 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1424 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1426 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1428 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1431 /* The output is in the SET.
1432 Its constraint is in the ASM_OPERANDS itself. */
1434 operands
[0] = SET_DEST (body
);
1436 operand_locs
[0] = &SET_DEST (body
);
1438 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1440 modes
[0] = GET_MODE (SET_DEST (body
));
1441 template = ASM_OPERANDS_TEMPLATE (asmop
);
1443 else if (GET_CODE (body
) == ASM_OPERANDS
)
1446 /* No output operands: BODY is (asm_operands ....). */
1448 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1450 /* The input operands are found in the 1st element vector. */
1451 /* Constraints for inputs are in the 2nd element vector. */
1452 for (i
= 0; i
< noperands
; i
++)
1455 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1457 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1459 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1461 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1463 template = ASM_OPERANDS_TEMPLATE (asmop
);
1465 else if (GET_CODE (body
) == PARALLEL
1466 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1468 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1469 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1470 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1471 int nout
= 0; /* Does not include CLOBBERs. */
1473 /* At least one output, plus some CLOBBERs. */
1475 /* The outputs are in the SETs.
1476 Their constraints are in the ASM_OPERANDS itself. */
1477 for (i
= 0; i
< nparallel
; i
++)
1479 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1480 break; /* Past last SET */
1483 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1485 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1487 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1489 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1493 for (i
= 0; i
< nin
; i
++)
1496 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1498 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1500 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1502 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1505 template = ASM_OPERANDS_TEMPLATE (asmop
);
1507 else if (GET_CODE (body
) == PARALLEL
1508 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1510 /* No outputs, but some CLOBBERs. */
1512 rtx asmop
= XVECEXP (body
, 0, 0);
1513 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1515 for (i
= 0; i
< nin
; i
++)
1518 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1520 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1522 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1524 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1527 template = ASM_OPERANDS_TEMPLATE (asmop
);
1533 /* Check if an asm_operand matches it's constraints.
1534 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1537 asm_operand_ok (op
, constraint
)
1539 const char *constraint
;
1543 /* Use constrain_operands after reload. */
1544 if (reload_completed
)
1549 switch (*constraint
++)
1562 case '0': case '1': case '2': case '3': case '4':
1563 case '5': case '6': case '7': case '8': case '9':
1564 /* For best results, our caller should have given us the
1565 proper matching constraint, but we can't actually fail
1566 the check if they didn't. Indicate that results are
1572 if (address_operand (op
, VOIDmode
))
1577 case 'V': /* non-offsettable */
1578 if (memory_operand (op
, VOIDmode
))
1582 case 'o': /* offsettable */
1583 if (offsettable_nonstrict_memref_p (op
))
1588 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1589 excepting those that expand_call created. Further, on some
1590 machines which do not have generalized auto inc/dec, an inc/dec
1591 is not a memory_operand.
1593 Match any memory and hope things are resolved after reload. */
1595 if (GET_CODE (op
) == MEM
1597 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1598 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1603 if (GET_CODE (op
) == MEM
1605 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1606 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1611 #ifndef REAL_ARITHMETIC
1612 /* Match any floating double constant, but only if
1613 we can examine the bits of it reliably. */
1614 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1615 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1616 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1622 if (GET_CODE (op
) == CONST_DOUBLE
)
1627 if (GET_CODE (op
) == CONST_DOUBLE
1628 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'G'))
1632 if (GET_CODE (op
) == CONST_DOUBLE
1633 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'H'))
1638 if (GET_CODE (op
) == CONST_INT
1639 || (GET_CODE (op
) == CONST_DOUBLE
1640 && GET_MODE (op
) == VOIDmode
))
1646 #ifdef LEGITIMATE_PIC_OPERAND_P
1647 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1654 if (GET_CODE (op
) == CONST_INT
1655 || (GET_CODE (op
) == CONST_DOUBLE
1656 && GET_MODE (op
) == VOIDmode
))
1661 if (GET_CODE (op
) == CONST_INT
1662 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'))
1666 if (GET_CODE (op
) == CONST_INT
1667 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'J'))
1671 if (GET_CODE (op
) == CONST_INT
1672 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1676 if (GET_CODE (op
) == CONST_INT
1677 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'))
1681 if (GET_CODE (op
) == CONST_INT
1682 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
1686 if (GET_CODE (op
) == CONST_INT
1687 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'N'))
1691 if (GET_CODE (op
) == CONST_INT
1692 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'O'))
1696 if (GET_CODE (op
) == CONST_INT
1697 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P'))
1705 if (general_operand (op
, VOIDmode
))
1709 #ifdef EXTRA_CONSTRAINT
1711 if (EXTRA_CONSTRAINT (op
, 'Q'))
1715 if (EXTRA_CONSTRAINT (op
, 'R'))
1719 if (EXTRA_CONSTRAINT (op
, 'S'))
1723 if (EXTRA_CONSTRAINT (op
, 'T'))
1727 if (EXTRA_CONSTRAINT (op
, 'U'))
1734 if (GET_MODE (op
) == BLKmode
)
1736 if (register_operand (op
, VOIDmode
))
1745 /* Given an rtx *P, if it is a sum containing an integer constant term,
1746 return the location (type rtx *) of the pointer to that constant term.
1747 Otherwise, return a null pointer. */
1750 find_constant_term_loc (p
)
1754 register enum rtx_code code
= GET_CODE (*p
);
1756 /* If *P IS such a constant term, P is its location. */
1758 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1762 /* Otherwise, if not a sum, it has no constant term. */
1764 if (GET_CODE (*p
) != PLUS
)
1767 /* If one of the summands is constant, return its location. */
1769 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1770 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1773 /* Otherwise, check each summand for containing a constant term. */
1775 if (XEXP (*p
, 0) != 0)
1777 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1782 if (XEXP (*p
, 1) != 0)
1784 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1792 /* Return 1 if OP is a memory reference
1793 whose address contains no side effects
1794 and remains valid after the addition
1795 of a positive integer less than the
1796 size of the object being referenced.
1798 We assume that the original address is valid and do not check it.
1800 This uses strict_memory_address_p as a subroutine, so
1801 don't use it before reload. */
1804 offsettable_memref_p (op
)
1807 return ((GET_CODE (op
) == MEM
)
1808 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1811 /* Similar, but don't require a strictly valid mem ref:
1812 consider pseudo-regs valid as index or base regs. */
1815 offsettable_nonstrict_memref_p (op
)
1818 return ((GET_CODE (op
) == MEM
)
1819 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1822 /* Return 1 if Y is a memory address which contains no side effects
1823 and would remain valid after the addition of a positive integer
1824 less than the size of that mode.
1826 We assume that the original address is valid and do not check it.
1827 We do check that it is valid for narrower modes.
1829 If STRICTP is nonzero, we require a strictly valid address,
1830 for the sake of use in reload.c. */
1833 offsettable_address_p (strictp
, mode
, y
)
1835 enum machine_mode mode
;
1838 register enum rtx_code ycode
= GET_CODE (y
);
1842 int (*addressp
) PROTO ((enum machine_mode
, rtx
)) =
1843 (strictp
? strict_memory_address_p
: memory_address_p
);
1845 if (CONSTANT_ADDRESS_P (y
))
1848 /* Adjusting an offsettable address involves changing to a narrower mode.
1849 Make sure that's OK. */
1851 if (mode_dependent_address_p (y
))
1854 /* If the expression contains a constant term,
1855 see if it remains valid when max possible offset is added. */
1857 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1862 *y2
= plus_constant (*y2
, GET_MODE_SIZE (mode
) - 1);
1863 /* Use QImode because an odd displacement may be automatically invalid
1864 for any wider mode. But it should be valid for a single byte. */
1865 good
= (*addressp
) (QImode
, y
);
1867 /* In any case, restore old contents of memory. */
1872 if (ycode
== PRE_DEC
|| ycode
== PRE_INC
1873 || ycode
== POST_DEC
|| ycode
== POST_INC
)
1876 /* The offset added here is chosen as the maximum offset that
1877 any instruction could need to add when operating on something
1878 of the specified mode. We assume that if Y and Y+c are
1879 valid addresses then so is Y+d for all 0<d<c. */
1881 z
= plus_constant_for_output (y
, GET_MODE_SIZE (mode
) - 1);
1883 /* Use QImode because an odd displacement may be automatically invalid
1884 for any wider mode. But it should be valid for a single byte. */
1885 return (*addressp
) (QImode
, z
);
1888 /* Return 1 if ADDR is an address-expression whose effect depends
1889 on the mode of the memory reference it is used in.
1891 Autoincrement addressing is a typical example of mode-dependence
1892 because the amount of the increment depends on the mode. */
1895 mode_dependent_address_p (addr
)
1896 rtx addr ATTRIBUTE_UNUSED
; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1898 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1900 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1901 win
: ATTRIBUTE_UNUSED_LABEL
1905 /* Return 1 if OP is a general operand
1906 other than a memory ref with a mode dependent address. */
1909 mode_independent_operand (op
, mode
)
1910 enum machine_mode mode
;
1915 if (! general_operand (op
, mode
))
1918 if (GET_CODE (op
) != MEM
)
1921 addr
= XEXP (op
, 0);
1922 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
1924 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1925 lose
: ATTRIBUTE_UNUSED_LABEL
1929 /* Given an operand OP that is a valid memory reference
1930 which satisfies offsettable_memref_p,
1931 return a new memory reference whose address has been adjusted by OFFSET.
1932 OFFSET should be positive and less than the size of the object referenced.
1936 adj_offsettable_operand (op
, offset
)
1940 register enum rtx_code code
= GET_CODE (op
);
1944 register rtx y
= XEXP (op
, 0);
1947 if (CONSTANT_ADDRESS_P (y
))
1949 new = gen_rtx_MEM (GET_MODE (op
),
1950 plus_constant_for_output (y
, offset
));
1951 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1955 if (GET_CODE (y
) == PLUS
)
1958 register rtx
*const_loc
;
1962 const_loc
= find_constant_term_loc (&z
);
1965 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
1970 new = gen_rtx_MEM (GET_MODE (op
), plus_constant_for_output (y
, offset
));
1971 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1977 /* Analyze INSN and fill in recog_data. */
1986 rtx body
= PATTERN (insn
);
1988 recog_data
.n_operands
= 0;
1989 recog_data
.n_alternatives
= 0;
1990 recog_data
.n_dups
= 0;
1992 switch (GET_CODE (body
))
2004 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2007 /* This insn is an `asm' with operands. */
2009 /* expand_asm_operands makes sure there aren't too many operands. */
2010 if (noperands
> MAX_RECOG_OPERANDS
)
2013 /* Now get the operand values and constraints out of the insn. */
2014 decode_asm_operands (body
, recog_data
.operand
,
2015 recog_data
.operand_loc
,
2016 recog_data
.constraints
,
2017 recog_data
.operand_mode
);
2020 const char *p
= recog_data
.constraints
[0];
2021 recog_data
.n_alternatives
= 1;
2023 recog_data
.n_alternatives
+= (*p
++ == ',');
2031 /* Ordinary insn: recognize it, get the operands via insn_extract
2032 and get the constraints. */
2034 icode
= recog_memoized (insn
);
2036 fatal_insn_not_found (insn
);
2038 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2039 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2040 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2042 insn_extract (insn
);
2044 for (i
= 0; i
< noperands
; i
++)
2046 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2047 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2050 for (i
= 0; i
< noperands
; i
++)
2051 recog_data
.operand_type
[i
]
2052 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2053 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2056 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2060 /* After calling extract_insn, you can use this function to extract some
2061 information from the constraint strings into a more usable form.
2062 The collected data is stored in recog_op_alt. */
2064 preprocess_constraints ()
2068 memset (recog_op_alt
, 0, sizeof recog_op_alt
);
2069 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2072 struct operand_alternative
*op_alt
;
2073 const char *p
= recog_data
.constraints
[i
];
2075 op_alt
= recog_op_alt
[i
];
2077 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2079 op_alt
[j
].class = NO_REGS
;
2080 op_alt
[j
].constraint
= p
;
2081 op_alt
[j
].matches
= -1;
2082 op_alt
[j
].matched
= -1;
2084 if (*p
== '\0' || *p
== ',')
2086 op_alt
[j
].anything_ok
= 1;
2096 while (c
!= ',' && c
!= '\0');
2097 if (c
== ',' || c
== '\0')
2102 case '=': case '+': case '*': case '%':
2103 case 'E': case 'F': case 'G': case 'H':
2104 case 's': case 'i': case 'n':
2105 case 'I': case 'J': case 'K': case 'L':
2106 case 'M': case 'N': case 'O': case 'P':
2107 #ifdef EXTRA_CONSTRAINT
2108 case 'Q': case 'R': case 'S': case 'T': case 'U':
2110 /* These don't say anything we care about. */
2114 op_alt
[j
].reject
+= 6;
2117 op_alt
[j
].reject
+= 600;
2120 op_alt
[j
].earlyclobber
= 1;
2123 case '0': case '1': case '2': case '3': case '4':
2124 case '5': case '6': case '7': case '8': case '9':
2125 op_alt
[j
].matches
= c
- '0';
2126 op_alt
[op_alt
[j
].matches
].matched
= i
;
2130 op_alt
[j
].memory_ok
= 1;
2133 op_alt
[j
].decmem_ok
= 1;
2136 op_alt
[j
].incmem_ok
= 1;
2139 op_alt
[j
].nonoffmem_ok
= 1;
2142 op_alt
[j
].offmem_ok
= 1;
2145 op_alt
[j
].anything_ok
= 1;
2149 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) BASE_REG_CLASS
];
2153 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2157 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c
)];
2165 /* Check the operands of an insn against the insn's operand constraints
2166 and return 1 if they are valid.
2167 The information about the insn's operands, constraints, operand modes
2168 etc. is obtained from the global variables set up by extract_insn.
2170 WHICH_ALTERNATIVE is set to a number which indicates which
2171 alternative of constraints was matched: 0 for the first alternative,
2172 1 for the next, etc.
2174 In addition, when two operands are match
2175 and it happens that the output operand is (reg) while the
2176 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2177 make the output operand look like the input.
2178 This is because the output operand is the one the template will print.
2180 This is used in final, just before printing the assembler code and by
2181 the routines that determine an insn's attribute.
2183 If STRICT is a positive non-zero value, it means that we have been
2184 called after reload has been completed. In that case, we must
2185 do all checks strictly. If it is zero, it means that we have been called
2186 before reload has completed. In that case, we first try to see if we can
2187 find an alternative that matches strictly. If not, we try again, this
2188 time assuming that reload will fix up the insn. This provides a "best
2189 guess" for the alternative and is used to compute attributes of insns prior
2190 to reload. A negative value of STRICT is used for this internal call. */
2198 constrain_operands (strict
)
2201 const char *constraints
[MAX_RECOG_OPERANDS
];
2202 int matching_operands
[MAX_RECOG_OPERANDS
];
2203 int earlyclobber
[MAX_RECOG_OPERANDS
];
2206 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2207 int funny_match_index
;
2209 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2212 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2214 constraints
[c
] = recog_data
.constraints
[c
];
2215 matching_operands
[c
] = -1;
2218 which_alternative
= 0;
2220 while (which_alternative
< recog_data
.n_alternatives
)
2224 funny_match_index
= 0;
2226 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2228 register rtx op
= recog_data
.operand
[opno
];
2229 enum machine_mode mode
= GET_MODE (op
);
2230 register const char *p
= constraints
[opno
];
2235 earlyclobber
[opno
] = 0;
2237 /* A unary operator may be accepted by the predicate, but it
2238 is irrelevant for matching constraints. */
2239 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2242 if (GET_CODE (op
) == SUBREG
)
2244 if (GET_CODE (SUBREG_REG (op
)) == REG
2245 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2246 offset
= SUBREG_WORD (op
);
2247 op
= SUBREG_REG (op
);
2250 /* An empty constraint or empty alternative
2251 allows anything which matched the pattern. */
2252 if (*p
== 0 || *p
== ',')
2255 while (*p
&& (c
= *p
++) != ',')
2258 case '?': case '!': case '*': case '%':
2263 /* Ignore rest of this alternative as far as
2264 constraint checking is concerned. */
2265 while (*p
&& *p
!= ',')
2270 earlyclobber
[opno
] = 1;
2273 case '0': case '1': case '2': case '3': case '4':
2274 case '5': case '6': case '7': case '8': case '9':
2276 /* This operand must be the same as a previous one.
2277 This kind of constraint is used for instructions such
2278 as add when they take only two operands.
2280 Note that the lower-numbered operand is passed first.
2282 If we are not testing strictly, assume that this constraint
2283 will be satisfied. */
2288 rtx op1
= recog_data
.operand
[c
- '0'];
2289 rtx op2
= recog_data
.operand
[opno
];
2291 /* A unary operator may be accepted by the predicate,
2292 but it is irrelevant for matching constraints. */
2293 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2294 op1
= XEXP (op1
, 0);
2295 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2296 op2
= XEXP (op2
, 0);
2298 val
= operands_match_p (op1
, op2
);
2301 matching_operands
[opno
] = c
- '0';
2302 matching_operands
[c
- '0'] = opno
;
2306 /* If output is *x and input is *--x,
2307 arrange later to change the output to *--x as well,
2308 since the output op is the one that will be printed. */
2309 if (val
== 2 && strict
> 0)
2311 funny_match
[funny_match_index
].this = opno
;
2312 funny_match
[funny_match_index
++].other
= c
- '0';
2317 /* p is used for address_operands. When we are called by
2318 gen_reload, no one will have checked that the address is
2319 strictly valid, i.e., that all pseudos requiring hard regs
2320 have gotten them. */
2322 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2327 /* No need to check general_operand again;
2328 it was done in insn-recog.c. */
2330 /* Anything goes unless it is a REG and really has a hard reg
2331 but the hard reg is not in the class GENERAL_REGS. */
2333 || GENERAL_REGS
== ALL_REGS
2334 || GET_CODE (op
) != REG
2335 || (reload_in_progress
2336 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2337 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2344 && GET_CODE (op
) == REG
2345 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2346 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2347 || (GET_CODE (op
) == REG
2348 && ((GENERAL_REGS
== ALL_REGS
2349 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)
2350 || reg_fits_class_p (op
, GENERAL_REGS
,
2356 /* This is used for a MATCH_SCRATCH in the cases when
2357 we don't actually need anything. So anything goes
2363 if (GET_CODE (op
) == MEM
2364 /* Before reload, accept what reload can turn into mem. */
2365 || (strict
< 0 && CONSTANT_P (op
))
2366 /* During reload, accept a pseudo */
2367 || (reload_in_progress
&& GET_CODE (op
) == REG
2368 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2373 if (GET_CODE (op
) == MEM
2374 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2375 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2380 if (GET_CODE (op
) == MEM
2381 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2382 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2387 #ifndef REAL_ARITHMETIC
2388 /* Match any CONST_DOUBLE, but only if
2389 we can examine the bits of it reliably. */
2390 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
2391 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
2392 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
2395 if (GET_CODE (op
) == CONST_DOUBLE
)
2400 if (GET_CODE (op
) == CONST_DOUBLE
)
2406 if (GET_CODE (op
) == CONST_DOUBLE
2407 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
2412 if (GET_CODE (op
) == CONST_INT
2413 || (GET_CODE (op
) == CONST_DOUBLE
2414 && GET_MODE (op
) == VOIDmode
))
2417 if (CONSTANT_P (op
))
2422 if (GET_CODE (op
) == CONST_INT
2423 || (GET_CODE (op
) == CONST_DOUBLE
2424 && GET_MODE (op
) == VOIDmode
))
2436 if (GET_CODE (op
) == CONST_INT
2437 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
2441 #ifdef EXTRA_CONSTRAINT
2447 if (EXTRA_CONSTRAINT (op
, c
))
2453 if (GET_CODE (op
) == MEM
2454 && ((strict
> 0 && ! offsettable_memref_p (op
))
2456 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2457 || (reload_in_progress
2458 && !(GET_CODE (op
) == REG
2459 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2464 if ((strict
> 0 && offsettable_memref_p (op
))
2465 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2466 /* Before reload, accept what reload can handle. */
2468 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2469 /* During reload, accept a pseudo */
2470 || (reload_in_progress
&& GET_CODE (op
) == REG
2471 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2478 && GET_CODE (op
) == REG
2479 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2480 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2481 || (GET_CODE (op
) == REG
2482 && reg_fits_class_p (op
, REG_CLASS_FROM_LETTER (c
),
2487 constraints
[opno
] = p
;
2488 /* If this operand did not win somehow,
2489 this alternative loses. */
2493 /* This alternative won; the operands are ok.
2494 Change whichever operands this alternative says to change. */
2499 /* See if any earlyclobber operand conflicts with some other
2503 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2504 /* Ignore earlyclobber operands now in memory,
2505 because we would often report failure when we have
2506 two memory operands, one of which was formerly a REG. */
2507 if (earlyclobber
[eopno
]
2508 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2509 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2510 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2511 || recog_data
.operand_type
[opno
] != OP_OUT
)
2513 /* Ignore things like match_operator operands. */
2514 && *recog_data
.constraints
[opno
] != 0
2515 && ! (matching_operands
[opno
] == eopno
2516 && operands_match_p (recog_data
.operand
[opno
],
2517 recog_data
.operand
[eopno
]))
2518 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2519 recog_data
.operand
[eopno
]))
2524 while (--funny_match_index
>= 0)
2526 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2527 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2534 which_alternative
++;
2537 /* If we are about to reject this, but we are not to test strictly,
2538 try a very loose test. Only return failure if it fails also. */
2540 return constrain_operands (-1);
2545 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2546 is a hard reg in class CLASS when its regno is offset by OFFSET
2547 and changed to mode MODE.
2548 If REG occupies multiple hard regs, all of them must be in CLASS. */
2551 reg_fits_class_p (operand
, class, offset
, mode
)
2553 register enum reg_class
class;
2555 enum machine_mode mode
;
2557 register int regno
= REGNO (operand
);
2558 if (regno
< FIRST_PSEUDO_REGISTER
2559 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2564 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2566 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2575 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2578 split_all_insns (upd_life
)
2585 blocks
= sbitmap_alloc (n_basic_blocks
);
2586 sbitmap_zero (blocks
);
2589 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2591 basic_block bb
= BASIC_BLOCK (i
);
2594 for (insn
= bb
->head
; insn
; insn
= next
)
2598 /* Can't use `next_real_insn' because that might go across
2599 CODE_LABELS and short-out basic blocks. */
2600 next
= NEXT_INSN (insn
);
2601 if (GET_CODE (insn
) != INSN
)
2604 /* Don't split no-op move insns. These should silently
2605 disappear later in final. Splitting such insns would
2606 break the code that handles REG_NO_CONFLICT blocks. */
2608 else if ((set
= single_set (insn
)) != NULL
2609 && rtx_equal_p (SET_SRC (set
), SET_DEST (set
)))
2611 /* Nops get in the way while scheduling, so delete them
2612 now if register allocation has already been done. It
2613 is too risky to try to do this before register
2614 allocation, and there are unlikely to be very many
2615 nops then anyways. */
2616 if (reload_completed
)
2618 PUT_CODE (insn
, NOTE
);
2619 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2620 NOTE_SOURCE_FILE (insn
) = 0;
2625 /* Split insns here to get max fine-grain parallelism. */
2626 rtx first
= PREV_INSN (insn
);
2627 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2631 SET_BIT (blocks
, i
);
2634 /* try_split returns the NOTE that INSN became. */
2635 first
= NEXT_INSN (first
);
2636 PUT_CODE (insn
, NOTE
);
2637 NOTE_SOURCE_FILE (insn
) = 0;
2638 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2640 if (insn
== bb
->end
)
2648 if (insn
== bb
->end
)
2652 /* ??? When we're called from just after reload, the CFG is in bad
2653 shape, and we may have fallen off the end. This could be fixed
2654 by having reload not try to delete unreachable code. Otherwise
2655 assert we found the end insn. */
2656 if (insn
== NULL
&& upd_life
)
2660 if (changed
&& upd_life
)
2662 compute_bb_for_insn (get_max_uid ());
2663 count_or_remove_death_notes (blocks
, 1);
2664 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
2667 sbitmap_free (blocks
);
2670 #ifdef HAVE_peephole2
2671 /* This is the last insn we'll allow recog_next_insn to consider. */
2672 static rtx recog_last_allowed_insn
;
2674 /* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
2675 not exist. Used by the recognizer to find the next insn to match in a
2676 multi-insn pattern. */
2678 recog_next_insn (insn
, n
)
2682 if (insn
!= NULL_RTX
)
2686 if (insn
== recog_last_allowed_insn
)
2689 insn
= NEXT_INSN (insn
);
2690 if (insn
== NULL_RTX
)
2693 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2701 /* Perform the peephole2 optimization pass. */
2703 peephole2_optimize (dump_file
)
2704 FILE *dump_file ATTRIBUTE_UNUSED
;
2710 /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
2711 and backtrack insn by insn as we proceed through the block. In this
2712 way we'll not need to keep searching forward from the beginning of
2713 basic blocks to find register life info. */
2715 init_resource_info (NULL
);
2717 blocks
= sbitmap_alloc (n_basic_blocks
);
2718 sbitmap_zero (blocks
);
2721 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2723 basic_block bb
= BASIC_BLOCK (i
);
2725 /* Since we don't update life info until the very end, we can't
2726 allow matching instructions that we've replaced before. Walk
2727 backward through the basic block so that we don't have to
2728 care about subsequent life info; recog_last_allowed_insn to
2729 restrict how far forward we will allow the match to proceed. */
2731 recog_last_allowed_insn
= bb
->end
;
2732 for (insn
= bb
->end
; ; insn
= prev
)
2734 prev
= PREV_INSN (insn
);
2735 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2739 try = peephole2_insns (PATTERN (insn
), insn
, &last_insn
);
2742 flow_delete_insn_chain (insn
, last_insn
);
2743 try = emit_insn_after (try, prev
);
2745 if (last_insn
== bb
->end
)
2747 if (insn
== bb
->head
)
2748 bb
->head
= NEXT_INSN (prev
);
2750 recog_last_allowed_insn
= prev
;
2751 SET_BIT (blocks
, i
);
2756 if (insn
== bb
->head
)
2761 free_resource_info ();
2763 compute_bb_for_insn (get_max_uid ());
2764 count_or_remove_death_notes (blocks
, 1);
2765 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);