1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "insn-config.h"
27 #include "insn-attr.h"
28 #include "insn-flags.h"
29 #include "insn-codes.h"
32 #include "hard-reg-set.h"
37 #include "basic-block.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
45 #define STACK_PUSH_CODE PRE_INC
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
53 #define STACK_POP_CODE POST_DEC
57 static void validate_replace_rtx_1
PROTO((rtx
*, rtx
, rtx
, rtx
));
58 static rtx
*find_single_use_1
PROTO((rtx
, rtx
*));
59 static rtx
*find_constant_term_loc
PROTO((rtx
*));
60 static int insn_invalid_p
PROTO((rtx
));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
72 struct recog_data recog_data
;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
78 /* On return from `constrain_operands', indicate which alternative
81 int which_alternative
;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
94 init_recog_no_volatile ()
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized (insn
)
118 if (INSN_CODE (insn
) < 0)
119 INSN_CODE (insn
) = recog (PATTERN (insn
), insn
, NULL_PTR
);
120 return INSN_CODE (insn
);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x
)
132 const char **constraints
;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed
)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x
));
140 constrain_operands (1);
141 return which_alternative
>= 0;
144 noperands
= asm_noperands (x
);
150 operands
= (rtx
*) alloca (noperands
* sizeof (rtx
));
151 constraints
= (const char **) alloca (noperands
* sizeof (char *));
153 decode_asm_operands (x
, operands
, NULL_PTR
, constraints
, NULL_PTR
);
155 for (i
= 0; i
< noperands
; i
++)
157 const char *c
= constraints
[i
];
160 if (ISDIGIT ((unsigned char)c
[0]) && c
[1] == '\0')
161 c
= constraints
[c
[0] - '0'];
163 if (! asm_operand_ok (operands
[i
], c
))
170 /* Static data for the next two routines. */
172 typedef struct change_t
180 static change_t
*changes
;
181 static int changes_allocated
;
183 static int num_changes
= 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object
, loc
, new, in_group
)
212 if (old
== new || rtx_equal_p (old
, new))
215 if (in_group
== 0 && num_changes
!= 0)
220 /* Save the information describing this change. */
221 if (num_changes
>= changes_allocated
)
223 if (changes_allocated
== 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
228 changes_allocated
*= 2;
231 (change_t
*) xrealloc (changes
,
232 sizeof (change_t
) * changes_allocated
);
235 changes
[num_changes
].object
= object
;
236 changes
[num_changes
].loc
= loc
;
237 changes
[num_changes
].old
= old
;
239 if (object
&& GET_CODE (object
) != MEM
)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 changes
[num_changes
].old_code
= INSN_CODE (object
);
244 INSN_CODE (object
) = -1;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn
)
265 int icode
= recog_memoized (insn
);
266 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
268 if (is_asm
&& ! check_asm_operands (PATTERN (insn
)))
270 if (! is_asm
&& icode
< 0)
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed
)
278 if (! constrain_operands (1))
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
293 /* The changes have been applied and all INSN_CODEs have been reset to force
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
302 for (i
= 0; i
< num_changes
; i
++)
304 rtx object
= changes
[i
].object
;
309 if (GET_CODE (object
) == MEM
)
311 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
314 else if (insn_invalid_p (object
))
316 rtx pat
= PATTERN (object
);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat
) == PARALLEL
324 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object
)) < 0)
329 if (XVECLEN (pat
, 0) == 2)
330 newpat
= XVECEXP (pat
, 0, 0);
336 = gen_rtx_PARALLEL (VOIDmode
,
337 gen_rtvec (XVECLEN (pat
, 0) - 1));
338 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
339 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
351 validate_change (object
, &PATTERN (object
), newpat
, 1);
353 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
362 if (i
== num_changes
)
374 /* Return the number of changes so far in the current group. */
377 num_validated_changes ()
382 /* Retract the changes numbered NUM and up. */
390 /* Back out all the changes. Do this in the opposite order in which
392 for (i
= num_changes
- 1; i
>= num
; i
--)
394 *changes
[i
].loc
= changes
[i
].old
;
395 if (changes
[i
].object
&& GET_CODE (changes
[i
].object
) != MEM
)
396 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
405 validate_replace_rtx_1 (loc
, from
, to
, object
)
407 rtx from
, to
, object
;
410 register const char *fmt
;
411 register rtx x
= *loc
;
412 enum rtx_code code
= GET_CODE (x
);
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
419 || (GET_CODE (x
) == REG
&& GET_CODE (from
) == REG
420 && GET_MODE (x
) == GET_MODE (from
)
421 && REGNO (x
) == REGNO (from
))
422 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
423 && rtx_equal_p (x
, from
)))
425 validate_change (object
, loc
, to
, 1);
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
432 if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == 'c')
434 int prev_changes
= num_changes
;
436 validate_replace_rtx_1 (&XEXP (x
, 0), from
, to
, object
);
437 validate_replace_rtx_1 (&XEXP (x
, 1), from
, to
, object
);
438 if (prev_changes
!= num_changes
&& CONSTANT_P (XEXP (x
, 0)))
440 validate_change (object
, loc
,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code
) == 'c' ? code
442 : swap_condition (code
),
443 GET_MODE (x
), XEXP (x
, 1),
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
460 validate_change (object
, loc
, plus_constant (XEXP (x
, 0), INTVAL (to
)),
465 if (GET_CODE (to
) == CONST_INT
&& XEXP (x
, 1) == from
)
467 validate_change (object
, loc
,
468 plus_constant (XEXP (x
, 0), - INTVAL (to
)),
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to
) == VOIDmode
482 && (XEXP (x
, 0) == from
483 || (GET_CODE (XEXP (x
, 0)) == REG
&& GET_CODE (from
) == REG
484 && GET_MODE (XEXP (x
, 0)) == GET_MODE (from
)
485 && REGNO (XEXP (x
, 0)) == REGNO (from
))))
487 rtx
new = simplify_unary_operation (code
, GET_MODE (x
), to
,
490 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
492 validate_change (object
, loc
, new, 1);
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
503 if (SUBREG_REG (x
) == from
504 && GET_CODE (from
) == REG
505 && GET_CODE (to
) == MEM
506 && ! mode_dependent_address_p (XEXP (to
, 0))
507 && ! MEM_VOLATILE_P (to
)
508 && GET_MODE_SIZE (GET_MODE (x
)) <= GET_MODE_SIZE (GET_MODE (to
)))
510 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
511 enum machine_mode mode
= GET_MODE (x
);
514 if (BYTES_BIG_ENDIAN
)
515 offset
+= (MIN (UNITS_PER_WORD
,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
517 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
519 new = gen_rtx_MEM (mode
, plus_constant (XEXP (to
, 0), offset
));
520 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to
);
521 MEM_COPY_ATTRIBUTES (new, to
);
522 validate_change (object
, loc
, new, 1);
529 /* If we are replacing a register with memory, try to change the memory
530 to be the mode required for memory in extract operations (this isn't
531 likely to be an insertion operation; if it was, nothing bad will
532 happen, we might just fail in some cases). */
534 if (XEXP (x
, 0) == from
&& GET_CODE (from
) == REG
&& GET_CODE (to
) == MEM
535 && GET_CODE (XEXP (x
, 1)) == CONST_INT
536 && GET_CODE (XEXP (x
, 2)) == CONST_INT
537 && ! mode_dependent_address_p (XEXP (to
, 0))
538 && ! MEM_VOLATILE_P (to
))
540 enum machine_mode wanted_mode
= VOIDmode
;
541 enum machine_mode is_mode
= GET_MODE (to
);
542 int pos
= INTVAL (XEXP (x
, 2));
545 if (code
== ZERO_EXTRACT
)
547 wanted_mode
= insn_data
[(int) CODE_FOR_extzv
].operand
[1].mode
;
548 if (wanted_mode
== VOIDmode
)
549 wanted_mode
= word_mode
;
553 if (code
== SIGN_EXTRACT
)
555 wanted_mode
= insn_data
[(int) CODE_FOR_extv
].operand
[1].mode
;
556 if (wanted_mode
== VOIDmode
)
557 wanted_mode
= word_mode
;
561 /* If we have a narrower mode, we can do something. */
562 if (wanted_mode
!= VOIDmode
563 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
565 int offset
= pos
/ BITS_PER_UNIT
;
568 /* If the bytes and bits are counted differently, we
569 must adjust the offset. */
570 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
571 offset
= (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
)
574 pos
%= GET_MODE_BITSIZE (wanted_mode
);
576 newmem
= gen_rtx_MEM (wanted_mode
,
577 plus_constant (XEXP (to
, 0), offset
));
578 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (to
);
579 MEM_COPY_ATTRIBUTES (newmem
, to
);
581 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
582 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
592 /* For commutative or comparison operations we've already performed
593 replacements. Don't try to perform them again. */
594 if (GET_RTX_CLASS (code
) != '<' && GET_RTX_CLASS (code
) != 'c')
596 fmt
= GET_RTX_FORMAT (code
);
597 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
600 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
601 else if (fmt
[i
] == 'E')
602 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
603 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
608 /* Try replacing every occurrence of FROM in INSN with TO. After all
609 changes have been made, validate by seeing if INSN is still valid. */
612 validate_replace_rtx (from
, to
, insn
)
615 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
616 return apply_change_group ();
619 /* Try replacing every occurrence of FROM in INSN with TO. After all
620 changes have been made, validate by seeing if INSN is still valid. */
623 validate_replace_rtx_group (from
, to
, insn
)
626 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
629 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
630 SET_DESTs. After all changes have been made, validate by seeing if
631 INSN is still valid. */
634 validate_replace_src (from
, to
, insn
)
637 if ((GET_CODE (insn
) != INSN
&& GET_CODE (insn
) != JUMP_INSN
)
638 || GET_CODE (PATTERN (insn
)) != SET
)
641 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn
)), from
, to
, insn
);
642 if (GET_CODE (SET_DEST (PATTERN (insn
))) == MEM
)
643 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn
)), 0),
645 return apply_change_group ();
649 /* Return 1 if the insn using CC0 set by INSN does not contain
650 any ordered tests applied to the condition codes.
651 EQ and NE tests do not count. */
654 next_insn_tests_no_inequality (insn
)
657 register rtx next
= next_cc0_user (insn
);
659 /* If there is no next insn, we have to take the conservative choice. */
663 return ((GET_CODE (next
) == JUMP_INSN
664 || GET_CODE (next
) == INSN
665 || GET_CODE (next
) == CALL_INSN
)
666 && ! inequality_comparisons_p (PATTERN (next
)));
669 #if 0 /* This is useless since the insn that sets the cc's
670 must be followed immediately by the use of them. */
671 /* Return 1 if the CC value set up by INSN is not used. */
674 next_insns_test_no_inequality (insn
)
677 register rtx next
= NEXT_INSN (insn
);
679 for (; next
!= 0; next
= NEXT_INSN (next
))
681 if (GET_CODE (next
) == CODE_LABEL
682 || GET_CODE (next
) == BARRIER
)
684 if (GET_CODE (next
) == NOTE
)
686 if (inequality_comparisons_p (PATTERN (next
)))
688 if (sets_cc0_p (PATTERN (next
)) == 1)
690 if (! reg_mentioned_p (cc0_rtx
, PATTERN (next
)))
698 /* This is used by find_single_use to locate an rtx that contains exactly one
699 use of DEST, which is typically either a REG or CC0. It returns a
700 pointer to the innermost rtx expression containing DEST. Appearances of
701 DEST that are being used to totally replace it are not counted. */
704 find_single_use_1 (dest
, loc
)
709 enum rtx_code code
= GET_CODE (x
);
726 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
727 of a REG that occupies all of the REG, the insn uses DEST if
728 it is mentioned in the destination or the source. Otherwise, we
729 need just check the source. */
730 if (GET_CODE (SET_DEST (x
)) != CC0
731 && GET_CODE (SET_DEST (x
)) != PC
732 && GET_CODE (SET_DEST (x
)) != REG
733 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
734 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
736 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
737 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
738 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
741 return find_single_use_1 (dest
, &SET_SRC (x
));
745 return find_single_use_1 (dest
, &XEXP (x
, 0));
751 /* If it wasn't one of the common cases above, check each expression and
752 vector of this code. Look for a unique usage of DEST. */
754 fmt
= GET_RTX_FORMAT (code
);
755 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
759 if (dest
== XEXP (x
, i
)
760 || (GET_CODE (dest
) == REG
&& GET_CODE (XEXP (x
, i
)) == REG
761 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
764 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
767 result
= this_result
;
768 else if (this_result
)
769 /* Duplicate usage. */
772 else if (fmt
[i
] == 'E')
776 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
778 if (XVECEXP (x
, i
, j
) == dest
779 || (GET_CODE (dest
) == REG
780 && GET_CODE (XVECEXP (x
, i
, j
)) == REG
781 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
784 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
787 result
= this_result
;
788 else if (this_result
)
797 /* See if DEST, produced in INSN, is used only a single time in the
798 sequel. If so, return a pointer to the innermost rtx expression in which
801 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
803 This routine will return usually zero either before flow is called (because
804 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
805 note can't be trusted).
807 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
808 care about REG_DEAD notes or LOG_LINKS.
810 Otherwise, we find the single use by finding an insn that has a
811 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
812 only referenced once in that insn, we know that it must be the first
813 and last insn referencing DEST. */
816 find_single_use (dest
, insn
, ploc
)
828 next
= NEXT_INSN (insn
);
830 || (GET_CODE (next
) != INSN
&& GET_CODE (next
) != JUMP_INSN
))
833 result
= find_single_use_1 (dest
, &PATTERN (next
));
840 if (reload_completed
|| reload_in_progress
|| GET_CODE (dest
) != REG
)
843 for (next
= next_nonnote_insn (insn
);
844 next
!= 0 && GET_CODE (next
) != CODE_LABEL
;
845 next
= next_nonnote_insn (next
))
846 if (GET_RTX_CLASS (GET_CODE (next
)) == 'i' && dead_or_set_p (next
, dest
))
848 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
849 if (XEXP (link
, 0) == insn
)
854 result
= find_single_use_1 (dest
, &PATTERN (next
));
864 /* Return 1 if OP is a valid general operand for machine mode MODE.
865 This is either a register reference, a memory reference,
866 or a constant. In the case of a memory reference, the address
867 is checked for general validity for the target machine.
869 Register and memory references must have mode MODE in order to be valid,
870 but some constants have no machine mode and are valid for any mode.
872 If MODE is VOIDmode, OP is checked for validity for whatever mode
875 The main use of this function is as a predicate in match_operand
876 expressions in the machine description.
878 For an explanation of this function's behavior for registers of
879 class NO_REGS, see the comment for `register_operand'. */
882 general_operand (op
, mode
)
884 enum machine_mode mode
;
886 register enum rtx_code code
= GET_CODE (op
);
887 int mode_altering_drug
= 0;
889 if (mode
== VOIDmode
)
890 mode
= GET_MODE (op
);
892 /* Don't accept CONST_INT or anything similar
893 if the caller wants something floating. */
894 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
895 && GET_MODE_CLASS (mode
) != MODE_INT
896 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
900 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
901 #ifdef LEGITIMATE_PIC_OPERAND_P
902 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
904 && LEGITIMATE_CONSTANT_P (op
));
906 /* Except for certain constants with VOIDmode, already checked for,
907 OP's mode must match MODE if MODE specifies a mode. */
909 if (GET_MODE (op
) != mode
)
914 #ifdef INSN_SCHEDULING
915 /* On machines that have insn scheduling, we want all memory
916 reference to be explicit, so outlaw paradoxical SUBREGs. */
917 if (GET_CODE (SUBREG_REG (op
)) == MEM
918 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
922 op
= SUBREG_REG (op
);
923 code
= GET_CODE (op
);
925 /* No longer needed, since (SUBREG (MEM...))
926 will load the MEM into a reload reg in the MEM's own mode. */
927 mode_altering_drug
= 1;
932 /* A register whose class is NO_REGS is not a general operand. */
933 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
934 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
938 register rtx y
= XEXP (op
, 0);
939 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
941 if (GET_CODE (y
) == ADDRESSOF
)
943 /* Use the mem's mode, since it will be reloaded thus. */
944 mode
= GET_MODE (op
);
945 GO_IF_LEGITIMATE_ADDRESS (mode
, y
, win
);
948 /* Pretend this is an operand for now; we'll run force_operand
949 on its replacement in fixup_var_refs_1. */
950 if (code
== ADDRESSOF
)
956 if (mode_altering_drug
)
957 return ! mode_dependent_address_p (XEXP (op
, 0));
961 /* Return 1 if OP is a valid memory address for a memory reference
964 The main use of this function is as a predicate in match_operand
965 expressions in the machine description. */
968 address_operand (op
, mode
)
970 enum machine_mode mode
;
972 return memory_address_p (mode
, op
);
975 /* Return 1 if OP is a register reference of mode MODE.
976 If MODE is VOIDmode, accept a register in any mode.
978 The main use of this function is as a predicate in match_operand
979 expressions in the machine description.
981 As a special exception, registers whose class is NO_REGS are
982 not accepted by `register_operand'. The reason for this change
983 is to allow the representation of special architecture artifacts
984 (such as a condition code register) without extending the rtl
985 definitions. Since registers of class NO_REGS cannot be used
986 as registers in any case where register classes are examined,
987 it is most consistent to keep this function from accepting them. */
990 register_operand (op
, mode
)
992 enum machine_mode mode
;
994 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
997 if (GET_CODE (op
) == SUBREG
)
999 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1000 because it is guaranteed to be reloaded into one.
1001 Just make sure the MEM is valid in itself.
1002 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1003 but currently it does result from (SUBREG (REG)...) where the
1004 reg went on the stack.) */
1005 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1006 return general_operand (op
, mode
);
1008 #ifdef CLASS_CANNOT_CHANGE_SIZE
1009 if (GET_CODE (SUBREG_REG (op
)) == REG
1010 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
1011 && TEST_HARD_REG_BIT (reg_class_contents
[(int) CLASS_CANNOT_CHANGE_SIZE
],
1012 REGNO (SUBREG_REG (op
)))
1013 && (GET_MODE_SIZE (mode
)
1014 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op
))))
1015 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op
))) != MODE_COMPLEX_FLOAT
)
1020 op
= SUBREG_REG (op
);
1023 /* If we have an ADDRESSOF, consider it valid since it will be
1024 converted into something that will not be a MEM. */
1025 if (GET_CODE (op
) == ADDRESSOF
)
1028 /* We don't consider registers whose class is NO_REGS
1029 to be a register operand. */
1030 return (GET_CODE (op
) == REG
1031 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1032 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1035 /* Return 1 for a register in Pmode; ignore the tested mode. */
1038 pmode_register_operand (op
, mode
)
1040 enum machine_mode mode ATTRIBUTE_UNUSED
;
1042 return register_operand (op
, Pmode
);
1045 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1046 or a hard register. */
1049 scratch_operand (op
, mode
)
1051 enum machine_mode mode
;
1053 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1056 return (GET_CODE (op
) == SCRATCH
1057 || (GET_CODE (op
) == REG
1058 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1061 /* Return 1 if OP is a valid immediate operand for mode MODE.
1063 The main use of this function is as a predicate in match_operand
1064 expressions in the machine description. */
1067 immediate_operand (op
, mode
)
1069 enum machine_mode mode
;
1071 /* Don't accept CONST_INT or anything similar
1072 if the caller wants something floating. */
1073 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1074 && GET_MODE_CLASS (mode
) != MODE_INT
1075 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1078 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1079 result in 0/1. It seems a safe assumption that this is
1080 in range for everyone. */
1081 if (GET_CODE (op
) == CONSTANT_P_RTX
)
1084 return (CONSTANT_P (op
)
1085 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1086 || GET_MODE (op
) == VOIDmode
)
1087 #ifdef LEGITIMATE_PIC_OPERAND_P
1088 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1090 && LEGITIMATE_CONSTANT_P (op
));
1093 /* Returns 1 if OP is an operand that is a CONST_INT. */
1096 const_int_operand (op
, mode
)
1098 enum machine_mode mode ATTRIBUTE_UNUSED
;
1100 return GET_CODE (op
) == CONST_INT
;
1103 /* Returns 1 if OP is an operand that is a constant integer or constant
1104 floating-point number. */
1107 const_double_operand (op
, mode
)
1109 enum machine_mode mode
;
1111 /* Don't accept CONST_INT or anything similar
1112 if the caller wants something floating. */
1113 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1114 && GET_MODE_CLASS (mode
) != MODE_INT
1115 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1118 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1119 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1120 || GET_MODE (op
) == VOIDmode
));
1123 /* Return 1 if OP is a general operand that is not an immediate operand. */
1126 nonimmediate_operand (op
, mode
)
1128 enum machine_mode mode
;
1130 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1133 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1136 nonmemory_operand (op
, mode
)
1138 enum machine_mode mode
;
1140 if (CONSTANT_P (op
))
1142 /* Don't accept CONST_INT or anything similar
1143 if the caller wants something floating. */
1144 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1145 && GET_MODE_CLASS (mode
) != MODE_INT
1146 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1149 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
)
1150 #ifdef LEGITIMATE_PIC_OPERAND_P
1151 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1153 && LEGITIMATE_CONSTANT_P (op
));
1156 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1159 if (GET_CODE (op
) == SUBREG
)
1161 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1162 because it is guaranteed to be reloaded into one.
1163 Just make sure the MEM is valid in itself.
1164 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1165 but currently it does result from (SUBREG (REG)...) where the
1166 reg went on the stack.) */
1167 if (! reload_completed
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1168 return general_operand (op
, mode
);
1169 op
= SUBREG_REG (op
);
1172 /* We don't consider registers whose class is NO_REGS
1173 to be a register operand. */
1174 return (GET_CODE (op
) == REG
1175 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1176 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1179 /* Return 1 if OP is a valid operand that stands for pushing a
1180 value of mode MODE onto the stack.
1182 The main use of this function is as a predicate in match_operand
1183 expressions in the machine description. */
1186 push_operand (op
, mode
)
1188 enum machine_mode mode
;
1190 if (GET_CODE (op
) != MEM
)
1193 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1198 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1201 return XEXP (op
, 0) == stack_pointer_rtx
;
1204 /* Return 1 if OP is a valid operand that stands for popping a
1205 value of mode MODE off the stack.
1207 The main use of this function is as a predicate in match_operand
1208 expressions in the machine description. */
1211 pop_operand (op
, mode
)
1213 enum machine_mode mode
;
1215 if (GET_CODE (op
) != MEM
)
1218 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1223 if (GET_CODE (op
) != STACK_POP_CODE
)
1226 return XEXP (op
, 0) == stack_pointer_rtx
;
1229 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1232 memory_address_p (mode
, addr
)
1233 enum machine_mode mode
;
1236 if (GET_CODE (addr
) == ADDRESSOF
)
1239 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1246 /* Return 1 if OP is a valid memory reference with mode MODE,
1247 including a valid address.
1249 The main use of this function is as a predicate in match_operand
1250 expressions in the machine description. */
1253 memory_operand (op
, mode
)
1255 enum machine_mode mode
;
1259 if (! reload_completed
)
1260 /* Note that no SUBREG is a memory operand before end of reload pass,
1261 because (SUBREG (MEM...)) forces reloading into a register. */
1262 return GET_CODE (op
) == MEM
&& general_operand (op
, mode
);
1264 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1268 if (GET_CODE (inner
) == SUBREG
)
1269 inner
= SUBREG_REG (inner
);
1271 return (GET_CODE (inner
) == MEM
&& general_operand (op
, mode
));
1274 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1275 that is, a memory reference whose address is a general_operand. */
1278 indirect_operand (op
, mode
)
1280 enum machine_mode mode
;
1282 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1283 if (! reload_completed
1284 && GET_CODE (op
) == SUBREG
&& GET_CODE (SUBREG_REG (op
)) == MEM
)
1286 register int offset
= SUBREG_WORD (op
) * UNITS_PER_WORD
;
1287 rtx inner
= SUBREG_REG (op
);
1289 if (BYTES_BIG_ENDIAN
)
1290 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (op
)))
1291 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (inner
))));
1293 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1296 /* The only way that we can have a general_operand as the resulting
1297 address is if OFFSET is zero and the address already is an operand
1298 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1301 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1302 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1303 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1304 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1305 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1308 return (GET_CODE (op
) == MEM
1309 && memory_operand (op
, mode
)
1310 && general_operand (XEXP (op
, 0), Pmode
));
1313 /* Return 1 if this is a comparison operator. This allows the use of
1314 MATCH_OPERATOR to recognize all the branch insns. */
1317 comparison_operator (op
, mode
)
1319 enum machine_mode mode
;
1321 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1322 && GET_RTX_CLASS (GET_CODE (op
)) == '<');
1325 /* If BODY is an insn body that uses ASM_OPERANDS,
1326 return the number of operands (both input and output) in the insn.
1327 Otherwise return -1. */
1330 asm_noperands (body
)
1333 if (GET_CODE (body
) == ASM_OPERANDS
)
1334 /* No output operands: return number of input operands. */
1335 return ASM_OPERANDS_INPUT_LENGTH (body
);
1336 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1337 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1338 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1339 else if (GET_CODE (body
) == PARALLEL
1340 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1341 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1343 /* Multiple output operands, or 1 output plus some clobbers:
1344 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1348 /* Count backwards through CLOBBERs to determine number of SETs. */
1349 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1351 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1353 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1357 /* N_SETS is now number of output operands. */
1360 /* Verify that all the SETs we have
1361 came from a single original asm_operands insn
1362 (so that invalid combinations are blocked). */
1363 for (i
= 0; i
< n_sets
; i
++)
1365 rtx elt
= XVECEXP (body
, 0, i
);
1366 if (GET_CODE (elt
) != SET
)
1368 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1370 /* If these ASM_OPERANDS rtx's came from different original insns
1371 then they aren't allowed together. */
1372 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1373 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1376 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1379 else if (GET_CODE (body
) == PARALLEL
1380 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1382 /* 0 outputs, but some clobbers:
1383 body is [(asm_operands ...) (clobber (reg ...))...]. */
1386 /* Make sure all the other parallel things really are clobbers. */
1387 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1388 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1391 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1397 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1398 copy its operands (both input and output) into the vector OPERANDS,
1399 the locations of the operands within the insn into the vector OPERAND_LOCS,
1400 and the constraints for the operands into CONSTRAINTS.
1401 Write the modes of the operands into MODES.
1402 Return the assembler-template.
1404 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1405 we don't store that info. */
1408 decode_asm_operands (body
, operands
, operand_locs
, constraints
, modes
)
1412 const char **constraints
;
1413 enum machine_mode
*modes
;
1419 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1421 rtx asmop
= SET_SRC (body
);
1422 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1424 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1426 for (i
= 1; i
< noperands
; i
++)
1429 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1431 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1433 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1435 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1438 /* The output is in the SET.
1439 Its constraint is in the ASM_OPERANDS itself. */
1441 operands
[0] = SET_DEST (body
);
1443 operand_locs
[0] = &SET_DEST (body
);
1445 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1447 modes
[0] = GET_MODE (SET_DEST (body
));
1448 template = ASM_OPERANDS_TEMPLATE (asmop
);
1450 else if (GET_CODE (body
) == ASM_OPERANDS
)
1453 /* No output operands: BODY is (asm_operands ....). */
1455 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1457 /* The input operands are found in the 1st element vector. */
1458 /* Constraints for inputs are in the 2nd element vector. */
1459 for (i
= 0; i
< noperands
; i
++)
1462 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1464 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1466 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1468 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1470 template = ASM_OPERANDS_TEMPLATE (asmop
);
1472 else if (GET_CODE (body
) == PARALLEL
1473 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
)
1475 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1476 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1477 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1478 int nout
= 0; /* Does not include CLOBBERs. */
1480 /* At least one output, plus some CLOBBERs. */
1482 /* The outputs are in the SETs.
1483 Their constraints are in the ASM_OPERANDS itself. */
1484 for (i
= 0; i
< nparallel
; i
++)
1486 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1487 break; /* Past last SET */
1490 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1492 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1494 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1496 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1500 for (i
= 0; i
< nin
; i
++)
1503 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1505 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1507 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1509 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1512 template = ASM_OPERANDS_TEMPLATE (asmop
);
1514 else if (GET_CODE (body
) == PARALLEL
1515 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1517 /* No outputs, but some CLOBBERs. */
1519 rtx asmop
= XVECEXP (body
, 0, 0);
1520 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1522 for (i
= 0; i
< nin
; i
++)
1525 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1527 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1529 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1531 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1534 template = ASM_OPERANDS_TEMPLATE (asmop
);
1540 /* Check if an asm_operand matches it's constraints.
1541 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1544 asm_operand_ok (op
, constraint
)
1546 const char *constraint
;
1550 /* Use constrain_operands after reload. */
1551 if (reload_completed
)
1556 switch (*constraint
++)
1569 case '0': case '1': case '2': case '3': case '4':
1570 case '5': case '6': case '7': case '8': case '9':
1571 /* For best results, our caller should have given us the
1572 proper matching constraint, but we can't actually fail
1573 the check if they didn't. Indicate that results are
1579 if (address_operand (op
, VOIDmode
))
1584 case 'V': /* non-offsettable */
1585 if (memory_operand (op
, VOIDmode
))
1589 case 'o': /* offsettable */
1590 if (offsettable_nonstrict_memref_p (op
))
1595 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1596 excepting those that expand_call created. Further, on some
1597 machines which do not have generalized auto inc/dec, an inc/dec
1598 is not a memory_operand.
1600 Match any memory and hope things are resolved after reload. */
1602 if (GET_CODE (op
) == MEM
1604 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1605 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1610 if (GET_CODE (op
) == MEM
1612 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1613 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1618 #ifndef REAL_ARITHMETIC
1619 /* Match any floating double constant, but only if
1620 we can examine the bits of it reliably. */
1621 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
1622 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
1623 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
1629 if (GET_CODE (op
) == CONST_DOUBLE
)
1634 if (GET_CODE (op
) == CONST_DOUBLE
1635 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'G'))
1639 if (GET_CODE (op
) == CONST_DOUBLE
1640 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, 'H'))
1645 if (GET_CODE (op
) == CONST_INT
1646 || (GET_CODE (op
) == CONST_DOUBLE
1647 && GET_MODE (op
) == VOIDmode
))
1653 #ifdef LEGITIMATE_PIC_OPERAND_P
1654 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1661 if (GET_CODE (op
) == CONST_INT
1662 || (GET_CODE (op
) == CONST_DOUBLE
1663 && GET_MODE (op
) == VOIDmode
))
1668 if (GET_CODE (op
) == CONST_INT
1669 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'I'))
1673 if (GET_CODE (op
) == CONST_INT
1674 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'J'))
1678 if (GET_CODE (op
) == CONST_INT
1679 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'K'))
1683 if (GET_CODE (op
) == CONST_INT
1684 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'L'))
1688 if (GET_CODE (op
) == CONST_INT
1689 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'M'))
1693 if (GET_CODE (op
) == CONST_INT
1694 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'N'))
1698 if (GET_CODE (op
) == CONST_INT
1699 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'O'))
1703 if (GET_CODE (op
) == CONST_INT
1704 && CONST_OK_FOR_LETTER_P (INTVAL (op
), 'P'))
1712 if (general_operand (op
, VOIDmode
))
1716 #ifdef EXTRA_CONSTRAINT
1718 if (EXTRA_CONSTRAINT (op
, 'Q'))
1722 if (EXTRA_CONSTRAINT (op
, 'R'))
1726 if (EXTRA_CONSTRAINT (op
, 'S'))
1730 if (EXTRA_CONSTRAINT (op
, 'T'))
1734 if (EXTRA_CONSTRAINT (op
, 'U'))
1741 if (GET_MODE (op
) == BLKmode
)
1743 if (register_operand (op
, VOIDmode
))
1752 /* Given an rtx *P, if it is a sum containing an integer constant term,
1753 return the location (type rtx *) of the pointer to that constant term.
1754 Otherwise, return a null pointer. */
1757 find_constant_term_loc (p
)
1761 register enum rtx_code code
= GET_CODE (*p
);
1763 /* If *P IS such a constant term, P is its location. */
1765 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1769 /* Otherwise, if not a sum, it has no constant term. */
1771 if (GET_CODE (*p
) != PLUS
)
1774 /* If one of the summands is constant, return its location. */
1776 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1777 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1780 /* Otherwise, check each summand for containing a constant term. */
1782 if (XEXP (*p
, 0) != 0)
1784 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1789 if (XEXP (*p
, 1) != 0)
1791 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1799 /* Return 1 if OP is a memory reference
1800 whose address contains no side effects
1801 and remains valid after the addition
1802 of a positive integer less than the
1803 size of the object being referenced.
1805 We assume that the original address is valid and do not check it.
1807 This uses strict_memory_address_p as a subroutine, so
1808 don't use it before reload. */
1811 offsettable_memref_p (op
)
1814 return ((GET_CODE (op
) == MEM
)
1815 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1818 /* Similar, but don't require a strictly valid mem ref:
1819 consider pseudo-regs valid as index or base regs. */
1822 offsettable_nonstrict_memref_p (op
)
1825 return ((GET_CODE (op
) == MEM
)
1826 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1829 /* Return 1 if Y is a memory address which contains no side effects
1830 and would remain valid after the addition of a positive integer
1831 less than the size of that mode.
1833 We assume that the original address is valid and do not check it.
1834 We do check that it is valid for narrower modes.
1836 If STRICTP is nonzero, we require a strictly valid address,
1837 for the sake of use in reload.c. */
1840 offsettable_address_p (strictp
, mode
, y
)
1842 enum machine_mode mode
;
1845 register enum rtx_code ycode
= GET_CODE (y
);
1849 int (*addressp
) PROTO ((enum machine_mode
, rtx
)) =
1850 (strictp
? strict_memory_address_p
: memory_address_p
);
1852 if (CONSTANT_ADDRESS_P (y
))
1855 /* Adjusting an offsettable address involves changing to a narrower mode.
1856 Make sure that's OK. */
1858 if (mode_dependent_address_p (y
))
1861 /* If the expression contains a constant term,
1862 see if it remains valid when max possible offset is added. */
1864 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1869 *y2
= plus_constant (*y2
, GET_MODE_SIZE (mode
) - 1);
1870 /* Use QImode because an odd displacement may be automatically invalid
1871 for any wider mode. But it should be valid for a single byte. */
1872 good
= (*addressp
) (QImode
, y
);
1874 /* In any case, restore old contents of memory. */
1879 if (ycode
== PRE_DEC
|| ycode
== PRE_INC
1880 || ycode
== POST_DEC
|| ycode
== POST_INC
)
1883 /* The offset added here is chosen as the maximum offset that
1884 any instruction could need to add when operating on something
1885 of the specified mode. We assume that if Y and Y+c are
1886 valid addresses then so is Y+d for all 0<d<c. */
1888 z
= plus_constant_for_output (y
, GET_MODE_SIZE (mode
) - 1);
1890 /* Use QImode because an odd displacement may be automatically invalid
1891 for any wider mode. But it should be valid for a single byte. */
1892 return (*addressp
) (QImode
, z
);
1895 /* Return 1 if ADDR is an address-expression whose effect depends
1896 on the mode of the memory reference it is used in.
1898 Autoincrement addressing is a typical example of mode-dependence
1899 because the amount of the increment depends on the mode. */
1902 mode_dependent_address_p (addr
)
1903 rtx addr ATTRIBUTE_UNUSED
; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1905 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1907 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1908 win
: ATTRIBUTE_UNUSED_LABEL
1912 /* Return 1 if OP is a general operand
1913 other than a memory ref with a mode dependent address. */
1916 mode_independent_operand (op
, mode
)
1917 enum machine_mode mode
;
1922 if (! general_operand (op
, mode
))
1925 if (GET_CODE (op
) != MEM
)
1928 addr
= XEXP (op
, 0);
1929 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, lose
);
1931 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1932 lose
: ATTRIBUTE_UNUSED_LABEL
1936 /* Given an operand OP that is a valid memory reference
1937 which satisfies offsettable_memref_p,
1938 return a new memory reference whose address has been adjusted by OFFSET.
1939 OFFSET should be positive and less than the size of the object referenced.
1943 adj_offsettable_operand (op
, offset
)
1947 register enum rtx_code code
= GET_CODE (op
);
1951 register rtx y
= XEXP (op
, 0);
1954 if (CONSTANT_ADDRESS_P (y
))
1956 new = gen_rtx_MEM (GET_MODE (op
),
1957 plus_constant_for_output (y
, offset
));
1958 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1962 if (GET_CODE (y
) == PLUS
)
1965 register rtx
*const_loc
;
1969 const_loc
= find_constant_term_loc (&z
);
1972 *const_loc
= plus_constant_for_output (*const_loc
, offset
);
1977 new = gen_rtx_MEM (GET_MODE (op
), plus_constant_for_output (y
, offset
));
1978 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op
);
1984 /* Analyze INSN and fill in recog_data. */
1993 rtx body
= PATTERN (insn
);
1995 recog_data
.n_operands
= 0;
1996 recog_data
.n_alternatives
= 0;
1997 recog_data
.n_dups
= 0;
1999 switch (GET_CODE (body
))
2011 recog_data
.n_operands
= noperands
= asm_noperands (body
);
2014 /* This insn is an `asm' with operands. */
2016 /* expand_asm_operands makes sure there aren't too many operands. */
2017 if (noperands
> MAX_RECOG_OPERANDS
)
2020 /* Now get the operand values and constraints out of the insn. */
2021 decode_asm_operands (body
, recog_data
.operand
,
2022 recog_data
.operand_loc
,
2023 recog_data
.constraints
,
2024 recog_data
.operand_mode
);
2027 const char *p
= recog_data
.constraints
[0];
2028 recog_data
.n_alternatives
= 1;
2030 recog_data
.n_alternatives
+= (*p
++ == ',');
2038 /* Ordinary insn: recognize it, get the operands via insn_extract
2039 and get the constraints. */
2041 icode
= recog_memoized (insn
);
2043 fatal_insn_not_found (insn
);
2045 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2046 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2047 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2049 insn_extract (insn
);
2051 for (i
= 0; i
< noperands
; i
++)
2053 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2054 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2057 for (i
= 0; i
< noperands
; i
++)
2058 recog_data
.operand_type
[i
]
2059 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2060 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2063 if (recog_data
.n_alternatives
> MAX_RECOG_ALTERNATIVES
)
2067 /* After calling extract_insn, you can use this function to extract some
2068 information from the constraint strings into a more usable form.
2069 The collected data is stored in recog_op_alt. */
2071 preprocess_constraints ()
2075 memset (recog_op_alt
, 0, sizeof recog_op_alt
);
2076 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2079 struct operand_alternative
*op_alt
;
2080 const char *p
= recog_data
.constraints
[i
];
2082 op_alt
= recog_op_alt
[i
];
2084 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2086 op_alt
[j
].class = NO_REGS
;
2087 op_alt
[j
].constraint
= p
;
2088 op_alt
[j
].matches
= -1;
2089 op_alt
[j
].matched
= -1;
2091 if (*p
== '\0' || *p
== ',')
2093 op_alt
[j
].anything_ok
= 1;
2103 while (c
!= ',' && c
!= '\0');
2104 if (c
== ',' || c
== '\0')
2109 case '=': case '+': case '*': case '%':
2110 case 'E': case 'F': case 'G': case 'H':
2111 case 's': case 'i': case 'n':
2112 case 'I': case 'J': case 'K': case 'L':
2113 case 'M': case 'N': case 'O': case 'P':
2114 #ifdef EXTRA_CONSTRAINT
2115 case 'Q': case 'R': case 'S': case 'T': case 'U':
2117 /* These don't say anything we care about. */
2121 op_alt
[j
].reject
+= 6;
2124 op_alt
[j
].reject
+= 600;
2127 op_alt
[j
].earlyclobber
= 1;
2130 case '0': case '1': case '2': case '3': case '4':
2131 case '5': case '6': case '7': case '8': case '9':
2132 op_alt
[j
].matches
= c
- '0';
2133 op_alt
[op_alt
[j
].matches
].matched
= i
;
2137 op_alt
[j
].memory_ok
= 1;
2140 op_alt
[j
].decmem_ok
= 1;
2143 op_alt
[j
].incmem_ok
= 1;
2146 op_alt
[j
].nonoffmem_ok
= 1;
2149 op_alt
[j
].offmem_ok
= 1;
2152 op_alt
[j
].anything_ok
= 1;
2156 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) BASE_REG_CLASS
];
2160 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) GENERAL_REGS
];
2164 op_alt
[j
].class = reg_class_subunion
[(int) op_alt
[j
].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c
)];
2172 /* Check the operands of an insn against the insn's operand constraints
2173 and return 1 if they are valid.
2174 The information about the insn's operands, constraints, operand modes
2175 etc. is obtained from the global variables set up by extract_insn.
2177 WHICH_ALTERNATIVE is set to a number which indicates which
2178 alternative of constraints was matched: 0 for the first alternative,
2179 1 for the next, etc.
2181 In addition, when two operands are match
2182 and it happens that the output operand is (reg) while the
2183 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2184 make the output operand look like the input.
2185 This is because the output operand is the one the template will print.
2187 This is used in final, just before printing the assembler code and by
2188 the routines that determine an insn's attribute.
2190 If STRICT is a positive non-zero value, it means that we have been
2191 called after reload has been completed. In that case, we must
2192 do all checks strictly. If it is zero, it means that we have been called
2193 before reload has completed. In that case, we first try to see if we can
2194 find an alternative that matches strictly. If not, we try again, this
2195 time assuming that reload will fix up the insn. This provides a "best
2196 guess" for the alternative and is used to compute attributes of insns prior
2197 to reload. A negative value of STRICT is used for this internal call. */
2205 constrain_operands (strict
)
2208 const char *constraints
[MAX_RECOG_OPERANDS
];
2209 int matching_operands
[MAX_RECOG_OPERANDS
];
2210 int earlyclobber
[MAX_RECOG_OPERANDS
];
2213 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2214 int funny_match_index
;
2216 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2219 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2221 constraints
[c
] = recog_data
.constraints
[c
];
2222 matching_operands
[c
] = -1;
2225 which_alternative
= 0;
2227 while (which_alternative
< recog_data
.n_alternatives
)
2231 funny_match_index
= 0;
2233 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2235 register rtx op
= recog_data
.operand
[opno
];
2236 enum machine_mode mode
= GET_MODE (op
);
2237 register const char *p
= constraints
[opno
];
2242 earlyclobber
[opno
] = 0;
2244 /* A unary operator may be accepted by the predicate, but it
2245 is irrelevant for matching constraints. */
2246 if (GET_RTX_CLASS (GET_CODE (op
)) == '1')
2249 if (GET_CODE (op
) == SUBREG
)
2251 if (GET_CODE (SUBREG_REG (op
)) == REG
2252 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2253 offset
= SUBREG_WORD (op
);
2254 op
= SUBREG_REG (op
);
2257 /* An empty constraint or empty alternative
2258 allows anything which matched the pattern. */
2259 if (*p
== 0 || *p
== ',')
2262 while (*p
&& (c
= *p
++) != ',')
2265 case '?': case '!': case '*': case '%':
2270 /* Ignore rest of this alternative as far as
2271 constraint checking is concerned. */
2272 while (*p
&& *p
!= ',')
2277 earlyclobber
[opno
] = 1;
2280 case '0': case '1': case '2': case '3': case '4':
2281 case '5': case '6': case '7': case '8': case '9':
2283 /* This operand must be the same as a previous one.
2284 This kind of constraint is used for instructions such
2285 as add when they take only two operands.
2287 Note that the lower-numbered operand is passed first.
2289 If we are not testing strictly, assume that this constraint
2290 will be satisfied. */
2295 rtx op1
= recog_data
.operand
[c
- '0'];
2296 rtx op2
= recog_data
.operand
[opno
];
2298 /* A unary operator may be accepted by the predicate,
2299 but it is irrelevant for matching constraints. */
2300 if (GET_RTX_CLASS (GET_CODE (op1
)) == '1')
2301 op1
= XEXP (op1
, 0);
2302 if (GET_RTX_CLASS (GET_CODE (op2
)) == '1')
2303 op2
= XEXP (op2
, 0);
2305 val
= operands_match_p (op1
, op2
);
2308 matching_operands
[opno
] = c
- '0';
2309 matching_operands
[c
- '0'] = opno
;
2313 /* If output is *x and input is *--x,
2314 arrange later to change the output to *--x as well,
2315 since the output op is the one that will be printed. */
2316 if (val
== 2 && strict
> 0)
2318 funny_match
[funny_match_index
].this = opno
;
2319 funny_match
[funny_match_index
++].other
= c
- '0';
2324 /* p is used for address_operands. When we are called by
2325 gen_reload, no one will have checked that the address is
2326 strictly valid, i.e., that all pseudos requiring hard regs
2327 have gotten them. */
2329 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2334 /* No need to check general_operand again;
2335 it was done in insn-recog.c. */
2337 /* Anything goes unless it is a REG and really has a hard reg
2338 but the hard reg is not in the class GENERAL_REGS. */
2340 || GENERAL_REGS
== ALL_REGS
2341 || GET_CODE (op
) != REG
2342 || (reload_in_progress
2343 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2344 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2351 && GET_CODE (op
) == REG
2352 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2353 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2354 || (GET_CODE (op
) == REG
2355 && ((GENERAL_REGS
== ALL_REGS
2356 && REGNO (op
) < FIRST_PSEUDO_REGISTER
)
2357 || reg_fits_class_p (op
, GENERAL_REGS
,
2363 /* This is used for a MATCH_SCRATCH in the cases when
2364 we don't actually need anything. So anything goes
2370 if (GET_CODE (op
) == MEM
2371 /* Before reload, accept what reload can turn into mem. */
2372 || (strict
< 0 && CONSTANT_P (op
))
2373 /* During reload, accept a pseudo */
2374 || (reload_in_progress
&& GET_CODE (op
) == REG
2375 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2380 if (GET_CODE (op
) == MEM
2381 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2382 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2387 if (GET_CODE (op
) == MEM
2388 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2389 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2394 #ifndef REAL_ARITHMETIC
2395 /* Match any CONST_DOUBLE, but only if
2396 we can examine the bits of it reliably. */
2397 if ((HOST_FLOAT_FORMAT
!= TARGET_FLOAT_FORMAT
2398 || HOST_BITS_PER_WIDE_INT
!= BITS_PER_WORD
)
2399 && GET_MODE (op
) != VOIDmode
&& ! flag_pretend_float
)
2402 if (GET_CODE (op
) == CONST_DOUBLE
)
2407 if (GET_CODE (op
) == CONST_DOUBLE
)
2413 if (GET_CODE (op
) == CONST_DOUBLE
2414 && CONST_DOUBLE_OK_FOR_LETTER_P (op
, c
))
2419 if (GET_CODE (op
) == CONST_INT
2420 || (GET_CODE (op
) == CONST_DOUBLE
2421 && GET_MODE (op
) == VOIDmode
))
2424 if (CONSTANT_P (op
))
2429 if (GET_CODE (op
) == CONST_INT
2430 || (GET_CODE (op
) == CONST_DOUBLE
2431 && GET_MODE (op
) == VOIDmode
))
2443 if (GET_CODE (op
) == CONST_INT
2444 && CONST_OK_FOR_LETTER_P (INTVAL (op
), c
))
2448 #ifdef EXTRA_CONSTRAINT
2454 if (EXTRA_CONSTRAINT (op
, c
))
2460 if (GET_CODE (op
) == MEM
2461 && ((strict
> 0 && ! offsettable_memref_p (op
))
2463 && !(CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2464 || (reload_in_progress
2465 && !(GET_CODE (op
) == REG
2466 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2471 if ((strict
> 0 && offsettable_memref_p (op
))
2472 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2473 /* Before reload, accept what reload can handle. */
2475 && (CONSTANT_P (op
) || GET_CODE (op
) == MEM
))
2476 /* During reload, accept a pseudo */
2477 || (reload_in_progress
&& GET_CODE (op
) == REG
2478 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2485 && GET_CODE (op
) == REG
2486 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2487 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2488 || (GET_CODE (op
) == REG
2489 && reg_fits_class_p (op
, REG_CLASS_FROM_LETTER (c
),
2494 constraints
[opno
] = p
;
2495 /* If this operand did not win somehow,
2496 this alternative loses. */
2500 /* This alternative won; the operands are ok.
2501 Change whichever operands this alternative says to change. */
2506 /* See if any earlyclobber operand conflicts with some other
2510 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2511 /* Ignore earlyclobber operands now in memory,
2512 because we would often report failure when we have
2513 two memory operands, one of which was formerly a REG. */
2514 if (earlyclobber
[eopno
]
2515 && GET_CODE (recog_data
.operand
[eopno
]) == REG
)
2516 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2517 if ((GET_CODE (recog_data
.operand
[opno
]) == MEM
2518 || recog_data
.operand_type
[opno
] != OP_OUT
)
2520 /* Ignore things like match_operator operands. */
2521 && *recog_data
.constraints
[opno
] != 0
2522 && ! (matching_operands
[opno
] == eopno
2523 && operands_match_p (recog_data
.operand
[opno
],
2524 recog_data
.operand
[eopno
]))
2525 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2526 recog_data
.operand
[eopno
]))
2531 while (--funny_match_index
>= 0)
2533 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2534 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2541 which_alternative
++;
2544 /* If we are about to reject this, but we are not to test strictly,
2545 try a very loose test. Only return failure if it fails also. */
2547 return constrain_operands (-1);
2552 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2553 is a hard reg in class CLASS when its regno is offset by OFFSET
2554 and changed to mode MODE.
2555 If REG occupies multiple hard regs, all of them must be in CLASS. */
2558 reg_fits_class_p (operand
, class, offset
, mode
)
2560 register enum reg_class
class;
2562 enum machine_mode mode
;
2564 register int regno
= REGNO (operand
);
2565 if (regno
< FIRST_PSEUDO_REGISTER
2566 && TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2571 for (sr
= HARD_REGNO_NREGS (regno
, mode
) - 1;
2573 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) class],
2582 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2585 split_all_insns (upd_life
)
2592 blocks
= sbitmap_alloc (n_basic_blocks
);
2593 sbitmap_zero (blocks
);
2596 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2598 basic_block bb
= BASIC_BLOCK (i
);
2601 for (insn
= bb
->head
; insn
; insn
= next
)
2605 /* Can't use `next_real_insn' because that might go across
2606 CODE_LABELS and short-out basic blocks. */
2607 next
= NEXT_INSN (insn
);
2608 if (GET_CODE (insn
) != INSN
)
2611 /* Don't split no-op move insns. These should silently
2612 disappear later in final. Splitting such insns would
2613 break the code that handles REG_NO_CONFLICT blocks. */
2615 else if ((set
= single_set (insn
)) != NULL
2616 && rtx_equal_p (SET_SRC (set
), SET_DEST (set
)))
2618 /* Nops get in the way while scheduling, so delete them
2619 now if register allocation has already been done. It
2620 is too risky to try to do this before register
2621 allocation, and there are unlikely to be very many
2622 nops then anyways. */
2623 if (reload_completed
)
2625 PUT_CODE (insn
, NOTE
);
2626 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2627 NOTE_SOURCE_FILE (insn
) = 0;
2632 /* Split insns here to get max fine-grain parallelism. */
2633 rtx first
= PREV_INSN (insn
);
2634 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2638 SET_BIT (blocks
, i
);
2641 /* try_split returns the NOTE that INSN became. */
2642 first
= NEXT_INSN (first
);
2643 PUT_CODE (insn
, NOTE
);
2644 NOTE_SOURCE_FILE (insn
) = 0;
2645 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2647 if (insn
== bb
->end
)
2655 if (insn
== bb
->end
)
2659 /* ??? When we're called from just after reload, the CFG is in bad
2660 shape, and we may have fallen off the end. This could be fixed
2661 by having reload not try to delete unreachable code. Otherwise
2662 assert we found the end insn. */
2663 if (insn
== NULL
&& upd_life
)
2667 if (changed
&& upd_life
)
2669 compute_bb_for_insn (get_max_uid ());
2670 count_or_remove_death_notes (blocks
, 1);
2671 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
2674 sbitmap_free (blocks
);
2677 #ifdef HAVE_peephole2
2678 /* This is the last insn we'll allow recog_next_insn to consider. */
2679 static rtx recog_last_allowed_insn
;
2681 /* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
2682 not exist. Used by the recognizer to find the next insn to match in a
2683 multi-insn pattern. */
2685 recog_next_insn (insn
, n
)
2689 if (insn
!= NULL_RTX
)
2693 if (insn
== recog_last_allowed_insn
)
2696 insn
= NEXT_INSN (insn
);
2697 if (insn
== NULL_RTX
)
2700 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2708 /* Perform the peephole2 optimization pass. */
2710 peephole2_optimize (dump_file
)
2711 FILE *dump_file ATTRIBUTE_UNUSED
;
2717 /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
2718 and backtrack insn by insn as we proceed through the block. In this
2719 way we'll not need to keep searching forward from the beginning of
2720 basic blocks to find register life info. */
2722 init_resource_info (NULL
);
2724 blocks
= sbitmap_alloc (n_basic_blocks
);
2725 sbitmap_zero (blocks
);
2728 for (i
= n_basic_blocks
- 1; i
>= 0; --i
)
2730 basic_block bb
= BASIC_BLOCK (i
);
2732 /* Since we don't update life info until the very end, we can't
2733 allow matching instructions that we've replaced before. Walk
2734 backward through the basic block so that we don't have to
2735 care about subsequent life info; recog_last_allowed_insn to
2736 restrict how far forward we will allow the match to proceed. */
2738 recog_last_allowed_insn
= NEXT_INSN (bb
->end
);
2739 for (insn
= bb
->end
; ; insn
= prev
)
2741 prev
= PREV_INSN (insn
);
2742 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2746 try = peephole2_insns (PATTERN (insn
), insn
, &last_insn
);
2749 flow_delete_insn_chain (insn
, last_insn
);
2750 try = emit_insn_after (try, prev
);
2752 if (last_insn
== bb
->end
)
2754 if (insn
== bb
->head
)
2755 bb
->head
= NEXT_INSN (prev
);
2757 recog_last_allowed_insn
= NEXT_INSN (prev
);
2758 SET_BIT (blocks
, i
);
2763 if (insn
== bb
->head
)
2768 free_resource_info ();
2770 compute_bb_for_insn (get_max_uid ());
2771 count_or_remove_death_notes (blocks
, 1);
2772 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);