1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
39 #include "basic-block.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
47 #define STACK_PUSH_CODE PRE_INC
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
55 #define STACK_POP_CODE POST_DEC
59 static void validate_replace_rtx_1 (rtx
*, rtx
, rtx
, rtx
);
60 static rtx
*find_single_use_1 (rtx
, rtx
*);
61 static void validate_replace_src_1 (rtx
*, void *);
62 static rtx
split_insn (rtx
);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
74 struct recog_data recog_data
;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt
[MAX_RECOG_OPERANDS
][MAX_RECOG_ALTERNATIVES
];
80 /* On return from `constrain_operands', indicate which alternative
83 int which_alternative
;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed
;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
99 init_recog_no_volatile (void)
111 /* Check that X is an insn-body for an `asm' with operands
112 and that the operands mentioned in it are legitimate. */
115 check_asm_operands (rtx x
)
119 const char **constraints
;
122 /* Post-reload, be more strict with things. */
123 if (reload_completed
)
125 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
126 extract_insn (make_insn_raw (x
));
127 constrain_operands (1);
128 return which_alternative
>= 0;
131 noperands
= asm_noperands (x
);
137 operands
= alloca (noperands
* sizeof (rtx
));
138 constraints
= alloca (noperands
* sizeof (char *));
140 decode_asm_operands (x
, operands
, NULL
, constraints
, NULL
);
142 for (i
= 0; i
< noperands
; i
++)
144 const char *c
= constraints
[i
];
147 if (ISDIGIT ((unsigned char) c
[0]) && c
[1] == '\0')
148 c
= constraints
[c
[0] - '0'];
150 if (! asm_operand_ok (operands
[i
], c
))
157 /* Static data for the next two routines. */
159 typedef struct change_t
167 static change_t
*changes
;
168 static int changes_allocated
;
170 static int num_changes
= 0;
172 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
173 at which NEW will be placed. If OBJECT is zero, no validation is done,
174 the change is simply made.
176 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
177 will be called with the address and mode as parameters. If OBJECT is
178 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
181 IN_GROUP is nonzero if this is part of a group of changes that must be
182 performed as a group. In that case, the changes will be stored. The
183 function `apply_change_group' will validate and apply the changes.
185 If IN_GROUP is zero, this is a single change. Try to recognize the insn
186 or validate the memory reference with the change applied. If the result
187 is not valid for the machine, suppress the change and return zero.
188 Otherwise, perform the change and return 1. */
191 validate_change (rtx object
, rtx
*loc
, rtx
new, int in_group
)
195 if (old
== new || rtx_equal_p (old
, new))
198 gcc_assert (in_group
!= 0 || num_changes
== 0);
202 /* Save the information describing this change. */
203 if (num_changes
>= changes_allocated
)
205 if (changes_allocated
== 0)
206 /* This value allows for repeated substitutions inside complex
207 indexed addresses, or changes in up to 5 insns. */
208 changes_allocated
= MAX_RECOG_OPERANDS
* 5;
210 changes_allocated
*= 2;
212 changes
= xrealloc (changes
, sizeof (change_t
) * changes_allocated
);
215 changes
[num_changes
].object
= object
;
216 changes
[num_changes
].loc
= loc
;
217 changes
[num_changes
].old
= old
;
219 if (object
&& !MEM_P (object
))
221 /* Set INSN_CODE to force rerecognition of insn. Save old code in
223 changes
[num_changes
].old_code
= INSN_CODE (object
);
224 INSN_CODE (object
) = -1;
229 /* If we are making a group of changes, return 1. Otherwise, validate the
230 change group we made. */
235 return apply_change_group ();
238 /* This subroutine of apply_change_group verifies whether the changes to INSN
239 were valid; i.e. whether INSN can still be recognized. */
242 insn_invalid_p (rtx insn
)
244 rtx pat
= PATTERN (insn
);
245 int num_clobbers
= 0;
246 /* If we are before reload and the pattern is a SET, see if we can add
248 int icode
= recog (pat
, insn
,
249 (GET_CODE (pat
) == SET
250 && ! reload_completed
&& ! reload_in_progress
)
251 ? &num_clobbers
: 0);
252 int is_asm
= icode
< 0 && asm_noperands (PATTERN (insn
)) >= 0;
255 /* If this is an asm and the operand aren't legal, then fail. Likewise if
256 this is not an asm and the insn wasn't recognized. */
257 if ((is_asm
&& ! check_asm_operands (PATTERN (insn
)))
258 || (!is_asm
&& icode
< 0))
261 /* If we have to add CLOBBERs, fail if we have to add ones that reference
262 hard registers since our callers can't know if they are live or not.
263 Otherwise, add them. */
264 if (num_clobbers
> 0)
268 if (added_clobbers_hard_reg_p (icode
))
271 newpat
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num_clobbers
+ 1));
272 XVECEXP (newpat
, 0, 0) = pat
;
273 add_clobbers (newpat
, icode
);
274 PATTERN (insn
) = pat
= newpat
;
277 /* After reload, verify that all constraints are satisfied. */
278 if (reload_completed
)
282 if (! constrain_operands (1))
286 INSN_CODE (insn
) = icode
;
290 /* Return number of changes made and not validated yet. */
292 num_changes_pending (void)
297 /* Apply a group of changes previously issued with `validate_change'.
298 Return 1 if all changes are valid, zero otherwise. */
301 apply_change_group (void)
304 rtx last_validated
= NULL_RTX
;
306 /* The changes have been applied and all INSN_CODEs have been reset to force
309 The changes are valid if we aren't given an object, or if we are
310 given a MEM and it still is a valid address, or if this is in insn
311 and it is recognized. In the latter case, if reload has completed,
312 we also require that the operands meet the constraints for
315 for (i
= 0; i
< num_changes
; i
++)
317 rtx object
= changes
[i
].object
;
319 /* If there is no object to test or if it is the same as the one we
320 already tested, ignore it. */
321 if (object
== 0 || object
== last_validated
)
326 if (! memory_address_p (GET_MODE (object
), XEXP (object
, 0)))
329 else if (insn_invalid_p (object
))
331 rtx pat
= PATTERN (object
);
333 /* Perhaps we couldn't recognize the insn because there were
334 extra CLOBBERs at the end. If so, try to re-recognize
335 without the last CLOBBER (later iterations will cause each of
336 them to be eliminated, in turn). But don't do this if we
337 have an ASM_OPERAND. */
338 if (GET_CODE (pat
) == PARALLEL
339 && GET_CODE (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1)) == CLOBBER
340 && asm_noperands (PATTERN (object
)) < 0)
344 if (XVECLEN (pat
, 0) == 2)
345 newpat
= XVECEXP (pat
, 0, 0);
351 = gen_rtx_PARALLEL (VOIDmode
,
352 rtvec_alloc (XVECLEN (pat
, 0) - 1));
353 for (j
= 0; j
< XVECLEN (newpat
, 0); j
++)
354 XVECEXP (newpat
, 0, j
) = XVECEXP (pat
, 0, j
);
357 /* Add a new change to this group to replace the pattern
358 with this new pattern. Then consider this change
359 as having succeeded. The change we added will
360 cause the entire call to fail if things remain invalid.
362 Note that this can lose if a later change than the one
363 we are processing specified &XVECEXP (PATTERN (object), 0, X)
364 but this shouldn't occur. */
366 validate_change (object
, &PATTERN (object
), newpat
, 1);
369 else if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
370 /* If this insn is a CLOBBER or USE, it is always valid, but is
376 last_validated
= object
;
379 if (i
== num_changes
)
383 for (i
= 0; i
< num_changes
; i
++)
384 if (changes
[i
].object
385 && INSN_P (changes
[i
].object
)
386 && (bb
= BLOCK_FOR_INSN (changes
[i
].object
)))
387 bb
->flags
|= BB_DIRTY
;
399 /* Return the number of changes so far in the current group. */
402 num_validated_changes (void)
407 /* Retract the changes numbered NUM and up. */
410 cancel_changes (int num
)
414 /* Back out all the changes. Do this in the opposite order in which
416 for (i
= num_changes
- 1; i
>= num
; i
--)
418 *changes
[i
].loc
= changes
[i
].old
;
419 if (changes
[i
].object
&& !MEM_P (changes
[i
].object
))
420 INSN_CODE (changes
[i
].object
) = changes
[i
].old_code
;
425 /* Replace every occurrence of FROM in X with TO. Mark each change with
426 validate_change passing OBJECT. */
429 validate_replace_rtx_1 (rtx
*loc
, rtx from
, rtx to
, rtx object
)
435 enum machine_mode op0_mode
= VOIDmode
;
436 int prev_changes
= num_changes
;
443 fmt
= GET_RTX_FORMAT (code
);
445 op0_mode
= GET_MODE (XEXP (x
, 0));
447 /* X matches FROM if it is the same rtx or they are both referring to the
448 same register in the same mode. Avoid calling rtx_equal_p unless the
449 operands look similar. */
452 || (REG_P (x
) && REG_P (from
)
453 && GET_MODE (x
) == GET_MODE (from
)
454 && REGNO (x
) == REGNO (from
))
455 || (GET_CODE (x
) == GET_CODE (from
) && GET_MODE (x
) == GET_MODE (from
)
456 && rtx_equal_p (x
, from
)))
458 validate_change (object
, loc
, to
, 1);
462 /* Call ourself recursively to perform the replacements.
463 We must not replace inside already replaced expression, otherwise we
464 get infinite recursion for replacements like (reg X)->(subreg (reg X))
465 done by regmove, so we must special case shared ASM_OPERANDS. */
467 if (GET_CODE (x
) == PARALLEL
)
469 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
471 if (j
&& GET_CODE (XVECEXP (x
, 0, j
)) == SET
472 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == ASM_OPERANDS
)
474 /* Verify that operands are really shared. */
475 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x
, 0, 0)))
476 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
478 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x
, 0, j
)),
482 validate_replace_rtx_1 (&XVECEXP (x
, 0, j
), from
, to
, object
);
486 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
489 validate_replace_rtx_1 (&XEXP (x
, i
), from
, to
, object
);
490 else if (fmt
[i
] == 'E')
491 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
492 validate_replace_rtx_1 (&XVECEXP (x
, i
, j
), from
, to
, object
);
495 /* If we didn't substitute, there is nothing more to do. */
496 if (num_changes
== prev_changes
)
499 /* Allow substituted expression to have different mode. This is used by
500 regmove to change mode of pseudo register. */
501 if (fmt
[0] == 'e' && GET_MODE (XEXP (x
, 0)) != VOIDmode
)
502 op0_mode
= GET_MODE (XEXP (x
, 0));
504 /* Do changes needed to keep rtx consistent. Don't do any other
505 simplifications, as it is not our job. */
507 if (SWAPPABLE_OPERANDS_P (x
)
508 && swap_commutative_operands_p (XEXP (x
, 0), XEXP (x
, 1)))
510 validate_change (object
, loc
,
511 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x
) ? code
512 : swap_condition (code
),
513 GET_MODE (x
), XEXP (x
, 1),
522 /* If we have a PLUS whose second operand is now a CONST_INT, use
523 simplify_gen_binary to try to simplify it.
524 ??? We may want later to remove this, once simplification is
525 separated from this function. */
526 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& XEXP (x
, 1) == to
)
527 validate_change (object
, loc
,
529 (PLUS
, GET_MODE (x
), XEXP (x
, 0), XEXP (x
, 1)), 1);
532 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
533 || GET_CODE (XEXP (x
, 1)) == CONST_DOUBLE
)
534 validate_change (object
, loc
,
536 (PLUS
, GET_MODE (x
), XEXP (x
, 0),
537 simplify_gen_unary (NEG
,
538 GET_MODE (x
), XEXP (x
, 1),
543 if (GET_MODE (XEXP (x
, 0)) == VOIDmode
)
545 new = simplify_gen_unary (code
, GET_MODE (x
), XEXP (x
, 0),
547 /* If any of the above failed, substitute in something that
548 we know won't be recognized. */
550 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
551 validate_change (object
, loc
, new, 1);
555 /* All subregs possible to simplify should be simplified. */
556 new = simplify_subreg (GET_MODE (x
), SUBREG_REG (x
), op0_mode
,
559 /* Subregs of VOIDmode operands are incorrect. */
560 if (!new && GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
561 new = gen_rtx_CLOBBER (GET_MODE (x
), const0_rtx
);
563 validate_change (object
, loc
, new, 1);
567 /* If we are replacing a register with memory, try to change the memory
568 to be the mode required for memory in extract operations (this isn't
569 likely to be an insertion operation; if it was, nothing bad will
570 happen, we might just fail in some cases). */
572 if (MEM_P (XEXP (x
, 0))
573 && GET_CODE (XEXP (x
, 1)) == CONST_INT
574 && GET_CODE (XEXP (x
, 2)) == CONST_INT
575 && !mode_dependent_address_p (XEXP (XEXP (x
, 0), 0))
576 && !MEM_VOLATILE_P (XEXP (x
, 0)))
578 enum machine_mode wanted_mode
= VOIDmode
;
579 enum machine_mode is_mode
= GET_MODE (XEXP (x
, 0));
580 int pos
= INTVAL (XEXP (x
, 2));
582 if (GET_CODE (x
) == ZERO_EXTRACT
)
584 enum machine_mode new_mode
585 = mode_for_extraction (EP_extzv
, 1);
586 if (new_mode
!= MAX_MACHINE_MODE
)
587 wanted_mode
= new_mode
;
589 else if (GET_CODE (x
) == SIGN_EXTRACT
)
591 enum machine_mode new_mode
592 = mode_for_extraction (EP_extv
, 1);
593 if (new_mode
!= MAX_MACHINE_MODE
)
594 wanted_mode
= new_mode
;
597 /* If we have a narrower mode, we can do something. */
598 if (wanted_mode
!= VOIDmode
599 && GET_MODE_SIZE (wanted_mode
) < GET_MODE_SIZE (is_mode
))
601 int offset
= pos
/ BITS_PER_UNIT
;
604 /* If the bytes and bits are counted differently, we
605 must adjust the offset. */
606 if (BYTES_BIG_ENDIAN
!= BITS_BIG_ENDIAN
)
608 (GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (wanted_mode
) -
611 pos
%= GET_MODE_BITSIZE (wanted_mode
);
613 newmem
= adjust_address_nv (XEXP (x
, 0), wanted_mode
, offset
);
615 validate_change (object
, &XEXP (x
, 2), GEN_INT (pos
), 1);
616 validate_change (object
, &XEXP (x
, 0), newmem
, 1);
627 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
628 with TO. After all changes have been made, validate by seeing
629 if INSN is still valid. */
632 validate_replace_rtx_subexp (rtx from
, rtx to
, rtx insn
, rtx
*loc
)
634 validate_replace_rtx_1 (loc
, from
, to
, insn
);
635 return apply_change_group ();
638 /* Try replacing every occurrence of FROM in INSN with TO. After all
639 changes have been made, validate by seeing if INSN is still valid. */
642 validate_replace_rtx (rtx from
, rtx to
, rtx insn
)
644 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
645 return apply_change_group ();
648 /* Try replacing every occurrence of FROM in INSN with TO. */
651 validate_replace_rtx_group (rtx from
, rtx to
, rtx insn
)
653 validate_replace_rtx_1 (&PATTERN (insn
), from
, to
, insn
);
656 /* Function called by note_uses to replace used subexpressions. */
657 struct validate_replace_src_data
659 rtx from
; /* Old RTX */
660 rtx to
; /* New RTX */
661 rtx insn
; /* Insn in which substitution is occurring. */
665 validate_replace_src_1 (rtx
*x
, void *data
)
667 struct validate_replace_src_data
*d
668 = (struct validate_replace_src_data
*) data
;
670 validate_replace_rtx_1 (x
, d
->from
, d
->to
, d
->insn
);
673 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
677 validate_replace_src_group (rtx from
, rtx to
, rtx insn
)
679 struct validate_replace_src_data d
;
684 note_uses (&PATTERN (insn
), validate_replace_src_1
, &d
);
688 /* Return 1 if the insn using CC0 set by INSN does not contain
689 any ordered tests applied to the condition codes.
690 EQ and NE tests do not count. */
693 next_insn_tests_no_inequality (rtx insn
)
695 rtx next
= next_cc0_user (insn
);
697 /* If there is no next insn, we have to take the conservative choice. */
701 return (INSN_P (next
)
702 && ! inequality_comparisons_p (PATTERN (next
)));
706 /* This is used by find_single_use to locate an rtx that contains exactly one
707 use of DEST, which is typically either a REG or CC0. It returns a
708 pointer to the innermost rtx expression containing DEST. Appearances of
709 DEST that are being used to totally replace it are not counted. */
712 find_single_use_1 (rtx dest
, rtx
*loc
)
715 enum rtx_code code
= GET_CODE (x
);
733 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
734 of a REG that occupies all of the REG, the insn uses DEST if
735 it is mentioned in the destination or the source. Otherwise, we
736 need just check the source. */
737 if (GET_CODE (SET_DEST (x
)) != CC0
738 && GET_CODE (SET_DEST (x
)) != PC
739 && !REG_P (SET_DEST (x
))
740 && ! (GET_CODE (SET_DEST (x
)) == SUBREG
741 && REG_P (SUBREG_REG (SET_DEST (x
)))
742 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
743 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
744 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
745 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))))
748 return find_single_use_1 (dest
, &SET_SRC (x
));
752 return find_single_use_1 (dest
, &XEXP (x
, 0));
758 /* If it wasn't one of the common cases above, check each expression and
759 vector of this code. Look for a unique usage of DEST. */
761 fmt
= GET_RTX_FORMAT (code
);
762 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
766 if (dest
== XEXP (x
, i
)
767 || (REG_P (dest
) && REG_P (XEXP (x
, i
))
768 && REGNO (dest
) == REGNO (XEXP (x
, i
))))
771 this_result
= find_single_use_1 (dest
, &XEXP (x
, i
));
774 result
= this_result
;
775 else if (this_result
)
776 /* Duplicate usage. */
779 else if (fmt
[i
] == 'E')
783 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
785 if (XVECEXP (x
, i
, j
) == dest
787 && REG_P (XVECEXP (x
, i
, j
))
788 && REGNO (XVECEXP (x
, i
, j
)) == REGNO (dest
)))
791 this_result
= find_single_use_1 (dest
, &XVECEXP (x
, i
, j
));
794 result
= this_result
;
795 else if (this_result
)
804 /* See if DEST, produced in INSN, is used only a single time in the
805 sequel. If so, return a pointer to the innermost rtx expression in which
808 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
810 This routine will return usually zero either before flow is called (because
811 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
812 note can't be trusted).
814 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
815 care about REG_DEAD notes or LOG_LINKS.
817 Otherwise, we find the single use by finding an insn that has a
818 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
819 only referenced once in that insn, we know that it must be the first
820 and last insn referencing DEST. */
823 find_single_use (rtx dest
, rtx insn
, rtx
*ploc
)
832 next
= NEXT_INSN (insn
);
834 || (!NONJUMP_INSN_P (next
) && !JUMP_P (next
)))
837 result
= find_single_use_1 (dest
, &PATTERN (next
));
844 if (reload_completed
|| reload_in_progress
|| !REG_P (dest
))
847 for (next
= next_nonnote_insn (insn
);
848 next
!= 0 && !LABEL_P (next
);
849 next
= next_nonnote_insn (next
))
850 if (INSN_P (next
) && dead_or_set_p (next
, dest
))
852 for (link
= LOG_LINKS (next
); link
; link
= XEXP (link
, 1))
853 if (XEXP (link
, 0) == insn
)
858 result
= find_single_use_1 (dest
, &PATTERN (next
));
868 /* Return 1 if OP is a valid general operand for machine mode MODE.
869 This is either a register reference, a memory reference,
870 or a constant. In the case of a memory reference, the address
871 is checked for general validity for the target machine.
873 Register and memory references must have mode MODE in order to be valid,
874 but some constants have no machine mode and are valid for any mode.
876 If MODE is VOIDmode, OP is checked for validity for whatever mode
879 The main use of this function is as a predicate in match_operand
880 expressions in the machine description.
882 For an explanation of this function's behavior for registers of
883 class NO_REGS, see the comment for `register_operand'. */
886 general_operand (rtx op
, enum machine_mode mode
)
888 enum rtx_code code
= GET_CODE (op
);
890 if (mode
== VOIDmode
)
891 mode
= GET_MODE (op
);
893 /* Don't accept CONST_INT or anything similar
894 if the caller wants something floating. */
895 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
896 && GET_MODE_CLASS (mode
) != MODE_INT
897 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
900 if (GET_CODE (op
) == CONST_INT
902 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
906 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
908 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
909 && LEGITIMATE_CONSTANT_P (op
));
911 /* Except for certain constants with VOIDmode, already checked for,
912 OP's mode must match MODE if MODE specifies a mode. */
914 if (GET_MODE (op
) != mode
)
919 rtx sub
= SUBREG_REG (op
);
921 #ifdef INSN_SCHEDULING
922 /* On machines that have insn scheduling, we want all memory
923 reference to be explicit, so outlaw paradoxical SUBREGs.
924 However, we must allow them after reload so that they can
925 get cleaned up by cleanup_subreg_operands. */
926 if (!reload_completed
&& MEM_P (sub
)
927 && GET_MODE_SIZE (mode
) > GET_MODE_SIZE (GET_MODE (sub
)))
930 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
931 may result in incorrect reference. We should simplify all valid
932 subregs of MEM anyway. But allow this after reload because we
933 might be called from cleanup_subreg_operands.
935 ??? This is a kludge. */
936 if (!reload_completed
&& SUBREG_BYTE (op
) != 0
940 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
941 create such rtl, and we must reject it. */
942 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
943 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
947 code
= GET_CODE (op
);
951 /* A register whose class is NO_REGS is not a general operand. */
952 return (REGNO (op
) >= FIRST_PSEUDO_REGISTER
953 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
);
957 rtx y
= XEXP (op
, 0);
959 if (! volatile_ok
&& MEM_VOLATILE_P (op
))
962 /* Use the mem's mode, since it will be reloaded thus. */
963 if (memory_address_p (GET_MODE (op
), y
))
970 /* Return 1 if OP is a valid memory address for a memory reference
973 The main use of this function is as a predicate in match_operand
974 expressions in the machine description. */
977 address_operand (rtx op
, enum machine_mode mode
)
979 return memory_address_p (mode
, op
);
982 /* Return 1 if OP is a register reference of mode MODE.
983 If MODE is VOIDmode, accept a register in any mode.
985 The main use of this function is as a predicate in match_operand
986 expressions in the machine description.
988 As a special exception, registers whose class is NO_REGS are
989 not accepted by `register_operand'. The reason for this change
990 is to allow the representation of special architecture artifacts
991 (such as a condition code register) without extending the rtl
992 definitions. Since registers of class NO_REGS cannot be used
993 as registers in any case where register classes are examined,
994 it is most consistent to keep this function from accepting them. */
997 register_operand (rtx op
, enum machine_mode mode
)
999 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1002 if (GET_CODE (op
) == SUBREG
)
1004 rtx sub
= SUBREG_REG (op
);
1006 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1007 because it is guaranteed to be reloaded into one.
1008 Just make sure the MEM is valid in itself.
1009 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1010 but currently it does result from (SUBREG (REG)...) where the
1011 reg went on the stack.) */
1012 if (! reload_completed
&& MEM_P (sub
))
1013 return general_operand (op
, mode
);
1015 #ifdef CANNOT_CHANGE_MODE_CLASS
1017 && REGNO (sub
) < FIRST_PSEUDO_REGISTER
1018 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub
), GET_MODE (sub
), mode
)
1019 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_INT
1020 && GET_MODE_CLASS (GET_MODE (sub
)) != MODE_COMPLEX_FLOAT
)
1024 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1025 create such rtl, and we must reject it. */
1026 if (GET_MODE_CLASS (GET_MODE (op
)) == MODE_FLOAT
1027 && GET_MODE_SIZE (GET_MODE (op
)) > GET_MODE_SIZE (GET_MODE (sub
)))
1033 /* We don't consider registers whose class is NO_REGS
1034 to be a register operand. */
1036 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1037 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1040 /* Return 1 for a register in Pmode; ignore the tested mode. */
1043 pmode_register_operand (rtx op
, enum machine_mode mode ATTRIBUTE_UNUSED
)
1045 return register_operand (op
, Pmode
);
1048 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1049 or a hard register. */
1052 scratch_operand (rtx op
, enum machine_mode mode
)
1054 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1057 return (GET_CODE (op
) == SCRATCH
1059 && REGNO (op
) < FIRST_PSEUDO_REGISTER
));
1062 /* Return 1 if OP is a valid immediate operand for mode MODE.
1064 The main use of this function is as a predicate in match_operand
1065 expressions in the machine description. */
1068 immediate_operand (rtx op
, enum machine_mode mode
)
1070 /* Don't accept CONST_INT or anything similar
1071 if the caller wants something floating. */
1072 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1073 && GET_MODE_CLASS (mode
) != MODE_INT
1074 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1077 if (GET_CODE (op
) == CONST_INT
1079 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1082 return (CONSTANT_P (op
)
1083 && (GET_MODE (op
) == mode
|| mode
== VOIDmode
1084 || GET_MODE (op
) == VOIDmode
)
1085 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1086 && LEGITIMATE_CONSTANT_P (op
));
1089 /* Returns 1 if OP is an operand that is a CONST_INT. */
1092 const_int_operand (rtx op
, enum machine_mode mode
)
1094 if (GET_CODE (op
) != CONST_INT
)
1097 if (mode
!= VOIDmode
1098 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1104 /* Returns 1 if OP is an operand that is a constant integer or constant
1105 floating-point number. */
1108 const_double_operand (rtx op
, enum machine_mode mode
)
1110 /* Don't accept CONST_INT or anything similar
1111 if the caller wants something floating. */
1112 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1113 && GET_MODE_CLASS (mode
) != MODE_INT
1114 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1117 return ((GET_CODE (op
) == CONST_DOUBLE
|| GET_CODE (op
) == CONST_INT
)
1118 && (mode
== VOIDmode
|| GET_MODE (op
) == mode
1119 || GET_MODE (op
) == VOIDmode
));
1122 /* Return 1 if OP is a general operand that is not an immediate operand. */
1125 nonimmediate_operand (rtx op
, enum machine_mode mode
)
1127 return (general_operand (op
, mode
) && ! CONSTANT_P (op
));
1130 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1133 nonmemory_operand (rtx op
, enum machine_mode mode
)
1135 if (CONSTANT_P (op
))
1137 /* Don't accept CONST_INT or anything similar
1138 if the caller wants something floating. */
1139 if (GET_MODE (op
) == VOIDmode
&& mode
!= VOIDmode
1140 && GET_MODE_CLASS (mode
) != MODE_INT
1141 && GET_MODE_CLASS (mode
) != MODE_PARTIAL_INT
)
1144 if (GET_CODE (op
) == CONST_INT
1146 && trunc_int_for_mode (INTVAL (op
), mode
) != INTVAL (op
))
1149 return ((GET_MODE (op
) == VOIDmode
|| GET_MODE (op
) == mode
1150 || mode
== VOIDmode
)
1151 && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
))
1152 && LEGITIMATE_CONSTANT_P (op
));
1155 if (GET_MODE (op
) != mode
&& mode
!= VOIDmode
)
1158 if (GET_CODE (op
) == SUBREG
)
1160 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1161 because it is guaranteed to be reloaded into one.
1162 Just make sure the MEM is valid in itself.
1163 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1164 but currently it does result from (SUBREG (REG)...) where the
1165 reg went on the stack.) */
1166 if (! reload_completed
&& MEM_P (SUBREG_REG (op
)))
1167 return general_operand (op
, mode
);
1168 op
= SUBREG_REG (op
);
1171 /* We don't consider registers whose class is NO_REGS
1172 to be a register operand. */
1174 && (REGNO (op
) >= FIRST_PSEUDO_REGISTER
1175 || REGNO_REG_CLASS (REGNO (op
)) != NO_REGS
));
1178 /* Return 1 if OP is a valid operand that stands for pushing a
1179 value of mode MODE onto the stack.
1181 The main use of this function is as a predicate in match_operand
1182 expressions in the machine description. */
1185 push_operand (rtx op
, enum machine_mode mode
)
1187 unsigned int rounded_size
= GET_MODE_SIZE (mode
);
1189 #ifdef PUSH_ROUNDING
1190 rounded_size
= PUSH_ROUNDING (rounded_size
);
1196 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1201 if (rounded_size
== GET_MODE_SIZE (mode
))
1203 if (GET_CODE (op
) != STACK_PUSH_CODE
)
1208 if (GET_CODE (op
) != PRE_MODIFY
1209 || GET_CODE (XEXP (op
, 1)) != PLUS
1210 || XEXP (XEXP (op
, 1), 0) != XEXP (op
, 0)
1211 || GET_CODE (XEXP (XEXP (op
, 1), 1)) != CONST_INT
1212 #ifdef STACK_GROWS_DOWNWARD
1213 || INTVAL (XEXP (XEXP (op
, 1), 1)) != - (int) rounded_size
1215 || INTVAL (XEXP (XEXP (op
, 1), 1)) != (int) rounded_size
1221 return XEXP (op
, 0) == stack_pointer_rtx
;
1224 /* Return 1 if OP is a valid operand that stands for popping a
1225 value of mode MODE off the stack.
1227 The main use of this function is as a predicate in match_operand
1228 expressions in the machine description. */
1231 pop_operand (rtx op
, enum machine_mode mode
)
1236 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1241 if (GET_CODE (op
) != STACK_POP_CODE
)
1244 return XEXP (op
, 0) == stack_pointer_rtx
;
1247 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1250 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED
, rtx addr
)
1252 GO_IF_LEGITIMATE_ADDRESS (mode
, addr
, win
);
1259 /* Return 1 if OP is a valid memory reference with mode MODE,
1260 including a valid address.
1262 The main use of this function is as a predicate in match_operand
1263 expressions in the machine description. */
1266 memory_operand (rtx op
, enum machine_mode mode
)
1270 if (! reload_completed
)
1271 /* Note that no SUBREG is a memory operand before end of reload pass,
1272 because (SUBREG (MEM...)) forces reloading into a register. */
1273 return MEM_P (op
) && general_operand (op
, mode
);
1275 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1279 if (GET_CODE (inner
) == SUBREG
)
1280 inner
= SUBREG_REG (inner
);
1282 return (MEM_P (inner
) && general_operand (op
, mode
));
1285 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1286 that is, a memory reference whose address is a general_operand. */
1289 indirect_operand (rtx op
, enum machine_mode mode
)
1291 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1292 if (! reload_completed
1293 && GET_CODE (op
) == SUBREG
&& MEM_P (SUBREG_REG (op
)))
1295 int offset
= SUBREG_BYTE (op
);
1296 rtx inner
= SUBREG_REG (op
);
1298 if (mode
!= VOIDmode
&& GET_MODE (op
) != mode
)
1301 /* The only way that we can have a general_operand as the resulting
1302 address is if OFFSET is zero and the address already is an operand
1303 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1306 return ((offset
== 0 && general_operand (XEXP (inner
, 0), Pmode
))
1307 || (GET_CODE (XEXP (inner
, 0)) == PLUS
1308 && GET_CODE (XEXP (XEXP (inner
, 0), 1)) == CONST_INT
1309 && INTVAL (XEXP (XEXP (inner
, 0), 1)) == -offset
1310 && general_operand (XEXP (XEXP (inner
, 0), 0), Pmode
)));
1314 && memory_operand (op
, mode
)
1315 && general_operand (XEXP (op
, 0), Pmode
));
1318 /* Return 1 if this is a comparison operator. This allows the use of
1319 MATCH_OPERATOR to recognize all the branch insns. */
1322 comparison_operator (rtx op
, enum machine_mode mode
)
1324 return ((mode
== VOIDmode
|| GET_MODE (op
) == mode
)
1325 && COMPARISON_P (op
));
1328 /* If BODY is an insn body that uses ASM_OPERANDS,
1329 return the number of operands (both input and output) in the insn.
1330 Otherwise return -1. */
1333 asm_noperands (rtx body
)
1335 switch (GET_CODE (body
))
1338 /* No output operands: return number of input operands. */
1339 return ASM_OPERANDS_INPUT_LENGTH (body
);
1341 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1342 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1343 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body
)) + 1;
1347 if (GET_CODE (XVECEXP (body
, 0, 0)) == SET
1348 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1350 /* Multiple output operands, or 1 output plus some clobbers:
1351 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1355 /* Count backwards through CLOBBERs to determine number of SETs. */
1356 for (i
= XVECLEN (body
, 0); i
> 0; i
--)
1358 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) == SET
)
1360 if (GET_CODE (XVECEXP (body
, 0, i
- 1)) != CLOBBER
)
1364 /* N_SETS is now number of output operands. */
1367 /* Verify that all the SETs we have
1368 came from a single original asm_operands insn
1369 (so that invalid combinations are blocked). */
1370 for (i
= 0; i
< n_sets
; i
++)
1372 rtx elt
= XVECEXP (body
, 0, i
);
1373 if (GET_CODE (elt
) != SET
)
1375 if (GET_CODE (SET_SRC (elt
)) != ASM_OPERANDS
)
1377 /* If these ASM_OPERANDS rtx's came from different original insns
1378 then they aren't allowed together. */
1379 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt
))
1380 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body
, 0, 0))))
1383 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body
, 0, 0)))
1386 else if (GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1388 /* 0 outputs, but some clobbers:
1389 body is [(asm_operands ...) (clobber (reg ...))...]. */
1392 /* Make sure all the other parallel things really are clobbers. */
1393 for (i
= XVECLEN (body
, 0) - 1; i
> 0; i
--)
1394 if (GET_CODE (XVECEXP (body
, 0, i
)) != CLOBBER
)
1397 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body
, 0, 0));
1406 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1407 copy its operands (both input and output) into the vector OPERANDS,
1408 the locations of the operands within the insn into the vector OPERAND_LOCS,
1409 and the constraints for the operands into CONSTRAINTS.
1410 Write the modes of the operands into MODES.
1411 Return the assembler-template.
1413 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1414 we don't store that info. */
1417 decode_asm_operands (rtx body
, rtx
*operands
, rtx
**operand_locs
,
1418 const char **constraints
, enum machine_mode
*modes
)
1422 const char *template = 0;
1424 if (GET_CODE (body
) == SET
&& GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1426 rtx asmop
= SET_SRC (body
);
1427 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1429 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
) + 1;
1431 for (i
= 1; i
< noperands
; i
++)
1434 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
- 1);
1436 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
- 1);
1438 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
- 1);
1440 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
- 1);
1443 /* The output is in the SET.
1444 Its constraint is in the ASM_OPERANDS itself. */
1446 operands
[0] = SET_DEST (body
);
1448 operand_locs
[0] = &SET_DEST (body
);
1450 constraints
[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop
);
1452 modes
[0] = GET_MODE (SET_DEST (body
));
1453 template = ASM_OPERANDS_TEMPLATE (asmop
);
1455 else if (GET_CODE (body
) == ASM_OPERANDS
)
1458 /* No output operands: BODY is (asm_operands ....). */
1460 noperands
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1462 /* The input operands are found in the 1st element vector. */
1463 /* Constraints for inputs are in the 2nd element vector. */
1464 for (i
= 0; i
< noperands
; i
++)
1467 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1469 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1471 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1473 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1475 template = ASM_OPERANDS_TEMPLATE (asmop
);
1477 else if (GET_CODE (body
) == PARALLEL
1478 && GET_CODE (XVECEXP (body
, 0, 0)) == SET
1479 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1481 rtx asmop
= SET_SRC (XVECEXP (body
, 0, 0));
1482 int nparallel
= XVECLEN (body
, 0); /* Includes CLOBBERs. */
1483 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1484 int nout
= 0; /* Does not include CLOBBERs. */
1486 /* At least one output, plus some CLOBBERs. */
1488 /* The outputs are in the SETs.
1489 Their constraints are in the ASM_OPERANDS itself. */
1490 for (i
= 0; i
< nparallel
; i
++)
1492 if (GET_CODE (XVECEXP (body
, 0, i
)) == CLOBBER
)
1493 break; /* Past last SET */
1496 operands
[i
] = SET_DEST (XVECEXP (body
, 0, i
));
1498 operand_locs
[i
] = &SET_DEST (XVECEXP (body
, 0, i
));
1500 constraints
[i
] = XSTR (SET_SRC (XVECEXP (body
, 0, i
)), 1);
1502 modes
[i
] = GET_MODE (SET_DEST (XVECEXP (body
, 0, i
)));
1506 for (i
= 0; i
< nin
; i
++)
1509 operand_locs
[i
+ nout
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1511 operands
[i
+ nout
] = ASM_OPERANDS_INPUT (asmop
, i
);
1513 constraints
[i
+ nout
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1515 modes
[i
+ nout
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1518 template = ASM_OPERANDS_TEMPLATE (asmop
);
1520 else if (GET_CODE (body
) == PARALLEL
1521 && GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1523 /* No outputs, but some CLOBBERs. */
1525 rtx asmop
= XVECEXP (body
, 0, 0);
1526 int nin
= ASM_OPERANDS_INPUT_LENGTH (asmop
);
1528 for (i
= 0; i
< nin
; i
++)
1531 operand_locs
[i
] = &ASM_OPERANDS_INPUT (asmop
, i
);
1533 operands
[i
] = ASM_OPERANDS_INPUT (asmop
, i
);
1535 constraints
[i
] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop
, i
);
1537 modes
[i
] = ASM_OPERANDS_INPUT_MODE (asmop
, i
);
1540 template = ASM_OPERANDS_TEMPLATE (asmop
);
1546 /* Check if an asm_operand matches its constraints.
1547 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1550 asm_operand_ok (rtx op
, const char *constraint
)
1554 /* Use constrain_operands after reload. */
1555 gcc_assert (!reload_completed
);
1559 char c
= *constraint
;
1576 case '0': case '1': case '2': case '3': case '4':
1577 case '5': case '6': case '7': case '8': case '9':
1578 /* For best results, our caller should have given us the
1579 proper matching constraint, but we can't actually fail
1580 the check if they didn't. Indicate that results are
1584 while (ISDIGIT (*constraint
));
1590 if (address_operand (op
, VOIDmode
))
1595 case 'V': /* non-offsettable */
1596 if (memory_operand (op
, VOIDmode
))
1600 case 'o': /* offsettable */
1601 if (offsettable_nonstrict_memref_p (op
))
1606 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1607 excepting those that expand_call created. Further, on some
1608 machines which do not have generalized auto inc/dec, an inc/dec
1609 is not a memory_operand.
1611 Match any memory and hope things are resolved after reload. */
1615 || GET_CODE (XEXP (op
, 0)) == PRE_DEC
1616 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
1623 || GET_CODE (XEXP (op
, 0)) == PRE_INC
1624 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
1630 if (GET_CODE (op
) == CONST_DOUBLE
1631 || (GET_CODE (op
) == CONST_VECTOR
1632 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
1637 if (GET_CODE (op
) == CONST_DOUBLE
1638 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'G', constraint
))
1642 if (GET_CODE (op
) == CONST_DOUBLE
1643 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, 'H', constraint
))
1648 if (GET_CODE (op
) == CONST_INT
1649 || (GET_CODE (op
) == CONST_DOUBLE
1650 && GET_MODE (op
) == VOIDmode
))
1655 if (CONSTANT_P (op
) && (! flag_pic
|| LEGITIMATE_PIC_OPERAND_P (op
)))
1660 if (GET_CODE (op
) == CONST_INT
1661 || (GET_CODE (op
) == CONST_DOUBLE
1662 && GET_MODE (op
) == VOIDmode
))
1667 if (GET_CODE (op
) == CONST_INT
1668 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'I', constraint
))
1672 if (GET_CODE (op
) == CONST_INT
1673 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'J', constraint
))
1677 if (GET_CODE (op
) == CONST_INT
1678 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'K', constraint
))
1682 if (GET_CODE (op
) == CONST_INT
1683 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'L', constraint
))
1687 if (GET_CODE (op
) == CONST_INT
1688 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'M', constraint
))
1692 if (GET_CODE (op
) == CONST_INT
1693 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'N', constraint
))
1697 if (GET_CODE (op
) == CONST_INT
1698 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'O', constraint
))
1702 if (GET_CODE (op
) == CONST_INT
1703 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), 'P', constraint
))
1712 if (general_operand (op
, VOIDmode
))
1717 /* For all other letters, we first check for a register class,
1718 otherwise it is an EXTRA_CONSTRAINT. */
1719 if (REG_CLASS_FROM_CONSTRAINT (c
, constraint
) != NO_REGS
)
1722 if (GET_MODE (op
) == BLKmode
)
1724 if (register_operand (op
, VOIDmode
))
1727 #ifdef EXTRA_CONSTRAINT_STR
1728 else if (EXTRA_CONSTRAINT_STR (op
, c
, constraint
))
1730 else if (EXTRA_MEMORY_CONSTRAINT (c
, constraint
)
1731 /* Every memory operand can be reloaded to fit. */
1732 && memory_operand (op
, VOIDmode
))
1734 else if (EXTRA_ADDRESS_CONSTRAINT (c
, constraint
)
1735 /* Every address operand can be reloaded to fit. */
1736 && address_operand (op
, VOIDmode
))
1741 len
= CONSTRAINT_LEN (c
, constraint
);
1744 while (--len
&& *constraint
);
1752 /* Given an rtx *P, if it is a sum containing an integer constant term,
1753 return the location (type rtx *) of the pointer to that constant term.
1754 Otherwise, return a null pointer. */
1757 find_constant_term_loc (rtx
*p
)
1760 enum rtx_code code
= GET_CODE (*p
);
1762 /* If *P IS such a constant term, P is its location. */
1764 if (code
== CONST_INT
|| code
== SYMBOL_REF
|| code
== LABEL_REF
1768 /* Otherwise, if not a sum, it has no constant term. */
1770 if (GET_CODE (*p
) != PLUS
)
1773 /* If one of the summands is constant, return its location. */
1775 if (XEXP (*p
, 0) && CONSTANT_P (XEXP (*p
, 0))
1776 && XEXP (*p
, 1) && CONSTANT_P (XEXP (*p
, 1)))
1779 /* Otherwise, check each summand for containing a constant term. */
1781 if (XEXP (*p
, 0) != 0)
1783 tem
= find_constant_term_loc (&XEXP (*p
, 0));
1788 if (XEXP (*p
, 1) != 0)
1790 tem
= find_constant_term_loc (&XEXP (*p
, 1));
1798 /* Return 1 if OP is a memory reference
1799 whose address contains no side effects
1800 and remains valid after the addition
1801 of a positive integer less than the
1802 size of the object being referenced.
1804 We assume that the original address is valid and do not check it.
1806 This uses strict_memory_address_p as a subroutine, so
1807 don't use it before reload. */
1810 offsettable_memref_p (rtx op
)
1812 return ((MEM_P (op
))
1813 && offsettable_address_p (1, GET_MODE (op
), XEXP (op
, 0)));
1816 /* Similar, but don't require a strictly valid mem ref:
1817 consider pseudo-regs valid as index or base regs. */
1820 offsettable_nonstrict_memref_p (rtx op
)
1822 return ((MEM_P (op
))
1823 && offsettable_address_p (0, GET_MODE (op
), XEXP (op
, 0)));
1826 /* Return 1 if Y is a memory address which contains no side effects
1827 and would remain valid after the addition of a positive integer
1828 less than the size of that mode.
1830 We assume that the original address is valid and do not check it.
1831 We do check that it is valid for narrower modes.
1833 If STRICTP is nonzero, we require a strictly valid address,
1834 for the sake of use in reload.c. */
1837 offsettable_address_p (int strictp
, enum machine_mode mode
, rtx y
)
1839 enum rtx_code ycode
= GET_CODE (y
);
1843 int (*addressp
) (enum machine_mode
, rtx
) =
1844 (strictp
? strict_memory_address_p
: memory_address_p
);
1845 unsigned int mode_sz
= GET_MODE_SIZE (mode
);
1847 if (CONSTANT_ADDRESS_P (y
))
1850 /* Adjusting an offsettable address involves changing to a narrower mode.
1851 Make sure that's OK. */
1853 if (mode_dependent_address_p (y
))
1856 /* ??? How much offset does an offsettable BLKmode reference need?
1857 Clearly that depends on the situation in which it's being used.
1858 However, the current situation in which we test 0xffffffff is
1859 less than ideal. Caveat user. */
1861 mode_sz
= BIGGEST_ALIGNMENT
/ BITS_PER_UNIT
;
1863 /* If the expression contains a constant term,
1864 see if it remains valid when max possible offset is added. */
1866 if ((ycode
== PLUS
) && (y2
= find_constant_term_loc (&y1
)))
1871 *y2
= plus_constant (*y2
, mode_sz
- 1);
1872 /* Use QImode because an odd displacement may be automatically invalid
1873 for any wider mode. But it should be valid for a single byte. */
1874 good
= (*addressp
) (QImode
, y
);
1876 /* In any case, restore old contents of memory. */
1881 if (GET_RTX_CLASS (ycode
) == RTX_AUTOINC
)
1884 /* The offset added here is chosen as the maximum offset that
1885 any instruction could need to add when operating on something
1886 of the specified mode. We assume that if Y and Y+c are
1887 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1888 go inside a LO_SUM here, so we do so as well. */
1889 if (GET_CODE (y
) == LO_SUM
1891 && mode_sz
<= GET_MODE_ALIGNMENT (mode
) / BITS_PER_UNIT
)
1892 z
= gen_rtx_LO_SUM (GET_MODE (y
), XEXP (y
, 0),
1893 plus_constant (XEXP (y
, 1), mode_sz
- 1));
1895 z
= plus_constant (y
, mode_sz
- 1);
1897 /* Use QImode because an odd displacement may be automatically invalid
1898 for any wider mode. But it should be valid for a single byte. */
1899 return (*addressp
) (QImode
, z
);
1902 /* Return 1 if ADDR is an address-expression whose effect depends
1903 on the mode of the memory reference it is used in.
1905 Autoincrement addressing is a typical example of mode-dependence
1906 because the amount of the increment depends on the mode. */
1909 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED
/* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1911 GO_IF_MODE_DEPENDENT_ADDRESS (addr
, win
);
1913 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1914 win
: ATTRIBUTE_UNUSED_LABEL
1918 /* Like extract_insn, but save insn extracted and don't extract again, when
1919 called again for the same insn expecting that recog_data still contain the
1920 valid information. This is used primary by gen_attr infrastructure that
1921 often does extract insn again and again. */
1923 extract_insn_cached (rtx insn
)
1925 if (recog_data
.insn
== insn
&& INSN_CODE (insn
) >= 0)
1927 extract_insn (insn
);
1928 recog_data
.insn
= insn
;
1930 /* Do cached extract_insn, constrain_operands and complain about failures.
1931 Used by insn_attrtab. */
1933 extract_constrain_insn_cached (rtx insn
)
1935 extract_insn_cached (insn
);
1936 if (which_alternative
== -1
1937 && !constrain_operands (reload_completed
))
1938 fatal_insn_not_found (insn
);
1940 /* Do cached constrain_operands and complain about failures. */
1942 constrain_operands_cached (int strict
)
1944 if (which_alternative
== -1)
1945 return constrain_operands (strict
);
1950 /* Analyze INSN and fill in recog_data. */
1953 extract_insn (rtx insn
)
1958 rtx body
= PATTERN (insn
);
1960 recog_data
.insn
= NULL
;
1961 recog_data
.n_operands
= 0;
1962 recog_data
.n_alternatives
= 0;
1963 recog_data
.n_dups
= 0;
1964 which_alternative
= -1;
1966 switch (GET_CODE (body
))
1976 if (GET_CODE (SET_SRC (body
)) == ASM_OPERANDS
)
1981 if ((GET_CODE (XVECEXP (body
, 0, 0)) == SET
1982 && GET_CODE (SET_SRC (XVECEXP (body
, 0, 0))) == ASM_OPERANDS
)
1983 || GET_CODE (XVECEXP (body
, 0, 0)) == ASM_OPERANDS
)
1989 recog_data
.n_operands
= noperands
= asm_noperands (body
);
1992 /* This insn is an `asm' with operands. */
1994 /* expand_asm_operands makes sure there aren't too many operands. */
1995 gcc_assert (noperands
<= MAX_RECOG_OPERANDS
);
1997 /* Now get the operand values and constraints out of the insn. */
1998 decode_asm_operands (body
, recog_data
.operand
,
1999 recog_data
.operand_loc
,
2000 recog_data
.constraints
,
2001 recog_data
.operand_mode
);
2004 const char *p
= recog_data
.constraints
[0];
2005 recog_data
.n_alternatives
= 1;
2007 recog_data
.n_alternatives
+= (*p
++ == ',');
2011 fatal_insn_not_found (insn
);
2015 /* Ordinary insn: recognize it, get the operands via insn_extract
2016 and get the constraints. */
2018 icode
= recog_memoized (insn
);
2020 fatal_insn_not_found (insn
);
2022 recog_data
.n_operands
= noperands
= insn_data
[icode
].n_operands
;
2023 recog_data
.n_alternatives
= insn_data
[icode
].n_alternatives
;
2024 recog_data
.n_dups
= insn_data
[icode
].n_dups
;
2026 insn_extract (insn
);
2028 for (i
= 0; i
< noperands
; i
++)
2030 recog_data
.constraints
[i
] = insn_data
[icode
].operand
[i
].constraint
;
2031 recog_data
.operand_mode
[i
] = insn_data
[icode
].operand
[i
].mode
;
2032 /* VOIDmode match_operands gets mode from their real operand. */
2033 if (recog_data
.operand_mode
[i
] == VOIDmode
)
2034 recog_data
.operand_mode
[i
] = GET_MODE (recog_data
.operand
[i
]);
2037 for (i
= 0; i
< noperands
; i
++)
2038 recog_data
.operand_type
[i
]
2039 = (recog_data
.constraints
[i
][0] == '=' ? OP_OUT
2040 : recog_data
.constraints
[i
][0] == '+' ? OP_INOUT
2043 gcc_assert (recog_data
.n_alternatives
<= MAX_RECOG_ALTERNATIVES
);
2046 /* After calling extract_insn, you can use this function to extract some
2047 information from the constraint strings into a more usable form.
2048 The collected data is stored in recog_op_alt. */
2050 preprocess_constraints (void)
2054 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2055 memset (recog_op_alt
[i
], 0, (recog_data
.n_alternatives
2056 * sizeof (struct operand_alternative
)));
2058 for (i
= 0; i
< recog_data
.n_operands
; i
++)
2061 struct operand_alternative
*op_alt
;
2062 const char *p
= recog_data
.constraints
[i
];
2064 op_alt
= recog_op_alt
[i
];
2066 for (j
= 0; j
< recog_data
.n_alternatives
; j
++)
2068 op_alt
[j
].cl
= NO_REGS
;
2069 op_alt
[j
].constraint
= p
;
2070 op_alt
[j
].matches
= -1;
2071 op_alt
[j
].matched
= -1;
2073 if (*p
== '\0' || *p
== ',')
2075 op_alt
[j
].anything_ok
= 1;
2085 while (c
!= ',' && c
!= '\0');
2086 if (c
== ',' || c
== '\0')
2094 case '=': case '+': case '*': case '%':
2095 case 'E': case 'F': case 'G': case 'H':
2096 case 's': case 'i': case 'n':
2097 case 'I': case 'J': case 'K': case 'L':
2098 case 'M': case 'N': case 'O': case 'P':
2099 /* These don't say anything we care about. */
2103 op_alt
[j
].reject
+= 6;
2106 op_alt
[j
].reject
+= 600;
2109 op_alt
[j
].earlyclobber
= 1;
2112 case '0': case '1': case '2': case '3': case '4':
2113 case '5': case '6': case '7': case '8': case '9':
2116 op_alt
[j
].matches
= strtoul (p
, &end
, 10);
2117 recog_op_alt
[op_alt
[j
].matches
][j
].matched
= i
;
2123 op_alt
[j
].memory_ok
= 1;
2126 op_alt
[j
].decmem_ok
= 1;
2129 op_alt
[j
].incmem_ok
= 1;
2132 op_alt
[j
].nonoffmem_ok
= 1;
2135 op_alt
[j
].offmem_ok
= 1;
2138 op_alt
[j
].anything_ok
= 1;
2142 op_alt
[j
].is_address
= 1;
2143 op_alt
[j
].cl
= reg_class_subunion
[(int) op_alt
[j
].cl
]
2144 [(int) MODE_BASE_REG_CLASS (VOIDmode
)];
2150 reg_class_subunion
[(int) op_alt
[j
].cl
][(int) GENERAL_REGS
];
2154 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
2156 op_alt
[j
].memory_ok
= 1;
2159 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
2161 op_alt
[j
].is_address
= 1;
2163 = (reg_class_subunion
2164 [(int) op_alt
[j
].cl
]
2165 [(int) MODE_BASE_REG_CLASS (VOIDmode
)]);
2170 = (reg_class_subunion
2171 [(int) op_alt
[j
].cl
]
2172 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
2175 p
+= CONSTRAINT_LEN (c
, p
);
2181 /* Check the operands of an insn against the insn's operand constraints
2182 and return 1 if they are valid.
2183 The information about the insn's operands, constraints, operand modes
2184 etc. is obtained from the global variables set up by extract_insn.
2186 WHICH_ALTERNATIVE is set to a number which indicates which
2187 alternative of constraints was matched: 0 for the first alternative,
2188 1 for the next, etc.
2190 In addition, when two operands are required to match
2191 and it happens that the output operand is (reg) while the
2192 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2193 make the output operand look like the input.
2194 This is because the output operand is the one the template will print.
2196 This is used in final, just before printing the assembler code and by
2197 the routines that determine an insn's attribute.
2199 If STRICT is a positive nonzero value, it means that we have been
2200 called after reload has been completed. In that case, we must
2201 do all checks strictly. If it is zero, it means that we have been called
2202 before reload has completed. In that case, we first try to see if we can
2203 find an alternative that matches strictly. If not, we try again, this
2204 time assuming that reload will fix up the insn. This provides a "best
2205 guess" for the alternative and is used to compute attributes of insns prior
2206 to reload. A negative value of STRICT is used for this internal call. */
2214 constrain_operands (int strict
)
2216 const char *constraints
[MAX_RECOG_OPERANDS
];
2217 int matching_operands
[MAX_RECOG_OPERANDS
];
2218 int earlyclobber
[MAX_RECOG_OPERANDS
];
2221 struct funny_match funny_match
[MAX_RECOG_OPERANDS
];
2222 int funny_match_index
;
2224 which_alternative
= 0;
2225 if (recog_data
.n_operands
== 0 || recog_data
.n_alternatives
== 0)
2228 for (c
= 0; c
< recog_data
.n_operands
; c
++)
2230 constraints
[c
] = recog_data
.constraints
[c
];
2231 matching_operands
[c
] = -1;
2238 funny_match_index
= 0;
2240 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2242 rtx op
= recog_data
.operand
[opno
];
2243 enum machine_mode mode
= GET_MODE (op
);
2244 const char *p
= constraints
[opno
];
2250 earlyclobber
[opno
] = 0;
2252 /* A unary operator may be accepted by the predicate, but it
2253 is irrelevant for matching constraints. */
2257 if (GET_CODE (op
) == SUBREG
)
2259 if (REG_P (SUBREG_REG (op
))
2260 && REGNO (SUBREG_REG (op
)) < FIRST_PSEUDO_REGISTER
)
2261 offset
= subreg_regno_offset (REGNO (SUBREG_REG (op
)),
2262 GET_MODE (SUBREG_REG (op
)),
2265 op
= SUBREG_REG (op
);
2268 /* An empty constraint or empty alternative
2269 allows anything which matched the pattern. */
2270 if (*p
== 0 || *p
== ',')
2274 switch (c
= *p
, len
= CONSTRAINT_LEN (c
, p
), c
)
2283 case '?': case '!': case '*': case '%':
2288 /* Ignore rest of this alternative as far as
2289 constraint checking is concerned. */
2292 while (*p
&& *p
!= ',');
2297 earlyclobber
[opno
] = 1;
2300 case '0': case '1': case '2': case '3': case '4':
2301 case '5': case '6': case '7': case '8': case '9':
2303 /* This operand must be the same as a previous one.
2304 This kind of constraint is used for instructions such
2305 as add when they take only two operands.
2307 Note that the lower-numbered operand is passed first.
2309 If we are not testing strictly, assume that this
2310 constraint will be satisfied. */
2315 match
= strtoul (p
, &end
, 10);
2322 rtx op1
= recog_data
.operand
[match
];
2323 rtx op2
= recog_data
.operand
[opno
];
2325 /* A unary operator may be accepted by the predicate,
2326 but it is irrelevant for matching constraints. */
2328 op1
= XEXP (op1
, 0);
2330 op2
= XEXP (op2
, 0);
2332 val
= operands_match_p (op1
, op2
);
2335 matching_operands
[opno
] = match
;
2336 matching_operands
[match
] = opno
;
2341 /* If output is *x and input is *--x, arrange later
2342 to change the output to *--x as well, since the
2343 output op is the one that will be printed. */
2344 if (val
== 2 && strict
> 0)
2346 funny_match
[funny_match_index
].this = opno
;
2347 funny_match
[funny_match_index
++].other
= match
;
2354 /* p is used for address_operands. When we are called by
2355 gen_reload, no one will have checked that the address is
2356 strictly valid, i.e., that all pseudos requiring hard regs
2357 have gotten them. */
2359 || (strict_memory_address_p (recog_data
.operand_mode
[opno
],
2364 /* No need to check general_operand again;
2365 it was done in insn-recog.c. */
2367 /* Anything goes unless it is a REG and really has a hard reg
2368 but the hard reg is not in the class GENERAL_REGS. */
2370 || GENERAL_REGS
== ALL_REGS
2372 || (reload_in_progress
2373 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2374 || reg_fits_class_p (op
, GENERAL_REGS
, offset
, mode
))
2379 /* This is used for a MATCH_SCRATCH in the cases when
2380 we don't actually need anything. So anything goes
2386 /* Memory operands must be valid, to the extent
2387 required by STRICT. */
2391 && !strict_memory_address_p (GET_MODE (op
),
2395 && !memory_address_p (GET_MODE (op
), XEXP (op
, 0)))
2399 /* Before reload, accept what reload can turn into mem. */
2400 else if (strict
< 0 && CONSTANT_P (op
))
2402 /* During reload, accept a pseudo */
2403 else if (reload_in_progress
&& REG_P (op
)
2404 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2410 && (GET_CODE (XEXP (op
, 0)) == PRE_DEC
2411 || GET_CODE (XEXP (op
, 0)) == POST_DEC
))
2417 && (GET_CODE (XEXP (op
, 0)) == PRE_INC
2418 || GET_CODE (XEXP (op
, 0)) == POST_INC
))
2424 if (GET_CODE (op
) == CONST_DOUBLE
2425 || (GET_CODE (op
) == CONST_VECTOR
2426 && GET_MODE_CLASS (GET_MODE (op
)) == MODE_VECTOR_FLOAT
))
2432 if (GET_CODE (op
) == CONST_DOUBLE
2433 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op
, c
, p
))
2438 if (GET_CODE (op
) == CONST_INT
2439 || (GET_CODE (op
) == CONST_DOUBLE
2440 && GET_MODE (op
) == VOIDmode
))
2443 if (CONSTANT_P (op
))
2448 if (GET_CODE (op
) == CONST_INT
2449 || (GET_CODE (op
) == CONST_DOUBLE
2450 && GET_MODE (op
) == VOIDmode
))
2462 if (GET_CODE (op
) == CONST_INT
2463 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op
), c
, p
))
2469 && ((strict
> 0 && ! offsettable_memref_p (op
))
2471 && !(CONSTANT_P (op
) || MEM_P (op
)))
2472 || (reload_in_progress
2474 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))))
2479 if ((strict
> 0 && offsettable_memref_p (op
))
2480 || (strict
== 0 && offsettable_nonstrict_memref_p (op
))
2481 /* Before reload, accept what reload can handle. */
2483 && (CONSTANT_P (op
) || MEM_P (op
)))
2484 /* During reload, accept a pseudo */
2485 || (reload_in_progress
&& REG_P (op
)
2486 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
))
2495 ? GENERAL_REGS
: REG_CLASS_FROM_CONSTRAINT (c
, p
));
2501 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)
2502 || (strict
== 0 && GET_CODE (op
) == SCRATCH
)
2504 && reg_fits_class_p (op
, cl
, offset
, mode
)))
2507 #ifdef EXTRA_CONSTRAINT_STR
2508 else if (EXTRA_CONSTRAINT_STR (op
, c
, p
))
2511 else if (EXTRA_MEMORY_CONSTRAINT (c
, p
)
2512 /* Every memory operand can be reloaded to fit. */
2513 && ((strict
< 0 && MEM_P (op
))
2514 /* Before reload, accept what reload can turn
2516 || (strict
< 0 && CONSTANT_P (op
))
2517 /* During reload, accept a pseudo */
2518 || (reload_in_progress
&& REG_P (op
)
2519 && REGNO (op
) >= FIRST_PSEUDO_REGISTER
)))
2521 else if (EXTRA_ADDRESS_CONSTRAINT (c
, p
)
2522 /* Every address operand can be reloaded to fit. */
2529 while (p
+= len
, c
);
2531 constraints
[opno
] = p
;
2532 /* If this operand did not win somehow,
2533 this alternative loses. */
2537 /* This alternative won; the operands are ok.
2538 Change whichever operands this alternative says to change. */
2543 /* See if any earlyclobber operand conflicts with some other
2547 for (eopno
= 0; eopno
< recog_data
.n_operands
; eopno
++)
2548 /* Ignore earlyclobber operands now in memory,
2549 because we would often report failure when we have
2550 two memory operands, one of which was formerly a REG. */
2551 if (earlyclobber
[eopno
]
2552 && REG_P (recog_data
.operand
[eopno
]))
2553 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2554 if ((MEM_P (recog_data
.operand
[opno
])
2555 || recog_data
.operand_type
[opno
] != OP_OUT
)
2557 /* Ignore things like match_operator operands. */
2558 && *recog_data
.constraints
[opno
] != 0
2559 && ! (matching_operands
[opno
] == eopno
2560 && operands_match_p (recog_data
.operand
[opno
],
2561 recog_data
.operand
[eopno
]))
2562 && ! safe_from_earlyclobber (recog_data
.operand
[opno
],
2563 recog_data
.operand
[eopno
]))
2568 while (--funny_match_index
>= 0)
2570 recog_data
.operand
[funny_match
[funny_match_index
].other
]
2571 = recog_data
.operand
[funny_match
[funny_match_index
].this];
2578 which_alternative
++;
2580 while (which_alternative
< recog_data
.n_alternatives
);
2582 which_alternative
= -1;
2583 /* If we are about to reject this, but we are not to test strictly,
2584 try a very loose test. Only return failure if it fails also. */
2586 return constrain_operands (-1);
2591 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2592 is a hard reg in class CLASS when its regno is offset by OFFSET
2593 and changed to mode MODE.
2594 If REG occupies multiple hard regs, all of them must be in CLASS. */
2597 reg_fits_class_p (rtx operand
, enum reg_class cl
, int offset
,
2598 enum machine_mode mode
)
2600 int regno
= REGNO (operand
);
2601 if (regno
< FIRST_PSEUDO_REGISTER
2602 && TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2607 for (sr
= hard_regno_nregs
[regno
][mode
] - 1;
2609 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) cl
],
2618 /* Split single instruction. Helper function for split_all_insns and
2619 split_all_insns_noflow. Return last insn in the sequence if successful,
2620 or NULL if unsuccessful. */
2623 split_insn (rtx insn
)
2625 /* Split insns here to get max fine-grain parallelism. */
2626 rtx first
= PREV_INSN (insn
);
2627 rtx last
= try_split (PATTERN (insn
), insn
, 1);
2632 /* try_split returns the NOTE that INSN became. */
2633 SET_INSN_DELETED (insn
);
2635 /* ??? Coddle to md files that generate subregs in post-reload
2636 splitters instead of computing the proper hard register. */
2637 if (reload_completed
&& first
!= last
)
2639 first
= NEXT_INSN (first
);
2643 cleanup_subreg_operands (first
);
2646 first
= NEXT_INSN (first
);
2652 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2655 split_all_insns (int upd_life
)
2661 blocks
= sbitmap_alloc (last_basic_block
);
2662 sbitmap_zero (blocks
);
2665 FOR_EACH_BB_REVERSE (bb
)
2668 bool finish
= false;
2670 for (insn
= BB_HEAD (bb
); !finish
; insn
= next
)
2672 /* Can't use `next_real_insn' because that might go across
2673 CODE_LABELS and short-out basic blocks. */
2674 next
= NEXT_INSN (insn
);
2675 finish
= (insn
== BB_END (bb
));
2678 rtx set
= single_set (insn
);
2680 /* Don't split no-op move insns. These should silently
2681 disappear later in final. Splitting such insns would
2682 break the code that handles REG_NO_CONFLICT blocks. */
2683 if (set
&& set_noop_p (set
))
2685 /* Nops get in the way while scheduling, so delete them
2686 now if register allocation has already been done. It
2687 is too risky to try to do this before register
2688 allocation, and there are unlikely to be very many
2689 nops then anyways. */
2690 if (reload_completed
)
2692 /* If the no-op set has a REG_UNUSED note, we need
2693 to update liveness information. */
2694 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
))
2696 SET_BIT (blocks
, bb
->index
);
2699 /* ??? Is life info affected by deleting edges? */
2700 delete_insn_and_edges (insn
);
2705 rtx last
= split_insn (insn
);
2708 /* The split sequence may include barrier, but the
2709 BB boundary we are interested in will be set to
2712 while (BARRIER_P (last
))
2713 last
= PREV_INSN (last
);
2714 SET_BIT (blocks
, bb
->index
);
2724 int old_last_basic_block
= last_basic_block
;
2726 find_many_sub_basic_blocks (blocks
);
2728 if (old_last_basic_block
!= last_basic_block
&& upd_life
)
2729 blocks
= sbitmap_resize (blocks
, last_basic_block
, 1);
2732 if (changed
&& upd_life
)
2733 update_life_info (blocks
, UPDATE_LIFE_GLOBAL_RM_NOTES
,
2736 #ifdef ENABLE_CHECKING
2737 verify_flow_info ();
2740 sbitmap_free (blocks
);
2743 /* Same as split_all_insns, but do not expect CFG to be available.
2744 Used by machine dependent reorg passes. */
2747 split_all_insns_noflow (void)
2751 for (insn
= get_insns (); insn
; insn
= next
)
2753 next
= NEXT_INSN (insn
);
2756 /* Don't split no-op move insns. These should silently
2757 disappear later in final. Splitting such insns would
2758 break the code that handles REG_NO_CONFLICT blocks. */
2759 rtx set
= single_set (insn
);
2760 if (set
&& set_noop_p (set
))
2762 /* Nops get in the way while scheduling, so delete them
2763 now if register allocation has already been done. It
2764 is too risky to try to do this before register
2765 allocation, and there are unlikely to be very many
2768 ??? Should we use delete_insn when the CFG isn't valid? */
2769 if (reload_completed
)
2770 delete_insn_and_edges (insn
);
2778 #ifdef HAVE_peephole2
2779 struct peep2_insn_data
2785 static struct peep2_insn_data peep2_insn_data
[MAX_INSNS_PER_PEEP2
+ 1];
2786 static int peep2_current
;
2788 /* A non-insn marker indicating the last insn of the block.
2789 The live_before regset for this element is correct, indicating
2790 global_live_at_end for the block. */
2791 #define PEEP2_EOB pc_rtx
2793 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2794 does not exist. Used by the recognizer to find the next insn to match
2795 in a multi-insn pattern. */
2798 peep2_next_insn (int n
)
2800 gcc_assert (n
< MAX_INSNS_PER_PEEP2
+ 1);
2803 if (n
>= MAX_INSNS_PER_PEEP2
+ 1)
2804 n
-= MAX_INSNS_PER_PEEP2
+ 1;
2806 if (peep2_insn_data
[n
].insn
== PEEP2_EOB
)
2808 return peep2_insn_data
[n
].insn
;
2811 /* Return true if REGNO is dead before the Nth non-note insn
2815 peep2_regno_dead_p (int ofs
, int regno
)
2817 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2819 ofs
+= peep2_current
;
2820 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2821 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2823 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2825 return ! REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
);
2828 /* Similarly for a REG. */
2831 peep2_reg_dead_p (int ofs
, rtx reg
)
2835 gcc_assert (ofs
< MAX_INSNS_PER_PEEP2
+ 1);
2837 ofs
+= peep2_current
;
2838 if (ofs
>= MAX_INSNS_PER_PEEP2
+ 1)
2839 ofs
-= MAX_INSNS_PER_PEEP2
+ 1;
2841 gcc_assert (peep2_insn_data
[ofs
].insn
!= NULL_RTX
);
2843 regno
= REGNO (reg
);
2844 n
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
2846 if (REGNO_REG_SET_P (peep2_insn_data
[ofs
].live_before
, regno
+ n
))
2851 /* Try to find a hard register of mode MODE, matching the register class in
2852 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2853 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2854 in which case the only condition is that the register must be available
2855 before CURRENT_INSN.
2856 Registers that already have bits set in REG_SET will not be considered.
2858 If an appropriate register is available, it will be returned and the
2859 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2863 peep2_find_free_register (int from
, int to
, const char *class_str
,
2864 enum machine_mode mode
, HARD_REG_SET
*reg_set
)
2866 static int search_ofs
;
2871 gcc_assert (from
< MAX_INSNS_PER_PEEP2
+ 1);
2872 gcc_assert (to
< MAX_INSNS_PER_PEEP2
+ 1);
2874 from
+= peep2_current
;
2875 if (from
>= MAX_INSNS_PER_PEEP2
+ 1)
2876 from
-= MAX_INSNS_PER_PEEP2
+ 1;
2877 to
+= peep2_current
;
2878 if (to
>= MAX_INSNS_PER_PEEP2
+ 1)
2879 to
-= MAX_INSNS_PER_PEEP2
+ 1;
2881 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2882 REG_SET_TO_HARD_REG_SET (live
, peep2_insn_data
[from
].live_before
);
2886 HARD_REG_SET this_live
;
2888 if (++from
>= MAX_INSNS_PER_PEEP2
+ 1)
2890 gcc_assert (peep2_insn_data
[from
].insn
!= NULL_RTX
);
2891 REG_SET_TO_HARD_REG_SET (this_live
, peep2_insn_data
[from
].live_before
);
2892 IOR_HARD_REG_SET (live
, this_live
);
2895 cl
= (class_str
[0] == 'r' ? GENERAL_REGS
2896 : REG_CLASS_FROM_CONSTRAINT (class_str
[0], class_str
));
2898 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2900 int raw_regno
, regno
, success
, j
;
2902 /* Distribute the free registers as much as possible. */
2903 raw_regno
= search_ofs
+ i
;
2904 if (raw_regno
>= FIRST_PSEUDO_REGISTER
)
2905 raw_regno
-= FIRST_PSEUDO_REGISTER
;
2906 #ifdef REG_ALLOC_ORDER
2907 regno
= reg_alloc_order
[raw_regno
];
2912 /* Don't allocate fixed registers. */
2913 if (fixed_regs
[regno
])
2915 /* Make sure the register is of the right class. */
2916 if (! TEST_HARD_REG_BIT (reg_class_contents
[cl
], regno
))
2918 /* And can support the mode we need. */
2919 if (! HARD_REGNO_MODE_OK (regno
, mode
))
2921 /* And that we don't create an extra save/restore. */
2922 if (! call_used_regs
[regno
] && ! regs_ever_live
[regno
])
2924 /* And we don't clobber traceback for noreturn functions. */
2925 if ((regno
== FRAME_POINTER_REGNUM
|| regno
== HARD_FRAME_POINTER_REGNUM
)
2926 && (! reload_completed
|| frame_pointer_needed
))
2930 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2932 if (TEST_HARD_REG_BIT (*reg_set
, regno
+ j
)
2933 || TEST_HARD_REG_BIT (live
, regno
+ j
))
2941 for (j
= hard_regno_nregs
[regno
][mode
] - 1; j
>= 0; j
--)
2942 SET_HARD_REG_BIT (*reg_set
, regno
+ j
);
2944 /* Start the next search with the next register. */
2945 if (++raw_regno
>= FIRST_PSEUDO_REGISTER
)
2947 search_ofs
= raw_regno
;
2949 return gen_rtx_REG (mode
, regno
);
2957 /* Perform the peephole2 optimization pass. */
2960 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED
)
2966 #ifdef HAVE_conditional_execution
2970 bool do_cleanup_cfg
= false;
2971 bool do_global_life_update
= false;
2972 bool do_rebuild_jump_labels
= false;
2974 /* Initialize the regsets we're going to use. */
2975 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
2976 peep2_insn_data
[i
].live_before
= ALLOC_REG_SET (®_obstack
);
2977 live
= ALLOC_REG_SET (®_obstack
);
2979 #ifdef HAVE_conditional_execution
2980 blocks
= sbitmap_alloc (last_basic_block
);
2981 sbitmap_zero (blocks
);
2984 count_or_remove_death_notes (NULL
, 1);
2987 FOR_EACH_BB_REVERSE (bb
)
2989 struct propagate_block_info
*pbi
;
2990 reg_set_iterator rsi
;
2993 /* Indicate that all slots except the last holds invalid data. */
2994 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
; ++i
)
2995 peep2_insn_data
[i
].insn
= NULL_RTX
;
2997 /* Indicate that the last slot contains live_after data. */
2998 peep2_insn_data
[MAX_INSNS_PER_PEEP2
].insn
= PEEP2_EOB
;
2999 peep2_current
= MAX_INSNS_PER_PEEP2
;
3001 /* Start up propagation. */
3002 COPY_REG_SET (live
, bb
->global_live_at_end
);
3003 COPY_REG_SET (peep2_insn_data
[MAX_INSNS_PER_PEEP2
].live_before
, live
);
3005 #ifdef HAVE_conditional_execution
3006 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, 0);
3008 pbi
= init_propagate_block_info (bb
, live
, NULL
, NULL
, PROP_DEATH_NOTES
);
3011 for (insn
= BB_END (bb
); ; insn
= prev
)
3013 prev
= PREV_INSN (insn
);
3016 rtx
try, before_try
, x
;
3019 bool was_call
= false;
3021 /* Record this insn. */
3022 if (--peep2_current
< 0)
3023 peep2_current
= MAX_INSNS_PER_PEEP2
;
3024 peep2_insn_data
[peep2_current
].insn
= insn
;
3025 propagate_one_insn (pbi
, insn
);
3026 COPY_REG_SET (peep2_insn_data
[peep2_current
].live_before
, live
);
3028 /* Match the peephole. */
3029 try = peephole2_insns (PATTERN (insn
), insn
, &match_len
);
3032 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3033 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3034 cfg-related call notes. */
3035 for (i
= 0; i
<= match_len
; ++i
)
3038 rtx old_insn
, new_insn
, note
;
3040 j
= i
+ peep2_current
;
3041 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3042 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3043 old_insn
= peep2_insn_data
[j
].insn
;
3044 if (!CALL_P (old_insn
))
3049 while (new_insn
!= NULL_RTX
)
3051 if (CALL_P (new_insn
))
3053 new_insn
= NEXT_INSN (new_insn
);
3056 gcc_assert (new_insn
!= NULL_RTX
);
3058 CALL_INSN_FUNCTION_USAGE (new_insn
)
3059 = CALL_INSN_FUNCTION_USAGE (old_insn
);
3061 for (note
= REG_NOTES (old_insn
);
3063 note
= XEXP (note
, 1))
3064 switch (REG_NOTE_KIND (note
))
3068 case REG_ALWAYS_RETURN
:
3069 REG_NOTES (new_insn
)
3070 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note
),
3072 REG_NOTES (new_insn
));
3074 /* Discard all other reg notes. */
3078 /* Croak if there is another call in the sequence. */
3079 while (++i
<= match_len
)
3081 j
= i
+ peep2_current
;
3082 if (j
>= MAX_INSNS_PER_PEEP2
+ 1)
3083 j
-= MAX_INSNS_PER_PEEP2
+ 1;
3084 old_insn
= peep2_insn_data
[j
].insn
;
3085 gcc_assert (!CALL_P (old_insn
));
3090 i
= match_len
+ peep2_current
;
3091 if (i
>= MAX_INSNS_PER_PEEP2
+ 1)
3092 i
-= MAX_INSNS_PER_PEEP2
+ 1;
3094 note
= find_reg_note (peep2_insn_data
[i
].insn
,
3095 REG_EH_REGION
, NULL_RTX
);
3097 /* Replace the old sequence with the new. */
3098 try = emit_insn_after_setloc (try, peep2_insn_data
[i
].insn
,
3099 INSN_LOCATOR (peep2_insn_data
[i
].insn
));
3100 before_try
= PREV_INSN (insn
);
3101 delete_insn_chain (insn
, peep2_insn_data
[i
].insn
);
3103 /* Re-insert the EH_REGION notes. */
3104 if (note
|| (was_call
&& nonlocal_goto_handler_labels
))
3109 FOR_EACH_EDGE (eh_edge
, ei
, bb
->succs
)
3110 if (eh_edge
->flags
& (EDGE_EH
| EDGE_ABNORMAL_CALL
))
3113 for (x
= try ; x
!= before_try
; x
= PREV_INSN (x
))
3115 || (flag_non_call_exceptions
3116 && may_trap_p (PATTERN (x
))
3117 && !find_reg_note (x
, REG_EH_REGION
, NULL
)))
3121 = gen_rtx_EXPR_LIST (REG_EH_REGION
,
3125 if (x
!= BB_END (bb
) && eh_edge
)
3130 nfte
= split_block (bb
, x
);
3131 flags
= (eh_edge
->flags
3132 & (EDGE_EH
| EDGE_ABNORMAL
));
3134 flags
|= EDGE_ABNORMAL_CALL
;
3135 nehe
= make_edge (nfte
->src
, eh_edge
->dest
,
3138 nehe
->probability
= eh_edge
->probability
;
3140 = REG_BR_PROB_BASE
- nehe
->probability
;
3142 do_cleanup_cfg
|= purge_dead_edges (nfte
->dest
);
3143 #ifdef HAVE_conditional_execution
3144 SET_BIT (blocks
, nfte
->dest
->index
);
3152 /* Converting possibly trapping insn to non-trapping is
3153 possible. Zap dummy outgoing edges. */
3154 do_cleanup_cfg
|= purge_dead_edges (bb
);
3157 #ifdef HAVE_conditional_execution
3158 /* With conditional execution, we cannot back up the
3159 live information so easily, since the conditional
3160 death data structures are not so self-contained.
3161 So record that we've made a modification to this
3162 block and update life information at the end. */
3163 SET_BIT (blocks
, bb
->index
);
3166 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3167 peep2_insn_data
[i
].insn
= NULL_RTX
;
3168 peep2_insn_data
[peep2_current
].insn
= PEEP2_EOB
;
3170 /* Back up lifetime information past the end of the
3171 newly created sequence. */
3172 if (++i
>= MAX_INSNS_PER_PEEP2
+ 1)
3174 COPY_REG_SET (live
, peep2_insn_data
[i
].live_before
);
3176 /* Update life information for the new sequence. */
3183 i
= MAX_INSNS_PER_PEEP2
;
3184 peep2_insn_data
[i
].insn
= x
;
3185 propagate_one_insn (pbi
, x
);
3186 COPY_REG_SET (peep2_insn_data
[i
].live_before
, live
);
3192 /* ??? Should verify that LIVE now matches what we
3193 had before the new sequence. */
3198 /* If we generated a jump instruction, it won't have
3199 JUMP_LABEL set. Recompute after we're done. */
3200 for (x
= try; x
!= before_try
; x
= PREV_INSN (x
))
3203 do_rebuild_jump_labels
= true;
3209 if (insn
== BB_HEAD (bb
))
3213 /* Some peepholes can decide the don't need one or more of their
3214 inputs. If this happens, local life update is not enough. */
3215 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb
->global_live_at_start
, live
,
3218 do_global_life_update
= true;
3222 free_propagate_block_info (pbi
);
3225 for (i
= 0; i
< MAX_INSNS_PER_PEEP2
+ 1; ++i
)
3226 FREE_REG_SET (peep2_insn_data
[i
].live_before
);
3227 FREE_REG_SET (live
);
3229 if (do_rebuild_jump_labels
)
3230 rebuild_jump_labels (get_insns ());
3232 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3233 we've changed global life since exception handlers are no longer
3238 do_global_life_update
= true;
3240 if (do_global_life_update
)
3241 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES
, PROP_DEATH_NOTES
);
3242 #ifdef HAVE_conditional_execution
3245 count_or_remove_death_notes (blocks
, 1);
3246 update_life_info (blocks
, UPDATE_LIFE_LOCAL
, PROP_DEATH_NOTES
);
3248 sbitmap_free (blocks
);
3251 #endif /* HAVE_peephole2 */
3253 /* Common predicates for use with define_bypass. */
3255 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3256 data not the address operand(s) of the store. IN_INSN must be
3257 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3261 store_data_bypass_p (rtx out_insn
, rtx in_insn
)
3263 rtx out_set
, in_set
;
3265 in_set
= single_set (in_insn
);
3266 gcc_assert (in_set
);
3268 if (!MEM_P (SET_DEST (in_set
)))
3271 out_set
= single_set (out_insn
);
3274 if (reg_mentioned_p (SET_DEST (out_set
), SET_DEST (in_set
)))
3282 out_pat
= PATTERN (out_insn
);
3283 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3285 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3287 rtx exp
= XVECEXP (out_pat
, 0, i
);
3289 if (GET_CODE (exp
) == CLOBBER
)
3292 gcc_assert (GET_CODE (exp
) == SET
);
3294 if (reg_mentioned_p (SET_DEST (exp
), SET_DEST (in_set
)))
3302 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3303 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3304 or multiple set; IN_INSN should be single_set for truth, but for convenience
3305 of insn categorization may be any JUMP or CALL insn. */
3308 if_test_bypass_p (rtx out_insn
, rtx in_insn
)
3310 rtx out_set
, in_set
;
3312 in_set
= single_set (in_insn
);
3315 gcc_assert (JUMP_P (in_insn
) || CALL_P (in_insn
));
3319 if (GET_CODE (SET_SRC (in_set
)) != IF_THEN_ELSE
)
3321 in_set
= SET_SRC (in_set
);
3323 out_set
= single_set (out_insn
);
3326 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3327 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))
3335 out_pat
= PATTERN (out_insn
);
3336 gcc_assert (GET_CODE (out_pat
) == PARALLEL
);
3338 for (i
= 0; i
< XVECLEN (out_pat
, 0); i
++)
3340 rtx exp
= XVECEXP (out_pat
, 0, i
);
3342 if (GET_CODE (exp
) == CLOBBER
)
3345 gcc_assert (GET_CODE (exp
) == SET
);
3347 if (reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 1))
3348 || reg_mentioned_p (SET_DEST (out_set
), XEXP (in_set
, 2)))