* config/i386/i386.md (mmx_pinsrw): Output operands in correct
[official-gcc.git] / gcc / recog.c
blobb8b532e24037b814f47b3c6cb411e1e5e2080335
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "toplev.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
45 #else
46 #define STACK_PUSH_CODE PRE_INC
47 #endif
48 #endif
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
53 #else
54 #define STACK_POP_CODE POST_DEC
55 #endif
56 #endif
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static int insn_invalid_p PARAMS ((rtx));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
71 int volatile_ok;
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
80 was satisfied. */
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
88 int reload_completed;
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
94 void
95 init_recog_no_volatile ()
97 volatile_ok = 0;
100 void
101 init_recog ()
103 volatile_ok = 1;
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized_1 (insn)
117 rtx insn;
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
129 rtx x;
131 int noperands;
132 rtx *operands;
133 const char **constraints;
134 int i;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
146 if (noperands < 0)
147 return 0;
148 if (noperands == 0)
149 return 1;
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
159 if (c[0] == '%')
160 c++;
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
165 return 0;
168 return 1;
171 /* Static data for the next two routines. */
173 typedef struct change_t
175 rtx object;
176 int old_code;
177 rtx *loc;
178 rtx old;
179 } change_t;
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 the change in place.
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
206 rtx object;
207 rtx *loc;
208 rtx new;
209 int in_group;
211 rtx old = *loc;
213 if (old == new || rtx_equal_p (old, new))
214 return 1;
216 if (in_group == 0 && num_changes != 0)
217 abort ();
219 *loc = new;
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 else
229 changes_allocated *= 2;
231 changes =
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
248 num_changes++;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
262 static int
263 insn_invalid_p (insn)
264 rtx insn;
266 int icode = recog_memoized (insn);
267 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
269 if (is_asm && ! check_asm_operands (PATTERN (insn)))
270 return 1;
271 if (! is_asm && icode < 0)
272 return 1;
274 /* After reload, verify that all constraints are satisfied. */
275 if (reload_completed)
277 extract_insn (insn);
279 if (! constrain_operands (1))
280 return 1;
283 return 0;
286 /* Apply a group of changes previously issued with `validate_change'.
287 Return 1 if all changes are valid, zero otherwise. */
290 apply_change_group ()
292 int i;
294 /* The changes have been applied and all INSN_CODEs have been reset to force
295 rerecognition.
297 The changes are valid if we aren't given an object, or if we are
298 given a MEM and it still is a valid address, or if this is in insn
299 and it is recognized. In the latter case, if reload has completed,
300 we also require that the operands meet the constraints for
301 the insn. */
303 for (i = 0; i < num_changes; i++)
305 rtx object = changes[i].object;
307 if (object == 0)
308 continue;
310 if (GET_CODE (object) == MEM)
312 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
313 break;
315 else if (insn_invalid_p (object))
317 rtx pat = PATTERN (object);
319 /* Perhaps we couldn't recognize the insn because there were
320 extra CLOBBERs at the end. If so, try to re-recognize
321 without the last CLOBBER (later iterations will cause each of
322 them to be eliminated, in turn). But don't do this if we
323 have an ASM_OPERAND. */
324 if (GET_CODE (pat) == PARALLEL
325 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
326 && asm_noperands (PATTERN (object)) < 0)
328 rtx newpat;
330 if (XVECLEN (pat, 0) == 2)
331 newpat = XVECEXP (pat, 0, 0);
332 else
334 int j;
336 newpat
337 = gen_rtx_PARALLEL (VOIDmode,
338 rtvec_alloc (XVECLEN (pat, 0) - 1));
339 for (j = 0; j < XVECLEN (newpat, 0); j++)
340 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
343 /* Add a new change to this group to replace the pattern
344 with this new pattern. Then consider this change
345 as having succeeded. The change we added will
346 cause the entire call to fail if things remain invalid.
348 Note that this can lose if a later change than the one
349 we are processing specified &XVECEXP (PATTERN (object), 0, X)
350 but this shouldn't occur. */
352 validate_change (object, &PATTERN (object), newpat, 1);
354 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
355 /* If this insn is a CLOBBER or USE, it is always valid, but is
356 never recognized. */
357 continue;
358 else
359 break;
363 if (i == num_changes)
365 num_changes = 0;
366 return 1;
368 else
370 cancel_changes (0);
371 return 0;
375 /* Return the number of changes so far in the current group. */
378 num_validated_changes ()
380 return num_changes;
383 /* Retract the changes numbered NUM and up. */
385 void
386 cancel_changes (num)
387 int num;
389 int i;
391 /* Back out all the changes. Do this in the opposite order in which
392 they were made. */
393 for (i = num_changes - 1; i >= num; i--)
395 *changes[i].loc = changes[i].old;
396 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
397 INSN_CODE (changes[i].object) = changes[i].old_code;
399 num_changes = num;
402 /* Replace every occurrence of FROM in X with TO. Mark each change with
403 validate_change passing OBJECT. */
405 static void
406 validate_replace_rtx_1 (loc, from, to, object)
407 rtx *loc;
408 rtx from, to, object;
410 register int i, j;
411 register const char *fmt;
412 register rtx x = *loc;
413 enum rtx_code code;
415 if (!x)
416 return;
417 code = GET_CODE (x);
418 /* X matches FROM if it is the same rtx or they are both referring to the
419 same register in the same mode. Avoid calling rtx_equal_p unless the
420 operands look similar. */
422 if (x == from
423 || (GET_CODE (x) == REG && GET_CODE (from) == REG
424 && GET_MODE (x) == GET_MODE (from)
425 && REGNO (x) == REGNO (from))
426 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
427 && rtx_equal_p (x, from)))
429 validate_change (object, loc, to, 1);
430 return;
433 /* For commutative or comparison operations, try replacing each argument
434 separately and seeing if we made any changes. If so, put a constant
435 argument last.*/
436 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
438 int prev_changes = num_changes;
440 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
441 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
442 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
444 validate_change (object, loc,
445 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
446 : swap_condition (code),
447 GET_MODE (x), XEXP (x, 1),
448 XEXP (x, 0)),
450 x = *loc;
451 code = GET_CODE (x);
455 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
456 done the substitution, otherwise we won't. */
458 switch (code)
460 case PLUS:
461 /* If we have a PLUS whose second operand is now a CONST_INT, use
462 plus_constant to try to simplify it. */
463 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
464 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
466 return;
468 case MINUS:
469 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
471 validate_change (object, loc,
472 plus_constant (XEXP (x, 0), - INTVAL (to)),
474 return;
476 break;
478 case ZERO_EXTEND:
479 case SIGN_EXTEND:
480 /* In these cases, the operation to be performed depends on the mode
481 of the operand. If we are replacing the operand with a VOIDmode
482 constant, we lose the information. So try to simplify the operation
483 in that case. If it fails, substitute in something that we know
484 won't be recognized. */
485 if (GET_MODE (to) == VOIDmode
486 && rtx_equal_p (XEXP (x, 0), from))
488 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
489 GET_MODE (from));
490 if (new == 0)
491 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
493 validate_change (object, loc, new, 1);
494 return;
496 break;
498 case SUBREG:
499 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
500 expression. We can't do this later, since the information about inner mode
501 may be lost. */
502 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
504 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
505 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
506 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
508 rtx temp = operand_subword (to, SUBREG_WORD (x),
509 0, GET_MODE (from));
510 if (temp)
512 validate_change (object, loc, temp, 1);
513 return;
516 if (subreg_lowpart_p (x))
518 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
519 if (new)
521 validate_change (object, loc, new, 1);
522 return;
526 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
527 since we are saying that the high bits don't matter. */
528 if (GET_MODE (to) == VOIDmode
529 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
531 validate_change (object, loc, to, 1);
532 return;
536 /* Changing mode twice with SUBREG => just change it once,
537 or not at all if changing back to starting mode. */
538 if (GET_CODE (to) == SUBREG
539 && rtx_equal_p (SUBREG_REG (x), from))
541 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
542 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
544 validate_change (object, loc, SUBREG_REG (to), 1);
545 return;
548 validate_change (object, loc,
549 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
550 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
551 return;
554 /* If we have a SUBREG of a register that we are replacing and we are
555 replacing it with a MEM, make a new MEM and try replacing the
556 SUBREG with it. Don't do this if the MEM has a mode-dependent address
557 or if we would be widening it. */
559 if (GET_CODE (from) == REG
560 && GET_CODE (to) == MEM
561 && rtx_equal_p (SUBREG_REG (x), from)
562 && ! mode_dependent_address_p (XEXP (to, 0))
563 && ! MEM_VOLATILE_P (to)
564 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
566 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
567 enum machine_mode mode = GET_MODE (x);
568 rtx new;
570 if (BYTES_BIG_ENDIAN)
571 offset += (MIN (UNITS_PER_WORD,
572 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
573 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
575 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
576 MEM_COPY_ATTRIBUTES (new, to);
577 validate_change (object, loc, new, 1);
578 return;
580 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
589 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
590 && rtx_equal_p (XEXP (x, 0), from)
591 && GET_CODE (XEXP (x, 1)) == CONST_INT
592 && GET_CODE (XEXP (x, 2)) == CONST_INT
593 && ! mode_dependent_address_p (XEXP (to, 0))
594 && ! MEM_VOLATILE_P (to))
596 enum machine_mode wanted_mode = VOIDmode;
597 enum machine_mode is_mode = GET_MODE (to);
598 int pos = INTVAL (XEXP (x, 2));
600 #ifdef HAVE_extzv
601 if (code == ZERO_EXTRACT)
603 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
604 if (wanted_mode == VOIDmode)
605 wanted_mode = word_mode;
607 #endif
608 #ifdef HAVE_extv
609 if (code == SIGN_EXTRACT)
611 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
612 if (wanted_mode == VOIDmode)
613 wanted_mode = word_mode;
615 #endif
617 /* If we have a narrower mode, we can do something. */
618 if (wanted_mode != VOIDmode
619 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
621 int offset = pos / BITS_PER_UNIT;
622 rtx newmem;
624 /* If the bytes and bits are counted differently, we
625 must adjust the offset. */
626 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
627 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
628 - offset);
630 pos %= GET_MODE_BITSIZE (wanted_mode);
632 newmem = gen_rtx_MEM (wanted_mode,
633 plus_constant (XEXP (to, 0), offset));
634 MEM_COPY_ATTRIBUTES (newmem, to);
636 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
637 validate_change (object, &XEXP (x, 0), newmem, 1);
641 break;
643 default:
644 break;
647 /* For commutative or comparison operations we've already performed
648 replacements. Don't try to perform them again. */
649 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
651 fmt = GET_RTX_FORMAT (code);
652 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
654 if (fmt[i] == 'e')
655 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
656 else if (fmt[i] == 'E')
657 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
658 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
663 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
664 with TO. After all changes have been made, validate by seeing
665 if INSN is still valid. */
668 validate_replace_rtx_subexp (from, to, insn, loc)
669 rtx from, to, insn, *loc;
671 validate_replace_rtx_1 (loc, from, to, insn);
672 return apply_change_group ();
675 /* Try replacing every occurrence of FROM in INSN with TO. After all
676 changes have been made, validate by seeing if INSN is still valid. */
679 validate_replace_rtx (from, to, insn)
680 rtx from, to, insn;
682 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
683 return apply_change_group ();
686 /* Try replacing every occurrence of FROM in INSN with TO. After all
687 changes have been made, validate by seeing if INSN is still valid. */
689 void
690 validate_replace_rtx_group (from, to, insn)
691 rtx from, to, insn;
693 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
696 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
697 SET_DESTs. After all changes have been made, validate by seeing if
698 INSN is still valid. */
701 validate_replace_src (from, to, insn)
702 rtx from, to, insn;
704 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
705 || GET_CODE (PATTERN (insn)) != SET)
706 abort ();
708 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
709 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
710 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
711 from, to, insn);
712 return apply_change_group ();
715 #ifdef HAVE_cc0
716 /* Return 1 if the insn using CC0 set by INSN does not contain
717 any ordered tests applied to the condition codes.
718 EQ and NE tests do not count. */
721 next_insn_tests_no_inequality (insn)
722 rtx insn;
724 register rtx next = next_cc0_user (insn);
726 /* If there is no next insn, we have to take the conservative choice. */
727 if (next == 0)
728 return 0;
730 return ((GET_CODE (next) == JUMP_INSN
731 || GET_CODE (next) == INSN
732 || GET_CODE (next) == CALL_INSN)
733 && ! inequality_comparisons_p (PATTERN (next)));
736 #if 0 /* This is useless since the insn that sets the cc's
737 must be followed immediately by the use of them. */
738 /* Return 1 if the CC value set up by INSN is not used. */
741 next_insns_test_no_inequality (insn)
742 rtx insn;
744 register rtx next = NEXT_INSN (insn);
746 for (; next != 0; next = NEXT_INSN (next))
748 if (GET_CODE (next) == CODE_LABEL
749 || GET_CODE (next) == BARRIER)
750 return 1;
751 if (GET_CODE (next) == NOTE)
752 continue;
753 if (inequality_comparisons_p (PATTERN (next)))
754 return 0;
755 if (sets_cc0_p (PATTERN (next)) == 1)
756 return 1;
757 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
758 return 1;
760 return 1;
762 #endif
763 #endif
765 /* This is used by find_single_use to locate an rtx that contains exactly one
766 use of DEST, which is typically either a REG or CC0. It returns a
767 pointer to the innermost rtx expression containing DEST. Appearances of
768 DEST that are being used to totally replace it are not counted. */
770 static rtx *
771 find_single_use_1 (dest, loc)
772 rtx dest;
773 rtx *loc;
775 rtx x = *loc;
776 enum rtx_code code = GET_CODE (x);
777 rtx *result = 0;
778 rtx *this_result;
779 int i;
780 const char *fmt;
782 switch (code)
784 case CONST_INT:
785 case CONST:
786 case LABEL_REF:
787 case SYMBOL_REF:
788 case CONST_DOUBLE:
789 case CLOBBER:
790 return 0;
792 case SET:
793 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
794 of a REG that occupies all of the REG, the insn uses DEST if
795 it is mentioned in the destination or the source. Otherwise, we
796 need just check the source. */
797 if (GET_CODE (SET_DEST (x)) != CC0
798 && GET_CODE (SET_DEST (x)) != PC
799 && GET_CODE (SET_DEST (x)) != REG
800 && ! (GET_CODE (SET_DEST (x)) == SUBREG
801 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
802 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
803 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
804 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
805 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
806 break;
808 return find_single_use_1 (dest, &SET_SRC (x));
810 case MEM:
811 case SUBREG:
812 return find_single_use_1 (dest, &XEXP (x, 0));
814 default:
815 break;
818 /* If it wasn't one of the common cases above, check each expression and
819 vector of this code. Look for a unique usage of DEST. */
821 fmt = GET_RTX_FORMAT (code);
822 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
824 if (fmt[i] == 'e')
826 if (dest == XEXP (x, i)
827 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
828 && REGNO (dest) == REGNO (XEXP (x, i))))
829 this_result = loc;
830 else
831 this_result = find_single_use_1 (dest, &XEXP (x, i));
833 if (result == 0)
834 result = this_result;
835 else if (this_result)
836 /* Duplicate usage. */
837 return 0;
839 else if (fmt[i] == 'E')
841 int j;
843 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
845 if (XVECEXP (x, i, j) == dest
846 || (GET_CODE (dest) == REG
847 && GET_CODE (XVECEXP (x, i, j)) == REG
848 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
849 this_result = loc;
850 else
851 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
853 if (result == 0)
854 result = this_result;
855 else if (this_result)
856 return 0;
861 return result;
864 /* See if DEST, produced in INSN, is used only a single time in the
865 sequel. If so, return a pointer to the innermost rtx expression in which
866 it is used.
868 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
870 This routine will return usually zero either before flow is called (because
871 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
872 note can't be trusted).
874 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
875 care about REG_DEAD notes or LOG_LINKS.
877 Otherwise, we find the single use by finding an insn that has a
878 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
879 only referenced once in that insn, we know that it must be the first
880 and last insn referencing DEST. */
882 rtx *
883 find_single_use (dest, insn, ploc)
884 rtx dest;
885 rtx insn;
886 rtx *ploc;
888 rtx next;
889 rtx *result;
890 rtx link;
892 #ifdef HAVE_cc0
893 if (dest == cc0_rtx)
895 next = NEXT_INSN (insn);
896 if (next == 0
897 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
898 return 0;
900 result = find_single_use_1 (dest, &PATTERN (next));
901 if (result && ploc)
902 *ploc = next;
903 return result;
905 #endif
907 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
908 return 0;
910 for (next = next_nonnote_insn (insn);
911 next != 0 && GET_CODE (next) != CODE_LABEL;
912 next = next_nonnote_insn (next))
913 if (INSN_P (next) && dead_or_set_p (next, dest))
915 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
916 if (XEXP (link, 0) == insn)
917 break;
919 if (link)
921 result = find_single_use_1 (dest, &PATTERN (next));
922 if (ploc)
923 *ploc = next;
924 return result;
928 return 0;
931 /* Return 1 if OP is a valid general operand for machine mode MODE.
932 This is either a register reference, a memory reference,
933 or a constant. In the case of a memory reference, the address
934 is checked for general validity for the target machine.
936 Register and memory references must have mode MODE in order to be valid,
937 but some constants have no machine mode and are valid for any mode.
939 If MODE is VOIDmode, OP is checked for validity for whatever mode
940 it has.
942 The main use of this function is as a predicate in match_operand
943 expressions in the machine description.
945 For an explanation of this function's behavior for registers of
946 class NO_REGS, see the comment for `register_operand'. */
949 general_operand (op, mode)
950 register rtx op;
951 enum machine_mode mode;
953 register enum rtx_code code = GET_CODE (op);
954 int mode_altering_drug = 0;
956 if (mode == VOIDmode)
957 mode = GET_MODE (op);
959 /* Don't accept CONST_INT or anything similar
960 if the caller wants something floating. */
961 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
962 && GET_MODE_CLASS (mode) != MODE_INT
963 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
964 return 0;
966 if (CONSTANT_P (op))
967 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
968 || mode == VOIDmode)
969 #ifdef LEGITIMATE_PIC_OPERAND_P
970 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
971 #endif
972 && LEGITIMATE_CONSTANT_P (op));
974 /* Except for certain constants with VOIDmode, already checked for,
975 OP's mode must match MODE if MODE specifies a mode. */
977 if (GET_MODE (op) != mode)
978 return 0;
980 if (code == SUBREG)
982 #ifdef INSN_SCHEDULING
983 /* On machines that have insn scheduling, we want all memory
984 reference to be explicit, so outlaw paradoxical SUBREGs. */
985 if (GET_CODE (SUBREG_REG (op)) == MEM
986 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
987 return 0;
988 #endif
990 op = SUBREG_REG (op);
991 code = GET_CODE (op);
992 #if 0
993 /* No longer needed, since (SUBREG (MEM...))
994 will load the MEM into a reload reg in the MEM's own mode. */
995 mode_altering_drug = 1;
996 #endif
999 if (code == REG)
1000 /* A register whose class is NO_REGS is not a general operand. */
1001 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1002 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1004 if (code == MEM)
1006 register rtx y = XEXP (op, 0);
1008 if (! volatile_ok && MEM_VOLATILE_P (op))
1009 return 0;
1011 if (GET_CODE (y) == ADDRESSOF)
1012 return 1;
1014 /* Use the mem's mode, since it will be reloaded thus. */
1015 mode = GET_MODE (op);
1016 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1019 /* Pretend this is an operand for now; we'll run force_operand
1020 on its replacement in fixup_var_refs_1. */
1021 if (code == ADDRESSOF)
1022 return 1;
1024 return 0;
1026 win:
1027 if (mode_altering_drug)
1028 return ! mode_dependent_address_p (XEXP (op, 0));
1029 return 1;
1032 /* Return 1 if OP is a valid memory address for a memory reference
1033 of mode MODE.
1035 The main use of this function is as a predicate in match_operand
1036 expressions in the machine description. */
1039 address_operand (op, mode)
1040 register rtx op;
1041 enum machine_mode mode;
1043 return memory_address_p (mode, op);
1046 /* Return 1 if OP is a register reference of mode MODE.
1047 If MODE is VOIDmode, accept a register in any mode.
1049 The main use of this function is as a predicate in match_operand
1050 expressions in the machine description.
1052 As a special exception, registers whose class is NO_REGS are
1053 not accepted by `register_operand'. The reason for this change
1054 is to allow the representation of special architecture artifacts
1055 (such as a condition code register) without extending the rtl
1056 definitions. Since registers of class NO_REGS cannot be used
1057 as registers in any case where register classes are examined,
1058 it is most consistent to keep this function from accepting them. */
1061 register_operand (op, mode)
1062 register rtx op;
1063 enum machine_mode mode;
1065 if (GET_MODE (op) != mode && mode != VOIDmode)
1066 return 0;
1068 if (GET_CODE (op) == SUBREG)
1070 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1071 because it is guaranteed to be reloaded into one.
1072 Just make sure the MEM is valid in itself.
1073 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1074 but currently it does result from (SUBREG (REG)...) where the
1075 reg went on the stack.) */
1076 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1077 return general_operand (op, mode);
1079 #ifdef CLASS_CANNOT_CHANGE_MODE
1080 if (GET_CODE (SUBREG_REG (op)) == REG
1081 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1082 && (TEST_HARD_REG_BIT
1083 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1084 REGNO (SUBREG_REG (op))))
1085 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1086 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1087 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1088 return 0;
1089 #endif
1091 op = SUBREG_REG (op);
1094 /* If we have an ADDRESSOF, consider it valid since it will be
1095 converted into something that will not be a MEM. */
1096 if (GET_CODE (op) == ADDRESSOF)
1097 return 1;
1099 /* We don't consider registers whose class is NO_REGS
1100 to be a register operand. */
1101 return (GET_CODE (op) == REG
1102 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1103 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1106 /* Return 1 for a register in Pmode; ignore the tested mode. */
1109 pmode_register_operand (op, mode)
1110 rtx op;
1111 enum machine_mode mode ATTRIBUTE_UNUSED;
1113 return register_operand (op, Pmode);
1116 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1117 or a hard register. */
1120 scratch_operand (op, mode)
1121 register rtx op;
1122 enum machine_mode mode;
1124 if (GET_MODE (op) != mode && mode != VOIDmode)
1125 return 0;
1127 return (GET_CODE (op) == SCRATCH
1128 || (GET_CODE (op) == REG
1129 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1132 /* Return 1 if OP is a valid immediate operand for mode MODE.
1134 The main use of this function is as a predicate in match_operand
1135 expressions in the machine description. */
1138 immediate_operand (op, mode)
1139 register rtx op;
1140 enum machine_mode mode;
1142 /* Don't accept CONST_INT or anything similar
1143 if the caller wants something floating. */
1144 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1145 && GET_MODE_CLASS (mode) != MODE_INT
1146 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1147 return 0;
1149 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1150 result in 0/1. It seems a safe assumption that this is
1151 in range for everyone. */
1152 if (GET_CODE (op) == CONSTANT_P_RTX)
1153 return 1;
1155 return (CONSTANT_P (op)
1156 && (GET_MODE (op) == mode || mode == VOIDmode
1157 || GET_MODE (op) == VOIDmode)
1158 #ifdef LEGITIMATE_PIC_OPERAND_P
1159 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1160 #endif
1161 && LEGITIMATE_CONSTANT_P (op));
1164 /* Returns 1 if OP is an operand that is a CONST_INT. */
1167 const_int_operand (op, mode)
1168 register rtx op;
1169 enum machine_mode mode ATTRIBUTE_UNUSED;
1171 return GET_CODE (op) == CONST_INT;
1174 /* Returns 1 if OP is an operand that is a constant integer or constant
1175 floating-point number. */
1178 const_double_operand (op, mode)
1179 register rtx op;
1180 enum machine_mode mode;
1182 /* Don't accept CONST_INT or anything similar
1183 if the caller wants something floating. */
1184 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1185 && GET_MODE_CLASS (mode) != MODE_INT
1186 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1187 return 0;
1189 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1190 && (mode == VOIDmode || GET_MODE (op) == mode
1191 || GET_MODE (op) == VOIDmode));
1194 /* Return 1 if OP is a general operand that is not an immediate operand. */
1197 nonimmediate_operand (op, mode)
1198 register rtx op;
1199 enum machine_mode mode;
1201 return (general_operand (op, mode) && ! CONSTANT_P (op));
1204 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1207 nonmemory_operand (op, mode)
1208 register rtx op;
1209 enum machine_mode mode;
1211 if (CONSTANT_P (op))
1213 /* Don't accept CONST_INT or anything similar
1214 if the caller wants something floating. */
1215 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1216 && GET_MODE_CLASS (mode) != MODE_INT
1217 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1218 return 0;
1220 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1221 || mode == VOIDmode)
1222 #ifdef LEGITIMATE_PIC_OPERAND_P
1223 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1224 #endif
1225 && LEGITIMATE_CONSTANT_P (op));
1228 if (GET_MODE (op) != mode && mode != VOIDmode)
1229 return 0;
1231 if (GET_CODE (op) == SUBREG)
1233 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1234 because it is guaranteed to be reloaded into one.
1235 Just make sure the MEM is valid in itself.
1236 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1237 but currently it does result from (SUBREG (REG)...) where the
1238 reg went on the stack.) */
1239 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1240 return general_operand (op, mode);
1241 op = SUBREG_REG (op);
1244 /* We don't consider registers whose class is NO_REGS
1245 to be a register operand. */
1246 return (GET_CODE (op) == REG
1247 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1248 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1251 /* Return 1 if OP is a valid operand that stands for pushing a
1252 value of mode MODE onto the stack.
1254 The main use of this function is as a predicate in match_operand
1255 expressions in the machine description. */
1258 push_operand (op, mode)
1259 rtx op;
1260 enum machine_mode mode;
1262 if (GET_CODE (op) != MEM)
1263 return 0;
1265 if (mode != VOIDmode && GET_MODE (op) != mode)
1266 return 0;
1268 op = XEXP (op, 0);
1270 if (GET_CODE (op) != STACK_PUSH_CODE)
1271 return 0;
1273 return XEXP (op, 0) == stack_pointer_rtx;
1276 /* Return 1 if OP is a valid operand that stands for popping a
1277 value of mode MODE off the stack.
1279 The main use of this function is as a predicate in match_operand
1280 expressions in the machine description. */
1283 pop_operand (op, mode)
1284 rtx op;
1285 enum machine_mode mode;
1287 if (GET_CODE (op) != MEM)
1288 return 0;
1290 if (mode != VOIDmode && GET_MODE (op) != mode)
1291 return 0;
1293 op = XEXP (op, 0);
1295 if (GET_CODE (op) != STACK_POP_CODE)
1296 return 0;
1298 return XEXP (op, 0) == stack_pointer_rtx;
1301 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1304 memory_address_p (mode, addr)
1305 enum machine_mode mode ATTRIBUTE_UNUSED;
1306 register rtx addr;
1308 if (GET_CODE (addr) == ADDRESSOF)
1309 return 1;
1311 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1312 return 0;
1314 win:
1315 return 1;
1318 /* Return 1 if OP is a valid memory reference with mode MODE,
1319 including a valid address.
1321 The main use of this function is as a predicate in match_operand
1322 expressions in the machine description. */
1325 memory_operand (op, mode)
1326 register rtx op;
1327 enum machine_mode mode;
1329 rtx inner;
1331 if (! reload_completed)
1332 /* Note that no SUBREG is a memory operand before end of reload pass,
1333 because (SUBREG (MEM...)) forces reloading into a register. */
1334 return GET_CODE (op) == MEM && general_operand (op, mode);
1336 if (mode != VOIDmode && GET_MODE (op) != mode)
1337 return 0;
1339 inner = op;
1340 if (GET_CODE (inner) == SUBREG)
1341 inner = SUBREG_REG (inner);
1343 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1346 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1347 that is, a memory reference whose address is a general_operand. */
1350 indirect_operand (op, mode)
1351 register rtx op;
1352 enum machine_mode mode;
1354 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1355 if (! reload_completed
1356 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1358 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1359 rtx inner = SUBREG_REG (op);
1361 if (BYTES_BIG_ENDIAN)
1362 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1363 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1365 if (mode != VOIDmode && GET_MODE (op) != mode)
1366 return 0;
1368 /* The only way that we can have a general_operand as the resulting
1369 address is if OFFSET is zero and the address already is an operand
1370 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1371 operand. */
1373 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1374 || (GET_CODE (XEXP (inner, 0)) == PLUS
1375 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1376 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1377 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1380 return (GET_CODE (op) == MEM
1381 && memory_operand (op, mode)
1382 && general_operand (XEXP (op, 0), Pmode));
1385 /* Return 1 if this is a comparison operator. This allows the use of
1386 MATCH_OPERATOR to recognize all the branch insns. */
1389 comparison_operator (op, mode)
1390 register rtx op;
1391 enum machine_mode mode;
1393 return ((mode == VOIDmode || GET_MODE (op) == mode)
1394 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1397 /* If BODY is an insn body that uses ASM_OPERANDS,
1398 return the number of operands (both input and output) in the insn.
1399 Otherwise return -1. */
1402 asm_noperands (body)
1403 rtx body;
1405 switch (GET_CODE (body))
1407 case ASM_OPERANDS:
1408 /* No output operands: return number of input operands. */
1409 return ASM_OPERANDS_INPUT_LENGTH (body);
1410 case SET:
1411 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1412 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1413 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1414 else
1415 return -1;
1416 case PARALLEL:
1417 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1418 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1420 /* Multiple output operands, or 1 output plus some clobbers:
1421 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1422 int i;
1423 int n_sets;
1425 /* Count backwards through CLOBBERs to determine number of SETs. */
1426 for (i = XVECLEN (body, 0); i > 0; i--)
1428 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1429 break;
1430 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1431 return -1;
1434 /* N_SETS is now number of output operands. */
1435 n_sets = i;
1437 /* Verify that all the SETs we have
1438 came from a single original asm_operands insn
1439 (so that invalid combinations are blocked). */
1440 for (i = 0; i < n_sets; i++)
1442 rtx elt = XVECEXP (body, 0, i);
1443 if (GET_CODE (elt) != SET)
1444 return -1;
1445 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1446 return -1;
1447 /* If these ASM_OPERANDS rtx's came from different original insns
1448 then they aren't allowed together. */
1449 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1450 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1451 return -1;
1453 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1454 + n_sets);
1456 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1458 /* 0 outputs, but some clobbers:
1459 body is [(asm_operands ...) (clobber (reg ...))...]. */
1460 int i;
1462 /* Make sure all the other parallel things really are clobbers. */
1463 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1464 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1465 return -1;
1467 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1469 else
1470 return -1;
1471 default:
1472 return -1;
1476 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1477 copy its operands (both input and output) into the vector OPERANDS,
1478 the locations of the operands within the insn into the vector OPERAND_LOCS,
1479 and the constraints for the operands into CONSTRAINTS.
1480 Write the modes of the operands into MODES.
1481 Return the assembler-template.
1483 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1484 we don't store that info. */
1486 const char *
1487 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1488 rtx body;
1489 rtx *operands;
1490 rtx **operand_locs;
1491 const char **constraints;
1492 enum machine_mode *modes;
1494 register int i;
1495 int noperands;
1496 const char *template = 0;
1498 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1500 rtx asmop = SET_SRC (body);
1501 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1503 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1505 for (i = 1; i < noperands; i++)
1507 if (operand_locs)
1508 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1509 if (operands)
1510 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1511 if (constraints)
1512 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1513 if (modes)
1514 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1517 /* The output is in the SET.
1518 Its constraint is in the ASM_OPERANDS itself. */
1519 if (operands)
1520 operands[0] = SET_DEST (body);
1521 if (operand_locs)
1522 operand_locs[0] = &SET_DEST (body);
1523 if (constraints)
1524 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1525 if (modes)
1526 modes[0] = GET_MODE (SET_DEST (body));
1527 template = ASM_OPERANDS_TEMPLATE (asmop);
1529 else if (GET_CODE (body) == ASM_OPERANDS)
1531 rtx asmop = body;
1532 /* No output operands: BODY is (asm_operands ....). */
1534 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1536 /* The input operands are found in the 1st element vector. */
1537 /* Constraints for inputs are in the 2nd element vector. */
1538 for (i = 0; i < noperands; i++)
1540 if (operand_locs)
1541 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1542 if (operands)
1543 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1544 if (constraints)
1545 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1546 if (modes)
1547 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1549 template = ASM_OPERANDS_TEMPLATE (asmop);
1551 else if (GET_CODE (body) == PARALLEL
1552 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1554 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1555 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1556 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1557 int nout = 0; /* Does not include CLOBBERs. */
1559 /* At least one output, plus some CLOBBERs. */
1561 /* The outputs are in the SETs.
1562 Their constraints are in the ASM_OPERANDS itself. */
1563 for (i = 0; i < nparallel; i++)
1565 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1566 break; /* Past last SET */
1568 if (operands)
1569 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1570 if (operand_locs)
1571 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1572 if (constraints)
1573 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1574 if (modes)
1575 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1576 nout++;
1579 for (i = 0; i < nin; i++)
1581 if (operand_locs)
1582 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1583 if (operands)
1584 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1585 if (constraints)
1586 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1587 if (modes)
1588 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1591 template = ASM_OPERANDS_TEMPLATE (asmop);
1593 else if (GET_CODE (body) == PARALLEL
1594 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1596 /* No outputs, but some CLOBBERs. */
1598 rtx asmop = XVECEXP (body, 0, 0);
1599 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1601 for (i = 0; i < nin; i++)
1603 if (operand_locs)
1604 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1605 if (operands)
1606 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1607 if (constraints)
1608 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1609 if (modes)
1610 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1613 template = ASM_OPERANDS_TEMPLATE (asmop);
1616 return template;
1619 /* Check if an asm_operand matches it's constraints.
1620 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1623 asm_operand_ok (op, constraint)
1624 rtx op;
1625 const char *constraint;
1627 int result = 0;
1629 /* Use constrain_operands after reload. */
1630 if (reload_completed)
1631 abort ();
1633 while (*constraint)
1635 char c = *constraint++;
1636 switch (c)
1638 case '=':
1639 case '+':
1640 case '*':
1641 case '%':
1642 case '?':
1643 case '!':
1644 case '#':
1645 case '&':
1646 case ',':
1647 break;
1649 case '0': case '1': case '2': case '3': case '4':
1650 case '5': case '6': case '7': case '8': case '9':
1651 /* For best results, our caller should have given us the
1652 proper matching constraint, but we can't actually fail
1653 the check if they didn't. Indicate that results are
1654 inconclusive. */
1655 result = -1;
1656 break;
1658 case 'p':
1659 if (address_operand (op, VOIDmode))
1660 return 1;
1661 break;
1663 case 'm':
1664 case 'V': /* non-offsettable */
1665 if (memory_operand (op, VOIDmode))
1666 return 1;
1667 break;
1669 case 'o': /* offsettable */
1670 if (offsettable_nonstrict_memref_p (op))
1671 return 1;
1672 break;
1674 case '<':
1675 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1676 excepting those that expand_call created. Further, on some
1677 machines which do not have generalized auto inc/dec, an inc/dec
1678 is not a memory_operand.
1680 Match any memory and hope things are resolved after reload. */
1682 if (GET_CODE (op) == MEM
1683 && (1
1684 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1685 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1686 return 1;
1687 break;
1689 case '>':
1690 if (GET_CODE (op) == MEM
1691 && (1
1692 || GET_CODE (XEXP (op, 0)) == PRE_INC
1693 || GET_CODE (XEXP (op, 0)) == POST_INC))
1694 return 1;
1695 break;
1697 case 'E':
1698 #ifndef REAL_ARITHMETIC
1699 /* Match any floating double constant, but only if
1700 we can examine the bits of it reliably. */
1701 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1702 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1703 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1704 break;
1705 #endif
1706 /* FALLTHRU */
1708 case 'F':
1709 if (GET_CODE (op) == CONST_DOUBLE)
1710 return 1;
1711 break;
1713 case 'G':
1714 if (GET_CODE (op) == CONST_DOUBLE
1715 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1716 return 1;
1717 break;
1718 case 'H':
1719 if (GET_CODE (op) == CONST_DOUBLE
1720 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1721 return 1;
1722 break;
1724 case 's':
1725 if (GET_CODE (op) == CONST_INT
1726 || (GET_CODE (op) == CONST_DOUBLE
1727 && GET_MODE (op) == VOIDmode))
1728 break;
1729 /* FALLTHRU */
1731 case 'i':
1732 if (CONSTANT_P (op)
1733 #ifdef LEGITIMATE_PIC_OPERAND_P
1734 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1735 #endif
1737 return 1;
1738 break;
1740 case 'n':
1741 if (GET_CODE (op) == CONST_INT
1742 || (GET_CODE (op) == CONST_DOUBLE
1743 && GET_MODE (op) == VOIDmode))
1744 return 1;
1745 break;
1747 case 'I':
1748 if (GET_CODE (op) == CONST_INT
1749 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1750 return 1;
1751 break;
1752 case 'J':
1753 if (GET_CODE (op) == CONST_INT
1754 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1755 return 1;
1756 break;
1757 case 'K':
1758 if (GET_CODE (op) == CONST_INT
1759 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1760 return 1;
1761 break;
1762 case 'L':
1763 if (GET_CODE (op) == CONST_INT
1764 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1765 return 1;
1766 break;
1767 case 'M':
1768 if (GET_CODE (op) == CONST_INT
1769 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1770 return 1;
1771 break;
1772 case 'N':
1773 if (GET_CODE (op) == CONST_INT
1774 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1775 return 1;
1776 break;
1777 case 'O':
1778 if (GET_CODE (op) == CONST_INT
1779 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1780 return 1;
1781 break;
1782 case 'P':
1783 if (GET_CODE (op) == CONST_INT
1784 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1785 return 1;
1786 break;
1788 case 'X':
1789 return 1;
1791 case 'g':
1792 if (general_operand (op, VOIDmode))
1793 return 1;
1794 break;
1796 default:
1797 /* For all other letters, we first check for a register class,
1798 otherwise it is an EXTRA_CONSTRAINT. */
1799 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1801 case 'r':
1802 if (GET_MODE (op) == BLKmode)
1803 break;
1804 if (register_operand (op, VOIDmode))
1805 return 1;
1807 #ifdef EXTRA_CONSTRAINT
1808 if (EXTRA_CONSTRAINT (op, c))
1809 return 1;
1810 #endif
1811 break;
1815 return result;
1818 /* Given an rtx *P, if it is a sum containing an integer constant term,
1819 return the location (type rtx *) of the pointer to that constant term.
1820 Otherwise, return a null pointer. */
1822 static rtx *
1823 find_constant_term_loc (p)
1824 rtx *p;
1826 register rtx *tem;
1827 register enum rtx_code code = GET_CODE (*p);
1829 /* If *P IS such a constant term, P is its location. */
1831 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1832 || code == CONST)
1833 return p;
1835 /* Otherwise, if not a sum, it has no constant term. */
1837 if (GET_CODE (*p) != PLUS)
1838 return 0;
1840 /* If one of the summands is constant, return its location. */
1842 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1843 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1844 return p;
1846 /* Otherwise, check each summand for containing a constant term. */
1848 if (XEXP (*p, 0) != 0)
1850 tem = find_constant_term_loc (&XEXP (*p, 0));
1851 if (tem != 0)
1852 return tem;
1855 if (XEXP (*p, 1) != 0)
1857 tem = find_constant_term_loc (&XEXP (*p, 1));
1858 if (tem != 0)
1859 return tem;
1862 return 0;
1865 /* Return 1 if OP is a memory reference
1866 whose address contains no side effects
1867 and remains valid after the addition
1868 of a positive integer less than the
1869 size of the object being referenced.
1871 We assume that the original address is valid and do not check it.
1873 This uses strict_memory_address_p as a subroutine, so
1874 don't use it before reload. */
1877 offsettable_memref_p (op)
1878 rtx op;
1880 return ((GET_CODE (op) == MEM)
1881 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1884 /* Similar, but don't require a strictly valid mem ref:
1885 consider pseudo-regs valid as index or base regs. */
1888 offsettable_nonstrict_memref_p (op)
1889 rtx op;
1891 return ((GET_CODE (op) == MEM)
1892 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1895 /* Return 1 if Y is a memory address which contains no side effects
1896 and would remain valid after the addition of a positive integer
1897 less than the size of that mode.
1899 We assume that the original address is valid and do not check it.
1900 We do check that it is valid for narrower modes.
1902 If STRICTP is nonzero, we require a strictly valid address,
1903 for the sake of use in reload.c. */
1906 offsettable_address_p (strictp, mode, y)
1907 int strictp;
1908 enum machine_mode mode;
1909 register rtx y;
1911 register enum rtx_code ycode = GET_CODE (y);
1912 register rtx z;
1913 rtx y1 = y;
1914 rtx *y2;
1915 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1916 (strictp ? strict_memory_address_p : memory_address_p);
1917 unsigned int mode_sz = GET_MODE_SIZE (mode);
1919 if (CONSTANT_ADDRESS_P (y))
1920 return 1;
1922 /* Adjusting an offsettable address involves changing to a narrower mode.
1923 Make sure that's OK. */
1925 if (mode_dependent_address_p (y))
1926 return 0;
1928 /* ??? How much offset does an offsettable BLKmode reference need?
1929 Clearly that depends on the situation in which it's being used.
1930 However, the current situation in which we test 0xffffffff is
1931 less than ideal. Caveat user. */
1932 if (mode_sz == 0)
1933 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1935 /* If the expression contains a constant term,
1936 see if it remains valid when max possible offset is added. */
1938 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1940 int good;
1942 y1 = *y2;
1943 *y2 = plus_constant (*y2, mode_sz - 1);
1944 /* Use QImode because an odd displacement may be automatically invalid
1945 for any wider mode. But it should be valid for a single byte. */
1946 good = (*addressp) (QImode, y);
1948 /* In any case, restore old contents of memory. */
1949 *y2 = y1;
1950 return good;
1953 if (GET_RTX_CLASS (ycode) == 'a')
1954 return 0;
1956 /* The offset added here is chosen as the maximum offset that
1957 any instruction could need to add when operating on something
1958 of the specified mode. We assume that if Y and Y+c are
1959 valid addresses then so is Y+d for all 0<d<c. */
1961 z = plus_constant_for_output (y, mode_sz - 1);
1963 /* Use QImode because an odd displacement may be automatically invalid
1964 for any wider mode. But it should be valid for a single byte. */
1965 return (*addressp) (QImode, z);
1968 /* Return 1 if ADDR is an address-expression whose effect depends
1969 on the mode of the memory reference it is used in.
1971 Autoincrement addressing is a typical example of mode-dependence
1972 because the amount of the increment depends on the mode. */
1975 mode_dependent_address_p (addr)
1976 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1978 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1979 return 0;
1980 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1981 win: ATTRIBUTE_UNUSED_LABEL
1982 return 1;
1985 /* Return 1 if OP is a general operand
1986 other than a memory ref with a mode dependent address. */
1989 mode_independent_operand (op, mode)
1990 enum machine_mode mode;
1991 rtx op;
1993 rtx addr;
1995 if (! general_operand (op, mode))
1996 return 0;
1998 if (GET_CODE (op) != MEM)
1999 return 1;
2001 addr = XEXP (op, 0);
2002 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2003 return 1;
2004 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2005 lose: ATTRIBUTE_UNUSED_LABEL
2006 return 0;
2009 /* Given an operand OP that is a valid memory reference which
2010 satisfies offsettable_memref_p, return a new memory reference whose
2011 address has been adjusted by OFFSET. OFFSET should be positive and
2012 less than the size of the object referenced. */
2015 adj_offsettable_operand (op, offset)
2016 rtx op;
2017 int offset;
2019 register enum rtx_code code = GET_CODE (op);
2021 if (code == MEM)
2023 register rtx y = XEXP (op, 0);
2024 register rtx new;
2026 if (CONSTANT_ADDRESS_P (y))
2028 new = gen_rtx_MEM (GET_MODE (op),
2029 plus_constant_for_output (y, offset));
2030 MEM_COPY_ATTRIBUTES (new, op);
2031 return new;
2034 if (GET_CODE (y) == PLUS)
2036 rtx z = y;
2037 register rtx *const_loc;
2039 op = copy_rtx (op);
2040 z = XEXP (op, 0);
2041 const_loc = find_constant_term_loc (&z);
2042 if (const_loc)
2044 *const_loc = plus_constant_for_output (*const_loc, offset);
2045 return op;
2049 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2050 MEM_COPY_ATTRIBUTES (new, op);
2051 return new;
2053 abort ();
2056 /* Like extract_insn, but save insn extracted and don't extract again, when
2057 called again for the same insn expecting that recog_data still contain the
2058 valid information. This is used primary by gen_attr infrastructure that
2059 often does extract insn again and again. */
2060 void
2061 extract_insn_cached (insn)
2062 rtx insn;
2064 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2065 return;
2066 extract_insn (insn);
2067 recog_data.insn = insn;
2069 /* Do cached extract_insn, constrain_operand and complain about failures.
2070 Used by insn_attrtab. */
2071 void
2072 extract_constrain_insn_cached (insn)
2073 rtx insn;
2075 extract_insn_cached (insn);
2076 if (which_alternative == -1
2077 && !constrain_operands (reload_completed))
2078 fatal_insn_not_found (insn);
2080 /* Do cached constrain_operand and complain about failures. */
2082 constrain_operands_cached (strict)
2083 int strict;
2085 if (which_alternative == -1)
2086 return constrain_operands (strict);
2087 else
2088 return 1;
2091 /* Analyze INSN and fill in recog_data. */
2093 void
2094 extract_insn (insn)
2095 rtx insn;
2097 int i;
2098 int icode;
2099 int noperands;
2100 rtx body = PATTERN (insn);
2102 recog_data.insn = NULL;
2103 recog_data.n_operands = 0;
2104 recog_data.n_alternatives = 0;
2105 recog_data.n_dups = 0;
2106 which_alternative = -1;
2108 switch (GET_CODE (body))
2110 case USE:
2111 case CLOBBER:
2112 case ASM_INPUT:
2113 case ADDR_VEC:
2114 case ADDR_DIFF_VEC:
2115 return;
2117 case SET:
2118 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2119 goto asm_insn;
2120 else
2121 goto normal_insn;
2122 case PARALLEL:
2123 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2124 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2125 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2126 goto asm_insn;
2127 else
2128 goto normal_insn;
2129 case ASM_OPERANDS:
2130 asm_insn:
2131 recog_data.n_operands = noperands = asm_noperands (body);
2132 if (noperands >= 0)
2134 /* This insn is an `asm' with operands. */
2136 /* expand_asm_operands makes sure there aren't too many operands. */
2137 if (noperands > MAX_RECOG_OPERANDS)
2138 abort ();
2140 /* Now get the operand values and constraints out of the insn. */
2141 decode_asm_operands (body, recog_data.operand,
2142 recog_data.operand_loc,
2143 recog_data.constraints,
2144 recog_data.operand_mode);
2145 if (noperands > 0)
2147 const char *p = recog_data.constraints[0];
2148 recog_data.n_alternatives = 1;
2149 while (*p)
2150 recog_data.n_alternatives += (*p++ == ',');
2152 break;
2154 fatal_insn_not_found (insn);
2156 default:
2157 normal_insn:
2158 /* Ordinary insn: recognize it, get the operands via insn_extract
2159 and get the constraints. */
2161 icode = recog_memoized (insn);
2162 if (icode < 0)
2163 fatal_insn_not_found (insn);
2165 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2166 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2167 recog_data.n_dups = insn_data[icode].n_dups;
2169 insn_extract (insn);
2171 for (i = 0; i < noperands; i++)
2173 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2174 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2175 /* VOIDmode match_operands gets mode from their real operand. */
2176 if (recog_data.operand_mode[i] == VOIDmode)
2177 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2180 for (i = 0; i < noperands; i++)
2181 recog_data.operand_type[i]
2182 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2183 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2184 : OP_IN);
2186 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2187 abort ();
2190 /* After calling extract_insn, you can use this function to extract some
2191 information from the constraint strings into a more usable form.
2192 The collected data is stored in recog_op_alt. */
2193 void
2194 preprocess_constraints ()
2196 int i;
2198 memset (recog_op_alt, 0, sizeof recog_op_alt);
2199 for (i = 0; i < recog_data.n_operands; i++)
2201 int j;
2202 struct operand_alternative *op_alt;
2203 const char *p = recog_data.constraints[i];
2205 op_alt = recog_op_alt[i];
2207 for (j = 0; j < recog_data.n_alternatives; j++)
2209 op_alt[j].class = NO_REGS;
2210 op_alt[j].constraint = p;
2211 op_alt[j].matches = -1;
2212 op_alt[j].matched = -1;
2214 if (*p == '\0' || *p == ',')
2216 op_alt[j].anything_ok = 1;
2217 continue;
2220 for (;;)
2222 char c = *p++;
2223 if (c == '#')
2225 c = *p++;
2226 while (c != ',' && c != '\0');
2227 if (c == ',' || c == '\0')
2228 break;
2230 switch (c)
2232 case '=': case '+': case '*': case '%':
2233 case 'E': case 'F': case 'G': case 'H':
2234 case 's': case 'i': case 'n':
2235 case 'I': case 'J': case 'K': case 'L':
2236 case 'M': case 'N': case 'O': case 'P':
2237 /* These don't say anything we care about. */
2238 break;
2240 case '?':
2241 op_alt[j].reject += 6;
2242 break;
2243 case '!':
2244 op_alt[j].reject += 600;
2245 break;
2246 case '&':
2247 op_alt[j].earlyclobber = 1;
2248 break;
2250 case '0': case '1': case '2': case '3': case '4':
2251 case '5': case '6': case '7': case '8': case '9':
2252 op_alt[j].matches = c - '0';
2253 recog_op_alt[op_alt[j].matches][j].matched = i;
2254 break;
2256 case 'm':
2257 op_alt[j].memory_ok = 1;
2258 break;
2259 case '<':
2260 op_alt[j].decmem_ok = 1;
2261 break;
2262 case '>':
2263 op_alt[j].incmem_ok = 1;
2264 break;
2265 case 'V':
2266 op_alt[j].nonoffmem_ok = 1;
2267 break;
2268 case 'o':
2269 op_alt[j].offmem_ok = 1;
2270 break;
2271 case 'X':
2272 op_alt[j].anything_ok = 1;
2273 break;
2275 case 'p':
2276 op_alt[j].is_address = 1;
2277 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2278 break;
2280 case 'g': case 'r':
2281 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2282 break;
2284 default:
2285 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2286 break;
2293 /* Check the operands of an insn against the insn's operand constraints
2294 and return 1 if they are valid.
2295 The information about the insn's operands, constraints, operand modes
2296 etc. is obtained from the global variables set up by extract_insn.
2298 WHICH_ALTERNATIVE is set to a number which indicates which
2299 alternative of constraints was matched: 0 for the first alternative,
2300 1 for the next, etc.
2302 In addition, when two operands are match
2303 and it happens that the output operand is (reg) while the
2304 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2305 make the output operand look like the input.
2306 This is because the output operand is the one the template will print.
2308 This is used in final, just before printing the assembler code and by
2309 the routines that determine an insn's attribute.
2311 If STRICT is a positive non-zero value, it means that we have been
2312 called after reload has been completed. In that case, we must
2313 do all checks strictly. If it is zero, it means that we have been called
2314 before reload has completed. In that case, we first try to see if we can
2315 find an alternative that matches strictly. If not, we try again, this
2316 time assuming that reload will fix up the insn. This provides a "best
2317 guess" for the alternative and is used to compute attributes of insns prior
2318 to reload. A negative value of STRICT is used for this internal call. */
2320 struct funny_match
2322 int this, other;
2326 constrain_operands (strict)
2327 int strict;
2329 const char *constraints[MAX_RECOG_OPERANDS];
2330 int matching_operands[MAX_RECOG_OPERANDS];
2331 int earlyclobber[MAX_RECOG_OPERANDS];
2332 register int c;
2334 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2335 int funny_match_index;
2337 which_alternative = 0;
2338 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2339 return 1;
2341 for (c = 0; c < recog_data.n_operands; c++)
2343 constraints[c] = recog_data.constraints[c];
2344 matching_operands[c] = -1;
2349 register int opno;
2350 int lose = 0;
2351 funny_match_index = 0;
2353 for (opno = 0; opno < recog_data.n_operands; opno++)
2355 register rtx op = recog_data.operand[opno];
2356 enum machine_mode mode = GET_MODE (op);
2357 register const char *p = constraints[opno];
2358 int offset = 0;
2359 int win = 0;
2360 int val;
2362 earlyclobber[opno] = 0;
2364 /* A unary operator may be accepted by the predicate, but it
2365 is irrelevant for matching constraints. */
2366 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2367 op = XEXP (op, 0);
2369 if (GET_CODE (op) == SUBREG)
2371 if (GET_CODE (SUBREG_REG (op)) == REG
2372 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2373 offset = SUBREG_WORD (op);
2374 op = SUBREG_REG (op);
2377 /* An empty constraint or empty alternative
2378 allows anything which matched the pattern. */
2379 if (*p == 0 || *p == ',')
2380 win = 1;
2382 while (*p && (c = *p++) != ',')
2383 switch (c)
2385 case '?': case '!': case '*': case '%':
2386 case '=': case '+':
2387 break;
2389 case '#':
2390 /* Ignore rest of this alternative as far as
2391 constraint checking is concerned. */
2392 while (*p && *p != ',')
2393 p++;
2394 break;
2396 case '&':
2397 earlyclobber[opno] = 1;
2398 break;
2400 case '0': case '1': case '2': case '3': case '4':
2401 case '5': case '6': case '7': case '8': case '9':
2403 /* This operand must be the same as a previous one.
2404 This kind of constraint is used for instructions such
2405 as add when they take only two operands.
2407 Note that the lower-numbered operand is passed first.
2409 If we are not testing strictly, assume that this constraint
2410 will be satisfied. */
2411 if (strict < 0)
2412 val = 1;
2413 else
2415 rtx op1 = recog_data.operand[c - '0'];
2416 rtx op2 = recog_data.operand[opno];
2418 /* A unary operator may be accepted by the predicate,
2419 but it is irrelevant for matching constraints. */
2420 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2421 op1 = XEXP (op1, 0);
2422 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2423 op2 = XEXP (op2, 0);
2425 val = operands_match_p (op1, op2);
2428 matching_operands[opno] = c - '0';
2429 matching_operands[c - '0'] = opno;
2431 if (val != 0)
2432 win = 1;
2433 /* If output is *x and input is *--x,
2434 arrange later to change the output to *--x as well,
2435 since the output op is the one that will be printed. */
2436 if (val == 2 && strict > 0)
2438 funny_match[funny_match_index].this = opno;
2439 funny_match[funny_match_index++].other = c - '0';
2441 break;
2443 case 'p':
2444 /* p is used for address_operands. When we are called by
2445 gen_reload, no one will have checked that the address is
2446 strictly valid, i.e., that all pseudos requiring hard regs
2447 have gotten them. */
2448 if (strict <= 0
2449 || (strict_memory_address_p (recog_data.operand_mode[opno],
2450 op)))
2451 win = 1;
2452 break;
2454 /* No need to check general_operand again;
2455 it was done in insn-recog.c. */
2456 case 'g':
2457 /* Anything goes unless it is a REG and really has a hard reg
2458 but the hard reg is not in the class GENERAL_REGS. */
2459 if (strict < 0
2460 || GENERAL_REGS == ALL_REGS
2461 || GET_CODE (op) != REG
2462 || (reload_in_progress
2463 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2464 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2465 win = 1;
2466 break;
2468 case 'X':
2469 /* This is used for a MATCH_SCRATCH in the cases when
2470 we don't actually need anything. So anything goes
2471 any time. */
2472 win = 1;
2473 break;
2475 case 'm':
2476 if (GET_CODE (op) == MEM
2477 /* Before reload, accept what reload can turn into mem. */
2478 || (strict < 0 && CONSTANT_P (op))
2479 /* During reload, accept a pseudo */
2480 || (reload_in_progress && GET_CODE (op) == REG
2481 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2482 win = 1;
2483 break;
2485 case '<':
2486 if (GET_CODE (op) == MEM
2487 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2488 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2489 win = 1;
2490 break;
2492 case '>':
2493 if (GET_CODE (op) == MEM
2494 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2495 || GET_CODE (XEXP (op, 0)) == POST_INC))
2496 win = 1;
2497 break;
2499 case 'E':
2500 #ifndef REAL_ARITHMETIC
2501 /* Match any CONST_DOUBLE, but only if
2502 we can examine the bits of it reliably. */
2503 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2504 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2505 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2506 break;
2507 #endif
2508 if (GET_CODE (op) == CONST_DOUBLE)
2509 win = 1;
2510 break;
2512 case 'F':
2513 if (GET_CODE (op) == CONST_DOUBLE)
2514 win = 1;
2515 break;
2517 case 'G':
2518 case 'H':
2519 if (GET_CODE (op) == CONST_DOUBLE
2520 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2521 win = 1;
2522 break;
2524 case 's':
2525 if (GET_CODE (op) == CONST_INT
2526 || (GET_CODE (op) == CONST_DOUBLE
2527 && GET_MODE (op) == VOIDmode))
2528 break;
2529 case 'i':
2530 if (CONSTANT_P (op))
2531 win = 1;
2532 break;
2534 case 'n':
2535 if (GET_CODE (op) == CONST_INT
2536 || (GET_CODE (op) == CONST_DOUBLE
2537 && GET_MODE (op) == VOIDmode))
2538 win = 1;
2539 break;
2541 case 'I':
2542 case 'J':
2543 case 'K':
2544 case 'L':
2545 case 'M':
2546 case 'N':
2547 case 'O':
2548 case 'P':
2549 if (GET_CODE (op) == CONST_INT
2550 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2551 win = 1;
2552 break;
2554 case 'V':
2555 if (GET_CODE (op) == MEM
2556 && ((strict > 0 && ! offsettable_memref_p (op))
2557 || (strict < 0
2558 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2559 || (reload_in_progress
2560 && !(GET_CODE (op) == REG
2561 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2562 win = 1;
2563 break;
2565 case 'o':
2566 if ((strict > 0 && offsettable_memref_p (op))
2567 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2568 /* Before reload, accept what reload can handle. */
2569 || (strict < 0
2570 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2571 /* During reload, accept a pseudo */
2572 || (reload_in_progress && GET_CODE (op) == REG
2573 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2574 win = 1;
2575 break;
2577 default:
2579 enum reg_class class;
2581 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2582 if (class != NO_REGS)
2584 if (strict < 0
2585 || (strict == 0
2586 && GET_CODE (op) == REG
2587 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2588 || (strict == 0 && GET_CODE (op) == SCRATCH)
2589 || (GET_CODE (op) == REG
2590 && reg_fits_class_p (op, class, offset, mode)))
2591 win = 1;
2593 #ifdef EXTRA_CONSTRAINT
2594 else if (EXTRA_CONSTRAINT (op, c))
2595 win = 1;
2596 #endif
2597 break;
2601 constraints[opno] = p;
2602 /* If this operand did not win somehow,
2603 this alternative loses. */
2604 if (! win)
2605 lose = 1;
2607 /* This alternative won; the operands are ok.
2608 Change whichever operands this alternative says to change. */
2609 if (! lose)
2611 int opno, eopno;
2613 /* See if any earlyclobber operand conflicts with some other
2614 operand. */
2616 if (strict > 0)
2617 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2618 /* Ignore earlyclobber operands now in memory,
2619 because we would often report failure when we have
2620 two memory operands, one of which was formerly a REG. */
2621 if (earlyclobber[eopno]
2622 && GET_CODE (recog_data.operand[eopno]) == REG)
2623 for (opno = 0; opno < recog_data.n_operands; opno++)
2624 if ((GET_CODE (recog_data.operand[opno]) == MEM
2625 || recog_data.operand_type[opno] != OP_OUT)
2626 && opno != eopno
2627 /* Ignore things like match_operator operands. */
2628 && *recog_data.constraints[opno] != 0
2629 && ! (matching_operands[opno] == eopno
2630 && operands_match_p (recog_data.operand[opno],
2631 recog_data.operand[eopno]))
2632 && ! safe_from_earlyclobber (recog_data.operand[opno],
2633 recog_data.operand[eopno]))
2634 lose = 1;
2636 if (! lose)
2638 while (--funny_match_index >= 0)
2640 recog_data.operand[funny_match[funny_match_index].other]
2641 = recog_data.operand[funny_match[funny_match_index].this];
2644 return 1;
2648 which_alternative++;
2650 while (which_alternative < recog_data.n_alternatives);
2652 which_alternative = -1;
2653 /* If we are about to reject this, but we are not to test strictly,
2654 try a very loose test. Only return failure if it fails also. */
2655 if (strict == 0)
2656 return constrain_operands (-1);
2657 else
2658 return 0;
2661 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2662 is a hard reg in class CLASS when its regno is offset by OFFSET
2663 and changed to mode MODE.
2664 If REG occupies multiple hard regs, all of them must be in CLASS. */
2667 reg_fits_class_p (operand, class, offset, mode)
2668 rtx operand;
2669 register enum reg_class class;
2670 int offset;
2671 enum machine_mode mode;
2673 register int regno = REGNO (operand);
2674 if (regno < FIRST_PSEUDO_REGISTER
2675 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2676 regno + offset))
2678 register int sr;
2679 regno += offset;
2680 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2681 sr > 0; sr--)
2682 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2683 regno + sr))
2684 break;
2685 return sr == 0;
2688 return 0;
2691 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2693 void
2694 split_all_insns (upd_life)
2695 int upd_life;
2697 sbitmap blocks;
2698 int changed;
2699 int i;
2701 blocks = sbitmap_alloc (n_basic_blocks);
2702 sbitmap_zero (blocks);
2703 changed = 0;
2705 for (i = n_basic_blocks - 1; i >= 0; --i)
2707 basic_block bb = BASIC_BLOCK (i);
2708 rtx insn, next;
2710 for (insn = bb->head; insn ; insn = next)
2712 rtx set;
2714 /* Can't use `next_real_insn' because that might go across
2715 CODE_LABELS and short-out basic blocks. */
2716 next = NEXT_INSN (insn);
2717 if (! INSN_P (insn))
2720 /* Don't split no-op move insns. These should silently
2721 disappear later in final. Splitting such insns would
2722 break the code that handles REG_NO_CONFLICT blocks. */
2724 else if ((set = single_set (insn)) != NULL
2725 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2727 /* Nops get in the way while scheduling, so delete them
2728 now if register allocation has already been done. It
2729 is too risky to try to do this before register
2730 allocation, and there are unlikely to be very many
2731 nops then anyways. */
2732 if (reload_completed)
2734 PUT_CODE (insn, NOTE);
2735 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2736 NOTE_SOURCE_FILE (insn) = 0;
2739 else
2741 /* Split insns here to get max fine-grain parallelism. */
2742 rtx first = PREV_INSN (insn);
2743 rtx last = try_split (PATTERN (insn), insn, 1);
2745 if (last != insn)
2747 SET_BIT (blocks, i);
2748 changed = 1;
2750 /* try_split returns the NOTE that INSN became. */
2751 PUT_CODE (insn, NOTE);
2752 NOTE_SOURCE_FILE (insn) = 0;
2753 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2755 /* ??? Coddle to md files that generate subregs in post-
2756 reload splitters instead of computing the proper
2757 hard register. */
2758 if (reload_completed && first != last)
2760 first = NEXT_INSN (first);
2761 while (1)
2763 if (INSN_P (first))
2764 cleanup_subreg_operands (first);
2765 if (first == last)
2766 break;
2767 first = NEXT_INSN (first);
2771 if (insn == bb->end)
2773 bb->end = last;
2774 break;
2779 if (insn == bb->end)
2780 break;
2783 /* ??? When we're called from just after reload, the CFG is in bad
2784 shape, and we may have fallen off the end. This could be fixed
2785 by having reload not try to delete unreachable code. Otherwise
2786 assert we found the end insn. */
2787 if (insn == NULL && upd_life)
2788 abort ();
2791 if (changed && upd_life)
2793 compute_bb_for_insn (get_max_uid ());
2794 count_or_remove_death_notes (blocks, 1);
2795 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2798 sbitmap_free (blocks);
2801 #ifdef HAVE_peephole2
2802 struct peep2_insn_data
2804 rtx insn;
2805 regset live_before;
2808 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2809 static int peep2_current;
2811 /* A non-insn marker indicating the last insn of the block.
2812 The live_before regset for this element is correct, indicating
2813 global_live_at_end for the block. */
2814 #define PEEP2_EOB pc_rtx
2816 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2817 does not exist. Used by the recognizer to find the next insn to match
2818 in a multi-insn pattern. */
2821 peep2_next_insn (n)
2822 int n;
2824 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2825 abort ();
2827 n += peep2_current;
2828 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2829 n -= MAX_INSNS_PER_PEEP2 + 1;
2831 if (peep2_insn_data[n].insn == PEEP2_EOB)
2832 return NULL_RTX;
2833 return peep2_insn_data[n].insn;
2836 /* Return true if REGNO is dead before the Nth non-note insn
2837 after `current'. */
2840 peep2_regno_dead_p (ofs, regno)
2841 int ofs;
2842 int regno;
2844 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2845 abort ();
2847 ofs += peep2_current;
2848 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2849 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2851 if (peep2_insn_data[ofs].insn == NULL_RTX)
2852 abort ();
2854 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2857 /* Similarly for a REG. */
2860 peep2_reg_dead_p (ofs, reg)
2861 int ofs;
2862 rtx reg;
2864 int regno, n;
2866 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2867 abort ();
2869 ofs += peep2_current;
2870 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2871 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2873 if (peep2_insn_data[ofs].insn == NULL_RTX)
2874 abort ();
2876 regno = REGNO (reg);
2877 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2878 while (--n >= 0)
2879 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2880 return 0;
2881 return 1;
2884 /* Try to find a hard register of mode MODE, matching the register class in
2885 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2886 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2887 in which case the only condition is that the register must be available
2888 before CURRENT_INSN.
2889 Registers that already have bits set in REG_SET will not be considered.
2891 If an appropriate register is available, it will be returned and the
2892 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2893 returned. */
2896 peep2_find_free_register (from, to, class_str, mode, reg_set)
2897 int from, to;
2898 const char *class_str;
2899 enum machine_mode mode;
2900 HARD_REG_SET *reg_set;
2902 static int search_ofs;
2903 enum reg_class class;
2904 HARD_REG_SET live;
2905 int i;
2907 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2908 abort ();
2910 from += peep2_current;
2911 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2912 from -= MAX_INSNS_PER_PEEP2 + 1;
2913 to += peep2_current;
2914 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2915 to -= MAX_INSNS_PER_PEEP2 + 1;
2917 if (peep2_insn_data[from].insn == NULL_RTX)
2918 abort ();
2919 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2921 while (from != to)
2923 HARD_REG_SET this_live;
2925 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2926 from = 0;
2927 if (peep2_insn_data[from].insn == NULL_RTX)
2928 abort ();
2929 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2930 IOR_HARD_REG_SET (live, this_live);
2933 class = (class_str[0] == 'r' ? GENERAL_REGS
2934 : REG_CLASS_FROM_LETTER (class_str[0]));
2936 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2938 int raw_regno, regno, success, j;
2940 /* Distribute the free registers as much as possible. */
2941 raw_regno = search_ofs + i;
2942 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2943 raw_regno -= FIRST_PSEUDO_REGISTER;
2944 #ifdef REG_ALLOC_ORDER
2945 regno = reg_alloc_order[raw_regno];
2946 #else
2947 regno = raw_regno;
2948 #endif
2950 /* Don't allocate fixed registers. */
2951 if (fixed_regs[regno])
2952 continue;
2953 /* Make sure the register is of the right class. */
2954 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2955 continue;
2956 /* And can support the mode we need. */
2957 if (! HARD_REGNO_MODE_OK (regno, mode))
2958 continue;
2959 /* And that we don't create an extra save/restore. */
2960 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2961 continue;
2962 /* And we don't clobber traceback for noreturn functions. */
2963 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2964 && (! reload_completed || frame_pointer_needed))
2965 continue;
2967 success = 1;
2968 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2970 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2971 || TEST_HARD_REG_BIT (live, regno + j))
2973 success = 0;
2974 break;
2977 if (success)
2979 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2980 SET_HARD_REG_BIT (*reg_set, regno + j);
2982 /* Start the next search with the next register. */
2983 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2984 raw_regno = 0;
2985 search_ofs = raw_regno;
2987 return gen_rtx_REG (mode, regno);
2991 search_ofs = 0;
2992 return NULL_RTX;
2995 /* Perform the peephole2 optimization pass. */
2997 void
2998 peephole2_optimize (dump_file)
2999 FILE *dump_file ATTRIBUTE_UNUSED;
3001 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3002 rtx insn, prev;
3003 regset live;
3004 int i, b;
3005 #ifdef HAVE_conditional_execution
3006 sbitmap blocks;
3007 int changed;
3008 #endif
3010 /* Initialize the regsets we're going to use. */
3011 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3012 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3013 live = INITIALIZE_REG_SET (rs_heads[i]);
3015 #ifdef HAVE_conditional_execution
3016 blocks = sbitmap_alloc (n_basic_blocks);
3017 sbitmap_zero (blocks);
3018 changed = 0;
3019 #else
3020 count_or_remove_death_notes (NULL, 1);
3021 #endif
3023 for (b = n_basic_blocks - 1; b >= 0; --b)
3025 basic_block bb = BASIC_BLOCK (b);
3026 struct propagate_block_info *pbi;
3028 /* Indicate that all slots except the last holds invalid data. */
3029 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3030 peep2_insn_data[i].insn = NULL_RTX;
3032 /* Indicate that the last slot contains live_after data. */
3033 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3034 peep2_current = MAX_INSNS_PER_PEEP2;
3036 /* Start up propagation. */
3037 COPY_REG_SET (live, bb->global_live_at_end);
3038 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3040 #ifdef HAVE_conditional_execution
3041 pbi = init_propagate_block_info (bb, live, NULL, 0);
3042 #else
3043 pbi = init_propagate_block_info (bb, live, NULL, PROP_DEATH_NOTES);
3044 #endif
3046 for (insn = bb->end; ; insn = prev)
3048 prev = PREV_INSN (insn);
3049 if (INSN_P (insn))
3051 rtx try;
3052 int match_len;
3054 /* Record this insn. */
3055 if (--peep2_current < 0)
3056 peep2_current = MAX_INSNS_PER_PEEP2;
3057 peep2_insn_data[peep2_current].insn = insn;
3058 propagate_one_insn (pbi, insn);
3059 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3061 /* Match the peephole. */
3062 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3063 if (try != NULL)
3065 i = match_len + peep2_current;
3066 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3067 i -= MAX_INSNS_PER_PEEP2 + 1;
3069 /* Replace the old sequence with the new. */
3070 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3071 try = emit_insn_after (try, prev);
3073 /* Adjust the basic block boundaries. */
3074 if (peep2_insn_data[i].insn == bb->end)
3075 bb->end = try;
3076 if (insn == bb->head)
3077 bb->head = NEXT_INSN (prev);
3079 #ifdef HAVE_conditional_execution
3080 /* With conditional execution, we cannot back up the
3081 live information so easily, since the conditional
3082 death data structures are not so self-contained.
3083 So record that we've made a modification to this
3084 block and update life information at the end. */
3085 SET_BIT (blocks, b);
3086 changed = 1;
3088 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3089 peep2_insn_data[i].insn = NULL_RTX;
3090 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3091 #else
3092 /* Back up lifetime information past the end of the
3093 newly created sequence. */
3094 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3095 i = 0;
3096 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3098 /* Update life information for the new sequence. */
3101 if (INSN_P (try))
3103 if (--i < 0)
3104 i = MAX_INSNS_PER_PEEP2;
3105 peep2_insn_data[i].insn = try;
3106 propagate_one_insn (pbi, try);
3107 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3109 try = PREV_INSN (try);
3111 while (try != prev);
3113 /* ??? Should verify that LIVE now matches what we
3114 had before the new sequence. */
3116 peep2_current = i;
3117 #endif
3121 if (insn == bb->head)
3122 break;
3125 free_propagate_block_info (pbi);
3128 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3129 FREE_REG_SET (peep2_insn_data[i].live_before);
3130 FREE_REG_SET (live);
3132 #ifdef HAVE_conditional_execution
3133 count_or_remove_death_notes (blocks, 1);
3134 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3135 sbitmap_free (blocks);
3136 #endif
3138 #endif /* HAVE_peephole2 */