Add D30V options
[official-gcc.git] / gcc / recog.c
bloba8d06bb59812a9ef1f3286ebaf509b85a919620c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "toplev.h"
38 #include "basic-block.h"
39 #include "output.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static rtx *find_constant_term_loc PARAMS ((rtx *));
60 static int insn_invalid_p PARAMS ((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
87 int reload_completed;
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
93 void
94 init_recog_no_volatile ()
96 volatile_ok = 0;
99 void
100 init_recog ()
102 volatile_ok = 1;
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized (insn)
116 rtx insn;
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
128 rtx x;
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
167 return 1;
170 /* Static data for the next two routines. */
172 typedef struct change_t
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
210 rtx old = *loc;
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
215 if (in_group == 0 && num_changes != 0)
216 abort ();
218 *loc = new;
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
261 static int
262 insn_invalid_p (insn)
263 rtx insn;
265 int icode = recog_memoized (insn);
266 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
268 if (is_asm && ! check_asm_operands (PATTERN (insn)))
269 return 1;
270 if (! is_asm && icode < 0)
271 return 1;
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed)
276 extract_insn (insn);
278 if (! constrain_operands (1))
279 return 1;
282 return 0;
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
291 int i;
293 /* The changes have been applied and all INSN_CODEs have been reset to force
294 rerecognition.
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
300 the insn. */
302 for (i = 0; i < num_changes; i++)
304 rtx object = changes[i].object;
306 if (object == 0)
307 continue;
309 if (GET_CODE (object) == MEM)
311 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
312 break;
314 else if (insn_invalid_p (object))
316 rtx pat = PATTERN (object);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat) == PARALLEL
324 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object)) < 0)
327 rtx newpat;
329 if (XVECLEN (pat, 0) == 2)
330 newpat = XVECEXP (pat, 0, 0);
331 else
333 int j;
335 newpat
336 = gen_rtx_PARALLEL (VOIDmode,
337 gen_rtvec (XVECLEN (pat, 0) - 1));
338 for (j = 0; j < XVECLEN (newpat, 0); j++)
339 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
351 validate_change (object, &PATTERN (object), newpat, 1);
353 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
355 never recognized. */
356 continue;
357 else
358 break;
362 if (i == num_changes)
364 num_changes = 0;
365 return 1;
367 else
369 cancel_changes (0);
370 return 0;
374 /* Return the number of changes so far in the current group. */
377 num_validated_changes ()
379 return num_changes;
382 /* Retract the changes numbered NUM and up. */
384 void
385 cancel_changes (num)
386 int num;
388 int i;
390 /* Back out all the changes. Do this in the opposite order in which
391 they were made. */
392 for (i = num_changes - 1; i >= num; i--)
394 *changes[i].loc = changes[i].old;
395 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
396 INSN_CODE (changes[i].object) = changes[i].old_code;
398 num_changes = num;
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
404 static void
405 validate_replace_rtx_1 (loc, from, to, object)
406 rtx *loc;
407 rtx from, to, object;
409 register int i, j;
410 register const char *fmt;
411 register rtx x = *loc;
412 enum rtx_code code = GET_CODE (x);
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
418 if (x == from
419 || (GET_CODE (x) == REG && GET_CODE (from) == REG
420 && GET_MODE (x) == GET_MODE (from)
421 && REGNO (x) == REGNO (from))
422 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
423 && rtx_equal_p (x, from)))
425 validate_change (object, loc, to, 1);
426 return;
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
431 argument last.*/
432 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
434 int prev_changes = num_changes;
436 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
437 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
438 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
440 validate_change (object, loc,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
442 : swap_condition (code),
443 GET_MODE (x), XEXP (x, 1),
444 XEXP (x, 0)),
446 x = *loc;
447 code = GET_CODE (x);
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
454 switch (code)
456 case PLUS:
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
460 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
462 return;
464 case MINUS:
465 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
467 validate_change (object, loc,
468 plus_constant (XEXP (x, 0), - INTVAL (to)),
470 return;
472 break;
474 case ZERO_EXTEND:
475 case SIGN_EXTEND:
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to) == VOIDmode
482 && (XEXP (x, 0) == from
483 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
484 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
485 && REGNO (XEXP (x, 0)) == REGNO (from))))
487 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
488 GET_MODE (from));
489 if (new == 0)
490 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
492 validate_change (object, loc, new, 1);
493 return;
495 break;
497 case SUBREG:
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
503 if (SUBREG_REG (x) == from
504 && GET_CODE (from) == REG
505 && GET_CODE (to) == MEM
506 && ! mode_dependent_address_p (XEXP (to, 0))
507 && ! MEM_VOLATILE_P (to)
508 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
510 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
511 enum machine_mode mode = GET_MODE (x);
512 rtx new;
514 if (BYTES_BIG_ENDIAN)
515 offset += (MIN (UNITS_PER_WORD,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
517 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
519 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
520 MEM_COPY_ATTRIBUTES (new, to);
521 validate_change (object, loc, new, 1);
522 return;
524 break;
526 case ZERO_EXTRACT:
527 case SIGN_EXTRACT:
528 /* If we are replacing a register with memory, try to change the memory
529 to be the mode required for memory in extract operations (this isn't
530 likely to be an insertion operation; if it was, nothing bad will
531 happen, we might just fail in some cases). */
533 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
534 && GET_CODE (XEXP (x, 1)) == CONST_INT
535 && GET_CODE (XEXP (x, 2)) == CONST_INT
536 && ! mode_dependent_address_p (XEXP (to, 0))
537 && ! MEM_VOLATILE_P (to))
539 enum machine_mode wanted_mode = VOIDmode;
540 enum machine_mode is_mode = GET_MODE (to);
541 int pos = INTVAL (XEXP (x, 2));
543 #ifdef HAVE_extzv
544 if (code == ZERO_EXTRACT)
546 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
547 if (wanted_mode == VOIDmode)
548 wanted_mode = word_mode;
550 #endif
551 #ifdef HAVE_extv
552 if (code == SIGN_EXTRACT)
554 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
555 if (wanted_mode == VOIDmode)
556 wanted_mode = word_mode;
558 #endif
560 /* If we have a narrower mode, we can do something. */
561 if (wanted_mode != VOIDmode
562 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
564 int offset = pos / BITS_PER_UNIT;
565 rtx newmem;
567 /* If the bytes and bits are counted differently, we
568 must adjust the offset. */
569 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
570 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
571 - offset);
573 pos %= GET_MODE_BITSIZE (wanted_mode);
575 newmem = gen_rtx_MEM (wanted_mode,
576 plus_constant (XEXP (to, 0), offset));
577 MEM_COPY_ATTRIBUTES (newmem, to);
579 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
580 validate_change (object, &XEXP (x, 0), newmem, 1);
584 break;
586 default:
587 break;
590 /* For commutative or comparison operations we've already performed
591 replacements. Don't try to perform them again. */
592 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
594 fmt = GET_RTX_FORMAT (code);
595 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
597 if (fmt[i] == 'e')
598 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
599 else if (fmt[i] == 'E')
600 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
601 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
606 /* Try replacing every occurrence of FROM in INSN with TO. After all
607 changes have been made, validate by seeing if INSN is still valid. */
610 validate_replace_rtx (from, to, insn)
611 rtx from, to, insn;
613 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
614 return apply_change_group ();
617 /* Try replacing every occurrence of FROM in INSN with TO. After all
618 changes have been made, validate by seeing if INSN is still valid. */
620 void
621 validate_replace_rtx_group (from, to, insn)
622 rtx from, to, insn;
624 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
627 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
628 SET_DESTs. After all changes have been made, validate by seeing if
629 INSN is still valid. */
632 validate_replace_src (from, to, insn)
633 rtx from, to, insn;
635 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
636 || GET_CODE (PATTERN (insn)) != SET)
637 abort ();
639 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
640 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
641 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
642 from, to, insn);
643 return apply_change_group ();
646 #ifdef HAVE_cc0
647 /* Return 1 if the insn using CC0 set by INSN does not contain
648 any ordered tests applied to the condition codes.
649 EQ and NE tests do not count. */
652 next_insn_tests_no_inequality (insn)
653 rtx insn;
655 register rtx next = next_cc0_user (insn);
657 /* If there is no next insn, we have to take the conservative choice. */
658 if (next == 0)
659 return 0;
661 return ((GET_CODE (next) == JUMP_INSN
662 || GET_CODE (next) == INSN
663 || GET_CODE (next) == CALL_INSN)
664 && ! inequality_comparisons_p (PATTERN (next)));
667 #if 0 /* This is useless since the insn that sets the cc's
668 must be followed immediately by the use of them. */
669 /* Return 1 if the CC value set up by INSN is not used. */
672 next_insns_test_no_inequality (insn)
673 rtx insn;
675 register rtx next = NEXT_INSN (insn);
677 for (; next != 0; next = NEXT_INSN (next))
679 if (GET_CODE (next) == CODE_LABEL
680 || GET_CODE (next) == BARRIER)
681 return 1;
682 if (GET_CODE (next) == NOTE)
683 continue;
684 if (inequality_comparisons_p (PATTERN (next)))
685 return 0;
686 if (sets_cc0_p (PATTERN (next)) == 1)
687 return 1;
688 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
689 return 1;
691 return 1;
693 #endif
694 #endif
696 /* This is used by find_single_use to locate an rtx that contains exactly one
697 use of DEST, which is typically either a REG or CC0. It returns a
698 pointer to the innermost rtx expression containing DEST. Appearances of
699 DEST that are being used to totally replace it are not counted. */
701 static rtx *
702 find_single_use_1 (dest, loc)
703 rtx dest;
704 rtx *loc;
706 rtx x = *loc;
707 enum rtx_code code = GET_CODE (x);
708 rtx *result = 0;
709 rtx *this_result;
710 int i;
711 const char *fmt;
713 switch (code)
715 case CONST_INT:
716 case CONST:
717 case LABEL_REF:
718 case SYMBOL_REF:
719 case CONST_DOUBLE:
720 case CLOBBER:
721 return 0;
723 case SET:
724 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
725 of a REG that occupies all of the REG, the insn uses DEST if
726 it is mentioned in the destination or the source. Otherwise, we
727 need just check the source. */
728 if (GET_CODE (SET_DEST (x)) != CC0
729 && GET_CODE (SET_DEST (x)) != PC
730 && GET_CODE (SET_DEST (x)) != REG
731 && ! (GET_CODE (SET_DEST (x)) == SUBREG
732 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
733 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
734 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
735 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
737 break;
739 return find_single_use_1 (dest, &SET_SRC (x));
741 case MEM:
742 case SUBREG:
743 return find_single_use_1 (dest, &XEXP (x, 0));
745 default:
746 break;
749 /* If it wasn't one of the common cases above, check each expression and
750 vector of this code. Look for a unique usage of DEST. */
752 fmt = GET_RTX_FORMAT (code);
753 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
755 if (fmt[i] == 'e')
757 if (dest == XEXP (x, i)
758 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
759 && REGNO (dest) == REGNO (XEXP (x, i))))
760 this_result = loc;
761 else
762 this_result = find_single_use_1 (dest, &XEXP (x, i));
764 if (result == 0)
765 result = this_result;
766 else if (this_result)
767 /* Duplicate usage. */
768 return 0;
770 else if (fmt[i] == 'E')
772 int j;
774 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
776 if (XVECEXP (x, i, j) == dest
777 || (GET_CODE (dest) == REG
778 && GET_CODE (XVECEXP (x, i, j)) == REG
779 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
780 this_result = loc;
781 else
782 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
784 if (result == 0)
785 result = this_result;
786 else if (this_result)
787 return 0;
792 return result;
795 /* See if DEST, produced in INSN, is used only a single time in the
796 sequel. If so, return a pointer to the innermost rtx expression in which
797 it is used.
799 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
801 This routine will return usually zero either before flow is called (because
802 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
803 note can't be trusted).
805 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
806 care about REG_DEAD notes or LOG_LINKS.
808 Otherwise, we find the single use by finding an insn that has a
809 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
810 only referenced once in that insn, we know that it must be the first
811 and last insn referencing DEST. */
813 rtx *
814 find_single_use (dest, insn, ploc)
815 rtx dest;
816 rtx insn;
817 rtx *ploc;
819 rtx next;
820 rtx *result;
821 rtx link;
823 #ifdef HAVE_cc0
824 if (dest == cc0_rtx)
826 next = NEXT_INSN (insn);
827 if (next == 0
828 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
829 return 0;
831 result = find_single_use_1 (dest, &PATTERN (next));
832 if (result && ploc)
833 *ploc = next;
834 return result;
836 #endif
838 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
839 return 0;
841 for (next = next_nonnote_insn (insn);
842 next != 0 && GET_CODE (next) != CODE_LABEL;
843 next = next_nonnote_insn (next))
844 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
846 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
847 if (XEXP (link, 0) == insn)
848 break;
850 if (link)
852 result = find_single_use_1 (dest, &PATTERN (next));
853 if (ploc)
854 *ploc = next;
855 return result;
859 return 0;
862 /* Return 1 if OP is a valid general operand for machine mode MODE.
863 This is either a register reference, a memory reference,
864 or a constant. In the case of a memory reference, the address
865 is checked for general validity for the target machine.
867 Register and memory references must have mode MODE in order to be valid,
868 but some constants have no machine mode and are valid for any mode.
870 If MODE is VOIDmode, OP is checked for validity for whatever mode
871 it has.
873 The main use of this function is as a predicate in match_operand
874 expressions in the machine description.
876 For an explanation of this function's behavior for registers of
877 class NO_REGS, see the comment for `register_operand'. */
880 general_operand (op, mode)
881 register rtx op;
882 enum machine_mode mode;
884 register enum rtx_code code = GET_CODE (op);
885 int mode_altering_drug = 0;
887 if (mode == VOIDmode)
888 mode = GET_MODE (op);
890 /* Don't accept CONST_INT or anything similar
891 if the caller wants something floating. */
892 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
893 && GET_MODE_CLASS (mode) != MODE_INT
894 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
895 return 0;
897 if (CONSTANT_P (op))
898 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
899 #ifdef LEGITIMATE_PIC_OPERAND_P
900 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
901 #endif
902 && LEGITIMATE_CONSTANT_P (op));
904 /* Except for certain constants with VOIDmode, already checked for,
905 OP's mode must match MODE if MODE specifies a mode. */
907 if (GET_MODE (op) != mode)
908 return 0;
910 if (code == SUBREG)
912 #ifdef INSN_SCHEDULING
913 /* On machines that have insn scheduling, we want all memory
914 reference to be explicit, so outlaw paradoxical SUBREGs. */
915 if (GET_CODE (SUBREG_REG (op)) == MEM
916 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
917 return 0;
918 #endif
920 op = SUBREG_REG (op);
921 code = GET_CODE (op);
922 #if 0
923 /* No longer needed, since (SUBREG (MEM...))
924 will load the MEM into a reload reg in the MEM's own mode. */
925 mode_altering_drug = 1;
926 #endif
929 if (code == REG)
930 /* A register whose class is NO_REGS is not a general operand. */
931 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
932 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
934 if (code == MEM)
936 register rtx y = XEXP (op, 0);
938 if (! volatile_ok && MEM_VOLATILE_P (op))
939 return 0;
941 if (GET_CODE (y) == ADDRESSOF)
942 return 1;
944 /* Use the mem's mode, since it will be reloaded thus. */
945 mode = GET_MODE (op);
946 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
949 /* Pretend this is an operand for now; we'll run force_operand
950 on its replacement in fixup_var_refs_1. */
951 if (code == ADDRESSOF)
952 return 1;
954 return 0;
956 win:
957 if (mode_altering_drug)
958 return ! mode_dependent_address_p (XEXP (op, 0));
959 return 1;
962 /* Return 1 if OP is a valid memory address for a memory reference
963 of mode MODE.
965 The main use of this function is as a predicate in match_operand
966 expressions in the machine description. */
969 address_operand (op, mode)
970 register rtx op;
971 enum machine_mode mode;
973 return memory_address_p (mode, op);
976 /* Return 1 if OP is a register reference of mode MODE.
977 If MODE is VOIDmode, accept a register in any mode.
979 The main use of this function is as a predicate in match_operand
980 expressions in the machine description.
982 As a special exception, registers whose class is NO_REGS are
983 not accepted by `register_operand'. The reason for this change
984 is to allow the representation of special architecture artifacts
985 (such as a condition code register) without extending the rtl
986 definitions. Since registers of class NO_REGS cannot be used
987 as registers in any case where register classes are examined,
988 it is most consistent to keep this function from accepting them. */
991 register_operand (op, mode)
992 register rtx op;
993 enum machine_mode mode;
995 if (GET_MODE (op) != mode && mode != VOIDmode)
996 return 0;
998 if (GET_CODE (op) == SUBREG)
1000 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1001 because it is guaranteed to be reloaded into one.
1002 Just make sure the MEM is valid in itself.
1003 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1004 but currently it does result from (SUBREG (REG)...) where the
1005 reg went on the stack.) */
1006 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1007 return general_operand (op, mode);
1009 #ifdef CLASS_CANNOT_CHANGE_MODE
1010 if (GET_CODE (SUBREG_REG (op)) == REG
1011 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1012 && (TEST_HARD_REG_BIT
1013 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1014 REGNO (SUBREG_REG (op))))
1015 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1017 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1018 return 0;
1019 #endif
1021 op = SUBREG_REG (op);
1024 /* If we have an ADDRESSOF, consider it valid since it will be
1025 converted into something that will not be a MEM. */
1026 if (GET_CODE (op) == ADDRESSOF)
1027 return 1;
1029 /* We don't consider registers whose class is NO_REGS
1030 to be a register operand. */
1031 return (GET_CODE (op) == REG
1032 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1033 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1036 /* Return 1 for a register in Pmode; ignore the tested mode. */
1039 pmode_register_operand (op, mode)
1040 rtx op;
1041 enum machine_mode mode ATTRIBUTE_UNUSED;
1043 return register_operand (op, Pmode);
1046 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1047 or a hard register. */
1050 scratch_operand (op, mode)
1051 register rtx op;
1052 enum machine_mode mode;
1054 if (GET_MODE (op) != mode && mode != VOIDmode)
1055 return 0;
1057 return (GET_CODE (op) == SCRATCH
1058 || (GET_CODE (op) == REG
1059 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1062 /* Return 1 if OP is a valid immediate operand for mode MODE.
1064 The main use of this function is as a predicate in match_operand
1065 expressions in the machine description. */
1068 immediate_operand (op, mode)
1069 register rtx op;
1070 enum machine_mode mode;
1072 /* Don't accept CONST_INT or anything similar
1073 if the caller wants something floating. */
1074 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1075 && GET_MODE_CLASS (mode) != MODE_INT
1076 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1077 return 0;
1079 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1080 result in 0/1. It seems a safe assumption that this is
1081 in range for everyone. */
1082 if (GET_CODE (op) == CONSTANT_P_RTX)
1083 return 1;
1085 return (CONSTANT_P (op)
1086 && (GET_MODE (op) == mode || mode == VOIDmode
1087 || GET_MODE (op) == VOIDmode)
1088 #ifdef LEGITIMATE_PIC_OPERAND_P
1089 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1090 #endif
1091 && LEGITIMATE_CONSTANT_P (op));
1094 /* Returns 1 if OP is an operand that is a CONST_INT. */
1097 const_int_operand (op, mode)
1098 register rtx op;
1099 enum machine_mode mode ATTRIBUTE_UNUSED;
1101 return GET_CODE (op) == CONST_INT;
1104 /* Returns 1 if OP is an operand that is a constant integer or constant
1105 floating-point number. */
1108 const_double_operand (op, mode)
1109 register rtx op;
1110 enum machine_mode mode;
1112 /* Don't accept CONST_INT or anything similar
1113 if the caller wants something floating. */
1114 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1115 && GET_MODE_CLASS (mode) != MODE_INT
1116 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1117 return 0;
1119 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1120 && (mode == VOIDmode || GET_MODE (op) == mode
1121 || GET_MODE (op) == VOIDmode));
1124 /* Return 1 if OP is a general operand that is not an immediate operand. */
1127 nonimmediate_operand (op, mode)
1128 register rtx op;
1129 enum machine_mode mode;
1131 return (general_operand (op, mode) && ! CONSTANT_P (op));
1134 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1137 nonmemory_operand (op, mode)
1138 register rtx op;
1139 enum machine_mode mode;
1141 if (CONSTANT_P (op))
1143 /* Don't accept CONST_INT or anything similar
1144 if the caller wants something floating. */
1145 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1146 && GET_MODE_CLASS (mode) != MODE_INT
1147 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1148 return 0;
1150 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1151 #ifdef LEGITIMATE_PIC_OPERAND_P
1152 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1153 #endif
1154 && LEGITIMATE_CONSTANT_P (op));
1157 if (GET_MODE (op) != mode && mode != VOIDmode)
1158 return 0;
1160 if (GET_CODE (op) == SUBREG)
1162 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1163 because it is guaranteed to be reloaded into one.
1164 Just make sure the MEM is valid in itself.
1165 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1166 but currently it does result from (SUBREG (REG)...) where the
1167 reg went on the stack.) */
1168 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1169 return general_operand (op, mode);
1170 op = SUBREG_REG (op);
1173 /* We don't consider registers whose class is NO_REGS
1174 to be a register operand. */
1175 return (GET_CODE (op) == REG
1176 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1177 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1180 /* Return 1 if OP is a valid operand that stands for pushing a
1181 value of mode MODE onto the stack.
1183 The main use of this function is as a predicate in match_operand
1184 expressions in the machine description. */
1187 push_operand (op, mode)
1188 rtx op;
1189 enum machine_mode mode;
1191 if (GET_CODE (op) != MEM)
1192 return 0;
1194 if (mode != VOIDmode && GET_MODE (op) != mode)
1195 return 0;
1197 op = XEXP (op, 0);
1199 if (GET_CODE (op) != STACK_PUSH_CODE)
1200 return 0;
1202 return XEXP (op, 0) == stack_pointer_rtx;
1205 /* Return 1 if OP is a valid operand that stands for popping a
1206 value of mode MODE off the stack.
1208 The main use of this function is as a predicate in match_operand
1209 expressions in the machine description. */
1212 pop_operand (op, mode)
1213 rtx op;
1214 enum machine_mode mode;
1216 if (GET_CODE (op) != MEM)
1217 return 0;
1219 if (mode != VOIDmode && GET_MODE (op) != mode)
1220 return 0;
1222 op = XEXP (op, 0);
1224 if (GET_CODE (op) != STACK_POP_CODE)
1225 return 0;
1227 return XEXP (op, 0) == stack_pointer_rtx;
1230 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1233 memory_address_p (mode, addr)
1234 enum machine_mode mode ATTRIBUTE_UNUSED;
1235 register rtx addr;
1237 if (GET_CODE (addr) == ADDRESSOF)
1238 return 1;
1240 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1241 return 0;
1243 win:
1244 return 1;
1247 /* Return 1 if OP is a valid memory reference with mode MODE,
1248 including a valid address.
1250 The main use of this function is as a predicate in match_operand
1251 expressions in the machine description. */
1254 memory_operand (op, mode)
1255 register rtx op;
1256 enum machine_mode mode;
1258 rtx inner;
1260 if (! reload_completed)
1261 /* Note that no SUBREG is a memory operand before end of reload pass,
1262 because (SUBREG (MEM...)) forces reloading into a register. */
1263 return GET_CODE (op) == MEM && general_operand (op, mode);
1265 if (mode != VOIDmode && GET_MODE (op) != mode)
1266 return 0;
1268 inner = op;
1269 if (GET_CODE (inner) == SUBREG)
1270 inner = SUBREG_REG (inner);
1272 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1275 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1276 that is, a memory reference whose address is a general_operand. */
1279 indirect_operand (op, mode)
1280 register rtx op;
1281 enum machine_mode mode;
1283 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1284 if (! reload_completed
1285 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1287 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1288 rtx inner = SUBREG_REG (op);
1290 if (BYTES_BIG_ENDIAN)
1291 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1292 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1294 if (mode != VOIDmode && GET_MODE (op) != mode)
1295 return 0;
1297 /* The only way that we can have a general_operand as the resulting
1298 address is if OFFSET is zero and the address already is an operand
1299 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1300 operand. */
1302 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1303 || (GET_CODE (XEXP (inner, 0)) == PLUS
1304 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1305 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1306 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1309 return (GET_CODE (op) == MEM
1310 && memory_operand (op, mode)
1311 && general_operand (XEXP (op, 0), Pmode));
1314 /* Return 1 if this is a comparison operator. This allows the use of
1315 MATCH_OPERATOR to recognize all the branch insns. */
1318 comparison_operator (op, mode)
1319 register rtx op;
1320 enum machine_mode mode;
1322 return ((mode == VOIDmode || GET_MODE (op) == mode)
1323 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1326 /* If BODY is an insn body that uses ASM_OPERANDS,
1327 return the number of operands (both input and output) in the insn.
1328 Otherwise return -1. */
1331 asm_noperands (body)
1332 rtx body;
1334 if (GET_CODE (body) == ASM_OPERANDS)
1335 /* No output operands: return number of input operands. */
1336 return ASM_OPERANDS_INPUT_LENGTH (body);
1337 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1338 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1339 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1340 else if (GET_CODE (body) == PARALLEL
1341 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1342 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1344 /* Multiple output operands, or 1 output plus some clobbers:
1345 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1346 int i;
1347 int n_sets;
1349 /* Count backwards through CLOBBERs to determine number of SETs. */
1350 for (i = XVECLEN (body, 0); i > 0; i--)
1352 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1353 break;
1354 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1355 return -1;
1358 /* N_SETS is now number of output operands. */
1359 n_sets = i;
1361 /* Verify that all the SETs we have
1362 came from a single original asm_operands insn
1363 (so that invalid combinations are blocked). */
1364 for (i = 0; i < n_sets; i++)
1366 rtx elt = XVECEXP (body, 0, i);
1367 if (GET_CODE (elt) != SET)
1368 return -1;
1369 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1370 return -1;
1371 /* If these ASM_OPERANDS rtx's came from different original insns
1372 then they aren't allowed together. */
1373 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1374 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1375 return -1;
1377 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1378 + n_sets);
1380 else if (GET_CODE (body) == PARALLEL
1381 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1383 /* 0 outputs, but some clobbers:
1384 body is [(asm_operands ...) (clobber (reg ...))...]. */
1385 int i;
1387 /* Make sure all the other parallel things really are clobbers. */
1388 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1389 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1390 return -1;
1392 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1394 else
1395 return -1;
1398 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1399 copy its operands (both input and output) into the vector OPERANDS,
1400 the locations of the operands within the insn into the vector OPERAND_LOCS,
1401 and the constraints for the operands into CONSTRAINTS.
1402 Write the modes of the operands into MODES.
1403 Return the assembler-template.
1405 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1406 we don't store that info. */
1408 const char *
1409 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1410 rtx body;
1411 rtx *operands;
1412 rtx **operand_locs;
1413 const char **constraints;
1414 enum machine_mode *modes;
1416 register int i;
1417 int noperands;
1418 const char *template = 0;
1420 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1422 rtx asmop = SET_SRC (body);
1423 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1425 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1427 for (i = 1; i < noperands; i++)
1429 if (operand_locs)
1430 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1431 if (operands)
1432 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1433 if (constraints)
1434 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1435 if (modes)
1436 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1439 /* The output is in the SET.
1440 Its constraint is in the ASM_OPERANDS itself. */
1441 if (operands)
1442 operands[0] = SET_DEST (body);
1443 if (operand_locs)
1444 operand_locs[0] = &SET_DEST (body);
1445 if (constraints)
1446 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1447 if (modes)
1448 modes[0] = GET_MODE (SET_DEST (body));
1449 template = ASM_OPERANDS_TEMPLATE (asmop);
1451 else if (GET_CODE (body) == ASM_OPERANDS)
1453 rtx asmop = body;
1454 /* No output operands: BODY is (asm_operands ....). */
1456 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1458 /* The input operands are found in the 1st element vector. */
1459 /* Constraints for inputs are in the 2nd element vector. */
1460 for (i = 0; i < noperands; i++)
1462 if (operand_locs)
1463 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1464 if (operands)
1465 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1466 if (constraints)
1467 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1468 if (modes)
1469 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1471 template = ASM_OPERANDS_TEMPLATE (asmop);
1473 else if (GET_CODE (body) == PARALLEL
1474 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1476 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1477 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1478 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1479 int nout = 0; /* Does not include CLOBBERs. */
1481 /* At least one output, plus some CLOBBERs. */
1483 /* The outputs are in the SETs.
1484 Their constraints are in the ASM_OPERANDS itself. */
1485 for (i = 0; i < nparallel; i++)
1487 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1488 break; /* Past last SET */
1490 if (operands)
1491 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1492 if (operand_locs)
1493 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1494 if (constraints)
1495 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1496 if (modes)
1497 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1498 nout++;
1501 for (i = 0; i < nin; i++)
1503 if (operand_locs)
1504 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1505 if (operands)
1506 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1507 if (constraints)
1508 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1509 if (modes)
1510 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1513 template = ASM_OPERANDS_TEMPLATE (asmop);
1515 else if (GET_CODE (body) == PARALLEL
1516 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1518 /* No outputs, but some CLOBBERs. */
1520 rtx asmop = XVECEXP (body, 0, 0);
1521 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1523 for (i = 0; i < nin; i++)
1525 if (operand_locs)
1526 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1527 if (operands)
1528 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1529 if (constraints)
1530 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1531 if (modes)
1532 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1535 template = ASM_OPERANDS_TEMPLATE (asmop);
1538 return template;
1541 /* Check if an asm_operand matches it's constraints.
1542 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1545 asm_operand_ok (op, constraint)
1546 rtx op;
1547 const char *constraint;
1549 int result = 0;
1551 /* Use constrain_operands after reload. */
1552 if (reload_completed)
1553 abort ();
1555 while (*constraint)
1557 switch (*constraint++)
1559 case '=':
1560 case '+':
1561 case '*':
1562 case '%':
1563 case '?':
1564 case '!':
1565 case '#':
1566 case '&':
1567 case ',':
1568 break;
1570 case '0': case '1': case '2': case '3': case '4':
1571 case '5': case '6': case '7': case '8': case '9':
1572 /* For best results, our caller should have given us the
1573 proper matching constraint, but we can't actually fail
1574 the check if they didn't. Indicate that results are
1575 inconclusive. */
1576 result = -1;
1577 break;
1579 case 'p':
1580 if (address_operand (op, VOIDmode))
1581 return 1;
1582 break;
1584 case 'm':
1585 case 'V': /* non-offsettable */
1586 if (memory_operand (op, VOIDmode))
1587 return 1;
1588 break;
1590 case 'o': /* offsettable */
1591 if (offsettable_nonstrict_memref_p (op))
1592 return 1;
1593 break;
1595 case '<':
1596 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1597 excepting those that expand_call created. Further, on some
1598 machines which do not have generalized auto inc/dec, an inc/dec
1599 is not a memory_operand.
1601 Match any memory and hope things are resolved after reload. */
1603 if (GET_CODE (op) == MEM
1604 && (1
1605 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1606 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1607 return 1;
1608 break;
1610 case '>':
1611 if (GET_CODE (op) == MEM
1612 && (1
1613 || GET_CODE (XEXP (op, 0)) == PRE_INC
1614 || GET_CODE (XEXP (op, 0)) == POST_INC))
1615 return 1;
1616 break;
1618 case 'E':
1619 #ifndef REAL_ARITHMETIC
1620 /* Match any floating double constant, but only if
1621 we can examine the bits of it reliably. */
1622 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1623 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1624 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1625 break;
1626 #endif
1627 /* FALLTHRU */
1629 case 'F':
1630 if (GET_CODE (op) == CONST_DOUBLE)
1631 return 1;
1632 break;
1634 case 'G':
1635 if (GET_CODE (op) == CONST_DOUBLE
1636 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1637 return 1;
1638 break;
1639 case 'H':
1640 if (GET_CODE (op) == CONST_DOUBLE
1641 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1642 return 1;
1643 break;
1645 case 's':
1646 if (GET_CODE (op) == CONST_INT
1647 || (GET_CODE (op) == CONST_DOUBLE
1648 && GET_MODE (op) == VOIDmode))
1649 break;
1650 /* FALLTHRU */
1652 case 'i':
1653 if (CONSTANT_P (op)
1654 #ifdef LEGITIMATE_PIC_OPERAND_P
1655 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1656 #endif
1658 return 1;
1659 break;
1661 case 'n':
1662 if (GET_CODE (op) == CONST_INT
1663 || (GET_CODE (op) == CONST_DOUBLE
1664 && GET_MODE (op) == VOIDmode))
1665 return 1;
1666 break;
1668 case 'I':
1669 if (GET_CODE (op) == CONST_INT
1670 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1671 return 1;
1672 break;
1673 case 'J':
1674 if (GET_CODE (op) == CONST_INT
1675 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1676 return 1;
1677 break;
1678 case 'K':
1679 if (GET_CODE (op) == CONST_INT
1680 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1681 return 1;
1682 break;
1683 case 'L':
1684 if (GET_CODE (op) == CONST_INT
1685 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1686 return 1;
1687 break;
1688 case 'M':
1689 if (GET_CODE (op) == CONST_INT
1690 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1691 return 1;
1692 break;
1693 case 'N':
1694 if (GET_CODE (op) == CONST_INT
1695 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1696 return 1;
1697 break;
1698 case 'O':
1699 if (GET_CODE (op) == CONST_INT
1700 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1701 return 1;
1702 break;
1703 case 'P':
1704 if (GET_CODE (op) == CONST_INT
1705 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1706 return 1;
1707 break;
1709 case 'X':
1710 return 1;
1712 case 'g':
1713 if (general_operand (op, VOIDmode))
1714 return 1;
1715 break;
1717 #ifdef EXTRA_CONSTRAINT
1718 case 'Q':
1719 if (EXTRA_CONSTRAINT (op, 'Q'))
1720 return 1;
1721 break;
1722 case 'R':
1723 if (EXTRA_CONSTRAINT (op, 'R'))
1724 return 1;
1725 break;
1726 case 'S':
1727 if (EXTRA_CONSTRAINT (op, 'S'))
1728 return 1;
1729 break;
1730 case 'T':
1731 if (EXTRA_CONSTRAINT (op, 'T'))
1732 return 1;
1733 break;
1734 case 'U':
1735 if (EXTRA_CONSTRAINT (op, 'U'))
1736 return 1;
1737 break;
1738 #endif
1740 case 'r':
1741 default:
1742 if (GET_MODE (op) == BLKmode)
1743 break;
1744 if (register_operand (op, VOIDmode))
1745 return 1;
1746 break;
1750 return result;
1753 /* Given an rtx *P, if it is a sum containing an integer constant term,
1754 return the location (type rtx *) of the pointer to that constant term.
1755 Otherwise, return a null pointer. */
1757 static rtx *
1758 find_constant_term_loc (p)
1759 rtx *p;
1761 register rtx *tem;
1762 register enum rtx_code code = GET_CODE (*p);
1764 /* If *P IS such a constant term, P is its location. */
1766 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1767 || code == CONST)
1768 return p;
1770 /* Otherwise, if not a sum, it has no constant term. */
1772 if (GET_CODE (*p) != PLUS)
1773 return 0;
1775 /* If one of the summands is constant, return its location. */
1777 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1778 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1779 return p;
1781 /* Otherwise, check each summand for containing a constant term. */
1783 if (XEXP (*p, 0) != 0)
1785 tem = find_constant_term_loc (&XEXP (*p, 0));
1786 if (tem != 0)
1787 return tem;
1790 if (XEXP (*p, 1) != 0)
1792 tem = find_constant_term_loc (&XEXP (*p, 1));
1793 if (tem != 0)
1794 return tem;
1797 return 0;
1800 /* Return 1 if OP is a memory reference
1801 whose address contains no side effects
1802 and remains valid after the addition
1803 of a positive integer less than the
1804 size of the object being referenced.
1806 We assume that the original address is valid and do not check it.
1808 This uses strict_memory_address_p as a subroutine, so
1809 don't use it before reload. */
1812 offsettable_memref_p (op)
1813 rtx op;
1815 return ((GET_CODE (op) == MEM)
1816 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1819 /* Similar, but don't require a strictly valid mem ref:
1820 consider pseudo-regs valid as index or base regs. */
1823 offsettable_nonstrict_memref_p (op)
1824 rtx op;
1826 return ((GET_CODE (op) == MEM)
1827 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1830 /* Return 1 if Y is a memory address which contains no side effects
1831 and would remain valid after the addition of a positive integer
1832 less than the size of that mode.
1834 We assume that the original address is valid and do not check it.
1835 We do check that it is valid for narrower modes.
1837 If STRICTP is nonzero, we require a strictly valid address,
1838 for the sake of use in reload.c. */
1841 offsettable_address_p (strictp, mode, y)
1842 int strictp;
1843 enum machine_mode mode;
1844 register rtx y;
1846 register enum rtx_code ycode = GET_CODE (y);
1847 register rtx z;
1848 rtx y1 = y;
1849 rtx *y2;
1850 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1851 (strictp ? strict_memory_address_p : memory_address_p);
1852 unsigned int mode_sz = GET_MODE_SIZE (mode);
1854 if (CONSTANT_ADDRESS_P (y))
1855 return 1;
1857 /* Adjusting an offsettable address involves changing to a narrower mode.
1858 Make sure that's OK. */
1860 if (mode_dependent_address_p (y))
1861 return 0;
1863 /* ??? How much offset does an offsettable BLKmode reference need?
1864 Clearly that depends on the situation in which it's being used.
1865 However, the current situation in which we test 0xffffffff is
1866 less than ideal. Caveat user. */
1867 if (mode_sz == 0)
1868 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1870 /* If the expression contains a constant term,
1871 see if it remains valid when max possible offset is added. */
1873 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1875 int good;
1877 y1 = *y2;
1878 *y2 = plus_constant (*y2, mode_sz - 1);
1879 /* Use QImode because an odd displacement may be automatically invalid
1880 for any wider mode. But it should be valid for a single byte. */
1881 good = (*addressp) (QImode, y);
1883 /* In any case, restore old contents of memory. */
1884 *y2 = y1;
1885 return good;
1888 if (ycode == PRE_DEC || ycode == PRE_INC
1889 || ycode == POST_DEC || ycode == POST_INC)
1890 return 0;
1892 /* The offset added here is chosen as the maximum offset that
1893 any instruction could need to add when operating on something
1894 of the specified mode. We assume that if Y and Y+c are
1895 valid addresses then so is Y+d for all 0<d<c. */
1897 z = plus_constant_for_output (y, mode_sz - 1);
1899 /* Use QImode because an odd displacement may be automatically invalid
1900 for any wider mode. But it should be valid for a single byte. */
1901 return (*addressp) (QImode, z);
1904 /* Return 1 if ADDR is an address-expression whose effect depends
1905 on the mode of the memory reference it is used in.
1907 Autoincrement addressing is a typical example of mode-dependence
1908 because the amount of the increment depends on the mode. */
1911 mode_dependent_address_p (addr)
1912 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1914 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1915 return 0;
1916 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1917 win: ATTRIBUTE_UNUSED_LABEL
1918 return 1;
1921 /* Return 1 if OP is a general operand
1922 other than a memory ref with a mode dependent address. */
1925 mode_independent_operand (op, mode)
1926 enum machine_mode mode;
1927 rtx op;
1929 rtx addr;
1931 if (! general_operand (op, mode))
1932 return 0;
1934 if (GET_CODE (op) != MEM)
1935 return 1;
1937 addr = XEXP (op, 0);
1938 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1939 return 1;
1940 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1941 lose: ATTRIBUTE_UNUSED_LABEL
1942 return 0;
1945 /* Given an operand OP that is a valid memory reference which
1946 satisfies offsettable_memref_p, return a new memory reference whose
1947 address has been adjusted by OFFSET. OFFSET should be positive and
1948 less than the size of the object referenced. */
1951 adj_offsettable_operand (op, offset)
1952 rtx op;
1953 int offset;
1955 register enum rtx_code code = GET_CODE (op);
1957 if (code == MEM)
1959 register rtx y = XEXP (op, 0);
1960 register rtx new;
1962 if (CONSTANT_ADDRESS_P (y))
1964 new = gen_rtx_MEM (GET_MODE (op),
1965 plus_constant_for_output (y, offset));
1966 MEM_COPY_ATTRIBUTES (new, op);
1967 return new;
1970 if (GET_CODE (y) == PLUS)
1972 rtx z = y;
1973 register rtx *const_loc;
1975 op = copy_rtx (op);
1976 z = XEXP (op, 0);
1977 const_loc = find_constant_term_loc (&z);
1978 if (const_loc)
1980 *const_loc = plus_constant_for_output (*const_loc, offset);
1981 return op;
1985 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1986 MEM_COPY_ATTRIBUTES (new, op);
1987 return new;
1989 abort ();
1992 /* Analyze INSN and fill in recog_data. */
1994 void
1995 extract_insn (insn)
1996 rtx insn;
1998 int i;
1999 int icode;
2000 int noperands;
2001 rtx body = PATTERN (insn);
2003 recog_data.n_operands = 0;
2004 recog_data.n_alternatives = 0;
2005 recog_data.n_dups = 0;
2007 switch (GET_CODE (body))
2009 case USE:
2010 case CLOBBER:
2011 case ASM_INPUT:
2012 case ADDR_VEC:
2013 case ADDR_DIFF_VEC:
2014 return;
2016 case SET:
2017 case PARALLEL:
2018 case ASM_OPERANDS:
2019 recog_data.n_operands = noperands = asm_noperands (body);
2020 if (noperands >= 0)
2022 /* This insn is an `asm' with operands. */
2024 /* expand_asm_operands makes sure there aren't too many operands. */
2025 if (noperands > MAX_RECOG_OPERANDS)
2026 abort ();
2028 /* Now get the operand values and constraints out of the insn. */
2029 decode_asm_operands (body, recog_data.operand,
2030 recog_data.operand_loc,
2031 recog_data.constraints,
2032 recog_data.operand_mode);
2033 if (noperands > 0)
2035 const char *p = recog_data.constraints[0];
2036 recog_data.n_alternatives = 1;
2037 while (*p)
2038 recog_data.n_alternatives += (*p++ == ',');
2040 break;
2043 /* FALLTHROUGH */
2045 default:
2046 /* Ordinary insn: recognize it, get the operands via insn_extract
2047 and get the constraints. */
2049 icode = recog_memoized (insn);
2050 if (icode < 0)
2051 fatal_insn_not_found (insn);
2053 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2054 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2055 recog_data.n_dups = insn_data[icode].n_dups;
2057 insn_extract (insn);
2059 for (i = 0; i < noperands; i++)
2061 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2062 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2065 for (i = 0; i < noperands; i++)
2066 recog_data.operand_type[i]
2067 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2068 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2069 : OP_IN);
2071 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2072 abort ();
2075 /* After calling extract_insn, you can use this function to extract some
2076 information from the constraint strings into a more usable form.
2077 The collected data is stored in recog_op_alt. */
2078 void
2079 preprocess_constraints ()
2081 int i;
2083 memset (recog_op_alt, 0, sizeof recog_op_alt);
2084 for (i = 0; i < recog_data.n_operands; i++)
2086 int j;
2087 struct operand_alternative *op_alt;
2088 const char *p = recog_data.constraints[i];
2090 op_alt = recog_op_alt[i];
2092 for (j = 0; j < recog_data.n_alternatives; j++)
2094 op_alt[j].class = NO_REGS;
2095 op_alt[j].constraint = p;
2096 op_alt[j].matches = -1;
2097 op_alt[j].matched = -1;
2099 if (*p == '\0' || *p == ',')
2101 op_alt[j].anything_ok = 1;
2102 continue;
2105 for (;;)
2107 char c = *p++;
2108 if (c == '#')
2110 c = *p++;
2111 while (c != ',' && c != '\0');
2112 if (c == ',' || c == '\0')
2113 break;
2115 switch (c)
2117 case '=': case '+': case '*': case '%':
2118 case 'E': case 'F': case 'G': case 'H':
2119 case 's': case 'i': case 'n':
2120 case 'I': case 'J': case 'K': case 'L':
2121 case 'M': case 'N': case 'O': case 'P':
2122 #ifdef EXTRA_CONSTRAINT
2123 case 'Q': case 'R': case 'S': case 'T': case 'U':
2124 #endif
2125 /* These don't say anything we care about. */
2126 break;
2128 case '?':
2129 op_alt[j].reject += 6;
2130 break;
2131 case '!':
2132 op_alt[j].reject += 600;
2133 break;
2134 case '&':
2135 op_alt[j].earlyclobber = 1;
2136 break;
2138 case '0': case '1': case '2': case '3': case '4':
2139 case '5': case '6': case '7': case '8': case '9':
2140 op_alt[j].matches = c - '0';
2141 recog_op_alt[op_alt[j].matches][j].matched = i;
2142 break;
2144 case 'm':
2145 op_alt[j].memory_ok = 1;
2146 break;
2147 case '<':
2148 op_alt[j].decmem_ok = 1;
2149 break;
2150 case '>':
2151 op_alt[j].incmem_ok = 1;
2152 break;
2153 case 'V':
2154 op_alt[j].nonoffmem_ok = 1;
2155 break;
2156 case 'o':
2157 op_alt[j].offmem_ok = 1;
2158 break;
2159 case 'X':
2160 op_alt[j].anything_ok = 1;
2161 break;
2163 case 'p':
2164 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2165 break;
2167 case 'g': case 'r':
2168 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2169 break;
2171 default:
2172 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2173 break;
2180 /* Check the operands of an insn against the insn's operand constraints
2181 and return 1 if they are valid.
2182 The information about the insn's operands, constraints, operand modes
2183 etc. is obtained from the global variables set up by extract_insn.
2185 WHICH_ALTERNATIVE is set to a number which indicates which
2186 alternative of constraints was matched: 0 for the first alternative,
2187 1 for the next, etc.
2189 In addition, when two operands are match
2190 and it happens that the output operand is (reg) while the
2191 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2192 make the output operand look like the input.
2193 This is because the output operand is the one the template will print.
2195 This is used in final, just before printing the assembler code and by
2196 the routines that determine an insn's attribute.
2198 If STRICT is a positive non-zero value, it means that we have been
2199 called after reload has been completed. In that case, we must
2200 do all checks strictly. If it is zero, it means that we have been called
2201 before reload has completed. In that case, we first try to see if we can
2202 find an alternative that matches strictly. If not, we try again, this
2203 time assuming that reload will fix up the insn. This provides a "best
2204 guess" for the alternative and is used to compute attributes of insns prior
2205 to reload. A negative value of STRICT is used for this internal call. */
2207 struct funny_match
2209 int this, other;
2213 constrain_operands (strict)
2214 int strict;
2216 const char *constraints[MAX_RECOG_OPERANDS];
2217 int matching_operands[MAX_RECOG_OPERANDS];
2218 int earlyclobber[MAX_RECOG_OPERANDS];
2219 register int c;
2221 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2222 int funny_match_index;
2224 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2225 return 1;
2227 for (c = 0; c < recog_data.n_operands; c++)
2229 constraints[c] = recog_data.constraints[c];
2230 matching_operands[c] = -1;
2233 which_alternative = 0;
2235 while (which_alternative < recog_data.n_alternatives)
2237 register int opno;
2238 int lose = 0;
2239 funny_match_index = 0;
2241 for (opno = 0; opno < recog_data.n_operands; opno++)
2243 register rtx op = recog_data.operand[opno];
2244 enum machine_mode mode = GET_MODE (op);
2245 register const char *p = constraints[opno];
2246 int offset = 0;
2247 int win = 0;
2248 int val;
2250 earlyclobber[opno] = 0;
2252 /* A unary operator may be accepted by the predicate, but it
2253 is irrelevant for matching constraints. */
2254 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2255 op = XEXP (op, 0);
2257 if (GET_CODE (op) == SUBREG)
2259 if (GET_CODE (SUBREG_REG (op)) == REG
2260 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2261 offset = SUBREG_WORD (op);
2262 op = SUBREG_REG (op);
2265 /* An empty constraint or empty alternative
2266 allows anything which matched the pattern. */
2267 if (*p == 0 || *p == ',')
2268 win = 1;
2270 while (*p && (c = *p++) != ',')
2271 switch (c)
2273 case '?': case '!': case '*': case '%':
2274 case '=': case '+':
2275 break;
2277 case '#':
2278 /* Ignore rest of this alternative as far as
2279 constraint checking is concerned. */
2280 while (*p && *p != ',')
2281 p++;
2282 break;
2284 case '&':
2285 earlyclobber[opno] = 1;
2286 break;
2288 case '0': case '1': case '2': case '3': case '4':
2289 case '5': case '6': case '7': case '8': case '9':
2291 /* This operand must be the same as a previous one.
2292 This kind of constraint is used for instructions such
2293 as add when they take only two operands.
2295 Note that the lower-numbered operand is passed first.
2297 If we are not testing strictly, assume that this constraint
2298 will be satisfied. */
2299 if (strict < 0)
2300 val = 1;
2301 else
2303 rtx op1 = recog_data.operand[c - '0'];
2304 rtx op2 = recog_data.operand[opno];
2306 /* A unary operator may be accepted by the predicate,
2307 but it is irrelevant for matching constraints. */
2308 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2309 op1 = XEXP (op1, 0);
2310 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2311 op2 = XEXP (op2, 0);
2313 val = operands_match_p (op1, op2);
2316 matching_operands[opno] = c - '0';
2317 matching_operands[c - '0'] = opno;
2319 if (val != 0)
2320 win = 1;
2321 /* If output is *x and input is *--x,
2322 arrange later to change the output to *--x as well,
2323 since the output op is the one that will be printed. */
2324 if (val == 2 && strict > 0)
2326 funny_match[funny_match_index].this = opno;
2327 funny_match[funny_match_index++].other = c - '0';
2329 break;
2331 case 'p':
2332 /* p is used for address_operands. When we are called by
2333 gen_reload, no one will have checked that the address is
2334 strictly valid, i.e., that all pseudos requiring hard regs
2335 have gotten them. */
2336 if (strict <= 0
2337 || (strict_memory_address_p (recog_data.operand_mode[opno],
2338 op)))
2339 win = 1;
2340 break;
2342 /* No need to check general_operand again;
2343 it was done in insn-recog.c. */
2344 case 'g':
2345 /* Anything goes unless it is a REG and really has a hard reg
2346 but the hard reg is not in the class GENERAL_REGS. */
2347 if (strict < 0
2348 || GENERAL_REGS == ALL_REGS
2349 || GET_CODE (op) != REG
2350 || (reload_in_progress
2351 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2352 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2353 win = 1;
2354 break;
2356 case 'r':
2357 if (strict < 0
2358 || (strict == 0
2359 && GET_CODE (op) == REG
2360 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2361 || (strict == 0 && GET_CODE (op) == SCRATCH)
2362 || (GET_CODE (op) == REG
2363 && ((GENERAL_REGS == ALL_REGS
2364 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2365 || reg_fits_class_p (op, GENERAL_REGS,
2366 offset, mode))))
2367 win = 1;
2368 break;
2370 case 'X':
2371 /* This is used for a MATCH_SCRATCH in the cases when
2372 we don't actually need anything. So anything goes
2373 any time. */
2374 win = 1;
2375 break;
2377 case 'm':
2378 if (GET_CODE (op) == MEM
2379 /* Before reload, accept what reload can turn into mem. */
2380 || (strict < 0 && CONSTANT_P (op))
2381 /* During reload, accept a pseudo */
2382 || (reload_in_progress && GET_CODE (op) == REG
2383 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2384 win = 1;
2385 break;
2387 case '<':
2388 if (GET_CODE (op) == MEM
2389 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2390 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2391 win = 1;
2392 break;
2394 case '>':
2395 if (GET_CODE (op) == MEM
2396 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2397 || GET_CODE (XEXP (op, 0)) == POST_INC))
2398 win = 1;
2399 break;
2401 case 'E':
2402 #ifndef REAL_ARITHMETIC
2403 /* Match any CONST_DOUBLE, but only if
2404 we can examine the bits of it reliably. */
2405 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2406 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2407 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2408 break;
2409 #endif
2410 if (GET_CODE (op) == CONST_DOUBLE)
2411 win = 1;
2412 break;
2414 case 'F':
2415 if (GET_CODE (op) == CONST_DOUBLE)
2416 win = 1;
2417 break;
2419 case 'G':
2420 case 'H':
2421 if (GET_CODE (op) == CONST_DOUBLE
2422 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2423 win = 1;
2424 break;
2426 case 's':
2427 if (GET_CODE (op) == CONST_INT
2428 || (GET_CODE (op) == CONST_DOUBLE
2429 && GET_MODE (op) == VOIDmode))
2430 break;
2431 case 'i':
2432 if (CONSTANT_P (op))
2433 win = 1;
2434 break;
2436 case 'n':
2437 if (GET_CODE (op) == CONST_INT
2438 || (GET_CODE (op) == CONST_DOUBLE
2439 && GET_MODE (op) == VOIDmode))
2440 win = 1;
2441 break;
2443 case 'I':
2444 case 'J':
2445 case 'K':
2446 case 'L':
2447 case 'M':
2448 case 'N':
2449 case 'O':
2450 case 'P':
2451 if (GET_CODE (op) == CONST_INT
2452 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2453 win = 1;
2454 break;
2456 #ifdef EXTRA_CONSTRAINT
2457 case 'Q':
2458 case 'R':
2459 case 'S':
2460 case 'T':
2461 case 'U':
2462 if (EXTRA_CONSTRAINT (op, c))
2463 win = 1;
2464 break;
2465 #endif
2467 case 'V':
2468 if (GET_CODE (op) == MEM
2469 && ((strict > 0 && ! offsettable_memref_p (op))
2470 || (strict < 0
2471 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2472 || (reload_in_progress
2473 && !(GET_CODE (op) == REG
2474 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2475 win = 1;
2476 break;
2478 case 'o':
2479 if ((strict > 0 && offsettable_memref_p (op))
2480 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2481 /* Before reload, accept what reload can handle. */
2482 || (strict < 0
2483 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2484 /* During reload, accept a pseudo */
2485 || (reload_in_progress && GET_CODE (op) == REG
2486 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2487 win = 1;
2488 break;
2490 default:
2491 if (strict < 0
2492 || (strict == 0
2493 && GET_CODE (op) == REG
2494 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2495 || (strict == 0 && GET_CODE (op) == SCRATCH)
2496 || (GET_CODE (op) == REG
2497 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2498 offset, mode)))
2499 win = 1;
2502 constraints[opno] = p;
2503 /* If this operand did not win somehow,
2504 this alternative loses. */
2505 if (! win)
2506 lose = 1;
2508 /* This alternative won; the operands are ok.
2509 Change whichever operands this alternative says to change. */
2510 if (! lose)
2512 int opno, eopno;
2514 /* See if any earlyclobber operand conflicts with some other
2515 operand. */
2517 if (strict > 0)
2518 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2519 /* Ignore earlyclobber operands now in memory,
2520 because we would often report failure when we have
2521 two memory operands, one of which was formerly a REG. */
2522 if (earlyclobber[eopno]
2523 && GET_CODE (recog_data.operand[eopno]) == REG)
2524 for (opno = 0; opno < recog_data.n_operands; opno++)
2525 if ((GET_CODE (recog_data.operand[opno]) == MEM
2526 || recog_data.operand_type[opno] != OP_OUT)
2527 && opno != eopno
2528 /* Ignore things like match_operator operands. */
2529 && *recog_data.constraints[opno] != 0
2530 && ! (matching_operands[opno] == eopno
2531 && operands_match_p (recog_data.operand[opno],
2532 recog_data.operand[eopno]))
2533 && ! safe_from_earlyclobber (recog_data.operand[opno],
2534 recog_data.operand[eopno]))
2535 lose = 1;
2537 if (! lose)
2539 while (--funny_match_index >= 0)
2541 recog_data.operand[funny_match[funny_match_index].other]
2542 = recog_data.operand[funny_match[funny_match_index].this];
2545 return 1;
2549 which_alternative++;
2552 /* If we are about to reject this, but we are not to test strictly,
2553 try a very loose test. Only return failure if it fails also. */
2554 if (strict == 0)
2555 return constrain_operands (-1);
2556 else
2557 return 0;
2560 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2561 is a hard reg in class CLASS when its regno is offset by OFFSET
2562 and changed to mode MODE.
2563 If REG occupies multiple hard regs, all of them must be in CLASS. */
2566 reg_fits_class_p (operand, class, offset, mode)
2567 rtx operand;
2568 register enum reg_class class;
2569 int offset;
2570 enum machine_mode mode;
2572 register int regno = REGNO (operand);
2573 if (regno < FIRST_PSEUDO_REGISTER
2574 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2575 regno + offset))
2577 register int sr;
2578 regno += offset;
2579 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2580 sr > 0; sr--)
2581 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2582 regno + sr))
2583 break;
2584 return sr == 0;
2587 return 0;
2590 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2592 void
2593 split_all_insns (upd_life)
2594 int upd_life;
2596 sbitmap blocks;
2597 int changed;
2598 int i;
2600 blocks = sbitmap_alloc (n_basic_blocks);
2601 sbitmap_zero (blocks);
2602 changed = 0;
2604 for (i = n_basic_blocks - 1; i >= 0; --i)
2606 basic_block bb = BASIC_BLOCK (i);
2607 rtx insn, next;
2609 for (insn = bb->head; insn ; insn = next)
2611 rtx set;
2613 /* Can't use `next_real_insn' because that might go across
2614 CODE_LABELS and short-out basic blocks. */
2615 next = NEXT_INSN (insn);
2616 if (GET_CODE (insn) != INSN)
2619 /* Don't split no-op move insns. These should silently
2620 disappear later in final. Splitting such insns would
2621 break the code that handles REG_NO_CONFLICT blocks. */
2623 else if ((set = single_set (insn)) != NULL
2624 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2626 /* Nops get in the way while scheduling, so delete them
2627 now if register allocation has already been done. It
2628 is too risky to try to do this before register
2629 allocation, and there are unlikely to be very many
2630 nops then anyways. */
2631 if (reload_completed)
2633 PUT_CODE (insn, NOTE);
2634 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2635 NOTE_SOURCE_FILE (insn) = 0;
2638 else
2640 /* Split insns here to get max fine-grain parallelism. */
2641 rtx first = PREV_INSN (insn);
2642 rtx last = try_split (PATTERN (insn), insn, 1);
2644 if (last != insn)
2646 SET_BIT (blocks, i);
2647 changed = 1;
2649 /* try_split returns the NOTE that INSN became. */
2650 first = NEXT_INSN (first);
2651 PUT_CODE (insn, NOTE);
2652 NOTE_SOURCE_FILE (insn) = 0;
2653 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2655 if (insn == bb->end)
2657 bb->end = last;
2658 break;
2663 if (insn == bb->end)
2664 break;
2667 /* ??? When we're called from just after reload, the CFG is in bad
2668 shape, and we may have fallen off the end. This could be fixed
2669 by having reload not try to delete unreachable code. Otherwise
2670 assert we found the end insn. */
2671 if (insn == NULL && upd_life)
2672 abort ();
2675 if (changed && upd_life)
2677 compute_bb_for_insn (get_max_uid ());
2678 count_or_remove_death_notes (blocks, 1);
2679 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2682 sbitmap_free (blocks);
2685 #ifdef HAVE_peephole2
2686 struct peep2_insn_data
2688 rtx insn;
2689 regset live_before;
2692 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2693 static int peep2_current;
2695 /* A non-insn marker indicating the last insn of the block.
2696 The live_before regset for this element is correct, indicating
2697 global_live_at_end for the block. */
2698 #define PEEP2_EOB pc_rtx
2700 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2701 does not exist. Used by the recognizer to find the next insn to match
2702 in a multi-insn pattern. */
2705 peep2_next_insn (n)
2706 int n;
2708 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2709 abort ();
2711 n += peep2_current;
2712 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2713 n -= MAX_INSNS_PER_PEEP2 + 1;
2715 if (peep2_insn_data[n].insn == PEEP2_EOB)
2716 return NULL_RTX;
2717 return peep2_insn_data[n].insn;
2720 /* Return true if REGNO is dead before the Nth non-note insn
2721 after `current'. */
2724 peep2_regno_dead_p (ofs, regno)
2725 int ofs;
2726 int regno;
2728 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2729 abort ();
2731 ofs += peep2_current;
2732 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2733 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2735 if (peep2_insn_data[ofs].insn == NULL_RTX)
2736 abort ();
2738 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2741 /* Similarly for a REG. */
2744 peep2_reg_dead_p (ofs, reg)
2745 int ofs;
2746 rtx reg;
2748 int regno, n;
2750 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2751 abort ();
2753 ofs += peep2_current;
2754 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2755 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2757 if (peep2_insn_data[ofs].insn == NULL_RTX)
2758 abort ();
2760 regno = REGNO (reg);
2761 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2762 while (--n >= 0)
2763 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2764 return 0;
2765 return 1;
2768 /* Try to find a hard register of mode MODE, matching the register class in
2769 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2770 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2771 in which case the only condition is that the register must be available
2772 before CURRENT_INSN.
2773 Registers that already have bits set in REG_SET will not be considered.
2775 If an appropriate register is available, it will be returned and the
2776 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2777 returned. */
2780 peep2_find_free_register (from, to, class_str, mode, reg_set)
2781 int from, to;
2782 const char *class_str;
2783 enum machine_mode mode;
2784 HARD_REG_SET *reg_set;
2786 static int search_ofs;
2787 enum reg_class class;
2788 HARD_REG_SET live;
2789 int i;
2791 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2792 abort ();
2794 from += peep2_current;
2795 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2796 from -= MAX_INSNS_PER_PEEP2 + 1;
2797 to += peep2_current;
2798 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2799 to -= MAX_INSNS_PER_PEEP2 + 1;
2801 if (peep2_insn_data[from].insn == NULL_RTX)
2802 abort ();
2803 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2805 while (from != to)
2807 HARD_REG_SET this_live;
2809 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2810 from = 0;
2811 if (peep2_insn_data[from].insn == NULL_RTX)
2812 abort ();
2813 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2814 IOR_HARD_REG_SET (live, this_live);
2817 class = (class_str[0] == 'r' ? GENERAL_REGS
2818 : REG_CLASS_FROM_LETTER (class_str[0]));
2820 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2822 int raw_regno, regno, success, j;
2824 /* Distribute the free registers as much as possible. */
2825 raw_regno = search_ofs + i;
2826 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2827 raw_regno -= FIRST_PSEUDO_REGISTER;
2828 #ifdef REG_ALLOC_ORDER
2829 regno = reg_alloc_order[raw_regno];
2830 #else
2831 regno = raw_regno;
2832 #endif
2834 /* Don't allocate fixed registers. */
2835 if (fixed_regs[regno])
2836 continue;
2837 /* Make sure the register is of the right class. */
2838 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2839 continue;
2840 /* And can support the mode we need. */
2841 if (! HARD_REGNO_MODE_OK (regno, mode))
2842 continue;
2843 /* And that we don't create an extra save/restore. */
2844 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2845 continue;
2846 /* And we don't clobber traceback for noreturn functions. */
2847 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2848 && (! reload_completed || frame_pointer_needed))
2849 continue;
2851 success = 1;
2852 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2854 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2855 || TEST_HARD_REG_BIT (live, regno + j))
2857 success = 0;
2858 break;
2861 if (success)
2863 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2864 SET_HARD_REG_BIT (*reg_set, regno + j);
2866 /* Start the next search with the next register. */
2867 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2868 raw_regno = 0;
2869 search_ofs = raw_regno;
2871 return gen_rtx_REG (mode, regno);
2875 search_ofs = 0;
2876 return NULL_RTX;
2879 /* Perform the peephole2 optimization pass. */
2881 void
2882 peephole2_optimize (dump_file)
2883 FILE *dump_file ATTRIBUTE_UNUSED;
2885 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2886 rtx insn, prev;
2887 regset live;
2888 int i, b;
2889 #ifdef HAVE_conditional_execution
2890 sbitmap blocks;
2891 int changed;
2892 #endif
2894 /* Initialize the regsets we're going to use. */
2895 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2896 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
2897 live = INITIALIZE_REG_SET (rs_heads[i]);
2899 #ifdef HAVE_conditional_execution
2900 blocks = sbitmap_alloc (n_basic_blocks);
2901 sbitmap_zero (blocks);
2902 changed = 0;
2903 #else
2904 count_or_remove_death_notes (NULL, 1);
2905 #endif
2907 for (b = n_basic_blocks - 1; b >= 0; --b)
2909 basic_block bb = BASIC_BLOCK (b);
2910 struct propagate_block_info *pbi;
2912 /* Indicate that all slots except the last holds invalid data. */
2913 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
2914 peep2_insn_data[i].insn = NULL_RTX;
2916 /* Indicate that the last slot contains live_after data. */
2917 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
2918 peep2_current = MAX_INSNS_PER_PEEP2;
2920 /* Start up propagation. */
2921 COPY_REG_SET (live, bb->global_live_at_end);
2922 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
2924 #ifdef HAVE_conditional_execution
2925 pbi = init_propagate_block_info (bb, live, NULL, 0);
2926 #else
2927 pbi = init_propagate_block_info (bb, live, NULL, PROP_DEATH_NOTES);
2928 #endif
2930 for (insn = bb->end; ; insn = prev)
2932 prev = PREV_INSN (insn);
2933 if (INSN_P (insn))
2935 rtx try;
2936 int match_len;
2938 /* Record this insn. */
2939 if (--peep2_current < 0)
2940 peep2_current = MAX_INSNS_PER_PEEP2;
2941 peep2_insn_data[peep2_current].insn = insn;
2942 propagate_one_insn (pbi, insn);
2943 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
2945 /* Match the peephole. */
2946 try = peephole2_insns (PATTERN (insn), insn, &match_len);
2947 if (try != NULL)
2949 i = match_len + peep2_current;
2950 if (i >= MAX_INSNS_PER_PEEP2 + 1)
2951 i -= MAX_INSNS_PER_PEEP2 + 1;
2953 /* Replace the old sequence with the new. */
2954 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
2955 try = emit_insn_after (try, prev);
2957 /* Adjust the basic block boundaries. */
2958 if (peep2_insn_data[i].insn == bb->end)
2959 bb->end = try;
2960 if (insn == bb->head)
2961 bb->head = NEXT_INSN (prev);
2963 #ifdef HAVE_conditional_execution
2964 /* With conditional execution, we cannot back up the
2965 live information so easily, since the conditional
2966 death data structures are not so self-contained.
2967 So record that we've made a modification to this
2968 block and update life information at the end. */
2969 SET_BIT (blocks, b);
2970 changed = 1;
2972 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2973 peep2_insn_data[i].insn = NULL_RTX;
2974 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
2975 #else
2976 /* Back up lifetime information past the end of the
2977 newly created sequence. */
2978 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
2979 i = 0;
2980 COPY_REG_SET (live, peep2_insn_data[i].live_before);
2982 /* Update life information for the new sequence. */
2985 if (INSN_P (try))
2987 if (--i < 0)
2988 i = MAX_INSNS_PER_PEEP2;
2989 peep2_insn_data[i].insn = try;
2990 propagate_one_insn (pbi, try);
2991 COPY_REG_SET (peep2_insn_data[i].live_before, live);
2993 try = PREV_INSN (try);
2995 while (try != prev);
2997 /* ??? Should verify that LIVE now matches what we
2998 had before the new sequence. */
3000 peep2_current = i;
3001 #endif
3005 if (insn == bb->head)
3006 break;
3009 free_propagate_block_info (pbi);
3012 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3013 FREE_REG_SET (peep2_insn_data[i].live_before);
3014 FREE_REG_SET (live);
3016 #ifdef HAVE_conditional_execution
3017 count_or_remove_death_notes (blocks, 1);
3018 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3019 sbitmap_free (blocks);
3020 #endif
3022 #endif /* HAVE_peephole2 */