Correct semantics restrictions checking in throw-expression.
[official-gcc.git] / gcc / recog.c
blob4dab907c84891fd48f682108c0ddc244ae323e94
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "toplev.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
45 #else
46 #define STACK_PUSH_CODE PRE_INC
47 #endif
48 #endif
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
53 #else
54 #define STACK_POP_CODE POST_DEC
55 #endif
56 #endif
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static int insn_invalid_p PARAMS ((rtx));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
71 int volatile_ok;
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
80 was satisfied. */
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
88 int reload_completed;
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
94 void
95 init_recog_no_volatile ()
97 volatile_ok = 0;
100 void
101 init_recog ()
103 volatile_ok = 1;
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized_1 (insn)
117 rtx insn;
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
129 rtx x;
131 int noperands;
132 rtx *operands;
133 const char **constraints;
134 int i;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
146 if (noperands < 0)
147 return 0;
148 if (noperands == 0)
149 return 1;
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
159 if (c[0] == '%')
160 c++;
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
165 return 0;
168 return 1;
171 /* Static data for the next two routines. */
173 typedef struct change_t
175 rtx object;
176 int old_code;
177 rtx *loc;
178 rtx old;
179 } change_t;
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 the change in place.
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
206 rtx object;
207 rtx *loc;
208 rtx new;
209 int in_group;
211 rtx old = *loc;
213 if (old == new || rtx_equal_p (old, new))
214 return 1;
216 if (in_group == 0 && num_changes != 0)
217 abort ();
219 *loc = new;
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 else
229 changes_allocated *= 2;
231 changes =
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
248 num_changes++;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
262 static int
263 insn_invalid_p (insn)
264 rtx insn;
266 int icode = recog_memoized (insn);
267 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
269 if (is_asm && ! check_asm_operands (PATTERN (insn)))
270 return 1;
271 if (! is_asm && icode < 0)
272 return 1;
274 /* After reload, verify that all constraints are satisfied. */
275 if (reload_completed)
277 extract_insn (insn);
279 if (! constrain_operands (1))
280 return 1;
283 return 0;
286 /* Apply a group of changes previously issued with `validate_change'.
287 Return 1 if all changes are valid, zero otherwise. */
290 apply_change_group ()
292 int i;
294 /* The changes have been applied and all INSN_CODEs have been reset to force
295 rerecognition.
297 The changes are valid if we aren't given an object, or if we are
298 given a MEM and it still is a valid address, or if this is in insn
299 and it is recognized. In the latter case, if reload has completed,
300 we also require that the operands meet the constraints for
301 the insn. */
303 for (i = 0; i < num_changes; i++)
305 rtx object = changes[i].object;
307 if (object == 0)
308 continue;
310 if (GET_CODE (object) == MEM)
312 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
313 break;
315 else if (insn_invalid_p (object))
317 rtx pat = PATTERN (object);
319 /* Perhaps we couldn't recognize the insn because there were
320 extra CLOBBERs at the end. If so, try to re-recognize
321 without the last CLOBBER (later iterations will cause each of
322 them to be eliminated, in turn). But don't do this if we
323 have an ASM_OPERAND. */
324 if (GET_CODE (pat) == PARALLEL
325 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
326 && asm_noperands (PATTERN (object)) < 0)
328 rtx newpat;
330 if (XVECLEN (pat, 0) == 2)
331 newpat = XVECEXP (pat, 0, 0);
332 else
334 int j;
336 newpat
337 = gen_rtx_PARALLEL (VOIDmode,
338 rtvec_alloc (XVECLEN (pat, 0) - 1));
339 for (j = 0; j < XVECLEN (newpat, 0); j++)
340 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
343 /* Add a new change to this group to replace the pattern
344 with this new pattern. Then consider this change
345 as having succeeded. The change we added will
346 cause the entire call to fail if things remain invalid.
348 Note that this can lose if a later change than the one
349 we are processing specified &XVECEXP (PATTERN (object), 0, X)
350 but this shouldn't occur. */
352 validate_change (object, &PATTERN (object), newpat, 1);
354 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
355 /* If this insn is a CLOBBER or USE, it is always valid, but is
356 never recognized. */
357 continue;
358 else
359 break;
363 if (i == num_changes)
365 num_changes = 0;
366 return 1;
368 else
370 cancel_changes (0);
371 return 0;
375 /* Return the number of changes so far in the current group. */
378 num_validated_changes ()
380 return num_changes;
383 /* Retract the changes numbered NUM and up. */
385 void
386 cancel_changes (num)
387 int num;
389 int i;
391 /* Back out all the changes. Do this in the opposite order in which
392 they were made. */
393 for (i = num_changes - 1; i >= num; i--)
395 *changes[i].loc = changes[i].old;
396 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
397 INSN_CODE (changes[i].object) = changes[i].old_code;
399 num_changes = num;
402 /* Replace every occurrence of FROM in X with TO. Mark each change with
403 validate_change passing OBJECT. */
405 static void
406 validate_replace_rtx_1 (loc, from, to, object)
407 rtx *loc;
408 rtx from, to, object;
410 register int i, j;
411 register const char *fmt;
412 register rtx x = *loc;
413 enum rtx_code code;
415 if (!x)
416 return;
417 code = GET_CODE (x);
418 /* X matches FROM if it is the same rtx or they are both referring to the
419 same register in the same mode. Avoid calling rtx_equal_p unless the
420 operands look similar. */
422 if (x == from
423 || (GET_CODE (x) == REG && GET_CODE (from) == REG
424 && GET_MODE (x) == GET_MODE (from)
425 && REGNO (x) == REGNO (from))
426 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
427 && rtx_equal_p (x, from)))
429 validate_change (object, loc, to, 1);
430 return;
433 /* For commutative or comparison operations, try replacing each argument
434 separately and seeing if we made any changes. If so, put a constant
435 argument last.*/
436 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
438 int prev_changes = num_changes;
440 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
441 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
442 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
444 validate_change (object, loc,
445 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
446 : swap_condition (code),
447 GET_MODE (x), XEXP (x, 1),
448 XEXP (x, 0)),
450 x = *loc;
451 code = GET_CODE (x);
455 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
456 done the substitution, otherwise we won't. */
458 switch (code)
460 case PLUS:
461 /* If we have a PLUS whose second operand is now a CONST_INT, use
462 plus_constant to try to simplify it. */
463 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
464 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
466 return;
468 case MINUS:
469 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
471 validate_change (object, loc,
472 plus_constant (XEXP (x, 0), - INTVAL (to)),
474 return;
476 break;
478 case ZERO_EXTEND:
479 case SIGN_EXTEND:
480 /* In these cases, the operation to be performed depends on the mode
481 of the operand. If we are replacing the operand with a VOIDmode
482 constant, we lose the information. So try to simplify the operation
483 in that case. */
484 if (GET_MODE (to) == VOIDmode
485 && (rtx_equal_p (XEXP (x, 0), from)
486 || (GET_CODE (XEXP (x, 0)) == SUBREG
487 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
489 rtx new = NULL_RTX;
491 /* If there is a subreg involved, crop to the portion of the
492 constant that we are interested in. */
493 if (GET_CODE (XEXP (x, 0)) == SUBREG)
495 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
496 to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
497 0, GET_MODE (from));
498 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
499 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
500 <= HOST_BITS_PER_WIDE_INT))
502 int i = SUBREG_WORD (XEXP (x, 0)) * BITS_PER_WORD;
503 HOST_WIDE_INT valh;
504 unsigned HOST_WIDE_INT vall;
506 if (GET_CODE (to) == CONST_INT)
508 vall = INTVAL (to);
509 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
511 else
513 vall = CONST_DOUBLE_LOW (to);
514 valh = CONST_DOUBLE_HIGH (to);
517 if (WORDS_BIG_ENDIAN)
518 i = (GET_MODE_BITSIZE (GET_MODE (from))
519 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
520 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
521 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
522 else if (i >= HOST_BITS_PER_WIDE_INT)
523 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
524 to = GEN_INT (trunc_int_for_mode (vall,
525 GET_MODE (XEXP (x, 0))));
527 else
528 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
531 /* If the above didn't fail, perform the extension from the
532 mode of the operand (and not the mode of FROM). */
533 if (to)
534 new = simplify_unary_operation (code, GET_MODE (x), to,
535 GET_MODE (XEXP (x, 0)));
537 /* If any of the above failed, substitute in something that
538 we know won't be recognized. */
539 if (!new)
540 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
542 validate_change (object, loc, new, 1);
543 return;
545 break;
547 case SUBREG:
548 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
549 expression. We can't do this later, since the information about inner mode
550 may be lost. */
551 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
553 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
554 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
555 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
557 rtx temp = operand_subword (to, SUBREG_WORD (x),
558 0, GET_MODE (from));
559 if (temp)
561 validate_change (object, loc, temp, 1);
562 return;
565 if (subreg_lowpart_p (x))
567 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
568 if (new)
570 validate_change (object, loc, new, 1);
571 return;
575 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
576 since we are saying that the high bits don't matter. */
577 if (GET_MODE (to) == VOIDmode
578 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
580 validate_change (object, loc, to, 1);
581 return;
585 /* Changing mode twice with SUBREG => just change it once,
586 or not at all if changing back to starting mode. */
587 if (GET_CODE (to) == SUBREG
588 && rtx_equal_p (SUBREG_REG (x), from))
590 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
591 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
593 validate_change (object, loc, SUBREG_REG (to), 1);
594 return;
597 validate_change (object, loc,
598 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
599 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
600 return;
603 /* If we have a SUBREG of a register that we are replacing and we are
604 replacing it with a MEM, make a new MEM and try replacing the
605 SUBREG with it. Don't do this if the MEM has a mode-dependent address
606 or if we would be widening it. */
608 if (GET_CODE (from) == REG
609 && GET_CODE (to) == MEM
610 && rtx_equal_p (SUBREG_REG (x), from)
611 && ! mode_dependent_address_p (XEXP (to, 0))
612 && ! MEM_VOLATILE_P (to)
613 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
615 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
616 enum machine_mode mode = GET_MODE (x);
617 rtx new;
619 if (BYTES_BIG_ENDIAN)
620 offset += (MIN (UNITS_PER_WORD,
621 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
622 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
624 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
625 MEM_COPY_ATTRIBUTES (new, to);
626 validate_change (object, loc, new, 1);
627 return;
629 break;
631 case ZERO_EXTRACT:
632 case SIGN_EXTRACT:
633 /* If we are replacing a register with memory, try to change the memory
634 to be the mode required for memory in extract operations (this isn't
635 likely to be an insertion operation; if it was, nothing bad will
636 happen, we might just fail in some cases). */
638 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
639 && rtx_equal_p (XEXP (x, 0), from)
640 && GET_CODE (XEXP (x, 1)) == CONST_INT
641 && GET_CODE (XEXP (x, 2)) == CONST_INT
642 && ! mode_dependent_address_p (XEXP (to, 0))
643 && ! MEM_VOLATILE_P (to))
645 enum machine_mode wanted_mode = VOIDmode;
646 enum machine_mode is_mode = GET_MODE (to);
647 int pos = INTVAL (XEXP (x, 2));
649 #ifdef HAVE_extzv
650 if (code == ZERO_EXTRACT)
652 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
653 if (wanted_mode == VOIDmode)
654 wanted_mode = word_mode;
656 #endif
657 #ifdef HAVE_extv
658 if (code == SIGN_EXTRACT)
660 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
661 if (wanted_mode == VOIDmode)
662 wanted_mode = word_mode;
664 #endif
666 /* If we have a narrower mode, we can do something. */
667 if (wanted_mode != VOIDmode
668 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
670 int offset = pos / BITS_PER_UNIT;
671 rtx newmem;
673 /* If the bytes and bits are counted differently, we
674 must adjust the offset. */
675 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
676 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
677 - offset);
679 pos %= GET_MODE_BITSIZE (wanted_mode);
681 newmem = gen_rtx_MEM (wanted_mode,
682 plus_constant (XEXP (to, 0), offset));
683 MEM_COPY_ATTRIBUTES (newmem, to);
685 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
686 validate_change (object, &XEXP (x, 0), newmem, 1);
690 break;
692 default:
693 break;
696 /* For commutative or comparison operations we've already performed
697 replacements. Don't try to perform them again. */
698 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
700 fmt = GET_RTX_FORMAT (code);
701 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
703 if (fmt[i] == 'e')
704 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
705 else if (fmt[i] == 'E')
706 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
707 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
712 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
713 with TO. After all changes have been made, validate by seeing
714 if INSN is still valid. */
717 validate_replace_rtx_subexp (from, to, insn, loc)
718 rtx from, to, insn, *loc;
720 validate_replace_rtx_1 (loc, from, to, insn);
721 return apply_change_group ();
724 /* Try replacing every occurrence of FROM in INSN with TO. After all
725 changes have been made, validate by seeing if INSN is still valid. */
728 validate_replace_rtx (from, to, insn)
729 rtx from, to, insn;
731 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
732 return apply_change_group ();
735 /* Try replacing every occurrence of FROM in INSN with TO. After all
736 changes have been made, validate by seeing if INSN is still valid. */
738 void
739 validate_replace_rtx_group (from, to, insn)
740 rtx from, to, insn;
742 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
745 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
746 SET_DESTs. After all changes have been made, validate by seeing if
747 INSN is still valid. */
750 validate_replace_src (from, to, insn)
751 rtx from, to, insn;
753 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
754 || GET_CODE (PATTERN (insn)) != SET)
755 abort ();
757 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
758 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
759 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
760 from, to, insn);
761 return apply_change_group ();
764 #ifdef HAVE_cc0
765 /* Return 1 if the insn using CC0 set by INSN does not contain
766 any ordered tests applied to the condition codes.
767 EQ and NE tests do not count. */
770 next_insn_tests_no_inequality (insn)
771 rtx insn;
773 register rtx next = next_cc0_user (insn);
775 /* If there is no next insn, we have to take the conservative choice. */
776 if (next == 0)
777 return 0;
779 return ((GET_CODE (next) == JUMP_INSN
780 || GET_CODE (next) == INSN
781 || GET_CODE (next) == CALL_INSN)
782 && ! inequality_comparisons_p (PATTERN (next)));
785 #if 0 /* This is useless since the insn that sets the cc's
786 must be followed immediately by the use of them. */
787 /* Return 1 if the CC value set up by INSN is not used. */
790 next_insns_test_no_inequality (insn)
791 rtx insn;
793 register rtx next = NEXT_INSN (insn);
795 for (; next != 0; next = NEXT_INSN (next))
797 if (GET_CODE (next) == CODE_LABEL
798 || GET_CODE (next) == BARRIER)
799 return 1;
800 if (GET_CODE (next) == NOTE)
801 continue;
802 if (inequality_comparisons_p (PATTERN (next)))
803 return 0;
804 if (sets_cc0_p (PATTERN (next)) == 1)
805 return 1;
806 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
807 return 1;
809 return 1;
811 #endif
812 #endif
814 /* This is used by find_single_use to locate an rtx that contains exactly one
815 use of DEST, which is typically either a REG or CC0. It returns a
816 pointer to the innermost rtx expression containing DEST. Appearances of
817 DEST that are being used to totally replace it are not counted. */
819 static rtx *
820 find_single_use_1 (dest, loc)
821 rtx dest;
822 rtx *loc;
824 rtx x = *loc;
825 enum rtx_code code = GET_CODE (x);
826 rtx *result = 0;
827 rtx *this_result;
828 int i;
829 const char *fmt;
831 switch (code)
833 case CONST_INT:
834 case CONST:
835 case LABEL_REF:
836 case SYMBOL_REF:
837 case CONST_DOUBLE:
838 case CLOBBER:
839 return 0;
841 case SET:
842 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
843 of a REG that occupies all of the REG, the insn uses DEST if
844 it is mentioned in the destination or the source. Otherwise, we
845 need just check the source. */
846 if (GET_CODE (SET_DEST (x)) != CC0
847 && GET_CODE (SET_DEST (x)) != PC
848 && GET_CODE (SET_DEST (x)) != REG
849 && ! (GET_CODE (SET_DEST (x)) == SUBREG
850 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
851 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
852 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
853 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
854 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
855 break;
857 return find_single_use_1 (dest, &SET_SRC (x));
859 case MEM:
860 case SUBREG:
861 return find_single_use_1 (dest, &XEXP (x, 0));
863 default:
864 break;
867 /* If it wasn't one of the common cases above, check each expression and
868 vector of this code. Look for a unique usage of DEST. */
870 fmt = GET_RTX_FORMAT (code);
871 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
873 if (fmt[i] == 'e')
875 if (dest == XEXP (x, i)
876 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
877 && REGNO (dest) == REGNO (XEXP (x, i))))
878 this_result = loc;
879 else
880 this_result = find_single_use_1 (dest, &XEXP (x, i));
882 if (result == 0)
883 result = this_result;
884 else if (this_result)
885 /* Duplicate usage. */
886 return 0;
888 else if (fmt[i] == 'E')
890 int j;
892 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
894 if (XVECEXP (x, i, j) == dest
895 || (GET_CODE (dest) == REG
896 && GET_CODE (XVECEXP (x, i, j)) == REG
897 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
898 this_result = loc;
899 else
900 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
902 if (result == 0)
903 result = this_result;
904 else if (this_result)
905 return 0;
910 return result;
913 /* See if DEST, produced in INSN, is used only a single time in the
914 sequel. If so, return a pointer to the innermost rtx expression in which
915 it is used.
917 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
919 This routine will return usually zero either before flow is called (because
920 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
921 note can't be trusted).
923 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
924 care about REG_DEAD notes or LOG_LINKS.
926 Otherwise, we find the single use by finding an insn that has a
927 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
928 only referenced once in that insn, we know that it must be the first
929 and last insn referencing DEST. */
931 rtx *
932 find_single_use (dest, insn, ploc)
933 rtx dest;
934 rtx insn;
935 rtx *ploc;
937 rtx next;
938 rtx *result;
939 rtx link;
941 #ifdef HAVE_cc0
942 if (dest == cc0_rtx)
944 next = NEXT_INSN (insn);
945 if (next == 0
946 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
947 return 0;
949 result = find_single_use_1 (dest, &PATTERN (next));
950 if (result && ploc)
951 *ploc = next;
952 return result;
954 #endif
956 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
957 return 0;
959 for (next = next_nonnote_insn (insn);
960 next != 0 && GET_CODE (next) != CODE_LABEL;
961 next = next_nonnote_insn (next))
962 if (INSN_P (next) && dead_or_set_p (next, dest))
964 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
965 if (XEXP (link, 0) == insn)
966 break;
968 if (link)
970 result = find_single_use_1 (dest, &PATTERN (next));
971 if (ploc)
972 *ploc = next;
973 return result;
977 return 0;
980 /* Return 1 if OP is a valid general operand for machine mode MODE.
981 This is either a register reference, a memory reference,
982 or a constant. In the case of a memory reference, the address
983 is checked for general validity for the target machine.
985 Register and memory references must have mode MODE in order to be valid,
986 but some constants have no machine mode and are valid for any mode.
988 If MODE is VOIDmode, OP is checked for validity for whatever mode
989 it has.
991 The main use of this function is as a predicate in match_operand
992 expressions in the machine description.
994 For an explanation of this function's behavior for registers of
995 class NO_REGS, see the comment for `register_operand'. */
998 general_operand (op, mode)
999 register rtx op;
1000 enum machine_mode mode;
1002 register enum rtx_code code = GET_CODE (op);
1003 int mode_altering_drug = 0;
1005 if (mode == VOIDmode)
1006 mode = GET_MODE (op);
1008 /* Don't accept CONST_INT or anything similar
1009 if the caller wants something floating. */
1010 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1011 && GET_MODE_CLASS (mode) != MODE_INT
1012 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1013 return 0;
1015 if (CONSTANT_P (op))
1016 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1017 || mode == VOIDmode)
1018 #ifdef LEGITIMATE_PIC_OPERAND_P
1019 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1020 #endif
1021 && LEGITIMATE_CONSTANT_P (op));
1023 /* Except for certain constants with VOIDmode, already checked for,
1024 OP's mode must match MODE if MODE specifies a mode. */
1026 if (GET_MODE (op) != mode)
1027 return 0;
1029 if (code == SUBREG)
1031 #ifdef INSN_SCHEDULING
1032 /* On machines that have insn scheduling, we want all memory
1033 reference to be explicit, so outlaw paradoxical SUBREGs. */
1034 if (GET_CODE (SUBREG_REG (op)) == MEM
1035 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1036 return 0;
1037 #endif
1039 op = SUBREG_REG (op);
1040 code = GET_CODE (op);
1041 #if 0
1042 /* No longer needed, since (SUBREG (MEM...))
1043 will load the MEM into a reload reg in the MEM's own mode. */
1044 mode_altering_drug = 1;
1045 #endif
1048 if (code == REG)
1049 /* A register whose class is NO_REGS is not a general operand. */
1050 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1051 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1053 if (code == MEM)
1055 register rtx y = XEXP (op, 0);
1057 if (! volatile_ok && MEM_VOLATILE_P (op))
1058 return 0;
1060 if (GET_CODE (y) == ADDRESSOF)
1061 return 1;
1063 /* Use the mem's mode, since it will be reloaded thus. */
1064 mode = GET_MODE (op);
1065 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1068 /* Pretend this is an operand for now; we'll run force_operand
1069 on its replacement in fixup_var_refs_1. */
1070 if (code == ADDRESSOF)
1071 return 1;
1073 return 0;
1075 win:
1076 if (mode_altering_drug)
1077 return ! mode_dependent_address_p (XEXP (op, 0));
1078 return 1;
1081 /* Return 1 if OP is a valid memory address for a memory reference
1082 of mode MODE.
1084 The main use of this function is as a predicate in match_operand
1085 expressions in the machine description. */
1088 address_operand (op, mode)
1089 register rtx op;
1090 enum machine_mode mode;
1092 return memory_address_p (mode, op);
1095 /* Return 1 if OP is a register reference of mode MODE.
1096 If MODE is VOIDmode, accept a register in any mode.
1098 The main use of this function is as a predicate in match_operand
1099 expressions in the machine description.
1101 As a special exception, registers whose class is NO_REGS are
1102 not accepted by `register_operand'. The reason for this change
1103 is to allow the representation of special architecture artifacts
1104 (such as a condition code register) without extending the rtl
1105 definitions. Since registers of class NO_REGS cannot be used
1106 as registers in any case where register classes are examined,
1107 it is most consistent to keep this function from accepting them. */
1110 register_operand (op, mode)
1111 register rtx op;
1112 enum machine_mode mode;
1114 if (GET_MODE (op) != mode && mode != VOIDmode)
1115 return 0;
1117 if (GET_CODE (op) == SUBREG)
1119 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1120 because it is guaranteed to be reloaded into one.
1121 Just make sure the MEM is valid in itself.
1122 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1123 but currently it does result from (SUBREG (REG)...) where the
1124 reg went on the stack.) */
1125 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1126 return general_operand (op, mode);
1128 #ifdef CLASS_CANNOT_CHANGE_MODE
1129 if (GET_CODE (SUBREG_REG (op)) == REG
1130 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1131 && (TEST_HARD_REG_BIT
1132 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1133 REGNO (SUBREG_REG (op))))
1134 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1135 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1136 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1137 return 0;
1138 #endif
1140 op = SUBREG_REG (op);
1143 /* If we have an ADDRESSOF, consider it valid since it will be
1144 converted into something that will not be a MEM. */
1145 if (GET_CODE (op) == ADDRESSOF)
1146 return 1;
1148 /* We don't consider registers whose class is NO_REGS
1149 to be a register operand. */
1150 return (GET_CODE (op) == REG
1151 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1152 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1155 /* Return 1 for a register in Pmode; ignore the tested mode. */
1158 pmode_register_operand (op, mode)
1159 rtx op;
1160 enum machine_mode mode ATTRIBUTE_UNUSED;
1162 return register_operand (op, Pmode);
1165 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1166 or a hard register. */
1169 scratch_operand (op, mode)
1170 register rtx op;
1171 enum machine_mode mode;
1173 if (GET_MODE (op) != mode && mode != VOIDmode)
1174 return 0;
1176 return (GET_CODE (op) == SCRATCH
1177 || (GET_CODE (op) == REG
1178 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1181 /* Return 1 if OP is a valid immediate operand for mode MODE.
1183 The main use of this function is as a predicate in match_operand
1184 expressions in the machine description. */
1187 immediate_operand (op, mode)
1188 register rtx op;
1189 enum machine_mode mode;
1191 /* Don't accept CONST_INT or anything similar
1192 if the caller wants something floating. */
1193 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1194 && GET_MODE_CLASS (mode) != MODE_INT
1195 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1196 return 0;
1198 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1199 result in 0/1. It seems a safe assumption that this is
1200 in range for everyone. */
1201 if (GET_CODE (op) == CONSTANT_P_RTX)
1202 return 1;
1204 return (CONSTANT_P (op)
1205 && (GET_MODE (op) == mode || mode == VOIDmode
1206 || GET_MODE (op) == VOIDmode)
1207 #ifdef LEGITIMATE_PIC_OPERAND_P
1208 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1209 #endif
1210 && LEGITIMATE_CONSTANT_P (op));
1213 /* Returns 1 if OP is an operand that is a CONST_INT. */
1216 const_int_operand (op, mode)
1217 register rtx op;
1218 enum machine_mode mode ATTRIBUTE_UNUSED;
1220 return GET_CODE (op) == CONST_INT;
1223 /* Returns 1 if OP is an operand that is a constant integer or constant
1224 floating-point number. */
1227 const_double_operand (op, mode)
1228 register rtx op;
1229 enum machine_mode mode;
1231 /* Don't accept CONST_INT or anything similar
1232 if the caller wants something floating. */
1233 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1234 && GET_MODE_CLASS (mode) != MODE_INT
1235 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1236 return 0;
1238 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1239 && (mode == VOIDmode || GET_MODE (op) == mode
1240 || GET_MODE (op) == VOIDmode));
1243 /* Return 1 if OP is a general operand that is not an immediate operand. */
1246 nonimmediate_operand (op, mode)
1247 register rtx op;
1248 enum machine_mode mode;
1250 return (general_operand (op, mode) && ! CONSTANT_P (op));
1253 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1256 nonmemory_operand (op, mode)
1257 register rtx op;
1258 enum machine_mode mode;
1260 if (CONSTANT_P (op))
1262 /* Don't accept CONST_INT or anything similar
1263 if the caller wants something floating. */
1264 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1265 && GET_MODE_CLASS (mode) != MODE_INT
1266 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1267 return 0;
1269 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1270 || mode == VOIDmode)
1271 #ifdef LEGITIMATE_PIC_OPERAND_P
1272 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1273 #endif
1274 && LEGITIMATE_CONSTANT_P (op));
1277 if (GET_MODE (op) != mode && mode != VOIDmode)
1278 return 0;
1280 if (GET_CODE (op) == SUBREG)
1282 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1283 because it is guaranteed to be reloaded into one.
1284 Just make sure the MEM is valid in itself.
1285 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1286 but currently it does result from (SUBREG (REG)...) where the
1287 reg went on the stack.) */
1288 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1289 return general_operand (op, mode);
1290 op = SUBREG_REG (op);
1293 /* We don't consider registers whose class is NO_REGS
1294 to be a register operand. */
1295 return (GET_CODE (op) == REG
1296 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1297 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1300 /* Return 1 if OP is a valid operand that stands for pushing a
1301 value of mode MODE onto the stack.
1303 The main use of this function is as a predicate in match_operand
1304 expressions in the machine description. */
1307 push_operand (op, mode)
1308 rtx op;
1309 enum machine_mode mode;
1311 if (GET_CODE (op) != MEM)
1312 return 0;
1314 if (mode != VOIDmode && GET_MODE (op) != mode)
1315 return 0;
1317 op = XEXP (op, 0);
1319 if (GET_CODE (op) != STACK_PUSH_CODE)
1320 return 0;
1322 return XEXP (op, 0) == stack_pointer_rtx;
1325 /* Return 1 if OP is a valid operand that stands for popping a
1326 value of mode MODE off the stack.
1328 The main use of this function is as a predicate in match_operand
1329 expressions in the machine description. */
1332 pop_operand (op, mode)
1333 rtx op;
1334 enum machine_mode mode;
1336 if (GET_CODE (op) != MEM)
1337 return 0;
1339 if (mode != VOIDmode && GET_MODE (op) != mode)
1340 return 0;
1342 op = XEXP (op, 0);
1344 if (GET_CODE (op) != STACK_POP_CODE)
1345 return 0;
1347 return XEXP (op, 0) == stack_pointer_rtx;
1350 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1353 memory_address_p (mode, addr)
1354 enum machine_mode mode ATTRIBUTE_UNUSED;
1355 register rtx addr;
1357 if (GET_CODE (addr) == ADDRESSOF)
1358 return 1;
1360 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1361 return 0;
1363 win:
1364 return 1;
1367 /* Return 1 if OP is a valid memory reference with mode MODE,
1368 including a valid address.
1370 The main use of this function is as a predicate in match_operand
1371 expressions in the machine description. */
1374 memory_operand (op, mode)
1375 register rtx op;
1376 enum machine_mode mode;
1378 rtx inner;
1380 if (! reload_completed)
1381 /* Note that no SUBREG is a memory operand before end of reload pass,
1382 because (SUBREG (MEM...)) forces reloading into a register. */
1383 return GET_CODE (op) == MEM && general_operand (op, mode);
1385 if (mode != VOIDmode && GET_MODE (op) != mode)
1386 return 0;
1388 inner = op;
1389 if (GET_CODE (inner) == SUBREG)
1390 inner = SUBREG_REG (inner);
1392 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1395 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1396 that is, a memory reference whose address is a general_operand. */
1399 indirect_operand (op, mode)
1400 register rtx op;
1401 enum machine_mode mode;
1403 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1404 if (! reload_completed
1405 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1407 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1408 rtx inner = SUBREG_REG (op);
1410 if (BYTES_BIG_ENDIAN)
1411 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1412 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1414 if (mode != VOIDmode && GET_MODE (op) != mode)
1415 return 0;
1417 /* The only way that we can have a general_operand as the resulting
1418 address is if OFFSET is zero and the address already is an operand
1419 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1420 operand. */
1422 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1423 || (GET_CODE (XEXP (inner, 0)) == PLUS
1424 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1425 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1426 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1429 return (GET_CODE (op) == MEM
1430 && memory_operand (op, mode)
1431 && general_operand (XEXP (op, 0), Pmode));
1434 /* Return 1 if this is a comparison operator. This allows the use of
1435 MATCH_OPERATOR to recognize all the branch insns. */
1438 comparison_operator (op, mode)
1439 register rtx op;
1440 enum machine_mode mode;
1442 return ((mode == VOIDmode || GET_MODE (op) == mode)
1443 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1446 /* If BODY is an insn body that uses ASM_OPERANDS,
1447 return the number of operands (both input and output) in the insn.
1448 Otherwise return -1. */
1451 asm_noperands (body)
1452 rtx body;
1454 switch (GET_CODE (body))
1456 case ASM_OPERANDS:
1457 /* No output operands: return number of input operands. */
1458 return ASM_OPERANDS_INPUT_LENGTH (body);
1459 case SET:
1460 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1461 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1462 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1463 else
1464 return -1;
1465 case PARALLEL:
1466 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1467 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1469 /* Multiple output operands, or 1 output plus some clobbers:
1470 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1471 int i;
1472 int n_sets;
1474 /* Count backwards through CLOBBERs to determine number of SETs. */
1475 for (i = XVECLEN (body, 0); i > 0; i--)
1477 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1478 break;
1479 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1480 return -1;
1483 /* N_SETS is now number of output operands. */
1484 n_sets = i;
1486 /* Verify that all the SETs we have
1487 came from a single original asm_operands insn
1488 (so that invalid combinations are blocked). */
1489 for (i = 0; i < n_sets; i++)
1491 rtx elt = XVECEXP (body, 0, i);
1492 if (GET_CODE (elt) != SET)
1493 return -1;
1494 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1495 return -1;
1496 /* If these ASM_OPERANDS rtx's came from different original insns
1497 then they aren't allowed together. */
1498 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1499 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1500 return -1;
1502 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1503 + n_sets);
1505 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1507 /* 0 outputs, but some clobbers:
1508 body is [(asm_operands ...) (clobber (reg ...))...]. */
1509 int i;
1511 /* Make sure all the other parallel things really are clobbers. */
1512 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1513 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1514 return -1;
1516 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1518 else
1519 return -1;
1520 default:
1521 return -1;
1525 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1526 copy its operands (both input and output) into the vector OPERANDS,
1527 the locations of the operands within the insn into the vector OPERAND_LOCS,
1528 and the constraints for the operands into CONSTRAINTS.
1529 Write the modes of the operands into MODES.
1530 Return the assembler-template.
1532 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1533 we don't store that info. */
1535 const char *
1536 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1537 rtx body;
1538 rtx *operands;
1539 rtx **operand_locs;
1540 const char **constraints;
1541 enum machine_mode *modes;
1543 register int i;
1544 int noperands;
1545 const char *template = 0;
1547 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1549 rtx asmop = SET_SRC (body);
1550 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1552 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1554 for (i = 1; i < noperands; i++)
1556 if (operand_locs)
1557 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1558 if (operands)
1559 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1560 if (constraints)
1561 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1562 if (modes)
1563 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1566 /* The output is in the SET.
1567 Its constraint is in the ASM_OPERANDS itself. */
1568 if (operands)
1569 operands[0] = SET_DEST (body);
1570 if (operand_locs)
1571 operand_locs[0] = &SET_DEST (body);
1572 if (constraints)
1573 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1574 if (modes)
1575 modes[0] = GET_MODE (SET_DEST (body));
1576 template = ASM_OPERANDS_TEMPLATE (asmop);
1578 else if (GET_CODE (body) == ASM_OPERANDS)
1580 rtx asmop = body;
1581 /* No output operands: BODY is (asm_operands ....). */
1583 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1585 /* The input operands are found in the 1st element vector. */
1586 /* Constraints for inputs are in the 2nd element vector. */
1587 for (i = 0; i < noperands; i++)
1589 if (operand_locs)
1590 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1591 if (operands)
1592 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1593 if (constraints)
1594 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1595 if (modes)
1596 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1598 template = ASM_OPERANDS_TEMPLATE (asmop);
1600 else if (GET_CODE (body) == PARALLEL
1601 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1603 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1604 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1605 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1606 int nout = 0; /* Does not include CLOBBERs. */
1608 /* At least one output, plus some CLOBBERs. */
1610 /* The outputs are in the SETs.
1611 Their constraints are in the ASM_OPERANDS itself. */
1612 for (i = 0; i < nparallel; i++)
1614 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1615 break; /* Past last SET */
1617 if (operands)
1618 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1619 if (operand_locs)
1620 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1621 if (constraints)
1622 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1623 if (modes)
1624 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1625 nout++;
1628 for (i = 0; i < nin; i++)
1630 if (operand_locs)
1631 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1632 if (operands)
1633 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1634 if (constraints)
1635 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1636 if (modes)
1637 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1640 template = ASM_OPERANDS_TEMPLATE (asmop);
1642 else if (GET_CODE (body) == PARALLEL
1643 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1645 /* No outputs, but some CLOBBERs. */
1647 rtx asmop = XVECEXP (body, 0, 0);
1648 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1650 for (i = 0; i < nin; i++)
1652 if (operand_locs)
1653 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1654 if (operands)
1655 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1656 if (constraints)
1657 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1658 if (modes)
1659 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1662 template = ASM_OPERANDS_TEMPLATE (asmop);
1665 return template;
1668 /* Check if an asm_operand matches it's constraints.
1669 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1672 asm_operand_ok (op, constraint)
1673 rtx op;
1674 const char *constraint;
1676 int result = 0;
1678 /* Use constrain_operands after reload. */
1679 if (reload_completed)
1680 abort ();
1682 while (*constraint)
1684 char c = *constraint++;
1685 switch (c)
1687 case '=':
1688 case '+':
1689 case '*':
1690 case '%':
1691 case '?':
1692 case '!':
1693 case '#':
1694 case '&':
1695 case ',':
1696 break;
1698 case '0': case '1': case '2': case '3': case '4':
1699 case '5': case '6': case '7': case '8': case '9':
1700 /* For best results, our caller should have given us the
1701 proper matching constraint, but we can't actually fail
1702 the check if they didn't. Indicate that results are
1703 inconclusive. */
1704 result = -1;
1705 break;
1707 case 'p':
1708 if (address_operand (op, VOIDmode))
1709 return 1;
1710 break;
1712 case 'm':
1713 case 'V': /* non-offsettable */
1714 if (memory_operand (op, VOIDmode))
1715 return 1;
1716 break;
1718 case 'o': /* offsettable */
1719 if (offsettable_nonstrict_memref_p (op))
1720 return 1;
1721 break;
1723 case '<':
1724 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1725 excepting those that expand_call created. Further, on some
1726 machines which do not have generalized auto inc/dec, an inc/dec
1727 is not a memory_operand.
1729 Match any memory and hope things are resolved after reload. */
1731 if (GET_CODE (op) == MEM
1732 && (1
1733 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1734 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1735 return 1;
1736 break;
1738 case '>':
1739 if (GET_CODE (op) == MEM
1740 && (1
1741 || GET_CODE (XEXP (op, 0)) == PRE_INC
1742 || GET_CODE (XEXP (op, 0)) == POST_INC))
1743 return 1;
1744 break;
1746 case 'E':
1747 #ifndef REAL_ARITHMETIC
1748 /* Match any floating double constant, but only if
1749 we can examine the bits of it reliably. */
1750 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1751 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1752 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1753 break;
1754 #endif
1755 /* FALLTHRU */
1757 case 'F':
1758 if (GET_CODE (op) == CONST_DOUBLE)
1759 return 1;
1760 break;
1762 case 'G':
1763 if (GET_CODE (op) == CONST_DOUBLE
1764 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1765 return 1;
1766 break;
1767 case 'H':
1768 if (GET_CODE (op) == CONST_DOUBLE
1769 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1770 return 1;
1771 break;
1773 case 's':
1774 if (GET_CODE (op) == CONST_INT
1775 || (GET_CODE (op) == CONST_DOUBLE
1776 && GET_MODE (op) == VOIDmode))
1777 break;
1778 /* FALLTHRU */
1780 case 'i':
1781 if (CONSTANT_P (op)
1782 #ifdef LEGITIMATE_PIC_OPERAND_P
1783 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1784 #endif
1786 return 1;
1787 break;
1789 case 'n':
1790 if (GET_CODE (op) == CONST_INT
1791 || (GET_CODE (op) == CONST_DOUBLE
1792 && GET_MODE (op) == VOIDmode))
1793 return 1;
1794 break;
1796 case 'I':
1797 if (GET_CODE (op) == CONST_INT
1798 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1799 return 1;
1800 break;
1801 case 'J':
1802 if (GET_CODE (op) == CONST_INT
1803 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1804 return 1;
1805 break;
1806 case 'K':
1807 if (GET_CODE (op) == CONST_INT
1808 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1809 return 1;
1810 break;
1811 case 'L':
1812 if (GET_CODE (op) == CONST_INT
1813 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1814 return 1;
1815 break;
1816 case 'M':
1817 if (GET_CODE (op) == CONST_INT
1818 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1819 return 1;
1820 break;
1821 case 'N':
1822 if (GET_CODE (op) == CONST_INT
1823 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1824 return 1;
1825 break;
1826 case 'O':
1827 if (GET_CODE (op) == CONST_INT
1828 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1829 return 1;
1830 break;
1831 case 'P':
1832 if (GET_CODE (op) == CONST_INT
1833 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1834 return 1;
1835 break;
1837 case 'X':
1838 return 1;
1840 case 'g':
1841 if (general_operand (op, VOIDmode))
1842 return 1;
1843 break;
1845 default:
1846 /* For all other letters, we first check for a register class,
1847 otherwise it is an EXTRA_CONSTRAINT. */
1848 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1850 case 'r':
1851 if (GET_MODE (op) == BLKmode)
1852 break;
1853 if (register_operand (op, VOIDmode))
1854 return 1;
1856 #ifdef EXTRA_CONSTRAINT
1857 if (EXTRA_CONSTRAINT (op, c))
1858 return 1;
1859 #endif
1860 break;
1864 return result;
1867 /* Given an rtx *P, if it is a sum containing an integer constant term,
1868 return the location (type rtx *) of the pointer to that constant term.
1869 Otherwise, return a null pointer. */
1871 static rtx *
1872 find_constant_term_loc (p)
1873 rtx *p;
1875 register rtx *tem;
1876 register enum rtx_code code = GET_CODE (*p);
1878 /* If *P IS such a constant term, P is its location. */
1880 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1881 || code == CONST)
1882 return p;
1884 /* Otherwise, if not a sum, it has no constant term. */
1886 if (GET_CODE (*p) != PLUS)
1887 return 0;
1889 /* If one of the summands is constant, return its location. */
1891 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1892 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1893 return p;
1895 /* Otherwise, check each summand for containing a constant term. */
1897 if (XEXP (*p, 0) != 0)
1899 tem = find_constant_term_loc (&XEXP (*p, 0));
1900 if (tem != 0)
1901 return tem;
1904 if (XEXP (*p, 1) != 0)
1906 tem = find_constant_term_loc (&XEXP (*p, 1));
1907 if (tem != 0)
1908 return tem;
1911 return 0;
1914 /* Return 1 if OP is a memory reference
1915 whose address contains no side effects
1916 and remains valid after the addition
1917 of a positive integer less than the
1918 size of the object being referenced.
1920 We assume that the original address is valid and do not check it.
1922 This uses strict_memory_address_p as a subroutine, so
1923 don't use it before reload. */
1926 offsettable_memref_p (op)
1927 rtx op;
1929 return ((GET_CODE (op) == MEM)
1930 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1933 /* Similar, but don't require a strictly valid mem ref:
1934 consider pseudo-regs valid as index or base regs. */
1937 offsettable_nonstrict_memref_p (op)
1938 rtx op;
1940 return ((GET_CODE (op) == MEM)
1941 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1944 /* Return 1 if Y is a memory address which contains no side effects
1945 and would remain valid after the addition of a positive integer
1946 less than the size of that mode.
1948 We assume that the original address is valid and do not check it.
1949 We do check that it is valid for narrower modes.
1951 If STRICTP is nonzero, we require a strictly valid address,
1952 for the sake of use in reload.c. */
1955 offsettable_address_p (strictp, mode, y)
1956 int strictp;
1957 enum machine_mode mode;
1958 register rtx y;
1960 register enum rtx_code ycode = GET_CODE (y);
1961 register rtx z;
1962 rtx y1 = y;
1963 rtx *y2;
1964 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1965 (strictp ? strict_memory_address_p : memory_address_p);
1966 unsigned int mode_sz = GET_MODE_SIZE (mode);
1968 if (CONSTANT_ADDRESS_P (y))
1969 return 1;
1971 /* Adjusting an offsettable address involves changing to a narrower mode.
1972 Make sure that's OK. */
1974 if (mode_dependent_address_p (y))
1975 return 0;
1977 /* ??? How much offset does an offsettable BLKmode reference need?
1978 Clearly that depends on the situation in which it's being used.
1979 However, the current situation in which we test 0xffffffff is
1980 less than ideal. Caveat user. */
1981 if (mode_sz == 0)
1982 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1984 /* If the expression contains a constant term,
1985 see if it remains valid when max possible offset is added. */
1987 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1989 int good;
1991 y1 = *y2;
1992 *y2 = plus_constant (*y2, mode_sz - 1);
1993 /* Use QImode because an odd displacement may be automatically invalid
1994 for any wider mode. But it should be valid for a single byte. */
1995 good = (*addressp) (QImode, y);
1997 /* In any case, restore old contents of memory. */
1998 *y2 = y1;
1999 return good;
2002 if (GET_RTX_CLASS (ycode) == 'a')
2003 return 0;
2005 /* The offset added here is chosen as the maximum offset that
2006 any instruction could need to add when operating on something
2007 of the specified mode. We assume that if Y and Y+c are
2008 valid addresses then so is Y+d for all 0<d<c. */
2010 z = plus_constant_for_output (y, mode_sz - 1);
2012 /* Use QImode because an odd displacement may be automatically invalid
2013 for any wider mode. But it should be valid for a single byte. */
2014 return (*addressp) (QImode, z);
2017 /* Return 1 if ADDR is an address-expression whose effect depends
2018 on the mode of the memory reference it is used in.
2020 Autoincrement addressing is a typical example of mode-dependence
2021 because the amount of the increment depends on the mode. */
2024 mode_dependent_address_p (addr)
2025 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2027 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2028 return 0;
2029 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2030 win: ATTRIBUTE_UNUSED_LABEL
2031 return 1;
2034 /* Return 1 if OP is a general operand
2035 other than a memory ref with a mode dependent address. */
2038 mode_independent_operand (op, mode)
2039 enum machine_mode mode;
2040 rtx op;
2042 rtx addr;
2044 if (! general_operand (op, mode))
2045 return 0;
2047 if (GET_CODE (op) != MEM)
2048 return 1;
2050 addr = XEXP (op, 0);
2051 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2052 return 1;
2053 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2054 lose: ATTRIBUTE_UNUSED_LABEL
2055 return 0;
2058 /* Given an operand OP that is a valid memory reference which
2059 satisfies offsettable_memref_p, return a new memory reference whose
2060 address has been adjusted by OFFSET. OFFSET should be positive and
2061 less than the size of the object referenced. */
2064 adj_offsettable_operand (op, offset)
2065 rtx op;
2066 int offset;
2068 register enum rtx_code code = GET_CODE (op);
2070 if (code == MEM)
2072 register rtx y = XEXP (op, 0);
2073 register rtx new;
2075 if (CONSTANT_ADDRESS_P (y))
2077 new = gen_rtx_MEM (GET_MODE (op),
2078 plus_constant_for_output (y, offset));
2079 MEM_COPY_ATTRIBUTES (new, op);
2080 return new;
2083 if (GET_CODE (y) == PLUS)
2085 rtx z = y;
2086 register rtx *const_loc;
2088 op = copy_rtx (op);
2089 z = XEXP (op, 0);
2090 const_loc = find_constant_term_loc (&z);
2091 if (const_loc)
2093 *const_loc = plus_constant_for_output (*const_loc, offset);
2094 return op;
2098 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2099 MEM_COPY_ATTRIBUTES (new, op);
2100 return new;
2102 abort ();
2105 /* Like extract_insn, but save insn extracted and don't extract again, when
2106 called again for the same insn expecting that recog_data still contain the
2107 valid information. This is used primary by gen_attr infrastructure that
2108 often does extract insn again and again. */
2109 void
2110 extract_insn_cached (insn)
2111 rtx insn;
2113 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2114 return;
2115 extract_insn (insn);
2116 recog_data.insn = insn;
2118 /* Do cached extract_insn, constrain_operand and complain about failures.
2119 Used by insn_attrtab. */
2120 void
2121 extract_constrain_insn_cached (insn)
2122 rtx insn;
2124 extract_insn_cached (insn);
2125 if (which_alternative == -1
2126 && !constrain_operands (reload_completed))
2127 fatal_insn_not_found (insn);
2129 /* Do cached constrain_operand and complain about failures. */
2131 constrain_operands_cached (strict)
2132 int strict;
2134 if (which_alternative == -1)
2135 return constrain_operands (strict);
2136 else
2137 return 1;
2140 /* Analyze INSN and fill in recog_data. */
2142 void
2143 extract_insn (insn)
2144 rtx insn;
2146 int i;
2147 int icode;
2148 int noperands;
2149 rtx body = PATTERN (insn);
2151 recog_data.insn = NULL;
2152 recog_data.n_operands = 0;
2153 recog_data.n_alternatives = 0;
2154 recog_data.n_dups = 0;
2155 which_alternative = -1;
2157 switch (GET_CODE (body))
2159 case USE:
2160 case CLOBBER:
2161 case ASM_INPUT:
2162 case ADDR_VEC:
2163 case ADDR_DIFF_VEC:
2164 return;
2166 case SET:
2167 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2168 goto asm_insn;
2169 else
2170 goto normal_insn;
2171 case PARALLEL:
2172 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2173 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2174 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2175 goto asm_insn;
2176 else
2177 goto normal_insn;
2178 case ASM_OPERANDS:
2179 asm_insn:
2180 recog_data.n_operands = noperands = asm_noperands (body);
2181 if (noperands >= 0)
2183 /* This insn is an `asm' with operands. */
2185 /* expand_asm_operands makes sure there aren't too many operands. */
2186 if (noperands > MAX_RECOG_OPERANDS)
2187 abort ();
2189 /* Now get the operand values and constraints out of the insn. */
2190 decode_asm_operands (body, recog_data.operand,
2191 recog_data.operand_loc,
2192 recog_data.constraints,
2193 recog_data.operand_mode);
2194 if (noperands > 0)
2196 const char *p = recog_data.constraints[0];
2197 recog_data.n_alternatives = 1;
2198 while (*p)
2199 recog_data.n_alternatives += (*p++ == ',');
2201 break;
2203 fatal_insn_not_found (insn);
2205 default:
2206 normal_insn:
2207 /* Ordinary insn: recognize it, get the operands via insn_extract
2208 and get the constraints. */
2210 icode = recog_memoized (insn);
2211 if (icode < 0)
2212 fatal_insn_not_found (insn);
2214 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2215 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2216 recog_data.n_dups = insn_data[icode].n_dups;
2218 insn_extract (insn);
2220 for (i = 0; i < noperands; i++)
2222 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2223 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2224 /* VOIDmode match_operands gets mode from their real operand. */
2225 if (recog_data.operand_mode[i] == VOIDmode)
2226 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2229 for (i = 0; i < noperands; i++)
2230 recog_data.operand_type[i]
2231 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2232 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2233 : OP_IN);
2235 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2236 abort ();
2239 /* After calling extract_insn, you can use this function to extract some
2240 information from the constraint strings into a more usable form.
2241 The collected data is stored in recog_op_alt. */
2242 void
2243 preprocess_constraints ()
2245 int i;
2247 memset (recog_op_alt, 0, sizeof recog_op_alt);
2248 for (i = 0; i < recog_data.n_operands; i++)
2250 int j;
2251 struct operand_alternative *op_alt;
2252 const char *p = recog_data.constraints[i];
2254 op_alt = recog_op_alt[i];
2256 for (j = 0; j < recog_data.n_alternatives; j++)
2258 op_alt[j].class = NO_REGS;
2259 op_alt[j].constraint = p;
2260 op_alt[j].matches = -1;
2261 op_alt[j].matched = -1;
2263 if (*p == '\0' || *p == ',')
2265 op_alt[j].anything_ok = 1;
2266 continue;
2269 for (;;)
2271 char c = *p++;
2272 if (c == '#')
2274 c = *p++;
2275 while (c != ',' && c != '\0');
2276 if (c == ',' || c == '\0')
2277 break;
2279 switch (c)
2281 case '=': case '+': case '*': case '%':
2282 case 'E': case 'F': case 'G': case 'H':
2283 case 's': case 'i': case 'n':
2284 case 'I': case 'J': case 'K': case 'L':
2285 case 'M': case 'N': case 'O': case 'P':
2286 /* These don't say anything we care about. */
2287 break;
2289 case '?':
2290 op_alt[j].reject += 6;
2291 break;
2292 case '!':
2293 op_alt[j].reject += 600;
2294 break;
2295 case '&':
2296 op_alt[j].earlyclobber = 1;
2297 break;
2299 case '0': case '1': case '2': case '3': case '4':
2300 case '5': case '6': case '7': case '8': case '9':
2301 op_alt[j].matches = c - '0';
2302 recog_op_alt[op_alt[j].matches][j].matched = i;
2303 break;
2305 case 'm':
2306 op_alt[j].memory_ok = 1;
2307 break;
2308 case '<':
2309 op_alt[j].decmem_ok = 1;
2310 break;
2311 case '>':
2312 op_alt[j].incmem_ok = 1;
2313 break;
2314 case 'V':
2315 op_alt[j].nonoffmem_ok = 1;
2316 break;
2317 case 'o':
2318 op_alt[j].offmem_ok = 1;
2319 break;
2320 case 'X':
2321 op_alt[j].anything_ok = 1;
2322 break;
2324 case 'p':
2325 op_alt[j].is_address = 1;
2326 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2327 break;
2329 case 'g': case 'r':
2330 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2331 break;
2333 default:
2334 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2335 break;
2342 /* Check the operands of an insn against the insn's operand constraints
2343 and return 1 if they are valid.
2344 The information about the insn's operands, constraints, operand modes
2345 etc. is obtained from the global variables set up by extract_insn.
2347 WHICH_ALTERNATIVE is set to a number which indicates which
2348 alternative of constraints was matched: 0 for the first alternative,
2349 1 for the next, etc.
2351 In addition, when two operands are match
2352 and it happens that the output operand is (reg) while the
2353 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2354 make the output operand look like the input.
2355 This is because the output operand is the one the template will print.
2357 This is used in final, just before printing the assembler code and by
2358 the routines that determine an insn's attribute.
2360 If STRICT is a positive non-zero value, it means that we have been
2361 called after reload has been completed. In that case, we must
2362 do all checks strictly. If it is zero, it means that we have been called
2363 before reload has completed. In that case, we first try to see if we can
2364 find an alternative that matches strictly. If not, we try again, this
2365 time assuming that reload will fix up the insn. This provides a "best
2366 guess" for the alternative and is used to compute attributes of insns prior
2367 to reload. A negative value of STRICT is used for this internal call. */
2369 struct funny_match
2371 int this, other;
2375 constrain_operands (strict)
2376 int strict;
2378 const char *constraints[MAX_RECOG_OPERANDS];
2379 int matching_operands[MAX_RECOG_OPERANDS];
2380 int earlyclobber[MAX_RECOG_OPERANDS];
2381 register int c;
2383 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2384 int funny_match_index;
2386 which_alternative = 0;
2387 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2388 return 1;
2390 for (c = 0; c < recog_data.n_operands; c++)
2392 constraints[c] = recog_data.constraints[c];
2393 matching_operands[c] = -1;
2398 register int opno;
2399 int lose = 0;
2400 funny_match_index = 0;
2402 for (opno = 0; opno < recog_data.n_operands; opno++)
2404 register rtx op = recog_data.operand[opno];
2405 enum machine_mode mode = GET_MODE (op);
2406 register const char *p = constraints[opno];
2407 int offset = 0;
2408 int win = 0;
2409 int val;
2411 earlyclobber[opno] = 0;
2413 /* A unary operator may be accepted by the predicate, but it
2414 is irrelevant for matching constraints. */
2415 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2416 op = XEXP (op, 0);
2418 if (GET_CODE (op) == SUBREG)
2420 if (GET_CODE (SUBREG_REG (op)) == REG
2421 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2422 offset = SUBREG_WORD (op);
2423 op = SUBREG_REG (op);
2426 /* An empty constraint or empty alternative
2427 allows anything which matched the pattern. */
2428 if (*p == 0 || *p == ',')
2429 win = 1;
2431 while (*p && (c = *p++) != ',')
2432 switch (c)
2434 case '?': case '!': case '*': case '%':
2435 case '=': case '+':
2436 break;
2438 case '#':
2439 /* Ignore rest of this alternative as far as
2440 constraint checking is concerned. */
2441 while (*p && *p != ',')
2442 p++;
2443 break;
2445 case '&':
2446 earlyclobber[opno] = 1;
2447 break;
2449 case '0': case '1': case '2': case '3': case '4':
2450 case '5': case '6': case '7': case '8': case '9':
2452 /* This operand must be the same as a previous one.
2453 This kind of constraint is used for instructions such
2454 as add when they take only two operands.
2456 Note that the lower-numbered operand is passed first.
2458 If we are not testing strictly, assume that this constraint
2459 will be satisfied. */
2460 if (strict < 0)
2461 val = 1;
2462 else
2464 rtx op1 = recog_data.operand[c - '0'];
2465 rtx op2 = recog_data.operand[opno];
2467 /* A unary operator may be accepted by the predicate,
2468 but it is irrelevant for matching constraints. */
2469 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2470 op1 = XEXP (op1, 0);
2471 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2472 op2 = XEXP (op2, 0);
2474 val = operands_match_p (op1, op2);
2477 matching_operands[opno] = c - '0';
2478 matching_operands[c - '0'] = opno;
2480 if (val != 0)
2481 win = 1;
2482 /* If output is *x and input is *--x,
2483 arrange later to change the output to *--x as well,
2484 since the output op is the one that will be printed. */
2485 if (val == 2 && strict > 0)
2487 funny_match[funny_match_index].this = opno;
2488 funny_match[funny_match_index++].other = c - '0';
2490 break;
2492 case 'p':
2493 /* p is used for address_operands. When we are called by
2494 gen_reload, no one will have checked that the address is
2495 strictly valid, i.e., that all pseudos requiring hard regs
2496 have gotten them. */
2497 if (strict <= 0
2498 || (strict_memory_address_p (recog_data.operand_mode[opno],
2499 op)))
2500 win = 1;
2501 break;
2503 /* No need to check general_operand again;
2504 it was done in insn-recog.c. */
2505 case 'g':
2506 /* Anything goes unless it is a REG and really has a hard reg
2507 but the hard reg is not in the class GENERAL_REGS. */
2508 if (strict < 0
2509 || GENERAL_REGS == ALL_REGS
2510 || GET_CODE (op) != REG
2511 || (reload_in_progress
2512 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2513 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2514 win = 1;
2515 break;
2517 case 'X':
2518 /* This is used for a MATCH_SCRATCH in the cases when
2519 we don't actually need anything. So anything goes
2520 any time. */
2521 win = 1;
2522 break;
2524 case 'm':
2525 if (GET_CODE (op) == MEM
2526 /* Before reload, accept what reload can turn into mem. */
2527 || (strict < 0 && CONSTANT_P (op))
2528 /* During reload, accept a pseudo */
2529 || (reload_in_progress && GET_CODE (op) == REG
2530 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2531 win = 1;
2532 break;
2534 case '<':
2535 if (GET_CODE (op) == MEM
2536 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2537 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2538 win = 1;
2539 break;
2541 case '>':
2542 if (GET_CODE (op) == MEM
2543 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2544 || GET_CODE (XEXP (op, 0)) == POST_INC))
2545 win = 1;
2546 break;
2548 case 'E':
2549 #ifndef REAL_ARITHMETIC
2550 /* Match any CONST_DOUBLE, but only if
2551 we can examine the bits of it reliably. */
2552 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2553 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2554 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2555 break;
2556 #endif
2557 if (GET_CODE (op) == CONST_DOUBLE)
2558 win = 1;
2559 break;
2561 case 'F':
2562 if (GET_CODE (op) == CONST_DOUBLE)
2563 win = 1;
2564 break;
2566 case 'G':
2567 case 'H':
2568 if (GET_CODE (op) == CONST_DOUBLE
2569 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2570 win = 1;
2571 break;
2573 case 's':
2574 if (GET_CODE (op) == CONST_INT
2575 || (GET_CODE (op) == CONST_DOUBLE
2576 && GET_MODE (op) == VOIDmode))
2577 break;
2578 case 'i':
2579 if (CONSTANT_P (op))
2580 win = 1;
2581 break;
2583 case 'n':
2584 if (GET_CODE (op) == CONST_INT
2585 || (GET_CODE (op) == CONST_DOUBLE
2586 && GET_MODE (op) == VOIDmode))
2587 win = 1;
2588 break;
2590 case 'I':
2591 case 'J':
2592 case 'K':
2593 case 'L':
2594 case 'M':
2595 case 'N':
2596 case 'O':
2597 case 'P':
2598 if (GET_CODE (op) == CONST_INT
2599 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2600 win = 1;
2601 break;
2603 case 'V':
2604 if (GET_CODE (op) == MEM
2605 && ((strict > 0 && ! offsettable_memref_p (op))
2606 || (strict < 0
2607 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2608 || (reload_in_progress
2609 && !(GET_CODE (op) == REG
2610 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2611 win = 1;
2612 break;
2614 case 'o':
2615 if ((strict > 0 && offsettable_memref_p (op))
2616 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2617 /* Before reload, accept what reload can handle. */
2618 || (strict < 0
2619 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2620 /* During reload, accept a pseudo */
2621 || (reload_in_progress && GET_CODE (op) == REG
2622 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2623 win = 1;
2624 break;
2626 default:
2628 enum reg_class class;
2630 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2631 if (class != NO_REGS)
2633 if (strict < 0
2634 || (strict == 0
2635 && GET_CODE (op) == REG
2636 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2637 || (strict == 0 && GET_CODE (op) == SCRATCH)
2638 || (GET_CODE (op) == REG
2639 && reg_fits_class_p (op, class, offset, mode)))
2640 win = 1;
2642 #ifdef EXTRA_CONSTRAINT
2643 else if (EXTRA_CONSTRAINT (op, c))
2644 win = 1;
2645 #endif
2646 break;
2650 constraints[opno] = p;
2651 /* If this operand did not win somehow,
2652 this alternative loses. */
2653 if (! win)
2654 lose = 1;
2656 /* This alternative won; the operands are ok.
2657 Change whichever operands this alternative says to change. */
2658 if (! lose)
2660 int opno, eopno;
2662 /* See if any earlyclobber operand conflicts with some other
2663 operand. */
2665 if (strict > 0)
2666 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2667 /* Ignore earlyclobber operands now in memory,
2668 because we would often report failure when we have
2669 two memory operands, one of which was formerly a REG. */
2670 if (earlyclobber[eopno]
2671 && GET_CODE (recog_data.operand[eopno]) == REG)
2672 for (opno = 0; opno < recog_data.n_operands; opno++)
2673 if ((GET_CODE (recog_data.operand[opno]) == MEM
2674 || recog_data.operand_type[opno] != OP_OUT)
2675 && opno != eopno
2676 /* Ignore things like match_operator operands. */
2677 && *recog_data.constraints[opno] != 0
2678 && ! (matching_operands[opno] == eopno
2679 && operands_match_p (recog_data.operand[opno],
2680 recog_data.operand[eopno]))
2681 && ! safe_from_earlyclobber (recog_data.operand[opno],
2682 recog_data.operand[eopno]))
2683 lose = 1;
2685 if (! lose)
2687 while (--funny_match_index >= 0)
2689 recog_data.operand[funny_match[funny_match_index].other]
2690 = recog_data.operand[funny_match[funny_match_index].this];
2693 return 1;
2697 which_alternative++;
2699 while (which_alternative < recog_data.n_alternatives);
2701 which_alternative = -1;
2702 /* If we are about to reject this, but we are not to test strictly,
2703 try a very loose test. Only return failure if it fails also. */
2704 if (strict == 0)
2705 return constrain_operands (-1);
2706 else
2707 return 0;
2710 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2711 is a hard reg in class CLASS when its regno is offset by OFFSET
2712 and changed to mode MODE.
2713 If REG occupies multiple hard regs, all of them must be in CLASS. */
2716 reg_fits_class_p (operand, class, offset, mode)
2717 rtx operand;
2718 register enum reg_class class;
2719 int offset;
2720 enum machine_mode mode;
2722 register int regno = REGNO (operand);
2723 if (regno < FIRST_PSEUDO_REGISTER
2724 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2725 regno + offset))
2727 register int sr;
2728 regno += offset;
2729 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2730 sr > 0; sr--)
2731 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2732 regno + sr))
2733 break;
2734 return sr == 0;
2737 return 0;
2740 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2742 void
2743 split_all_insns (upd_life)
2744 int upd_life;
2746 sbitmap blocks;
2747 int changed;
2748 int i;
2750 blocks = sbitmap_alloc (n_basic_blocks);
2751 sbitmap_zero (blocks);
2752 changed = 0;
2754 for (i = n_basic_blocks - 1; i >= 0; --i)
2756 basic_block bb = BASIC_BLOCK (i);
2757 rtx insn, next;
2759 for (insn = bb->head; insn ; insn = next)
2761 rtx set;
2763 /* Can't use `next_real_insn' because that might go across
2764 CODE_LABELS and short-out basic blocks. */
2765 next = NEXT_INSN (insn);
2766 if (! INSN_P (insn))
2769 /* Don't split no-op move insns. These should silently
2770 disappear later in final. Splitting such insns would
2771 break the code that handles REG_NO_CONFLICT blocks. */
2773 else if ((set = single_set (insn)) != NULL
2774 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2776 /* Nops get in the way while scheduling, so delete them
2777 now if register allocation has already been done. It
2778 is too risky to try to do this before register
2779 allocation, and there are unlikely to be very many
2780 nops then anyways. */
2781 if (reload_completed)
2783 PUT_CODE (insn, NOTE);
2784 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2785 NOTE_SOURCE_FILE (insn) = 0;
2788 else
2790 /* Split insns here to get max fine-grain parallelism. */
2791 rtx first = PREV_INSN (insn);
2792 rtx last = try_split (PATTERN (insn), insn, 1);
2794 if (last != insn)
2796 SET_BIT (blocks, i);
2797 changed = 1;
2799 /* try_split returns the NOTE that INSN became. */
2800 PUT_CODE (insn, NOTE);
2801 NOTE_SOURCE_FILE (insn) = 0;
2802 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2804 /* ??? Coddle to md files that generate subregs in post-
2805 reload splitters instead of computing the proper
2806 hard register. */
2807 if (reload_completed && first != last)
2809 first = NEXT_INSN (first);
2810 while (1)
2812 if (INSN_P (first))
2813 cleanup_subreg_operands (first);
2814 if (first == last)
2815 break;
2816 first = NEXT_INSN (first);
2820 if (insn == bb->end)
2822 bb->end = last;
2823 break;
2828 if (insn == bb->end)
2829 break;
2832 /* ??? When we're called from just after reload, the CFG is in bad
2833 shape, and we may have fallen off the end. This could be fixed
2834 by having reload not try to delete unreachable code. Otherwise
2835 assert we found the end insn. */
2836 if (insn == NULL && upd_life)
2837 abort ();
2840 if (changed && upd_life)
2842 compute_bb_for_insn (get_max_uid ());
2843 count_or_remove_death_notes (blocks, 1);
2844 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2847 sbitmap_free (blocks);
2850 #ifdef HAVE_peephole2
2851 struct peep2_insn_data
2853 rtx insn;
2854 regset live_before;
2857 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2858 static int peep2_current;
2860 /* A non-insn marker indicating the last insn of the block.
2861 The live_before regset for this element is correct, indicating
2862 global_live_at_end for the block. */
2863 #define PEEP2_EOB pc_rtx
2865 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2866 does not exist. Used by the recognizer to find the next insn to match
2867 in a multi-insn pattern. */
2870 peep2_next_insn (n)
2871 int n;
2873 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2874 abort ();
2876 n += peep2_current;
2877 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2878 n -= MAX_INSNS_PER_PEEP2 + 1;
2880 if (peep2_insn_data[n].insn == PEEP2_EOB)
2881 return NULL_RTX;
2882 return peep2_insn_data[n].insn;
2885 /* Return true if REGNO is dead before the Nth non-note insn
2886 after `current'. */
2889 peep2_regno_dead_p (ofs, regno)
2890 int ofs;
2891 int regno;
2893 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2894 abort ();
2896 ofs += peep2_current;
2897 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2898 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2900 if (peep2_insn_data[ofs].insn == NULL_RTX)
2901 abort ();
2903 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2906 /* Similarly for a REG. */
2909 peep2_reg_dead_p (ofs, reg)
2910 int ofs;
2911 rtx reg;
2913 int regno, n;
2915 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2916 abort ();
2918 ofs += peep2_current;
2919 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2920 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2922 if (peep2_insn_data[ofs].insn == NULL_RTX)
2923 abort ();
2925 regno = REGNO (reg);
2926 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2927 while (--n >= 0)
2928 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2929 return 0;
2930 return 1;
2933 /* Try to find a hard register of mode MODE, matching the register class in
2934 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2935 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2936 in which case the only condition is that the register must be available
2937 before CURRENT_INSN.
2938 Registers that already have bits set in REG_SET will not be considered.
2940 If an appropriate register is available, it will be returned and the
2941 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2942 returned. */
2945 peep2_find_free_register (from, to, class_str, mode, reg_set)
2946 int from, to;
2947 const char *class_str;
2948 enum machine_mode mode;
2949 HARD_REG_SET *reg_set;
2951 static int search_ofs;
2952 enum reg_class class;
2953 HARD_REG_SET live;
2954 int i;
2956 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2957 abort ();
2959 from += peep2_current;
2960 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2961 from -= MAX_INSNS_PER_PEEP2 + 1;
2962 to += peep2_current;
2963 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2964 to -= MAX_INSNS_PER_PEEP2 + 1;
2966 if (peep2_insn_data[from].insn == NULL_RTX)
2967 abort ();
2968 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2970 while (from != to)
2972 HARD_REG_SET this_live;
2974 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2975 from = 0;
2976 if (peep2_insn_data[from].insn == NULL_RTX)
2977 abort ();
2978 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2979 IOR_HARD_REG_SET (live, this_live);
2982 class = (class_str[0] == 'r' ? GENERAL_REGS
2983 : REG_CLASS_FROM_LETTER (class_str[0]));
2985 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2987 int raw_regno, regno, success, j;
2989 /* Distribute the free registers as much as possible. */
2990 raw_regno = search_ofs + i;
2991 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2992 raw_regno -= FIRST_PSEUDO_REGISTER;
2993 #ifdef REG_ALLOC_ORDER
2994 regno = reg_alloc_order[raw_regno];
2995 #else
2996 regno = raw_regno;
2997 #endif
2999 /* Don't allocate fixed registers. */
3000 if (fixed_regs[regno])
3001 continue;
3002 /* Make sure the register is of the right class. */
3003 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3004 continue;
3005 /* And can support the mode we need. */
3006 if (! HARD_REGNO_MODE_OK (regno, mode))
3007 continue;
3008 /* And that we don't create an extra save/restore. */
3009 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3010 continue;
3011 /* And we don't clobber traceback for noreturn functions. */
3012 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3013 && (! reload_completed || frame_pointer_needed))
3014 continue;
3016 success = 1;
3017 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3019 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3020 || TEST_HARD_REG_BIT (live, regno + j))
3022 success = 0;
3023 break;
3026 if (success)
3028 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3029 SET_HARD_REG_BIT (*reg_set, regno + j);
3031 /* Start the next search with the next register. */
3032 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3033 raw_regno = 0;
3034 search_ofs = raw_regno;
3036 return gen_rtx_REG (mode, regno);
3040 search_ofs = 0;
3041 return NULL_RTX;
3044 /* Perform the peephole2 optimization pass. */
3046 void
3047 peephole2_optimize (dump_file)
3048 FILE *dump_file ATTRIBUTE_UNUSED;
3050 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3051 rtx insn, prev;
3052 regset live;
3053 int i, b;
3054 #ifdef HAVE_conditional_execution
3055 sbitmap blocks;
3056 int changed;
3057 #endif
3059 /* Initialize the regsets we're going to use. */
3060 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3061 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3062 live = INITIALIZE_REG_SET (rs_heads[i]);
3064 #ifdef HAVE_conditional_execution
3065 blocks = sbitmap_alloc (n_basic_blocks);
3066 sbitmap_zero (blocks);
3067 changed = 0;
3068 #else
3069 count_or_remove_death_notes (NULL, 1);
3070 #endif
3072 for (b = n_basic_blocks - 1; b >= 0; --b)
3074 basic_block bb = BASIC_BLOCK (b);
3075 struct propagate_block_info *pbi;
3077 /* Indicate that all slots except the last holds invalid data. */
3078 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3079 peep2_insn_data[i].insn = NULL_RTX;
3081 /* Indicate that the last slot contains live_after data. */
3082 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3083 peep2_current = MAX_INSNS_PER_PEEP2;
3085 /* Start up propagation. */
3086 COPY_REG_SET (live, bb->global_live_at_end);
3087 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3089 #ifdef HAVE_conditional_execution
3090 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3091 #else
3092 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3093 #endif
3095 for (insn = bb->end; ; insn = prev)
3097 prev = PREV_INSN (insn);
3098 if (INSN_P (insn))
3100 rtx try;
3101 int match_len;
3103 /* Record this insn. */
3104 if (--peep2_current < 0)
3105 peep2_current = MAX_INSNS_PER_PEEP2;
3106 peep2_insn_data[peep2_current].insn = insn;
3107 propagate_one_insn (pbi, insn);
3108 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3110 /* Match the peephole. */
3111 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3112 if (try != NULL)
3114 i = match_len + peep2_current;
3115 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3116 i -= MAX_INSNS_PER_PEEP2 + 1;
3118 /* Replace the old sequence with the new. */
3119 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3120 try = emit_insn_after (try, prev);
3122 /* Adjust the basic block boundaries. */
3123 if (peep2_insn_data[i].insn == bb->end)
3124 bb->end = try;
3125 if (insn == bb->head)
3126 bb->head = NEXT_INSN (prev);
3128 #ifdef HAVE_conditional_execution
3129 /* With conditional execution, we cannot back up the
3130 live information so easily, since the conditional
3131 death data structures are not so self-contained.
3132 So record that we've made a modification to this
3133 block and update life information at the end. */
3134 SET_BIT (blocks, b);
3135 changed = 1;
3137 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3138 peep2_insn_data[i].insn = NULL_RTX;
3139 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3140 #else
3141 /* Back up lifetime information past the end of the
3142 newly created sequence. */
3143 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3144 i = 0;
3145 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3147 /* Update life information for the new sequence. */
3150 if (INSN_P (try))
3152 if (--i < 0)
3153 i = MAX_INSNS_PER_PEEP2;
3154 peep2_insn_data[i].insn = try;
3155 propagate_one_insn (pbi, try);
3156 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3158 try = PREV_INSN (try);
3160 while (try != prev);
3162 /* ??? Should verify that LIVE now matches what we
3163 had before the new sequence. */
3165 peep2_current = i;
3166 #endif
3170 if (insn == bb->head)
3171 break;
3174 free_propagate_block_info (pbi);
3177 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3178 FREE_REG_SET (peep2_insn_data[i].live_before);
3179 FREE_REG_SET (live);
3181 #ifdef HAVE_conditional_execution
3182 count_or_remove_death_notes (blocks, 1);
3183 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3184 sbitmap_free (blocks);
3185 #endif
3187 #endif /* HAVE_peephole2 */