Update my entries in the MAINTAINERS file.
[official-gcc.git] / gcc / recog.c
blobc01f884ebb4fdb9d4c113cdee048f3a949a5e4d9
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "toplev.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
45 #else
46 #define STACK_PUSH_CODE PRE_INC
47 #endif
48 #endif
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
53 #else
54 #define STACK_POP_CODE POST_DEC
55 #endif
56 #endif
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static int insn_invalid_p PARAMS ((rtx));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
71 int volatile_ok;
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
80 was satisfied. */
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
88 int reload_completed;
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
94 void
95 init_recog_no_volatile ()
97 volatile_ok = 0;
100 void
101 init_recog ()
103 volatile_ok = 1;
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized_1 (insn)
117 rtx insn;
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
129 rtx x;
131 int noperands;
132 rtx *operands;
133 const char **constraints;
134 int i;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
146 if (noperands < 0)
147 return 0;
148 if (noperands == 0)
149 return 1;
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
159 if (c[0] == '%')
160 c++;
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
165 return 0;
168 return 1;
171 /* Static data for the next two routines. */
173 typedef struct change_t
175 rtx object;
176 int old_code;
177 rtx *loc;
178 rtx old;
179 } change_t;
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 the change in place.
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
206 rtx object;
207 rtx *loc;
208 rtx new;
209 int in_group;
211 rtx old = *loc;
213 if (old == new || rtx_equal_p (old, new))
214 return 1;
216 if (in_group == 0 && num_changes != 0)
217 abort ();
219 *loc = new;
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 else
229 changes_allocated *= 2;
231 changes =
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
248 num_changes++;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
262 static int
263 insn_invalid_p (insn)
264 rtx insn;
266 int icode = recog_memoized (insn);
267 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
269 if (is_asm && ! check_asm_operands (PATTERN (insn)))
270 return 1;
271 if (! is_asm && icode < 0)
272 return 1;
274 /* After reload, verify that all constraints are satisfied. */
275 if (reload_completed)
277 extract_insn (insn);
279 if (! constrain_operands (1))
280 return 1;
283 return 0;
286 /* Apply a group of changes previously issued with `validate_change'.
287 Return 1 if all changes are valid, zero otherwise. */
290 apply_change_group ()
292 int i;
294 /* The changes have been applied and all INSN_CODEs have been reset to force
295 rerecognition.
297 The changes are valid if we aren't given an object, or if we are
298 given a MEM and it still is a valid address, or if this is in insn
299 and it is recognized. In the latter case, if reload has completed,
300 we also require that the operands meet the constraints for
301 the insn. */
303 for (i = 0; i < num_changes; i++)
305 rtx object = changes[i].object;
307 if (object == 0)
308 continue;
310 if (GET_CODE (object) == MEM)
312 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
313 break;
315 else if (insn_invalid_p (object))
317 rtx pat = PATTERN (object);
319 /* Perhaps we couldn't recognize the insn because there were
320 extra CLOBBERs at the end. If so, try to re-recognize
321 without the last CLOBBER (later iterations will cause each of
322 them to be eliminated, in turn). But don't do this if we
323 have an ASM_OPERAND. */
324 if (GET_CODE (pat) == PARALLEL
325 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
326 && asm_noperands (PATTERN (object)) < 0)
328 rtx newpat;
330 if (XVECLEN (pat, 0) == 2)
331 newpat = XVECEXP (pat, 0, 0);
332 else
334 int j;
336 newpat
337 = gen_rtx_PARALLEL (VOIDmode,
338 rtvec_alloc (XVECLEN (pat, 0) - 1));
339 for (j = 0; j < XVECLEN (newpat, 0); j++)
340 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
343 /* Add a new change to this group to replace the pattern
344 with this new pattern. Then consider this change
345 as having succeeded. The change we added will
346 cause the entire call to fail if things remain invalid.
348 Note that this can lose if a later change than the one
349 we are processing specified &XVECEXP (PATTERN (object), 0, X)
350 but this shouldn't occur. */
352 validate_change (object, &PATTERN (object), newpat, 1);
354 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
355 /* If this insn is a CLOBBER or USE, it is always valid, but is
356 never recognized. */
357 continue;
358 else
359 break;
363 if (i == num_changes)
365 num_changes = 0;
366 return 1;
368 else
370 cancel_changes (0);
371 return 0;
375 /* Return the number of changes so far in the current group. */
378 num_validated_changes ()
380 return num_changes;
383 /* Retract the changes numbered NUM and up. */
385 void
386 cancel_changes (num)
387 int num;
389 int i;
391 /* Back out all the changes. Do this in the opposite order in which
392 they were made. */
393 for (i = num_changes - 1; i >= num; i--)
395 *changes[i].loc = changes[i].old;
396 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
397 INSN_CODE (changes[i].object) = changes[i].old_code;
399 num_changes = num;
402 /* Replace every occurrence of FROM in X with TO. Mark each change with
403 validate_change passing OBJECT. */
405 static void
406 validate_replace_rtx_1 (loc, from, to, object)
407 rtx *loc;
408 rtx from, to, object;
410 register int i, j;
411 register const char *fmt;
412 register rtx x = *loc;
413 enum rtx_code code;
415 if (!x)
416 return;
417 code = GET_CODE (x);
418 /* X matches FROM if it is the same rtx or they are both referring to the
419 same register in the same mode. Avoid calling rtx_equal_p unless the
420 operands look similar. */
422 if (x == from
423 || (GET_CODE (x) == REG && GET_CODE (from) == REG
424 && GET_MODE (x) == GET_MODE (from)
425 && REGNO (x) == REGNO (from))
426 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
427 && rtx_equal_p (x, from)))
429 validate_change (object, loc, to, 1);
430 return;
433 /* For commutative or comparison operations, try replacing each argument
434 separately and seeing if we made any changes. If so, put a constant
435 argument last.*/
436 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
438 int prev_changes = num_changes;
440 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
441 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
442 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
444 validate_change (object, loc,
445 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
446 : swap_condition (code),
447 GET_MODE (x), XEXP (x, 1),
448 XEXP (x, 0)),
450 x = *loc;
451 code = GET_CODE (x);
455 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
456 done the substitution, otherwise we won't. */
458 switch (code)
460 case PLUS:
461 /* If we have a PLUS whose second operand is now a CONST_INT, use
462 plus_constant to try to simplify it. */
463 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
464 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
466 return;
468 case MINUS:
469 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
471 validate_change (object, loc,
472 plus_constant (XEXP (x, 0), - INTVAL (to)),
474 return;
476 break;
478 case ZERO_EXTEND:
479 case SIGN_EXTEND:
480 /* In these cases, the operation to be performed depends on the mode
481 of the operand. If we are replacing the operand with a VOIDmode
482 constant, we lose the information. So try to simplify the operation
483 in that case. */
484 if (GET_MODE (to) == VOIDmode
485 && (rtx_equal_p (XEXP (x, 0), from)
486 || (GET_CODE (XEXP (x, 0)) == SUBREG
487 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
489 rtx new = NULL_RTX;
491 /* If there is a subreg involved, crop to the portion of the
492 constant that we are interested in. */
493 if (GET_CODE (XEXP (x, 0)) == SUBREG)
494 to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
495 0, GET_MODE (from));
497 /* If the above didn't fail, perform the extension from the
498 mode of the operand (and not the mode of FROM). */
499 if (to)
500 new = simplify_unary_operation (code, GET_MODE (x), to,
501 GET_MODE (XEXP (x, 0)));
503 /* If any of the above failed, substitute in something that
504 we know won't be recognized. */
505 if (!new)
506 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
508 validate_change (object, loc, new, 1);
509 return;
511 break;
513 case SUBREG:
514 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
515 expression. We can't do this later, since the information about inner mode
516 may be lost. */
517 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
519 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
520 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
521 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
523 rtx temp = operand_subword (to, SUBREG_WORD (x),
524 0, GET_MODE (from));
525 if (temp)
527 validate_change (object, loc, temp, 1);
528 return;
531 if (subreg_lowpart_p (x))
533 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
534 if (new)
536 validate_change (object, loc, new, 1);
537 return;
541 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
542 since we are saying that the high bits don't matter. */
543 if (GET_MODE (to) == VOIDmode
544 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
546 validate_change (object, loc, to, 1);
547 return;
551 /* Changing mode twice with SUBREG => just change it once,
552 or not at all if changing back to starting mode. */
553 if (GET_CODE (to) == SUBREG
554 && rtx_equal_p (SUBREG_REG (x), from))
556 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
557 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
559 validate_change (object, loc, SUBREG_REG (to), 1);
560 return;
563 validate_change (object, loc,
564 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
565 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
566 return;
569 /* If we have a SUBREG of a register that we are replacing and we are
570 replacing it with a MEM, make a new MEM and try replacing the
571 SUBREG with it. Don't do this if the MEM has a mode-dependent address
572 or if we would be widening it. */
574 if (GET_CODE (from) == REG
575 && GET_CODE (to) == MEM
576 && rtx_equal_p (SUBREG_REG (x), from)
577 && ! mode_dependent_address_p (XEXP (to, 0))
578 && ! MEM_VOLATILE_P (to)
579 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
581 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
582 enum machine_mode mode = GET_MODE (x);
583 rtx new;
585 if (BYTES_BIG_ENDIAN)
586 offset += (MIN (UNITS_PER_WORD,
587 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
588 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
590 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
591 MEM_COPY_ATTRIBUTES (new, to);
592 validate_change (object, loc, new, 1);
593 return;
595 break;
597 case ZERO_EXTRACT:
598 case SIGN_EXTRACT:
599 /* If we are replacing a register with memory, try to change the memory
600 to be the mode required for memory in extract operations (this isn't
601 likely to be an insertion operation; if it was, nothing bad will
602 happen, we might just fail in some cases). */
604 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
605 && rtx_equal_p (XEXP (x, 0), from)
606 && GET_CODE (XEXP (x, 1)) == CONST_INT
607 && GET_CODE (XEXP (x, 2)) == CONST_INT
608 && ! mode_dependent_address_p (XEXP (to, 0))
609 && ! MEM_VOLATILE_P (to))
611 enum machine_mode wanted_mode = VOIDmode;
612 enum machine_mode is_mode = GET_MODE (to);
613 int pos = INTVAL (XEXP (x, 2));
615 #ifdef HAVE_extzv
616 if (code == ZERO_EXTRACT)
618 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
619 if (wanted_mode == VOIDmode)
620 wanted_mode = word_mode;
622 #endif
623 #ifdef HAVE_extv
624 if (code == SIGN_EXTRACT)
626 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
627 if (wanted_mode == VOIDmode)
628 wanted_mode = word_mode;
630 #endif
632 /* If we have a narrower mode, we can do something. */
633 if (wanted_mode != VOIDmode
634 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
636 int offset = pos / BITS_PER_UNIT;
637 rtx newmem;
639 /* If the bytes and bits are counted differently, we
640 must adjust the offset. */
641 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
642 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
643 - offset);
645 pos %= GET_MODE_BITSIZE (wanted_mode);
647 newmem = gen_rtx_MEM (wanted_mode,
648 plus_constant (XEXP (to, 0), offset));
649 MEM_COPY_ATTRIBUTES (newmem, to);
651 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
652 validate_change (object, &XEXP (x, 0), newmem, 1);
656 break;
658 default:
659 break;
662 /* For commutative or comparison operations we've already performed
663 replacements. Don't try to perform them again. */
664 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
666 fmt = GET_RTX_FORMAT (code);
667 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
669 if (fmt[i] == 'e')
670 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
671 else if (fmt[i] == 'E')
672 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
673 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
678 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
679 with TO. After all changes have been made, validate by seeing
680 if INSN is still valid. */
683 validate_replace_rtx_subexp (from, to, insn, loc)
684 rtx from, to, insn, *loc;
686 validate_replace_rtx_1 (loc, from, to, insn);
687 return apply_change_group ();
690 /* Try replacing every occurrence of FROM in INSN with TO. After all
691 changes have been made, validate by seeing if INSN is still valid. */
694 validate_replace_rtx (from, to, insn)
695 rtx from, to, insn;
697 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
698 return apply_change_group ();
701 /* Try replacing every occurrence of FROM in INSN with TO. After all
702 changes have been made, validate by seeing if INSN is still valid. */
704 void
705 validate_replace_rtx_group (from, to, insn)
706 rtx from, to, insn;
708 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
711 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
712 SET_DESTs. After all changes have been made, validate by seeing if
713 INSN is still valid. */
716 validate_replace_src (from, to, insn)
717 rtx from, to, insn;
719 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
720 || GET_CODE (PATTERN (insn)) != SET)
721 abort ();
723 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
724 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
725 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
726 from, to, insn);
727 return apply_change_group ();
730 #ifdef HAVE_cc0
731 /* Return 1 if the insn using CC0 set by INSN does not contain
732 any ordered tests applied to the condition codes.
733 EQ and NE tests do not count. */
736 next_insn_tests_no_inequality (insn)
737 rtx insn;
739 register rtx next = next_cc0_user (insn);
741 /* If there is no next insn, we have to take the conservative choice. */
742 if (next == 0)
743 return 0;
745 return ((GET_CODE (next) == JUMP_INSN
746 || GET_CODE (next) == INSN
747 || GET_CODE (next) == CALL_INSN)
748 && ! inequality_comparisons_p (PATTERN (next)));
751 #if 0 /* This is useless since the insn that sets the cc's
752 must be followed immediately by the use of them. */
753 /* Return 1 if the CC value set up by INSN is not used. */
756 next_insns_test_no_inequality (insn)
757 rtx insn;
759 register rtx next = NEXT_INSN (insn);
761 for (; next != 0; next = NEXT_INSN (next))
763 if (GET_CODE (next) == CODE_LABEL
764 || GET_CODE (next) == BARRIER)
765 return 1;
766 if (GET_CODE (next) == NOTE)
767 continue;
768 if (inequality_comparisons_p (PATTERN (next)))
769 return 0;
770 if (sets_cc0_p (PATTERN (next)) == 1)
771 return 1;
772 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
773 return 1;
775 return 1;
777 #endif
778 #endif
780 /* This is used by find_single_use to locate an rtx that contains exactly one
781 use of DEST, which is typically either a REG or CC0. It returns a
782 pointer to the innermost rtx expression containing DEST. Appearances of
783 DEST that are being used to totally replace it are not counted. */
785 static rtx *
786 find_single_use_1 (dest, loc)
787 rtx dest;
788 rtx *loc;
790 rtx x = *loc;
791 enum rtx_code code = GET_CODE (x);
792 rtx *result = 0;
793 rtx *this_result;
794 int i;
795 const char *fmt;
797 switch (code)
799 case CONST_INT:
800 case CONST:
801 case LABEL_REF:
802 case SYMBOL_REF:
803 case CONST_DOUBLE:
804 case CLOBBER:
805 return 0;
807 case SET:
808 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
809 of a REG that occupies all of the REG, the insn uses DEST if
810 it is mentioned in the destination or the source. Otherwise, we
811 need just check the source. */
812 if (GET_CODE (SET_DEST (x)) != CC0
813 && GET_CODE (SET_DEST (x)) != PC
814 && GET_CODE (SET_DEST (x)) != REG
815 && ! (GET_CODE (SET_DEST (x)) == SUBREG
816 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
817 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
818 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
819 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
820 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
821 break;
823 return find_single_use_1 (dest, &SET_SRC (x));
825 case MEM:
826 case SUBREG:
827 return find_single_use_1 (dest, &XEXP (x, 0));
829 default:
830 break;
833 /* If it wasn't one of the common cases above, check each expression and
834 vector of this code. Look for a unique usage of DEST. */
836 fmt = GET_RTX_FORMAT (code);
837 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
839 if (fmt[i] == 'e')
841 if (dest == XEXP (x, i)
842 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
843 && REGNO (dest) == REGNO (XEXP (x, i))))
844 this_result = loc;
845 else
846 this_result = find_single_use_1 (dest, &XEXP (x, i));
848 if (result == 0)
849 result = this_result;
850 else if (this_result)
851 /* Duplicate usage. */
852 return 0;
854 else if (fmt[i] == 'E')
856 int j;
858 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
860 if (XVECEXP (x, i, j) == dest
861 || (GET_CODE (dest) == REG
862 && GET_CODE (XVECEXP (x, i, j)) == REG
863 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
864 this_result = loc;
865 else
866 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
868 if (result == 0)
869 result = this_result;
870 else if (this_result)
871 return 0;
876 return result;
879 /* See if DEST, produced in INSN, is used only a single time in the
880 sequel. If so, return a pointer to the innermost rtx expression in which
881 it is used.
883 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
885 This routine will return usually zero either before flow is called (because
886 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
887 note can't be trusted).
889 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
890 care about REG_DEAD notes or LOG_LINKS.
892 Otherwise, we find the single use by finding an insn that has a
893 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
894 only referenced once in that insn, we know that it must be the first
895 and last insn referencing DEST. */
897 rtx *
898 find_single_use (dest, insn, ploc)
899 rtx dest;
900 rtx insn;
901 rtx *ploc;
903 rtx next;
904 rtx *result;
905 rtx link;
907 #ifdef HAVE_cc0
908 if (dest == cc0_rtx)
910 next = NEXT_INSN (insn);
911 if (next == 0
912 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
913 return 0;
915 result = find_single_use_1 (dest, &PATTERN (next));
916 if (result && ploc)
917 *ploc = next;
918 return result;
920 #endif
922 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
923 return 0;
925 for (next = next_nonnote_insn (insn);
926 next != 0 && GET_CODE (next) != CODE_LABEL;
927 next = next_nonnote_insn (next))
928 if (INSN_P (next) && dead_or_set_p (next, dest))
930 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
931 if (XEXP (link, 0) == insn)
932 break;
934 if (link)
936 result = find_single_use_1 (dest, &PATTERN (next));
937 if (ploc)
938 *ploc = next;
939 return result;
943 return 0;
946 /* Return 1 if OP is a valid general operand for machine mode MODE.
947 This is either a register reference, a memory reference,
948 or a constant. In the case of a memory reference, the address
949 is checked for general validity for the target machine.
951 Register and memory references must have mode MODE in order to be valid,
952 but some constants have no machine mode and are valid for any mode.
954 If MODE is VOIDmode, OP is checked for validity for whatever mode
955 it has.
957 The main use of this function is as a predicate in match_operand
958 expressions in the machine description.
960 For an explanation of this function's behavior for registers of
961 class NO_REGS, see the comment for `register_operand'. */
964 general_operand (op, mode)
965 register rtx op;
966 enum machine_mode mode;
968 register enum rtx_code code = GET_CODE (op);
969 int mode_altering_drug = 0;
971 if (mode == VOIDmode)
972 mode = GET_MODE (op);
974 /* Don't accept CONST_INT or anything similar
975 if the caller wants something floating. */
976 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
977 && GET_MODE_CLASS (mode) != MODE_INT
978 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
979 return 0;
981 if (CONSTANT_P (op))
982 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
983 || mode == VOIDmode)
984 #ifdef LEGITIMATE_PIC_OPERAND_P
985 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
986 #endif
987 && LEGITIMATE_CONSTANT_P (op));
989 /* Except for certain constants with VOIDmode, already checked for,
990 OP's mode must match MODE if MODE specifies a mode. */
992 if (GET_MODE (op) != mode)
993 return 0;
995 if (code == SUBREG)
997 #ifdef INSN_SCHEDULING
998 /* On machines that have insn scheduling, we want all memory
999 reference to be explicit, so outlaw paradoxical SUBREGs. */
1000 if (GET_CODE (SUBREG_REG (op)) == MEM
1001 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1002 return 0;
1003 #endif
1005 op = SUBREG_REG (op);
1006 code = GET_CODE (op);
1007 #if 0
1008 /* No longer needed, since (SUBREG (MEM...))
1009 will load the MEM into a reload reg in the MEM's own mode. */
1010 mode_altering_drug = 1;
1011 #endif
1014 if (code == REG)
1015 /* A register whose class is NO_REGS is not a general operand. */
1016 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1017 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1019 if (code == MEM)
1021 register rtx y = XEXP (op, 0);
1023 if (! volatile_ok && MEM_VOLATILE_P (op))
1024 return 0;
1026 if (GET_CODE (y) == ADDRESSOF)
1027 return 1;
1029 /* Use the mem's mode, since it will be reloaded thus. */
1030 mode = GET_MODE (op);
1031 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1034 /* Pretend this is an operand for now; we'll run force_operand
1035 on its replacement in fixup_var_refs_1. */
1036 if (code == ADDRESSOF)
1037 return 1;
1039 return 0;
1041 win:
1042 if (mode_altering_drug)
1043 return ! mode_dependent_address_p (XEXP (op, 0));
1044 return 1;
1047 /* Return 1 if OP is a valid memory address for a memory reference
1048 of mode MODE.
1050 The main use of this function is as a predicate in match_operand
1051 expressions in the machine description. */
1054 address_operand (op, mode)
1055 register rtx op;
1056 enum machine_mode mode;
1058 return memory_address_p (mode, op);
1061 /* Return 1 if OP is a register reference of mode MODE.
1062 If MODE is VOIDmode, accept a register in any mode.
1064 The main use of this function is as a predicate in match_operand
1065 expressions in the machine description.
1067 As a special exception, registers whose class is NO_REGS are
1068 not accepted by `register_operand'. The reason for this change
1069 is to allow the representation of special architecture artifacts
1070 (such as a condition code register) without extending the rtl
1071 definitions. Since registers of class NO_REGS cannot be used
1072 as registers in any case where register classes are examined,
1073 it is most consistent to keep this function from accepting them. */
1076 register_operand (op, mode)
1077 register rtx op;
1078 enum machine_mode mode;
1080 if (GET_MODE (op) != mode && mode != VOIDmode)
1081 return 0;
1083 if (GET_CODE (op) == SUBREG)
1085 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1086 because it is guaranteed to be reloaded into one.
1087 Just make sure the MEM is valid in itself.
1088 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1089 but currently it does result from (SUBREG (REG)...) where the
1090 reg went on the stack.) */
1091 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1092 return general_operand (op, mode);
1094 #ifdef CLASS_CANNOT_CHANGE_MODE
1095 if (GET_CODE (SUBREG_REG (op)) == REG
1096 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1097 && (TEST_HARD_REG_BIT
1098 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1099 REGNO (SUBREG_REG (op))))
1100 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1101 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1102 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1103 return 0;
1104 #endif
1106 op = SUBREG_REG (op);
1109 /* If we have an ADDRESSOF, consider it valid since it will be
1110 converted into something that will not be a MEM. */
1111 if (GET_CODE (op) == ADDRESSOF)
1112 return 1;
1114 /* We don't consider registers whose class is NO_REGS
1115 to be a register operand. */
1116 return (GET_CODE (op) == REG
1117 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1118 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1121 /* Return 1 for a register in Pmode; ignore the tested mode. */
1124 pmode_register_operand (op, mode)
1125 rtx op;
1126 enum machine_mode mode ATTRIBUTE_UNUSED;
1128 return register_operand (op, Pmode);
1131 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1132 or a hard register. */
1135 scratch_operand (op, mode)
1136 register rtx op;
1137 enum machine_mode mode;
1139 if (GET_MODE (op) != mode && mode != VOIDmode)
1140 return 0;
1142 return (GET_CODE (op) == SCRATCH
1143 || (GET_CODE (op) == REG
1144 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1147 /* Return 1 if OP is a valid immediate operand for mode MODE.
1149 The main use of this function is as a predicate in match_operand
1150 expressions in the machine description. */
1153 immediate_operand (op, mode)
1154 register rtx op;
1155 enum machine_mode mode;
1157 /* Don't accept CONST_INT or anything similar
1158 if the caller wants something floating. */
1159 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1160 && GET_MODE_CLASS (mode) != MODE_INT
1161 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1162 return 0;
1164 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1165 result in 0/1. It seems a safe assumption that this is
1166 in range for everyone. */
1167 if (GET_CODE (op) == CONSTANT_P_RTX)
1168 return 1;
1170 return (CONSTANT_P (op)
1171 && (GET_MODE (op) == mode || mode == VOIDmode
1172 || GET_MODE (op) == VOIDmode)
1173 #ifdef LEGITIMATE_PIC_OPERAND_P
1174 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1175 #endif
1176 && LEGITIMATE_CONSTANT_P (op));
1179 /* Returns 1 if OP is an operand that is a CONST_INT. */
1182 const_int_operand (op, mode)
1183 register rtx op;
1184 enum machine_mode mode ATTRIBUTE_UNUSED;
1186 return GET_CODE (op) == CONST_INT;
1189 /* Returns 1 if OP is an operand that is a constant integer or constant
1190 floating-point number. */
1193 const_double_operand (op, mode)
1194 register rtx op;
1195 enum machine_mode mode;
1197 /* Don't accept CONST_INT or anything similar
1198 if the caller wants something floating. */
1199 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1200 && GET_MODE_CLASS (mode) != MODE_INT
1201 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1202 return 0;
1204 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1205 && (mode == VOIDmode || GET_MODE (op) == mode
1206 || GET_MODE (op) == VOIDmode));
1209 /* Return 1 if OP is a general operand that is not an immediate operand. */
1212 nonimmediate_operand (op, mode)
1213 register rtx op;
1214 enum machine_mode mode;
1216 return (general_operand (op, mode) && ! CONSTANT_P (op));
1219 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1222 nonmemory_operand (op, mode)
1223 register rtx op;
1224 enum machine_mode mode;
1226 if (CONSTANT_P (op))
1228 /* Don't accept CONST_INT or anything similar
1229 if the caller wants something floating. */
1230 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1231 && GET_MODE_CLASS (mode) != MODE_INT
1232 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1233 return 0;
1235 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1236 || mode == VOIDmode)
1237 #ifdef LEGITIMATE_PIC_OPERAND_P
1238 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1239 #endif
1240 && LEGITIMATE_CONSTANT_P (op));
1243 if (GET_MODE (op) != mode && mode != VOIDmode)
1244 return 0;
1246 if (GET_CODE (op) == SUBREG)
1248 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1249 because it is guaranteed to be reloaded into one.
1250 Just make sure the MEM is valid in itself.
1251 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1252 but currently it does result from (SUBREG (REG)...) where the
1253 reg went on the stack.) */
1254 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1255 return general_operand (op, mode);
1256 op = SUBREG_REG (op);
1259 /* We don't consider registers whose class is NO_REGS
1260 to be a register operand. */
1261 return (GET_CODE (op) == REG
1262 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1263 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1266 /* Return 1 if OP is a valid operand that stands for pushing a
1267 value of mode MODE onto the stack.
1269 The main use of this function is as a predicate in match_operand
1270 expressions in the machine description. */
1273 push_operand (op, mode)
1274 rtx op;
1275 enum machine_mode mode;
1277 if (GET_CODE (op) != MEM)
1278 return 0;
1280 if (mode != VOIDmode && GET_MODE (op) != mode)
1281 return 0;
1283 op = XEXP (op, 0);
1285 if (GET_CODE (op) != STACK_PUSH_CODE)
1286 return 0;
1288 return XEXP (op, 0) == stack_pointer_rtx;
1291 /* Return 1 if OP is a valid operand that stands for popping a
1292 value of mode MODE off the stack.
1294 The main use of this function is as a predicate in match_operand
1295 expressions in the machine description. */
1298 pop_operand (op, mode)
1299 rtx op;
1300 enum machine_mode mode;
1302 if (GET_CODE (op) != MEM)
1303 return 0;
1305 if (mode != VOIDmode && GET_MODE (op) != mode)
1306 return 0;
1308 op = XEXP (op, 0);
1310 if (GET_CODE (op) != STACK_POP_CODE)
1311 return 0;
1313 return XEXP (op, 0) == stack_pointer_rtx;
1316 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1319 memory_address_p (mode, addr)
1320 enum machine_mode mode ATTRIBUTE_UNUSED;
1321 register rtx addr;
1323 if (GET_CODE (addr) == ADDRESSOF)
1324 return 1;
1326 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1327 return 0;
1329 win:
1330 return 1;
1333 /* Return 1 if OP is a valid memory reference with mode MODE,
1334 including a valid address.
1336 The main use of this function is as a predicate in match_operand
1337 expressions in the machine description. */
1340 memory_operand (op, mode)
1341 register rtx op;
1342 enum machine_mode mode;
1344 rtx inner;
1346 if (! reload_completed)
1347 /* Note that no SUBREG is a memory operand before end of reload pass,
1348 because (SUBREG (MEM...)) forces reloading into a register. */
1349 return GET_CODE (op) == MEM && general_operand (op, mode);
1351 if (mode != VOIDmode && GET_MODE (op) != mode)
1352 return 0;
1354 inner = op;
1355 if (GET_CODE (inner) == SUBREG)
1356 inner = SUBREG_REG (inner);
1358 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1361 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1362 that is, a memory reference whose address is a general_operand. */
1365 indirect_operand (op, mode)
1366 register rtx op;
1367 enum machine_mode mode;
1369 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1370 if (! reload_completed
1371 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1373 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1374 rtx inner = SUBREG_REG (op);
1376 if (BYTES_BIG_ENDIAN)
1377 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1378 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1380 if (mode != VOIDmode && GET_MODE (op) != mode)
1381 return 0;
1383 /* The only way that we can have a general_operand as the resulting
1384 address is if OFFSET is zero and the address already is an operand
1385 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1386 operand. */
1388 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1389 || (GET_CODE (XEXP (inner, 0)) == PLUS
1390 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1391 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1392 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1395 return (GET_CODE (op) == MEM
1396 && memory_operand (op, mode)
1397 && general_operand (XEXP (op, 0), Pmode));
1400 /* Return 1 if this is a comparison operator. This allows the use of
1401 MATCH_OPERATOR to recognize all the branch insns. */
1404 comparison_operator (op, mode)
1405 register rtx op;
1406 enum machine_mode mode;
1408 return ((mode == VOIDmode || GET_MODE (op) == mode)
1409 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1412 /* If BODY is an insn body that uses ASM_OPERANDS,
1413 return the number of operands (both input and output) in the insn.
1414 Otherwise return -1. */
1417 asm_noperands (body)
1418 rtx body;
1420 switch (GET_CODE (body))
1422 case ASM_OPERANDS:
1423 /* No output operands: return number of input operands. */
1424 return ASM_OPERANDS_INPUT_LENGTH (body);
1425 case SET:
1426 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1427 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1428 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1429 else
1430 return -1;
1431 case PARALLEL:
1432 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1433 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1435 /* Multiple output operands, or 1 output plus some clobbers:
1436 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1437 int i;
1438 int n_sets;
1440 /* Count backwards through CLOBBERs to determine number of SETs. */
1441 for (i = XVECLEN (body, 0); i > 0; i--)
1443 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1444 break;
1445 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1446 return -1;
1449 /* N_SETS is now number of output operands. */
1450 n_sets = i;
1452 /* Verify that all the SETs we have
1453 came from a single original asm_operands insn
1454 (so that invalid combinations are blocked). */
1455 for (i = 0; i < n_sets; i++)
1457 rtx elt = XVECEXP (body, 0, i);
1458 if (GET_CODE (elt) != SET)
1459 return -1;
1460 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1461 return -1;
1462 /* If these ASM_OPERANDS rtx's came from different original insns
1463 then they aren't allowed together. */
1464 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1465 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1466 return -1;
1468 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1469 + n_sets);
1471 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1473 /* 0 outputs, but some clobbers:
1474 body is [(asm_operands ...) (clobber (reg ...))...]. */
1475 int i;
1477 /* Make sure all the other parallel things really are clobbers. */
1478 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1479 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1480 return -1;
1482 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1484 else
1485 return -1;
1486 default:
1487 return -1;
1491 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1492 copy its operands (both input and output) into the vector OPERANDS,
1493 the locations of the operands within the insn into the vector OPERAND_LOCS,
1494 and the constraints for the operands into CONSTRAINTS.
1495 Write the modes of the operands into MODES.
1496 Return the assembler-template.
1498 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1499 we don't store that info. */
1501 const char *
1502 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1503 rtx body;
1504 rtx *operands;
1505 rtx **operand_locs;
1506 const char **constraints;
1507 enum machine_mode *modes;
1509 register int i;
1510 int noperands;
1511 const char *template = 0;
1513 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1515 rtx asmop = SET_SRC (body);
1516 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1518 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1520 for (i = 1; i < noperands; i++)
1522 if (operand_locs)
1523 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1524 if (operands)
1525 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1526 if (constraints)
1527 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1528 if (modes)
1529 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1532 /* The output is in the SET.
1533 Its constraint is in the ASM_OPERANDS itself. */
1534 if (operands)
1535 operands[0] = SET_DEST (body);
1536 if (operand_locs)
1537 operand_locs[0] = &SET_DEST (body);
1538 if (constraints)
1539 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1540 if (modes)
1541 modes[0] = GET_MODE (SET_DEST (body));
1542 template = ASM_OPERANDS_TEMPLATE (asmop);
1544 else if (GET_CODE (body) == ASM_OPERANDS)
1546 rtx asmop = body;
1547 /* No output operands: BODY is (asm_operands ....). */
1549 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1551 /* The input operands are found in the 1st element vector. */
1552 /* Constraints for inputs are in the 2nd element vector. */
1553 for (i = 0; i < noperands; i++)
1555 if (operand_locs)
1556 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1557 if (operands)
1558 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1559 if (constraints)
1560 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1561 if (modes)
1562 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1564 template = ASM_OPERANDS_TEMPLATE (asmop);
1566 else if (GET_CODE (body) == PARALLEL
1567 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1569 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1570 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1571 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1572 int nout = 0; /* Does not include CLOBBERs. */
1574 /* At least one output, plus some CLOBBERs. */
1576 /* The outputs are in the SETs.
1577 Their constraints are in the ASM_OPERANDS itself. */
1578 for (i = 0; i < nparallel; i++)
1580 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1581 break; /* Past last SET */
1583 if (operands)
1584 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1585 if (operand_locs)
1586 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1587 if (constraints)
1588 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1589 if (modes)
1590 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1591 nout++;
1594 for (i = 0; i < nin; i++)
1596 if (operand_locs)
1597 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1598 if (operands)
1599 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1600 if (constraints)
1601 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1602 if (modes)
1603 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1606 template = ASM_OPERANDS_TEMPLATE (asmop);
1608 else if (GET_CODE (body) == PARALLEL
1609 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1611 /* No outputs, but some CLOBBERs. */
1613 rtx asmop = XVECEXP (body, 0, 0);
1614 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1616 for (i = 0; i < nin; i++)
1618 if (operand_locs)
1619 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1620 if (operands)
1621 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1622 if (constraints)
1623 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1624 if (modes)
1625 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1628 template = ASM_OPERANDS_TEMPLATE (asmop);
1631 return template;
1634 /* Check if an asm_operand matches it's constraints.
1635 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1638 asm_operand_ok (op, constraint)
1639 rtx op;
1640 const char *constraint;
1642 int result = 0;
1644 /* Use constrain_operands after reload. */
1645 if (reload_completed)
1646 abort ();
1648 while (*constraint)
1650 char c = *constraint++;
1651 switch (c)
1653 case '=':
1654 case '+':
1655 case '*':
1656 case '%':
1657 case '?':
1658 case '!':
1659 case '#':
1660 case '&':
1661 case ',':
1662 break;
1664 case '0': case '1': case '2': case '3': case '4':
1665 case '5': case '6': case '7': case '8': case '9':
1666 /* For best results, our caller should have given us the
1667 proper matching constraint, but we can't actually fail
1668 the check if they didn't. Indicate that results are
1669 inconclusive. */
1670 result = -1;
1671 break;
1673 case 'p':
1674 if (address_operand (op, VOIDmode))
1675 return 1;
1676 break;
1678 case 'm':
1679 case 'V': /* non-offsettable */
1680 if (memory_operand (op, VOIDmode))
1681 return 1;
1682 break;
1684 case 'o': /* offsettable */
1685 if (offsettable_nonstrict_memref_p (op))
1686 return 1;
1687 break;
1689 case '<':
1690 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1691 excepting those that expand_call created. Further, on some
1692 machines which do not have generalized auto inc/dec, an inc/dec
1693 is not a memory_operand.
1695 Match any memory and hope things are resolved after reload. */
1697 if (GET_CODE (op) == MEM
1698 && (1
1699 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1700 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1701 return 1;
1702 break;
1704 case '>':
1705 if (GET_CODE (op) == MEM
1706 && (1
1707 || GET_CODE (XEXP (op, 0)) == PRE_INC
1708 || GET_CODE (XEXP (op, 0)) == POST_INC))
1709 return 1;
1710 break;
1712 case 'E':
1713 #ifndef REAL_ARITHMETIC
1714 /* Match any floating double constant, but only if
1715 we can examine the bits of it reliably. */
1716 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1717 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1718 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1719 break;
1720 #endif
1721 /* FALLTHRU */
1723 case 'F':
1724 if (GET_CODE (op) == CONST_DOUBLE)
1725 return 1;
1726 break;
1728 case 'G':
1729 if (GET_CODE (op) == CONST_DOUBLE
1730 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1731 return 1;
1732 break;
1733 case 'H':
1734 if (GET_CODE (op) == CONST_DOUBLE
1735 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1736 return 1;
1737 break;
1739 case 's':
1740 if (GET_CODE (op) == CONST_INT
1741 || (GET_CODE (op) == CONST_DOUBLE
1742 && GET_MODE (op) == VOIDmode))
1743 break;
1744 /* FALLTHRU */
1746 case 'i':
1747 if (CONSTANT_P (op)
1748 #ifdef LEGITIMATE_PIC_OPERAND_P
1749 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1750 #endif
1752 return 1;
1753 break;
1755 case 'n':
1756 if (GET_CODE (op) == CONST_INT
1757 || (GET_CODE (op) == CONST_DOUBLE
1758 && GET_MODE (op) == VOIDmode))
1759 return 1;
1760 break;
1762 case 'I':
1763 if (GET_CODE (op) == CONST_INT
1764 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1765 return 1;
1766 break;
1767 case 'J':
1768 if (GET_CODE (op) == CONST_INT
1769 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1770 return 1;
1771 break;
1772 case 'K':
1773 if (GET_CODE (op) == CONST_INT
1774 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1775 return 1;
1776 break;
1777 case 'L':
1778 if (GET_CODE (op) == CONST_INT
1779 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1780 return 1;
1781 break;
1782 case 'M':
1783 if (GET_CODE (op) == CONST_INT
1784 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1785 return 1;
1786 break;
1787 case 'N':
1788 if (GET_CODE (op) == CONST_INT
1789 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1790 return 1;
1791 break;
1792 case 'O':
1793 if (GET_CODE (op) == CONST_INT
1794 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1795 return 1;
1796 break;
1797 case 'P':
1798 if (GET_CODE (op) == CONST_INT
1799 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1800 return 1;
1801 break;
1803 case 'X':
1804 return 1;
1806 case 'g':
1807 if (general_operand (op, VOIDmode))
1808 return 1;
1809 break;
1811 default:
1812 /* For all other letters, we first check for a register class,
1813 otherwise it is an EXTRA_CONSTRAINT. */
1814 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1816 case 'r':
1817 if (GET_MODE (op) == BLKmode)
1818 break;
1819 if (register_operand (op, VOIDmode))
1820 return 1;
1822 #ifdef EXTRA_CONSTRAINT
1823 if (EXTRA_CONSTRAINT (op, c))
1824 return 1;
1825 #endif
1826 break;
1830 return result;
1833 /* Given an rtx *P, if it is a sum containing an integer constant term,
1834 return the location (type rtx *) of the pointer to that constant term.
1835 Otherwise, return a null pointer. */
1837 static rtx *
1838 find_constant_term_loc (p)
1839 rtx *p;
1841 register rtx *tem;
1842 register enum rtx_code code = GET_CODE (*p);
1844 /* If *P IS such a constant term, P is its location. */
1846 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1847 || code == CONST)
1848 return p;
1850 /* Otherwise, if not a sum, it has no constant term. */
1852 if (GET_CODE (*p) != PLUS)
1853 return 0;
1855 /* If one of the summands is constant, return its location. */
1857 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1858 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1859 return p;
1861 /* Otherwise, check each summand for containing a constant term. */
1863 if (XEXP (*p, 0) != 0)
1865 tem = find_constant_term_loc (&XEXP (*p, 0));
1866 if (tem != 0)
1867 return tem;
1870 if (XEXP (*p, 1) != 0)
1872 tem = find_constant_term_loc (&XEXP (*p, 1));
1873 if (tem != 0)
1874 return tem;
1877 return 0;
1880 /* Return 1 if OP is a memory reference
1881 whose address contains no side effects
1882 and remains valid after the addition
1883 of a positive integer less than the
1884 size of the object being referenced.
1886 We assume that the original address is valid and do not check it.
1888 This uses strict_memory_address_p as a subroutine, so
1889 don't use it before reload. */
1892 offsettable_memref_p (op)
1893 rtx op;
1895 return ((GET_CODE (op) == MEM)
1896 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1899 /* Similar, but don't require a strictly valid mem ref:
1900 consider pseudo-regs valid as index or base regs. */
1903 offsettable_nonstrict_memref_p (op)
1904 rtx op;
1906 return ((GET_CODE (op) == MEM)
1907 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1910 /* Return 1 if Y is a memory address which contains no side effects
1911 and would remain valid after the addition of a positive integer
1912 less than the size of that mode.
1914 We assume that the original address is valid and do not check it.
1915 We do check that it is valid for narrower modes.
1917 If STRICTP is nonzero, we require a strictly valid address,
1918 for the sake of use in reload.c. */
1921 offsettable_address_p (strictp, mode, y)
1922 int strictp;
1923 enum machine_mode mode;
1924 register rtx y;
1926 register enum rtx_code ycode = GET_CODE (y);
1927 register rtx z;
1928 rtx y1 = y;
1929 rtx *y2;
1930 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1931 (strictp ? strict_memory_address_p : memory_address_p);
1932 unsigned int mode_sz = GET_MODE_SIZE (mode);
1934 if (CONSTANT_ADDRESS_P (y))
1935 return 1;
1937 /* Adjusting an offsettable address involves changing to a narrower mode.
1938 Make sure that's OK. */
1940 if (mode_dependent_address_p (y))
1941 return 0;
1943 /* ??? How much offset does an offsettable BLKmode reference need?
1944 Clearly that depends on the situation in which it's being used.
1945 However, the current situation in which we test 0xffffffff is
1946 less than ideal. Caveat user. */
1947 if (mode_sz == 0)
1948 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1950 /* If the expression contains a constant term,
1951 see if it remains valid when max possible offset is added. */
1953 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1955 int good;
1957 y1 = *y2;
1958 *y2 = plus_constant (*y2, mode_sz - 1);
1959 /* Use QImode because an odd displacement may be automatically invalid
1960 for any wider mode. But it should be valid for a single byte. */
1961 good = (*addressp) (QImode, y);
1963 /* In any case, restore old contents of memory. */
1964 *y2 = y1;
1965 return good;
1968 if (GET_RTX_CLASS (ycode) == 'a')
1969 return 0;
1971 /* The offset added here is chosen as the maximum offset that
1972 any instruction could need to add when operating on something
1973 of the specified mode. We assume that if Y and Y+c are
1974 valid addresses then so is Y+d for all 0<d<c. */
1976 z = plus_constant_for_output (y, mode_sz - 1);
1978 /* Use QImode because an odd displacement may be automatically invalid
1979 for any wider mode. But it should be valid for a single byte. */
1980 return (*addressp) (QImode, z);
1983 /* Return 1 if ADDR is an address-expression whose effect depends
1984 on the mode of the memory reference it is used in.
1986 Autoincrement addressing is a typical example of mode-dependence
1987 because the amount of the increment depends on the mode. */
1990 mode_dependent_address_p (addr)
1991 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1993 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1994 return 0;
1995 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1996 win: ATTRIBUTE_UNUSED_LABEL
1997 return 1;
2000 /* Return 1 if OP is a general operand
2001 other than a memory ref with a mode dependent address. */
2004 mode_independent_operand (op, mode)
2005 enum machine_mode mode;
2006 rtx op;
2008 rtx addr;
2010 if (! general_operand (op, mode))
2011 return 0;
2013 if (GET_CODE (op) != MEM)
2014 return 1;
2016 addr = XEXP (op, 0);
2017 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2018 return 1;
2019 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2020 lose: ATTRIBUTE_UNUSED_LABEL
2021 return 0;
2024 /* Given an operand OP that is a valid memory reference which
2025 satisfies offsettable_memref_p, return a new memory reference whose
2026 address has been adjusted by OFFSET. OFFSET should be positive and
2027 less than the size of the object referenced. */
2030 adj_offsettable_operand (op, offset)
2031 rtx op;
2032 int offset;
2034 register enum rtx_code code = GET_CODE (op);
2036 if (code == MEM)
2038 register rtx y = XEXP (op, 0);
2039 register rtx new;
2041 if (CONSTANT_ADDRESS_P (y))
2043 new = gen_rtx_MEM (GET_MODE (op),
2044 plus_constant_for_output (y, offset));
2045 MEM_COPY_ATTRIBUTES (new, op);
2046 return new;
2049 if (GET_CODE (y) == PLUS)
2051 rtx z = y;
2052 register rtx *const_loc;
2054 op = copy_rtx (op);
2055 z = XEXP (op, 0);
2056 const_loc = find_constant_term_loc (&z);
2057 if (const_loc)
2059 *const_loc = plus_constant_for_output (*const_loc, offset);
2060 return op;
2064 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2065 MEM_COPY_ATTRIBUTES (new, op);
2066 return new;
2068 abort ();
2071 /* Like extract_insn, but save insn extracted and don't extract again, when
2072 called again for the same insn expecting that recog_data still contain the
2073 valid information. This is used primary by gen_attr infrastructure that
2074 often does extract insn again and again. */
2075 void
2076 extract_insn_cached (insn)
2077 rtx insn;
2079 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2080 return;
2081 extract_insn (insn);
2082 recog_data.insn = insn;
2084 /* Do cached extract_insn, constrain_operand and complain about failures.
2085 Used by insn_attrtab. */
2086 void
2087 extract_constrain_insn_cached (insn)
2088 rtx insn;
2090 extract_insn_cached (insn);
2091 if (which_alternative == -1
2092 && !constrain_operands (reload_completed))
2093 fatal_insn_not_found (insn);
2095 /* Do cached constrain_operand and complain about failures. */
2097 constrain_operands_cached (strict)
2098 int strict;
2100 if (which_alternative == -1)
2101 return constrain_operands (strict);
2102 else
2103 return 1;
2106 /* Analyze INSN and fill in recog_data. */
2108 void
2109 extract_insn (insn)
2110 rtx insn;
2112 int i;
2113 int icode;
2114 int noperands;
2115 rtx body = PATTERN (insn);
2117 recog_data.insn = NULL;
2118 recog_data.n_operands = 0;
2119 recog_data.n_alternatives = 0;
2120 recog_data.n_dups = 0;
2121 which_alternative = -1;
2123 switch (GET_CODE (body))
2125 case USE:
2126 case CLOBBER:
2127 case ASM_INPUT:
2128 case ADDR_VEC:
2129 case ADDR_DIFF_VEC:
2130 return;
2132 case SET:
2133 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2134 goto asm_insn;
2135 else
2136 goto normal_insn;
2137 case PARALLEL:
2138 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2139 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2140 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2141 goto asm_insn;
2142 else
2143 goto normal_insn;
2144 case ASM_OPERANDS:
2145 asm_insn:
2146 recog_data.n_operands = noperands = asm_noperands (body);
2147 if (noperands >= 0)
2149 /* This insn is an `asm' with operands. */
2151 /* expand_asm_operands makes sure there aren't too many operands. */
2152 if (noperands > MAX_RECOG_OPERANDS)
2153 abort ();
2155 /* Now get the operand values and constraints out of the insn. */
2156 decode_asm_operands (body, recog_data.operand,
2157 recog_data.operand_loc,
2158 recog_data.constraints,
2159 recog_data.operand_mode);
2160 if (noperands > 0)
2162 const char *p = recog_data.constraints[0];
2163 recog_data.n_alternatives = 1;
2164 while (*p)
2165 recog_data.n_alternatives += (*p++ == ',');
2167 break;
2169 fatal_insn_not_found (insn);
2171 default:
2172 normal_insn:
2173 /* Ordinary insn: recognize it, get the operands via insn_extract
2174 and get the constraints. */
2176 icode = recog_memoized (insn);
2177 if (icode < 0)
2178 fatal_insn_not_found (insn);
2180 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2181 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2182 recog_data.n_dups = insn_data[icode].n_dups;
2184 insn_extract (insn);
2186 for (i = 0; i < noperands; i++)
2188 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2189 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2190 /* VOIDmode match_operands gets mode from their real operand. */
2191 if (recog_data.operand_mode[i] == VOIDmode)
2192 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2195 for (i = 0; i < noperands; i++)
2196 recog_data.operand_type[i]
2197 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2198 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2199 : OP_IN);
2201 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2202 abort ();
2205 /* After calling extract_insn, you can use this function to extract some
2206 information from the constraint strings into a more usable form.
2207 The collected data is stored in recog_op_alt. */
2208 void
2209 preprocess_constraints ()
2211 int i;
2213 memset (recog_op_alt, 0, sizeof recog_op_alt);
2214 for (i = 0; i < recog_data.n_operands; i++)
2216 int j;
2217 struct operand_alternative *op_alt;
2218 const char *p = recog_data.constraints[i];
2220 op_alt = recog_op_alt[i];
2222 for (j = 0; j < recog_data.n_alternatives; j++)
2224 op_alt[j].class = NO_REGS;
2225 op_alt[j].constraint = p;
2226 op_alt[j].matches = -1;
2227 op_alt[j].matched = -1;
2229 if (*p == '\0' || *p == ',')
2231 op_alt[j].anything_ok = 1;
2232 continue;
2235 for (;;)
2237 char c = *p++;
2238 if (c == '#')
2240 c = *p++;
2241 while (c != ',' && c != '\0');
2242 if (c == ',' || c == '\0')
2243 break;
2245 switch (c)
2247 case '=': case '+': case '*': case '%':
2248 case 'E': case 'F': case 'G': case 'H':
2249 case 's': case 'i': case 'n':
2250 case 'I': case 'J': case 'K': case 'L':
2251 case 'M': case 'N': case 'O': case 'P':
2252 /* These don't say anything we care about. */
2253 break;
2255 case '?':
2256 op_alt[j].reject += 6;
2257 break;
2258 case '!':
2259 op_alt[j].reject += 600;
2260 break;
2261 case '&':
2262 op_alt[j].earlyclobber = 1;
2263 break;
2265 case '0': case '1': case '2': case '3': case '4':
2266 case '5': case '6': case '7': case '8': case '9':
2267 op_alt[j].matches = c - '0';
2268 recog_op_alt[op_alt[j].matches][j].matched = i;
2269 break;
2271 case 'm':
2272 op_alt[j].memory_ok = 1;
2273 break;
2274 case '<':
2275 op_alt[j].decmem_ok = 1;
2276 break;
2277 case '>':
2278 op_alt[j].incmem_ok = 1;
2279 break;
2280 case 'V':
2281 op_alt[j].nonoffmem_ok = 1;
2282 break;
2283 case 'o':
2284 op_alt[j].offmem_ok = 1;
2285 break;
2286 case 'X':
2287 op_alt[j].anything_ok = 1;
2288 break;
2290 case 'p':
2291 op_alt[j].is_address = 1;
2292 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2293 break;
2295 case 'g': case 'r':
2296 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2297 break;
2299 default:
2300 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2301 break;
2308 /* Check the operands of an insn against the insn's operand constraints
2309 and return 1 if they are valid.
2310 The information about the insn's operands, constraints, operand modes
2311 etc. is obtained from the global variables set up by extract_insn.
2313 WHICH_ALTERNATIVE is set to a number which indicates which
2314 alternative of constraints was matched: 0 for the first alternative,
2315 1 for the next, etc.
2317 In addition, when two operands are match
2318 and it happens that the output operand is (reg) while the
2319 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2320 make the output operand look like the input.
2321 This is because the output operand is the one the template will print.
2323 This is used in final, just before printing the assembler code and by
2324 the routines that determine an insn's attribute.
2326 If STRICT is a positive non-zero value, it means that we have been
2327 called after reload has been completed. In that case, we must
2328 do all checks strictly. If it is zero, it means that we have been called
2329 before reload has completed. In that case, we first try to see if we can
2330 find an alternative that matches strictly. If not, we try again, this
2331 time assuming that reload will fix up the insn. This provides a "best
2332 guess" for the alternative and is used to compute attributes of insns prior
2333 to reload. A negative value of STRICT is used for this internal call. */
2335 struct funny_match
2337 int this, other;
2341 constrain_operands (strict)
2342 int strict;
2344 const char *constraints[MAX_RECOG_OPERANDS];
2345 int matching_operands[MAX_RECOG_OPERANDS];
2346 int earlyclobber[MAX_RECOG_OPERANDS];
2347 register int c;
2349 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2350 int funny_match_index;
2352 which_alternative = 0;
2353 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2354 return 1;
2356 for (c = 0; c < recog_data.n_operands; c++)
2358 constraints[c] = recog_data.constraints[c];
2359 matching_operands[c] = -1;
2364 register int opno;
2365 int lose = 0;
2366 funny_match_index = 0;
2368 for (opno = 0; opno < recog_data.n_operands; opno++)
2370 register rtx op = recog_data.operand[opno];
2371 enum machine_mode mode = GET_MODE (op);
2372 register const char *p = constraints[opno];
2373 int offset = 0;
2374 int win = 0;
2375 int val;
2377 earlyclobber[opno] = 0;
2379 /* A unary operator may be accepted by the predicate, but it
2380 is irrelevant for matching constraints. */
2381 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2382 op = XEXP (op, 0);
2384 if (GET_CODE (op) == SUBREG)
2386 if (GET_CODE (SUBREG_REG (op)) == REG
2387 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2388 offset = SUBREG_WORD (op);
2389 op = SUBREG_REG (op);
2392 /* An empty constraint or empty alternative
2393 allows anything which matched the pattern. */
2394 if (*p == 0 || *p == ',')
2395 win = 1;
2397 while (*p && (c = *p++) != ',')
2398 switch (c)
2400 case '?': case '!': case '*': case '%':
2401 case '=': case '+':
2402 break;
2404 case '#':
2405 /* Ignore rest of this alternative as far as
2406 constraint checking is concerned. */
2407 while (*p && *p != ',')
2408 p++;
2409 break;
2411 case '&':
2412 earlyclobber[opno] = 1;
2413 break;
2415 case '0': case '1': case '2': case '3': case '4':
2416 case '5': case '6': case '7': case '8': case '9':
2418 /* This operand must be the same as a previous one.
2419 This kind of constraint is used for instructions such
2420 as add when they take only two operands.
2422 Note that the lower-numbered operand is passed first.
2424 If we are not testing strictly, assume that this constraint
2425 will be satisfied. */
2426 if (strict < 0)
2427 val = 1;
2428 else
2430 rtx op1 = recog_data.operand[c - '0'];
2431 rtx op2 = recog_data.operand[opno];
2433 /* A unary operator may be accepted by the predicate,
2434 but it is irrelevant for matching constraints. */
2435 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2436 op1 = XEXP (op1, 0);
2437 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2438 op2 = XEXP (op2, 0);
2440 val = operands_match_p (op1, op2);
2443 matching_operands[opno] = c - '0';
2444 matching_operands[c - '0'] = opno;
2446 if (val != 0)
2447 win = 1;
2448 /* If output is *x and input is *--x,
2449 arrange later to change the output to *--x as well,
2450 since the output op is the one that will be printed. */
2451 if (val == 2 && strict > 0)
2453 funny_match[funny_match_index].this = opno;
2454 funny_match[funny_match_index++].other = c - '0';
2456 break;
2458 case 'p':
2459 /* p is used for address_operands. When we are called by
2460 gen_reload, no one will have checked that the address is
2461 strictly valid, i.e., that all pseudos requiring hard regs
2462 have gotten them. */
2463 if (strict <= 0
2464 || (strict_memory_address_p (recog_data.operand_mode[opno],
2465 op)))
2466 win = 1;
2467 break;
2469 /* No need to check general_operand again;
2470 it was done in insn-recog.c. */
2471 case 'g':
2472 /* Anything goes unless it is a REG and really has a hard reg
2473 but the hard reg is not in the class GENERAL_REGS. */
2474 if (strict < 0
2475 || GENERAL_REGS == ALL_REGS
2476 || GET_CODE (op) != REG
2477 || (reload_in_progress
2478 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2479 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2480 win = 1;
2481 break;
2483 case 'X':
2484 /* This is used for a MATCH_SCRATCH in the cases when
2485 we don't actually need anything. So anything goes
2486 any time. */
2487 win = 1;
2488 break;
2490 case 'm':
2491 if (GET_CODE (op) == MEM
2492 /* Before reload, accept what reload can turn into mem. */
2493 || (strict < 0 && CONSTANT_P (op))
2494 /* During reload, accept a pseudo */
2495 || (reload_in_progress && GET_CODE (op) == REG
2496 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2497 win = 1;
2498 break;
2500 case '<':
2501 if (GET_CODE (op) == MEM
2502 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2503 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2504 win = 1;
2505 break;
2507 case '>':
2508 if (GET_CODE (op) == MEM
2509 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2510 || GET_CODE (XEXP (op, 0)) == POST_INC))
2511 win = 1;
2512 break;
2514 case 'E':
2515 #ifndef REAL_ARITHMETIC
2516 /* Match any CONST_DOUBLE, but only if
2517 we can examine the bits of it reliably. */
2518 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2519 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2520 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2521 break;
2522 #endif
2523 if (GET_CODE (op) == CONST_DOUBLE)
2524 win = 1;
2525 break;
2527 case 'F':
2528 if (GET_CODE (op) == CONST_DOUBLE)
2529 win = 1;
2530 break;
2532 case 'G':
2533 case 'H':
2534 if (GET_CODE (op) == CONST_DOUBLE
2535 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2536 win = 1;
2537 break;
2539 case 's':
2540 if (GET_CODE (op) == CONST_INT
2541 || (GET_CODE (op) == CONST_DOUBLE
2542 && GET_MODE (op) == VOIDmode))
2543 break;
2544 case 'i':
2545 if (CONSTANT_P (op))
2546 win = 1;
2547 break;
2549 case 'n':
2550 if (GET_CODE (op) == CONST_INT
2551 || (GET_CODE (op) == CONST_DOUBLE
2552 && GET_MODE (op) == VOIDmode))
2553 win = 1;
2554 break;
2556 case 'I':
2557 case 'J':
2558 case 'K':
2559 case 'L':
2560 case 'M':
2561 case 'N':
2562 case 'O':
2563 case 'P':
2564 if (GET_CODE (op) == CONST_INT
2565 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2566 win = 1;
2567 break;
2569 case 'V':
2570 if (GET_CODE (op) == MEM
2571 && ((strict > 0 && ! offsettable_memref_p (op))
2572 || (strict < 0
2573 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2574 || (reload_in_progress
2575 && !(GET_CODE (op) == REG
2576 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2577 win = 1;
2578 break;
2580 case 'o':
2581 if ((strict > 0 && offsettable_memref_p (op))
2582 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2583 /* Before reload, accept what reload can handle. */
2584 || (strict < 0
2585 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2586 /* During reload, accept a pseudo */
2587 || (reload_in_progress && GET_CODE (op) == REG
2588 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2589 win = 1;
2590 break;
2592 default:
2594 enum reg_class class;
2596 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2597 if (class != NO_REGS)
2599 if (strict < 0
2600 || (strict == 0
2601 && GET_CODE (op) == REG
2602 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2603 || (strict == 0 && GET_CODE (op) == SCRATCH)
2604 || (GET_CODE (op) == REG
2605 && reg_fits_class_p (op, class, offset, mode)))
2606 win = 1;
2608 #ifdef EXTRA_CONSTRAINT
2609 else if (EXTRA_CONSTRAINT (op, c))
2610 win = 1;
2611 #endif
2612 break;
2616 constraints[opno] = p;
2617 /* If this operand did not win somehow,
2618 this alternative loses. */
2619 if (! win)
2620 lose = 1;
2622 /* This alternative won; the operands are ok.
2623 Change whichever operands this alternative says to change. */
2624 if (! lose)
2626 int opno, eopno;
2628 /* See if any earlyclobber operand conflicts with some other
2629 operand. */
2631 if (strict > 0)
2632 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2633 /* Ignore earlyclobber operands now in memory,
2634 because we would often report failure when we have
2635 two memory operands, one of which was formerly a REG. */
2636 if (earlyclobber[eopno]
2637 && GET_CODE (recog_data.operand[eopno]) == REG)
2638 for (opno = 0; opno < recog_data.n_operands; opno++)
2639 if ((GET_CODE (recog_data.operand[opno]) == MEM
2640 || recog_data.operand_type[opno] != OP_OUT)
2641 && opno != eopno
2642 /* Ignore things like match_operator operands. */
2643 && *recog_data.constraints[opno] != 0
2644 && ! (matching_operands[opno] == eopno
2645 && operands_match_p (recog_data.operand[opno],
2646 recog_data.operand[eopno]))
2647 && ! safe_from_earlyclobber (recog_data.operand[opno],
2648 recog_data.operand[eopno]))
2649 lose = 1;
2651 if (! lose)
2653 while (--funny_match_index >= 0)
2655 recog_data.operand[funny_match[funny_match_index].other]
2656 = recog_data.operand[funny_match[funny_match_index].this];
2659 return 1;
2663 which_alternative++;
2665 while (which_alternative < recog_data.n_alternatives);
2667 which_alternative = -1;
2668 /* If we are about to reject this, but we are not to test strictly,
2669 try a very loose test. Only return failure if it fails also. */
2670 if (strict == 0)
2671 return constrain_operands (-1);
2672 else
2673 return 0;
2676 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2677 is a hard reg in class CLASS when its regno is offset by OFFSET
2678 and changed to mode MODE.
2679 If REG occupies multiple hard regs, all of them must be in CLASS. */
2682 reg_fits_class_p (operand, class, offset, mode)
2683 rtx operand;
2684 register enum reg_class class;
2685 int offset;
2686 enum machine_mode mode;
2688 register int regno = REGNO (operand);
2689 if (regno < FIRST_PSEUDO_REGISTER
2690 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2691 regno + offset))
2693 register int sr;
2694 regno += offset;
2695 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2696 sr > 0; sr--)
2697 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2698 regno + sr))
2699 break;
2700 return sr == 0;
2703 return 0;
2706 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2708 void
2709 split_all_insns (upd_life)
2710 int upd_life;
2712 sbitmap blocks;
2713 int changed;
2714 int i;
2716 blocks = sbitmap_alloc (n_basic_blocks);
2717 sbitmap_zero (blocks);
2718 changed = 0;
2720 for (i = n_basic_blocks - 1; i >= 0; --i)
2722 basic_block bb = BASIC_BLOCK (i);
2723 rtx insn, next;
2725 for (insn = bb->head; insn ; insn = next)
2727 rtx set;
2729 /* Can't use `next_real_insn' because that might go across
2730 CODE_LABELS and short-out basic blocks. */
2731 next = NEXT_INSN (insn);
2732 if (! INSN_P (insn))
2735 /* Don't split no-op move insns. These should silently
2736 disappear later in final. Splitting such insns would
2737 break the code that handles REG_NO_CONFLICT blocks. */
2739 else if ((set = single_set (insn)) != NULL
2740 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2742 /* Nops get in the way while scheduling, so delete them
2743 now if register allocation has already been done. It
2744 is too risky to try to do this before register
2745 allocation, and there are unlikely to be very many
2746 nops then anyways. */
2747 if (reload_completed)
2749 PUT_CODE (insn, NOTE);
2750 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2751 NOTE_SOURCE_FILE (insn) = 0;
2754 else
2756 /* Split insns here to get max fine-grain parallelism. */
2757 rtx first = PREV_INSN (insn);
2758 rtx last = try_split (PATTERN (insn), insn, 1);
2760 if (last != insn)
2762 SET_BIT (blocks, i);
2763 changed = 1;
2765 /* try_split returns the NOTE that INSN became. */
2766 PUT_CODE (insn, NOTE);
2767 NOTE_SOURCE_FILE (insn) = 0;
2768 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2770 /* ??? Coddle to md files that generate subregs in post-
2771 reload splitters instead of computing the proper
2772 hard register. */
2773 if (reload_completed && first != last)
2775 first = NEXT_INSN (first);
2776 while (1)
2778 if (INSN_P (first))
2779 cleanup_subreg_operands (first);
2780 if (first == last)
2781 break;
2782 first = NEXT_INSN (first);
2786 if (insn == bb->end)
2788 bb->end = last;
2789 break;
2794 if (insn == bb->end)
2795 break;
2798 /* ??? When we're called from just after reload, the CFG is in bad
2799 shape, and we may have fallen off the end. This could be fixed
2800 by having reload not try to delete unreachable code. Otherwise
2801 assert we found the end insn. */
2802 if (insn == NULL && upd_life)
2803 abort ();
2806 if (changed && upd_life)
2808 compute_bb_for_insn (get_max_uid ());
2809 count_or_remove_death_notes (blocks, 1);
2810 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2813 sbitmap_free (blocks);
2816 #ifdef HAVE_peephole2
2817 struct peep2_insn_data
2819 rtx insn;
2820 regset live_before;
2823 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2824 static int peep2_current;
2826 /* A non-insn marker indicating the last insn of the block.
2827 The live_before regset for this element is correct, indicating
2828 global_live_at_end for the block. */
2829 #define PEEP2_EOB pc_rtx
2831 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2832 does not exist. Used by the recognizer to find the next insn to match
2833 in a multi-insn pattern. */
2836 peep2_next_insn (n)
2837 int n;
2839 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2840 abort ();
2842 n += peep2_current;
2843 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2844 n -= MAX_INSNS_PER_PEEP2 + 1;
2846 if (peep2_insn_data[n].insn == PEEP2_EOB)
2847 return NULL_RTX;
2848 return peep2_insn_data[n].insn;
2851 /* Return true if REGNO is dead before the Nth non-note insn
2852 after `current'. */
2855 peep2_regno_dead_p (ofs, regno)
2856 int ofs;
2857 int regno;
2859 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2860 abort ();
2862 ofs += peep2_current;
2863 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2864 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2866 if (peep2_insn_data[ofs].insn == NULL_RTX)
2867 abort ();
2869 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2872 /* Similarly for a REG. */
2875 peep2_reg_dead_p (ofs, reg)
2876 int ofs;
2877 rtx reg;
2879 int regno, n;
2881 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2882 abort ();
2884 ofs += peep2_current;
2885 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2886 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2888 if (peep2_insn_data[ofs].insn == NULL_RTX)
2889 abort ();
2891 regno = REGNO (reg);
2892 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2893 while (--n >= 0)
2894 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2895 return 0;
2896 return 1;
2899 /* Try to find a hard register of mode MODE, matching the register class in
2900 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2901 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2902 in which case the only condition is that the register must be available
2903 before CURRENT_INSN.
2904 Registers that already have bits set in REG_SET will not be considered.
2906 If an appropriate register is available, it will be returned and the
2907 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2908 returned. */
2911 peep2_find_free_register (from, to, class_str, mode, reg_set)
2912 int from, to;
2913 const char *class_str;
2914 enum machine_mode mode;
2915 HARD_REG_SET *reg_set;
2917 static int search_ofs;
2918 enum reg_class class;
2919 HARD_REG_SET live;
2920 int i;
2922 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2923 abort ();
2925 from += peep2_current;
2926 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2927 from -= MAX_INSNS_PER_PEEP2 + 1;
2928 to += peep2_current;
2929 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2930 to -= MAX_INSNS_PER_PEEP2 + 1;
2932 if (peep2_insn_data[from].insn == NULL_RTX)
2933 abort ();
2934 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2936 while (from != to)
2938 HARD_REG_SET this_live;
2940 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2941 from = 0;
2942 if (peep2_insn_data[from].insn == NULL_RTX)
2943 abort ();
2944 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2945 IOR_HARD_REG_SET (live, this_live);
2948 class = (class_str[0] == 'r' ? GENERAL_REGS
2949 : REG_CLASS_FROM_LETTER (class_str[0]));
2951 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2953 int raw_regno, regno, success, j;
2955 /* Distribute the free registers as much as possible. */
2956 raw_regno = search_ofs + i;
2957 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2958 raw_regno -= FIRST_PSEUDO_REGISTER;
2959 #ifdef REG_ALLOC_ORDER
2960 regno = reg_alloc_order[raw_regno];
2961 #else
2962 regno = raw_regno;
2963 #endif
2965 /* Don't allocate fixed registers. */
2966 if (fixed_regs[regno])
2967 continue;
2968 /* Make sure the register is of the right class. */
2969 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2970 continue;
2971 /* And can support the mode we need. */
2972 if (! HARD_REGNO_MODE_OK (regno, mode))
2973 continue;
2974 /* And that we don't create an extra save/restore. */
2975 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2976 continue;
2977 /* And we don't clobber traceback for noreturn functions. */
2978 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2979 && (! reload_completed || frame_pointer_needed))
2980 continue;
2982 success = 1;
2983 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2985 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2986 || TEST_HARD_REG_BIT (live, regno + j))
2988 success = 0;
2989 break;
2992 if (success)
2994 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2995 SET_HARD_REG_BIT (*reg_set, regno + j);
2997 /* Start the next search with the next register. */
2998 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2999 raw_regno = 0;
3000 search_ofs = raw_regno;
3002 return gen_rtx_REG (mode, regno);
3006 search_ofs = 0;
3007 return NULL_RTX;
3010 /* Perform the peephole2 optimization pass. */
3012 void
3013 peephole2_optimize (dump_file)
3014 FILE *dump_file ATTRIBUTE_UNUSED;
3016 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3017 rtx insn, prev;
3018 regset live;
3019 int i, b;
3020 #ifdef HAVE_conditional_execution
3021 sbitmap blocks;
3022 int changed;
3023 #endif
3025 /* Initialize the regsets we're going to use. */
3026 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3027 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3028 live = INITIALIZE_REG_SET (rs_heads[i]);
3030 #ifdef HAVE_conditional_execution
3031 blocks = sbitmap_alloc (n_basic_blocks);
3032 sbitmap_zero (blocks);
3033 changed = 0;
3034 #else
3035 count_or_remove_death_notes (NULL, 1);
3036 #endif
3038 for (b = n_basic_blocks - 1; b >= 0; --b)
3040 basic_block bb = BASIC_BLOCK (b);
3041 struct propagate_block_info *pbi;
3043 /* Indicate that all slots except the last holds invalid data. */
3044 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3045 peep2_insn_data[i].insn = NULL_RTX;
3047 /* Indicate that the last slot contains live_after data. */
3048 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3049 peep2_current = MAX_INSNS_PER_PEEP2;
3051 /* Start up propagation. */
3052 COPY_REG_SET (live, bb->global_live_at_end);
3053 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3055 #ifdef HAVE_conditional_execution
3056 pbi = init_propagate_block_info (bb, live, NULL, 0);
3057 #else
3058 pbi = init_propagate_block_info (bb, live, NULL, PROP_DEATH_NOTES);
3059 #endif
3061 for (insn = bb->end; ; insn = prev)
3063 prev = PREV_INSN (insn);
3064 if (INSN_P (insn))
3066 rtx try;
3067 int match_len;
3069 /* Record this insn. */
3070 if (--peep2_current < 0)
3071 peep2_current = MAX_INSNS_PER_PEEP2;
3072 peep2_insn_data[peep2_current].insn = insn;
3073 propagate_one_insn (pbi, insn);
3074 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3076 /* Match the peephole. */
3077 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3078 if (try != NULL)
3080 i = match_len + peep2_current;
3081 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3082 i -= MAX_INSNS_PER_PEEP2 + 1;
3084 /* Replace the old sequence with the new. */
3085 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3086 try = emit_insn_after (try, prev);
3088 /* Adjust the basic block boundaries. */
3089 if (peep2_insn_data[i].insn == bb->end)
3090 bb->end = try;
3091 if (insn == bb->head)
3092 bb->head = NEXT_INSN (prev);
3094 #ifdef HAVE_conditional_execution
3095 /* With conditional execution, we cannot back up the
3096 live information so easily, since the conditional
3097 death data structures are not so self-contained.
3098 So record that we've made a modification to this
3099 block and update life information at the end. */
3100 SET_BIT (blocks, b);
3101 changed = 1;
3103 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3104 peep2_insn_data[i].insn = NULL_RTX;
3105 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3106 #else
3107 /* Back up lifetime information past the end of the
3108 newly created sequence. */
3109 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3110 i = 0;
3111 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3113 /* Update life information for the new sequence. */
3116 if (INSN_P (try))
3118 if (--i < 0)
3119 i = MAX_INSNS_PER_PEEP2;
3120 peep2_insn_data[i].insn = try;
3121 propagate_one_insn (pbi, try);
3122 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3124 try = PREV_INSN (try);
3126 while (try != prev);
3128 /* ??? Should verify that LIVE now matches what we
3129 had before the new sequence. */
3131 peep2_current = i;
3132 #endif
3136 if (insn == bb->head)
3137 break;
3140 free_propagate_block_info (pbi);
3143 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3144 FREE_REG_SET (peep2_insn_data[i].live_before);
3145 FREE_REG_SET (live);
3147 #ifdef HAVE_conditional_execution
3148 count_or_remove_death_notes (blocks, 1);
3149 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3150 sbitmap_free (blocks);
3151 #endif
3153 #endif /* HAVE_peephole2 */