* Makefile.in: Rebuilt.
[official-gcc.git] / gcc / recog.c
blob00b45c64ce56469a5cb6e9e27592a6fc3c548250
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "function.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "toplev.h"
36 #include "basic-block.h"
37 #include "output.h"
38 #include "reload.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
43 #else
44 #define STACK_PUSH_CODE PRE_INC
45 #endif
46 #endif
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
51 #else
52 #define STACK_POP_CODE POST_DEC
53 #endif
54 #endif
56 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
57 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
58 static rtx *find_constant_term_loc PARAMS ((rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
69 int volatile_ok;
71 struct recog_data recog_data;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77 /* On return from `constrain_operands', indicate which alternative
78 was satisfied. */
80 int which_alternative;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
86 int reload_completed;
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
92 void
93 init_recog_no_volatile ()
95 volatile_ok = 0;
98 void
99 init_recog ()
101 volatile_ok = 1;
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn)
115 rtx insn;
117 if (INSN_CODE (insn) < 0)
118 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
119 return INSN_CODE (insn);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x)
127 rtx x;
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = (rtx *) alloca (noperands * sizeof (rtx));
150 constraints = (const char **) alloca (noperands * sizeof (char *));
152 decode_asm_operands (x, operands, NULL, constraints, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
160 c = constraints[c[0] - '0'];
162 if (! asm_operand_ok (operands[i], c))
163 return 0;
166 return 1;
169 /* Static data for the next two routines. */
171 typedef struct change_t
173 rtx object;
174 int old_code;
175 rtx *loc;
176 rtx old;
177 } change_t;
179 static change_t *changes;
180 static int changes_allocated;
182 static int num_changes = 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
191 the change in place.
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object, loc, new, in_group)
204 rtx object;
205 rtx *loc;
206 rtx new;
207 int in_group;
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 if (in_group == 0 && num_changes != 0)
215 abort ();
217 *loc = new;
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
229 changes =
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
238 if (object && GET_CODE (object) != MEM)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
241 case invalid. */
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
246 num_changes++;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
251 if (in_group)
252 return 1;
253 else
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
261 insn_invalid_p (insn)
262 rtx insn;
264 rtx pat = PATTERN (insn);
265 int num_clobbers = 0;
266 /* If we are before reload and the pattern is a SET, see if we can add
267 clobbers. */
268 int icode = recog (pat, insn,
269 (GET_CODE (pat) == SET
270 && ! reload_completed && ! reload_in_progress)
271 ? &num_clobbers : 0);
272 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
275 /* If this is an asm and the operand aren't legal, then fail. Likewise if
276 this is not an asm and the insn wasn't recognized. */
277 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
278 || (!is_asm && icode < 0))
279 return 1;
281 /* If we have to add CLOBBERs, fail if we have to add ones that reference
282 hard registers since our callers can't know if they are live or not.
283 Otherwise, add them. */
284 if (num_clobbers > 0)
286 rtx newpat;
288 if (added_clobbers_hard_reg_p (icode))
289 return 1;
291 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
292 XVECEXP (newpat, 0, 0) = pat;
293 add_clobbers (newpat, icode);
294 PATTERN (insn) = pat = newpat;
297 /* After reload, verify that all constraints are satisfied. */
298 if (reload_completed)
300 extract_insn (insn);
302 if (! constrain_operands (1))
303 return 1;
306 INSN_CODE (insn) = icode;
307 return 0;
310 /* Apply a group of changes previously issued with `validate_change'.
311 Return 1 if all changes are valid, zero otherwise. */
314 apply_change_group ()
316 int i;
317 rtx last_validated = NULL_RTX;
319 /* The changes have been applied and all INSN_CODEs have been reset to force
320 rerecognition.
322 The changes are valid if we aren't given an object, or if we are
323 given a MEM and it still is a valid address, or if this is in insn
324 and it is recognized. In the latter case, if reload has completed,
325 we also require that the operands meet the constraints for
326 the insn. */
328 for (i = 0; i < num_changes; i++)
330 rtx object = changes[i].object;
332 /* if there is no object to test or if it is the same as the one we
333 already tested, ignore it. */
334 if (object == 0 || object == last_validated)
335 continue;
337 if (GET_CODE (object) == MEM)
339 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
340 break;
342 else if (insn_invalid_p (object))
344 rtx pat = PATTERN (object);
346 /* Perhaps we couldn't recognize the insn because there were
347 extra CLOBBERs at the end. If so, try to re-recognize
348 without the last CLOBBER (later iterations will cause each of
349 them to be eliminated, in turn). But don't do this if we
350 have an ASM_OPERAND. */
351 if (GET_CODE (pat) == PARALLEL
352 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
353 && asm_noperands (PATTERN (object)) < 0)
355 rtx newpat;
357 if (XVECLEN (pat, 0) == 2)
358 newpat = XVECEXP (pat, 0, 0);
359 else
361 int j;
363 newpat
364 = gen_rtx_PARALLEL (VOIDmode,
365 rtvec_alloc (XVECLEN (pat, 0) - 1));
366 for (j = 0; j < XVECLEN (newpat, 0); j++)
367 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
370 /* Add a new change to this group to replace the pattern
371 with this new pattern. Then consider this change
372 as having succeeded. The change we added will
373 cause the entire call to fail if things remain invalid.
375 Note that this can lose if a later change than the one
376 we are processing specified &XVECEXP (PATTERN (object), 0, X)
377 but this shouldn't occur. */
379 validate_change (object, &PATTERN (object), newpat, 1);
380 continue;
382 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
383 /* If this insn is a CLOBBER or USE, it is always valid, but is
384 never recognized. */
385 continue;
386 else
387 break;
389 last_validated = object;
392 if (i == num_changes)
394 num_changes = 0;
395 return 1;
397 else
399 cancel_changes (0);
400 return 0;
404 /* Return the number of changes so far in the current group. */
407 num_validated_changes ()
409 return num_changes;
412 /* Retract the changes numbered NUM and up. */
414 void
415 cancel_changes (num)
416 int num;
418 int i;
420 /* Back out all the changes. Do this in the opposite order in which
421 they were made. */
422 for (i = num_changes - 1; i >= num; i--)
424 *changes[i].loc = changes[i].old;
425 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
426 INSN_CODE (changes[i].object) = changes[i].old_code;
428 num_changes = num;
431 /* Replace every occurrence of FROM in X with TO. Mark each change with
432 validate_change passing OBJECT. */
434 static void
435 validate_replace_rtx_1 (loc, from, to, object)
436 rtx *loc;
437 rtx from, to, object;
439 register int i, j;
440 register const char *fmt;
441 register rtx x = *loc;
442 enum rtx_code code;
444 if (!x)
445 return;
446 code = GET_CODE (x);
447 /* X matches FROM if it is the same rtx or they are both referring to the
448 same register in the same mode. Avoid calling rtx_equal_p unless the
449 operands look similar. */
451 if (x == from
452 || (GET_CODE (x) == REG && GET_CODE (from) == REG
453 && GET_MODE (x) == GET_MODE (from)
454 && REGNO (x) == REGNO (from))
455 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
456 && rtx_equal_p (x, from)))
458 validate_change (object, loc, to, 1);
459 return;
462 /* For commutative or comparison operations, try replacing each argument
463 separately and seeing if we made any changes. If so, put a constant
464 argument last.*/
465 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
467 int prev_changes = num_changes;
469 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
470 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
471 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
473 validate_change (object, loc,
474 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
475 : swap_condition (code),
476 GET_MODE (x), XEXP (x, 1),
477 XEXP (x, 0)),
479 x = *loc;
480 code = GET_CODE (x);
484 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
485 done the substitution, otherwise we won't. */
487 switch (code)
489 case PLUS:
490 /* If we have a PLUS whose second operand is now a CONST_INT, use
491 plus_constant to try to simplify it. */
492 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
493 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
495 return;
497 case MINUS:
498 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
500 validate_change (object, loc,
501 plus_constant (XEXP (x, 0), - INTVAL (to)),
503 return;
505 break;
507 case ZERO_EXTEND:
508 case SIGN_EXTEND:
509 /* In these cases, the operation to be performed depends on the mode
510 of the operand. If we are replacing the operand with a VOIDmode
511 constant, we lose the information. So try to simplify the operation
512 in that case. */
513 if (GET_MODE (to) == VOIDmode
514 && (rtx_equal_p (XEXP (x, 0), from)
515 || (GET_CODE (XEXP (x, 0)) == SUBREG
516 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
518 rtx new = NULL_RTX;
520 /* If there is a subreg involved, crop to the portion of the
521 constant that we are interested in. */
522 if (GET_CODE (XEXP (x, 0)) == SUBREG)
524 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
525 to = operand_subword (to,
526 (SUBREG_BYTE (XEXP (x, 0))
527 / UNITS_PER_WORD),
528 0, GET_MODE (from));
529 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
530 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
531 <= HOST_BITS_PER_WIDE_INT))
533 int i = SUBREG_BYTE (XEXP (x, 0)) * BITS_PER_UNIT;
534 HOST_WIDE_INT valh;
535 unsigned HOST_WIDE_INT vall;
537 if (GET_CODE (to) == CONST_INT)
539 vall = INTVAL (to);
540 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
542 else
544 vall = CONST_DOUBLE_LOW (to);
545 valh = CONST_DOUBLE_HIGH (to);
548 if (WORDS_BIG_ENDIAN)
549 i = (GET_MODE_BITSIZE (GET_MODE (from))
550 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
551 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
552 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
553 else if (i >= HOST_BITS_PER_WIDE_INT)
554 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
555 to = GEN_INT (trunc_int_for_mode (vall,
556 GET_MODE (XEXP (x, 0))));
558 else
559 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
562 /* If the above didn't fail, perform the extension from the
563 mode of the operand (and not the mode of FROM). */
564 if (to)
565 new = simplify_unary_operation (code, GET_MODE (x), to,
566 GET_MODE (XEXP (x, 0)));
568 /* If any of the above failed, substitute in something that
569 we know won't be recognized. */
570 if (!new)
571 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
573 validate_change (object, loc, new, 1);
574 return;
576 break;
578 case SUBREG:
579 /* In case we are replacing by constant, attempt to simplify it to
580 non-SUBREG expression. We can't do this later, since the information
581 about inner mode may be lost. */
582 if (rtx_equal_p (SUBREG_REG (x), from))
584 rtx temp;
585 temp = simplify_subreg (GET_MODE (x), to,
586 GET_MODE (to) != VOIDmode
587 ? GET_MODE (to) : GET_MODE (SUBREG_REG (x)),
588 SUBREG_BYTE (x));
589 if (temp)
591 validate_change (object, loc, temp, 1);
592 return;
594 /* Avoid creating of invalid SUBREGS. */
595 if (GET_MODE (from) == VOIDmode)
597 /* Substitute in something that we know won't be
598 recognized. */
599 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
600 validate_change (object, loc, to, 1);
601 return;
604 break;
606 case ZERO_EXTRACT:
607 case SIGN_EXTRACT:
608 /* If we are replacing a register with memory, try to change the memory
609 to be the mode required for memory in extract operations (this isn't
610 likely to be an insertion operation; if it was, nothing bad will
611 happen, we might just fail in some cases). */
613 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
614 && rtx_equal_p (XEXP (x, 0), from)
615 && GET_CODE (XEXP (x, 1)) == CONST_INT
616 && GET_CODE (XEXP (x, 2)) == CONST_INT
617 && ! mode_dependent_address_p (XEXP (to, 0))
618 && ! MEM_VOLATILE_P (to))
620 enum machine_mode wanted_mode = VOIDmode;
621 enum machine_mode is_mode = GET_MODE (to);
622 int pos = INTVAL (XEXP (x, 2));
624 #ifdef HAVE_extzv
625 if (code == ZERO_EXTRACT)
627 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
628 if (wanted_mode == VOIDmode)
629 wanted_mode = word_mode;
631 #endif
632 #ifdef HAVE_extv
633 if (code == SIGN_EXTRACT)
635 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
636 if (wanted_mode == VOIDmode)
637 wanted_mode = word_mode;
639 #endif
641 /* If we have a narrower mode, we can do something. */
642 if (wanted_mode != VOIDmode
643 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
645 int offset = pos / BITS_PER_UNIT;
646 rtx newmem;
648 /* If the bytes and bits are counted differently, we
649 must adjust the offset. */
650 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
651 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
652 - offset);
654 pos %= GET_MODE_BITSIZE (wanted_mode);
656 newmem = gen_rtx_MEM (wanted_mode,
657 plus_constant (XEXP (to, 0), offset));
658 MEM_COPY_ATTRIBUTES (newmem, to);
660 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
661 validate_change (object, &XEXP (x, 0), newmem, 1);
665 break;
667 default:
668 break;
671 /* For commutative or comparison operations we've already performed
672 replacements. Don't try to perform them again. */
673 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
675 fmt = GET_RTX_FORMAT (code);
676 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
678 if (fmt[i] == 'e')
679 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
680 else if (fmt[i] == 'E')
681 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
682 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
687 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
688 with TO. After all changes have been made, validate by seeing
689 if INSN is still valid. */
692 validate_replace_rtx_subexp (from, to, insn, loc)
693 rtx from, to, insn, *loc;
695 validate_replace_rtx_1 (loc, from, to, insn);
696 return apply_change_group ();
699 /* Try replacing every occurrence of FROM in INSN with TO. After all
700 changes have been made, validate by seeing if INSN is still valid. */
703 validate_replace_rtx (from, to, insn)
704 rtx from, to, insn;
706 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
707 return apply_change_group ();
710 /* Try replacing every occurrence of FROM in INSN with TO. */
712 void
713 validate_replace_rtx_group (from, to, insn)
714 rtx from, to, insn;
716 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
719 /* Function called by note_uses to replace used subexpressions. */
720 struct validate_replace_src_data
722 rtx from; /* Old RTX */
723 rtx to; /* New RTX */
724 rtx insn; /* Insn in which substitution is occurring. */
727 static void
728 validate_replace_src_1 (x, data)
729 rtx *x;
730 void *data;
732 struct validate_replace_src_data *d
733 = (struct validate_replace_src_data *) data;
735 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
738 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
739 SET_DESTs. After all changes have been made, validate by seeing if
740 INSN is still valid. */
743 validate_replace_src (from, to, insn)
744 rtx from, to, insn;
746 struct validate_replace_src_data d;
748 d.from = from;
749 d.to = to;
750 d.insn = insn;
751 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
752 return apply_change_group ();
755 #ifdef HAVE_cc0
756 /* Return 1 if the insn using CC0 set by INSN does not contain
757 any ordered tests applied to the condition codes.
758 EQ and NE tests do not count. */
761 next_insn_tests_no_inequality (insn)
762 rtx insn;
764 register rtx next = next_cc0_user (insn);
766 /* If there is no next insn, we have to take the conservative choice. */
767 if (next == 0)
768 return 0;
770 return ((GET_CODE (next) == JUMP_INSN
771 || GET_CODE (next) == INSN
772 || GET_CODE (next) == CALL_INSN)
773 && ! inequality_comparisons_p (PATTERN (next)));
776 #if 0 /* This is useless since the insn that sets the cc's
777 must be followed immediately by the use of them. */
778 /* Return 1 if the CC value set up by INSN is not used. */
781 next_insns_test_no_inequality (insn)
782 rtx insn;
784 register rtx next = NEXT_INSN (insn);
786 for (; next != 0; next = NEXT_INSN (next))
788 if (GET_CODE (next) == CODE_LABEL
789 || GET_CODE (next) == BARRIER)
790 return 1;
791 if (GET_CODE (next) == NOTE)
792 continue;
793 if (inequality_comparisons_p (PATTERN (next)))
794 return 0;
795 if (sets_cc0_p (PATTERN (next)) == 1)
796 return 1;
797 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
798 return 1;
800 return 1;
802 #endif
803 #endif
805 /* This is used by find_single_use to locate an rtx that contains exactly one
806 use of DEST, which is typically either a REG or CC0. It returns a
807 pointer to the innermost rtx expression containing DEST. Appearances of
808 DEST that are being used to totally replace it are not counted. */
810 static rtx *
811 find_single_use_1 (dest, loc)
812 rtx dest;
813 rtx *loc;
815 rtx x = *loc;
816 enum rtx_code code = GET_CODE (x);
817 rtx *result = 0;
818 rtx *this_result;
819 int i;
820 const char *fmt;
822 switch (code)
824 case CONST_INT:
825 case CONST:
826 case LABEL_REF:
827 case SYMBOL_REF:
828 case CONST_DOUBLE:
829 case CLOBBER:
830 return 0;
832 case SET:
833 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
834 of a REG that occupies all of the REG, the insn uses DEST if
835 it is mentioned in the destination or the source. Otherwise, we
836 need just check the source. */
837 if (GET_CODE (SET_DEST (x)) != CC0
838 && GET_CODE (SET_DEST (x)) != PC
839 && GET_CODE (SET_DEST (x)) != REG
840 && ! (GET_CODE (SET_DEST (x)) == SUBREG
841 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
842 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
843 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
844 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
845 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
846 break;
848 return find_single_use_1 (dest, &SET_SRC (x));
850 case MEM:
851 case SUBREG:
852 return find_single_use_1 (dest, &XEXP (x, 0));
854 default:
855 break;
858 /* If it wasn't one of the common cases above, check each expression and
859 vector of this code. Look for a unique usage of DEST. */
861 fmt = GET_RTX_FORMAT (code);
862 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
864 if (fmt[i] == 'e')
866 if (dest == XEXP (x, i)
867 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
868 && REGNO (dest) == REGNO (XEXP (x, i))))
869 this_result = loc;
870 else
871 this_result = find_single_use_1 (dest, &XEXP (x, i));
873 if (result == 0)
874 result = this_result;
875 else if (this_result)
876 /* Duplicate usage. */
877 return 0;
879 else if (fmt[i] == 'E')
881 int j;
883 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
885 if (XVECEXP (x, i, j) == dest
886 || (GET_CODE (dest) == REG
887 && GET_CODE (XVECEXP (x, i, j)) == REG
888 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
889 this_result = loc;
890 else
891 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
893 if (result == 0)
894 result = this_result;
895 else if (this_result)
896 return 0;
901 return result;
904 /* See if DEST, produced in INSN, is used only a single time in the
905 sequel. If so, return a pointer to the innermost rtx expression in which
906 it is used.
908 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
910 This routine will return usually zero either before flow is called (because
911 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
912 note can't be trusted).
914 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
915 care about REG_DEAD notes or LOG_LINKS.
917 Otherwise, we find the single use by finding an insn that has a
918 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
919 only referenced once in that insn, we know that it must be the first
920 and last insn referencing DEST. */
922 rtx *
923 find_single_use (dest, insn, ploc)
924 rtx dest;
925 rtx insn;
926 rtx *ploc;
928 rtx next;
929 rtx *result;
930 rtx link;
932 #ifdef HAVE_cc0
933 if (dest == cc0_rtx)
935 next = NEXT_INSN (insn);
936 if (next == 0
937 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
938 return 0;
940 result = find_single_use_1 (dest, &PATTERN (next));
941 if (result && ploc)
942 *ploc = next;
943 return result;
945 #endif
947 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
948 return 0;
950 for (next = next_nonnote_insn (insn);
951 next != 0 && GET_CODE (next) != CODE_LABEL;
952 next = next_nonnote_insn (next))
953 if (INSN_P (next) && dead_or_set_p (next, dest))
955 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
956 if (XEXP (link, 0) == insn)
957 break;
959 if (link)
961 result = find_single_use_1 (dest, &PATTERN (next));
962 if (ploc)
963 *ploc = next;
964 return result;
968 return 0;
971 /* Return 1 if OP is a valid general operand for machine mode MODE.
972 This is either a register reference, a memory reference,
973 or a constant. In the case of a memory reference, the address
974 is checked for general validity for the target machine.
976 Register and memory references must have mode MODE in order to be valid,
977 but some constants have no machine mode and are valid for any mode.
979 If MODE is VOIDmode, OP is checked for validity for whatever mode
980 it has.
982 The main use of this function is as a predicate in match_operand
983 expressions in the machine description.
985 For an explanation of this function's behavior for registers of
986 class NO_REGS, see the comment for `register_operand'. */
989 general_operand (op, mode)
990 register rtx op;
991 enum machine_mode mode;
993 register enum rtx_code code = GET_CODE (op);
995 if (mode == VOIDmode)
996 mode = GET_MODE (op);
998 /* Don't accept CONST_INT or anything similar
999 if the caller wants something floating. */
1000 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1001 && GET_MODE_CLASS (mode) != MODE_INT
1002 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1003 return 0;
1005 if (GET_CODE (op) == CONST_INT
1006 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1007 return 0;
1009 if (CONSTANT_P (op))
1010 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1011 || mode == VOIDmode)
1012 #ifdef LEGITIMATE_PIC_OPERAND_P
1013 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1014 #endif
1015 && LEGITIMATE_CONSTANT_P (op));
1017 /* Except for certain constants with VOIDmode, already checked for,
1018 OP's mode must match MODE if MODE specifies a mode. */
1020 if (GET_MODE (op) != mode)
1021 return 0;
1023 if (code == SUBREG)
1025 #ifdef INSN_SCHEDULING
1026 /* On machines that have insn scheduling, we want all memory
1027 reference to be explicit, so outlaw paradoxical SUBREGs. */
1028 if (GET_CODE (SUBREG_REG (op)) == MEM
1029 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1030 return 0;
1031 #endif
1033 op = SUBREG_REG (op);
1034 code = GET_CODE (op);
1037 if (code == REG)
1038 /* A register whose class is NO_REGS is not a general operand. */
1039 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1040 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1042 if (code == MEM)
1044 register rtx y = XEXP (op, 0);
1046 if (! volatile_ok && MEM_VOLATILE_P (op))
1047 return 0;
1049 if (GET_CODE (y) == ADDRESSOF)
1050 return 1;
1052 /* Use the mem's mode, since it will be reloaded thus. */
1053 mode = GET_MODE (op);
1054 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1057 /* Pretend this is an operand for now; we'll run force_operand
1058 on its replacement in fixup_var_refs_1. */
1059 if (code == ADDRESSOF)
1060 return 1;
1062 return 0;
1064 win:
1065 return 1;
1068 /* Return 1 if OP is a valid memory address for a memory reference
1069 of mode MODE.
1071 The main use of this function is as a predicate in match_operand
1072 expressions in the machine description. */
1075 address_operand (op, mode)
1076 register rtx op;
1077 enum machine_mode mode;
1079 return memory_address_p (mode, op);
1082 /* Return 1 if OP is a register reference of mode MODE.
1083 If MODE is VOIDmode, accept a register in any mode.
1085 The main use of this function is as a predicate in match_operand
1086 expressions in the machine description.
1088 As a special exception, registers whose class is NO_REGS are
1089 not accepted by `register_operand'. The reason for this change
1090 is to allow the representation of special architecture artifacts
1091 (such as a condition code register) without extending the rtl
1092 definitions. Since registers of class NO_REGS cannot be used
1093 as registers in any case where register classes are examined,
1094 it is most consistent to keep this function from accepting them. */
1097 register_operand (op, mode)
1098 register rtx op;
1099 enum machine_mode mode;
1101 if (GET_MODE (op) != mode && mode != VOIDmode)
1102 return 0;
1104 if (GET_CODE (op) == SUBREG)
1106 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1107 because it is guaranteed to be reloaded into one.
1108 Just make sure the MEM is valid in itself.
1109 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1110 but currently it does result from (SUBREG (REG)...) where the
1111 reg went on the stack.) */
1112 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1113 return general_operand (op, mode);
1115 #ifdef CLASS_CANNOT_CHANGE_MODE
1116 if (GET_CODE (SUBREG_REG (op)) == REG
1117 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1118 && (TEST_HARD_REG_BIT
1119 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1120 REGNO (SUBREG_REG (op))))
1121 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1122 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1123 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1124 return 0;
1125 #endif
1127 op = SUBREG_REG (op);
1130 /* If we have an ADDRESSOF, consider it valid since it will be
1131 converted into something that will not be a MEM. */
1132 if (GET_CODE (op) == ADDRESSOF)
1133 return 1;
1135 /* We don't consider registers whose class is NO_REGS
1136 to be a register operand. */
1137 return (GET_CODE (op) == REG
1138 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1139 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1142 /* Return 1 for a register in Pmode; ignore the tested mode. */
1145 pmode_register_operand (op, mode)
1146 rtx op;
1147 enum machine_mode mode ATTRIBUTE_UNUSED;
1149 return register_operand (op, Pmode);
1152 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1153 or a hard register. */
1156 scratch_operand (op, mode)
1157 register rtx op;
1158 enum machine_mode mode;
1160 if (GET_MODE (op) != mode && mode != VOIDmode)
1161 return 0;
1163 return (GET_CODE (op) == SCRATCH
1164 || (GET_CODE (op) == REG
1165 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1168 /* Return 1 if OP is a valid immediate operand for mode MODE.
1170 The main use of this function is as a predicate in match_operand
1171 expressions in the machine description. */
1174 immediate_operand (op, mode)
1175 register rtx op;
1176 enum machine_mode mode;
1178 /* Don't accept CONST_INT or anything similar
1179 if the caller wants something floating. */
1180 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1181 && GET_MODE_CLASS (mode) != MODE_INT
1182 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1183 return 0;
1185 if (GET_CODE (op) == CONST_INT
1186 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1187 return 0;
1189 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1190 result in 0/1. It seems a safe assumption that this is
1191 in range for everyone. */
1192 if (GET_CODE (op) == CONSTANT_P_RTX)
1193 return 1;
1195 return (CONSTANT_P (op)
1196 && (GET_MODE (op) == mode || mode == VOIDmode
1197 || GET_MODE (op) == VOIDmode)
1198 #ifdef LEGITIMATE_PIC_OPERAND_P
1199 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1200 #endif
1201 && LEGITIMATE_CONSTANT_P (op));
1204 /* Returns 1 if OP is an operand that is a CONST_INT. */
1207 const_int_operand (op, mode)
1208 register rtx op;
1209 enum machine_mode mode ATTRIBUTE_UNUSED;
1211 return GET_CODE (op) == CONST_INT;
1214 /* Returns 1 if OP is an operand that is a constant integer or constant
1215 floating-point number. */
1218 const_double_operand (op, mode)
1219 register rtx op;
1220 enum machine_mode mode;
1222 /* Don't accept CONST_INT or anything similar
1223 if the caller wants something floating. */
1224 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1225 && GET_MODE_CLASS (mode) != MODE_INT
1226 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1227 return 0;
1229 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1230 && (mode == VOIDmode || GET_MODE (op) == mode
1231 || GET_MODE (op) == VOIDmode));
1234 /* Return 1 if OP is a general operand that is not an immediate operand. */
1237 nonimmediate_operand (op, mode)
1238 register rtx op;
1239 enum machine_mode mode;
1241 return (general_operand (op, mode) && ! CONSTANT_P (op));
1244 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1247 nonmemory_operand (op, mode)
1248 register rtx op;
1249 enum machine_mode mode;
1251 if (CONSTANT_P (op))
1253 /* Don't accept CONST_INT or anything similar
1254 if the caller wants something floating. */
1255 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1256 && GET_MODE_CLASS (mode) != MODE_INT
1257 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1258 return 0;
1260 if (GET_CODE (op) == CONST_INT
1261 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1262 return 0;
1264 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1265 || mode == VOIDmode)
1266 #ifdef LEGITIMATE_PIC_OPERAND_P
1267 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1268 #endif
1269 && LEGITIMATE_CONSTANT_P (op));
1272 if (GET_MODE (op) != mode && mode != VOIDmode)
1273 return 0;
1275 if (GET_CODE (op) == SUBREG)
1277 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1278 because it is guaranteed to be reloaded into one.
1279 Just make sure the MEM is valid in itself.
1280 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1281 but currently it does result from (SUBREG (REG)...) where the
1282 reg went on the stack.) */
1283 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1284 return general_operand (op, mode);
1285 op = SUBREG_REG (op);
1288 /* We don't consider registers whose class is NO_REGS
1289 to be a register operand. */
1290 return (GET_CODE (op) == REG
1291 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1292 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1295 /* Return 1 if OP is a valid operand that stands for pushing a
1296 value of mode MODE onto the stack.
1298 The main use of this function is as a predicate in match_operand
1299 expressions in the machine description. */
1302 push_operand (op, mode)
1303 rtx op;
1304 enum machine_mode mode;
1306 unsigned int rounded_size = GET_MODE_SIZE (mode);
1308 #ifdef PUSH_ROUNDING
1309 rounded_size = PUSH_ROUNDING (rounded_size);
1310 #endif
1312 if (GET_CODE (op) != MEM)
1313 return 0;
1315 if (mode != VOIDmode && GET_MODE (op) != mode)
1316 return 0;
1318 op = XEXP (op, 0);
1320 if (rounded_size == GET_MODE_SIZE (mode))
1322 if (GET_CODE (op) != STACK_PUSH_CODE)
1323 return 0;
1325 else
1327 if (GET_CODE (op) != PRE_MODIFY
1328 || GET_CODE (XEXP (op, 1)) != PLUS
1329 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1330 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1331 #ifdef STACK_GROWS_DOWNWARD
1332 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1333 #else
1334 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1335 #endif
1337 return 0;
1340 return XEXP (op, 0) == stack_pointer_rtx;
1343 /* Return 1 if OP is a valid operand that stands for popping a
1344 value of mode MODE off the stack.
1346 The main use of this function is as a predicate in match_operand
1347 expressions in the machine description. */
1350 pop_operand (op, mode)
1351 rtx op;
1352 enum machine_mode mode;
1354 if (GET_CODE (op) != MEM)
1355 return 0;
1357 if (mode != VOIDmode && GET_MODE (op) != mode)
1358 return 0;
1360 op = XEXP (op, 0);
1362 if (GET_CODE (op) != STACK_POP_CODE)
1363 return 0;
1365 return XEXP (op, 0) == stack_pointer_rtx;
1368 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1371 memory_address_p (mode, addr)
1372 enum machine_mode mode ATTRIBUTE_UNUSED;
1373 register rtx addr;
1375 if (GET_CODE (addr) == ADDRESSOF)
1376 return 1;
1378 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1379 return 0;
1381 win:
1382 return 1;
1385 /* Return 1 if OP is a valid memory reference with mode MODE,
1386 including a valid address.
1388 The main use of this function is as a predicate in match_operand
1389 expressions in the machine description. */
1392 memory_operand (op, mode)
1393 register rtx op;
1394 enum machine_mode mode;
1396 rtx inner;
1398 if (! reload_completed)
1399 /* Note that no SUBREG is a memory operand before end of reload pass,
1400 because (SUBREG (MEM...)) forces reloading into a register. */
1401 return GET_CODE (op) == MEM && general_operand (op, mode);
1403 if (mode != VOIDmode && GET_MODE (op) != mode)
1404 return 0;
1406 inner = op;
1407 if (GET_CODE (inner) == SUBREG)
1408 inner = SUBREG_REG (inner);
1410 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1413 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1414 that is, a memory reference whose address is a general_operand. */
1417 indirect_operand (op, mode)
1418 register rtx op;
1419 enum machine_mode mode;
1421 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1422 if (! reload_completed
1423 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1425 register int offset = SUBREG_BYTE (op);
1426 rtx inner = SUBREG_REG (op);
1428 if (mode != VOIDmode && GET_MODE (op) != mode)
1429 return 0;
1431 /* The only way that we can have a general_operand as the resulting
1432 address is if OFFSET is zero and the address already is an operand
1433 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1434 operand. */
1436 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1437 || (GET_CODE (XEXP (inner, 0)) == PLUS
1438 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1439 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1440 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1443 return (GET_CODE (op) == MEM
1444 && memory_operand (op, mode)
1445 && general_operand (XEXP (op, 0), Pmode));
1448 /* Return 1 if this is a comparison operator. This allows the use of
1449 MATCH_OPERATOR to recognize all the branch insns. */
1452 comparison_operator (op, mode)
1453 register rtx op;
1454 enum machine_mode mode;
1456 return ((mode == VOIDmode || GET_MODE (op) == mode)
1457 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1460 /* If BODY is an insn body that uses ASM_OPERANDS,
1461 return the number of operands (both input and output) in the insn.
1462 Otherwise return -1. */
1465 asm_noperands (body)
1466 rtx body;
1468 switch (GET_CODE (body))
1470 case ASM_OPERANDS:
1471 /* No output operands: return number of input operands. */
1472 return ASM_OPERANDS_INPUT_LENGTH (body);
1473 case SET:
1474 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1475 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1476 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1477 else
1478 return -1;
1479 case PARALLEL:
1480 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1481 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1483 /* Multiple output operands, or 1 output plus some clobbers:
1484 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1485 int i;
1486 int n_sets;
1488 /* Count backwards through CLOBBERs to determine number of SETs. */
1489 for (i = XVECLEN (body, 0); i > 0; i--)
1491 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1492 break;
1493 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1494 return -1;
1497 /* N_SETS is now number of output operands. */
1498 n_sets = i;
1500 /* Verify that all the SETs we have
1501 came from a single original asm_operands insn
1502 (so that invalid combinations are blocked). */
1503 for (i = 0; i < n_sets; i++)
1505 rtx elt = XVECEXP (body, 0, i);
1506 if (GET_CODE (elt) != SET)
1507 return -1;
1508 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1509 return -1;
1510 /* If these ASM_OPERANDS rtx's came from different original insns
1511 then they aren't allowed together. */
1512 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1513 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1514 return -1;
1516 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1517 + n_sets);
1519 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1521 /* 0 outputs, but some clobbers:
1522 body is [(asm_operands ...) (clobber (reg ...))...]. */
1523 int i;
1525 /* Make sure all the other parallel things really are clobbers. */
1526 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1527 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1528 return -1;
1530 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1532 else
1533 return -1;
1534 default:
1535 return -1;
1539 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1540 copy its operands (both input and output) into the vector OPERANDS,
1541 the locations of the operands within the insn into the vector OPERAND_LOCS,
1542 and the constraints for the operands into CONSTRAINTS.
1543 Write the modes of the operands into MODES.
1544 Return the assembler-template.
1546 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1547 we don't store that info. */
1549 const char *
1550 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1551 rtx body;
1552 rtx *operands;
1553 rtx **operand_locs;
1554 const char **constraints;
1555 enum machine_mode *modes;
1557 register int i;
1558 int noperands;
1559 const char *template = 0;
1561 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1563 rtx asmop = SET_SRC (body);
1564 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1566 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1568 for (i = 1; i < noperands; i++)
1570 if (operand_locs)
1571 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1572 if (operands)
1573 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1574 if (constraints)
1575 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1576 if (modes)
1577 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1580 /* The output is in the SET.
1581 Its constraint is in the ASM_OPERANDS itself. */
1582 if (operands)
1583 operands[0] = SET_DEST (body);
1584 if (operand_locs)
1585 operand_locs[0] = &SET_DEST (body);
1586 if (constraints)
1587 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1588 if (modes)
1589 modes[0] = GET_MODE (SET_DEST (body));
1590 template = ASM_OPERANDS_TEMPLATE (asmop);
1592 else if (GET_CODE (body) == ASM_OPERANDS)
1594 rtx asmop = body;
1595 /* No output operands: BODY is (asm_operands ....). */
1597 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1599 /* The input operands are found in the 1st element vector. */
1600 /* Constraints for inputs are in the 2nd element vector. */
1601 for (i = 0; i < noperands; i++)
1603 if (operand_locs)
1604 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1605 if (operands)
1606 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1607 if (constraints)
1608 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1609 if (modes)
1610 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1612 template = ASM_OPERANDS_TEMPLATE (asmop);
1614 else if (GET_CODE (body) == PARALLEL
1615 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1617 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1618 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1619 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1620 int nout = 0; /* Does not include CLOBBERs. */
1622 /* At least one output, plus some CLOBBERs. */
1624 /* The outputs are in the SETs.
1625 Their constraints are in the ASM_OPERANDS itself. */
1626 for (i = 0; i < nparallel; i++)
1628 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1629 break; /* Past last SET */
1631 if (operands)
1632 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1633 if (operand_locs)
1634 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1635 if (constraints)
1636 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1637 if (modes)
1638 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1639 nout++;
1642 for (i = 0; i < nin; i++)
1644 if (operand_locs)
1645 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1646 if (operands)
1647 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1648 if (constraints)
1649 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1650 if (modes)
1651 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1654 template = ASM_OPERANDS_TEMPLATE (asmop);
1656 else if (GET_CODE (body) == PARALLEL
1657 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1659 /* No outputs, but some CLOBBERs. */
1661 rtx asmop = XVECEXP (body, 0, 0);
1662 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1664 for (i = 0; i < nin; i++)
1666 if (operand_locs)
1667 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1668 if (operands)
1669 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1670 if (constraints)
1671 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1672 if (modes)
1673 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1676 template = ASM_OPERANDS_TEMPLATE (asmop);
1679 return template;
1682 /* Check if an asm_operand matches it's constraints.
1683 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1686 asm_operand_ok (op, constraint)
1687 rtx op;
1688 const char *constraint;
1690 int result = 0;
1692 /* Use constrain_operands after reload. */
1693 if (reload_completed)
1694 abort ();
1696 while (*constraint)
1698 char c = *constraint++;
1699 switch (c)
1701 case '=':
1702 case '+':
1703 case '*':
1704 case '%':
1705 case '?':
1706 case '!':
1707 case '#':
1708 case '&':
1709 case ',':
1710 break;
1712 case '0': case '1': case '2': case '3': case '4':
1713 case '5': case '6': case '7': case '8': case '9':
1714 /* For best results, our caller should have given us the
1715 proper matching constraint, but we can't actually fail
1716 the check if they didn't. Indicate that results are
1717 inconclusive. */
1718 result = -1;
1719 break;
1721 case 'p':
1722 if (address_operand (op, VOIDmode))
1723 return 1;
1724 break;
1726 case 'm':
1727 case 'V': /* non-offsettable */
1728 if (memory_operand (op, VOIDmode))
1729 return 1;
1730 break;
1732 case 'o': /* offsettable */
1733 if (offsettable_nonstrict_memref_p (op))
1734 return 1;
1735 break;
1737 case '<':
1738 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1739 excepting those that expand_call created. Further, on some
1740 machines which do not have generalized auto inc/dec, an inc/dec
1741 is not a memory_operand.
1743 Match any memory and hope things are resolved after reload. */
1745 if (GET_CODE (op) == MEM
1746 && (1
1747 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1748 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1749 return 1;
1750 break;
1752 case '>':
1753 if (GET_CODE (op) == MEM
1754 && (1
1755 || GET_CODE (XEXP (op, 0)) == PRE_INC
1756 || GET_CODE (XEXP (op, 0)) == POST_INC))
1757 return 1;
1758 break;
1760 case 'E':
1761 #ifndef REAL_ARITHMETIC
1762 /* Match any floating double constant, but only if
1763 we can examine the bits of it reliably. */
1764 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1765 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1766 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1767 break;
1768 #endif
1769 /* FALLTHRU */
1771 case 'F':
1772 if (GET_CODE (op) == CONST_DOUBLE)
1773 return 1;
1774 break;
1776 case 'G':
1777 if (GET_CODE (op) == CONST_DOUBLE
1778 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1779 return 1;
1780 break;
1781 case 'H':
1782 if (GET_CODE (op) == CONST_DOUBLE
1783 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1784 return 1;
1785 break;
1787 case 's':
1788 if (GET_CODE (op) == CONST_INT
1789 || (GET_CODE (op) == CONST_DOUBLE
1790 && GET_MODE (op) == VOIDmode))
1791 break;
1792 /* FALLTHRU */
1794 case 'i':
1795 if (CONSTANT_P (op)
1796 #ifdef LEGITIMATE_PIC_OPERAND_P
1797 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1798 #endif
1800 return 1;
1801 break;
1803 case 'n':
1804 if (GET_CODE (op) == CONST_INT
1805 || (GET_CODE (op) == CONST_DOUBLE
1806 && GET_MODE (op) == VOIDmode))
1807 return 1;
1808 break;
1810 case 'I':
1811 if (GET_CODE (op) == CONST_INT
1812 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1813 return 1;
1814 break;
1815 case 'J':
1816 if (GET_CODE (op) == CONST_INT
1817 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1818 return 1;
1819 break;
1820 case 'K':
1821 if (GET_CODE (op) == CONST_INT
1822 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1823 return 1;
1824 break;
1825 case 'L':
1826 if (GET_CODE (op) == CONST_INT
1827 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1828 return 1;
1829 break;
1830 case 'M':
1831 if (GET_CODE (op) == CONST_INT
1832 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1833 return 1;
1834 break;
1835 case 'N':
1836 if (GET_CODE (op) == CONST_INT
1837 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1838 return 1;
1839 break;
1840 case 'O':
1841 if (GET_CODE (op) == CONST_INT
1842 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1843 return 1;
1844 break;
1845 case 'P':
1846 if (GET_CODE (op) == CONST_INT
1847 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1848 return 1;
1849 break;
1851 case 'X':
1852 return 1;
1854 case 'g':
1855 if (general_operand (op, VOIDmode))
1856 return 1;
1857 break;
1859 default:
1860 /* For all other letters, we first check for a register class,
1861 otherwise it is an EXTRA_CONSTRAINT. */
1862 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1864 case 'r':
1865 if (GET_MODE (op) == BLKmode)
1866 break;
1867 if (register_operand (op, VOIDmode))
1868 return 1;
1870 #ifdef EXTRA_CONSTRAINT
1871 if (EXTRA_CONSTRAINT (op, c))
1872 return 1;
1873 #endif
1874 break;
1878 return result;
1881 /* Given an rtx *P, if it is a sum containing an integer constant term,
1882 return the location (type rtx *) of the pointer to that constant term.
1883 Otherwise, return a null pointer. */
1885 static rtx *
1886 find_constant_term_loc (p)
1887 rtx *p;
1889 register rtx *tem;
1890 register enum rtx_code code = GET_CODE (*p);
1892 /* If *P IS such a constant term, P is its location. */
1894 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1895 || code == CONST)
1896 return p;
1898 /* Otherwise, if not a sum, it has no constant term. */
1900 if (GET_CODE (*p) != PLUS)
1901 return 0;
1903 /* If one of the summands is constant, return its location. */
1905 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1906 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1907 return p;
1909 /* Otherwise, check each summand for containing a constant term. */
1911 if (XEXP (*p, 0) != 0)
1913 tem = find_constant_term_loc (&XEXP (*p, 0));
1914 if (tem != 0)
1915 return tem;
1918 if (XEXP (*p, 1) != 0)
1920 tem = find_constant_term_loc (&XEXP (*p, 1));
1921 if (tem != 0)
1922 return tem;
1925 return 0;
1928 /* Return 1 if OP is a memory reference
1929 whose address contains no side effects
1930 and remains valid after the addition
1931 of a positive integer less than the
1932 size of the object being referenced.
1934 We assume that the original address is valid and do not check it.
1936 This uses strict_memory_address_p as a subroutine, so
1937 don't use it before reload. */
1940 offsettable_memref_p (op)
1941 rtx op;
1943 return ((GET_CODE (op) == MEM)
1944 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1947 /* Similar, but don't require a strictly valid mem ref:
1948 consider pseudo-regs valid as index or base regs. */
1951 offsettable_nonstrict_memref_p (op)
1952 rtx op;
1954 return ((GET_CODE (op) == MEM)
1955 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1958 /* Return 1 if Y is a memory address which contains no side effects
1959 and would remain valid after the addition of a positive integer
1960 less than the size of that mode.
1962 We assume that the original address is valid and do not check it.
1963 We do check that it is valid for narrower modes.
1965 If STRICTP is nonzero, we require a strictly valid address,
1966 for the sake of use in reload.c. */
1969 offsettable_address_p (strictp, mode, y)
1970 int strictp;
1971 enum machine_mode mode;
1972 register rtx y;
1974 register enum rtx_code ycode = GET_CODE (y);
1975 register rtx z;
1976 rtx y1 = y;
1977 rtx *y2;
1978 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1979 (strictp ? strict_memory_address_p : memory_address_p);
1980 unsigned int mode_sz = GET_MODE_SIZE (mode);
1982 if (CONSTANT_ADDRESS_P (y))
1983 return 1;
1985 /* Adjusting an offsettable address involves changing to a narrower mode.
1986 Make sure that's OK. */
1988 if (mode_dependent_address_p (y))
1989 return 0;
1991 /* ??? How much offset does an offsettable BLKmode reference need?
1992 Clearly that depends on the situation in which it's being used.
1993 However, the current situation in which we test 0xffffffff is
1994 less than ideal. Caveat user. */
1995 if (mode_sz == 0)
1996 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1998 /* If the expression contains a constant term,
1999 see if it remains valid when max possible offset is added. */
2001 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2003 int good;
2005 y1 = *y2;
2006 *y2 = plus_constant (*y2, mode_sz - 1);
2007 /* Use QImode because an odd displacement may be automatically invalid
2008 for any wider mode. But it should be valid for a single byte. */
2009 good = (*addressp) (QImode, y);
2011 /* In any case, restore old contents of memory. */
2012 *y2 = y1;
2013 return good;
2016 if (GET_RTX_CLASS (ycode) == 'a')
2017 return 0;
2019 /* The offset added here is chosen as the maximum offset that
2020 any instruction could need to add when operating on something
2021 of the specified mode. We assume that if Y and Y+c are
2022 valid addresses then so is Y+d for all 0<d<c. */
2024 z = plus_constant_for_output (y, mode_sz - 1);
2026 /* Use QImode because an odd displacement may be automatically invalid
2027 for any wider mode. But it should be valid for a single byte. */
2028 return (*addressp) (QImode, z);
2031 /* Return 1 if ADDR is an address-expression whose effect depends
2032 on the mode of the memory reference it is used in.
2034 Autoincrement addressing is a typical example of mode-dependence
2035 because the amount of the increment depends on the mode. */
2038 mode_dependent_address_p (addr)
2039 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2041 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2042 return 0;
2043 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2044 win: ATTRIBUTE_UNUSED_LABEL
2045 return 1;
2048 /* Return 1 if OP is a general operand
2049 other than a memory ref with a mode dependent address. */
2052 mode_independent_operand (op, mode)
2053 enum machine_mode mode;
2054 rtx op;
2056 rtx addr;
2058 if (! general_operand (op, mode))
2059 return 0;
2061 if (GET_CODE (op) != MEM)
2062 return 1;
2064 addr = XEXP (op, 0);
2065 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2066 return 1;
2067 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2068 lose: ATTRIBUTE_UNUSED_LABEL
2069 return 0;
2072 /* Given an operand OP that is a valid memory reference which
2073 satisfies offsettable_memref_p, return a new memory reference whose
2074 address has been adjusted by OFFSET. OFFSET should be positive and
2075 less than the size of the object referenced. */
2078 adj_offsettable_operand (op, offset)
2079 rtx op;
2080 int offset;
2082 register enum rtx_code code = GET_CODE (op);
2084 if (code == MEM)
2086 register rtx y = XEXP (op, 0);
2087 register rtx new;
2089 if (CONSTANT_ADDRESS_P (y))
2091 new = gen_rtx_MEM (GET_MODE (op),
2092 plus_constant_for_output (y, offset));
2093 MEM_COPY_ATTRIBUTES (new, op);
2094 return new;
2097 if (GET_CODE (y) == PLUS)
2099 rtx z = y;
2100 register rtx *const_loc;
2102 op = copy_rtx (op);
2103 z = XEXP (op, 0);
2104 const_loc = find_constant_term_loc (&z);
2105 if (const_loc)
2107 *const_loc = plus_constant_for_output (*const_loc, offset);
2108 return op;
2112 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2113 MEM_COPY_ATTRIBUTES (new, op);
2114 return new;
2116 abort ();
2119 /* Like extract_insn, but save insn extracted and don't extract again, when
2120 called again for the same insn expecting that recog_data still contain the
2121 valid information. This is used primary by gen_attr infrastructure that
2122 often does extract insn again and again. */
2123 void
2124 extract_insn_cached (insn)
2125 rtx insn;
2127 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2128 return;
2129 extract_insn (insn);
2130 recog_data.insn = insn;
2132 /* Do cached extract_insn, constrain_operand and complain about failures.
2133 Used by insn_attrtab. */
2134 void
2135 extract_constrain_insn_cached (insn)
2136 rtx insn;
2138 extract_insn_cached (insn);
2139 if (which_alternative == -1
2140 && !constrain_operands (reload_completed))
2141 fatal_insn_not_found (insn);
2143 /* Do cached constrain_operand and complain about failures. */
2145 constrain_operands_cached (strict)
2146 int strict;
2148 if (which_alternative == -1)
2149 return constrain_operands (strict);
2150 else
2151 return 1;
2154 /* Analyze INSN and fill in recog_data. */
2156 void
2157 extract_insn (insn)
2158 rtx insn;
2160 int i;
2161 int icode;
2162 int noperands;
2163 rtx body = PATTERN (insn);
2165 recog_data.insn = NULL;
2166 recog_data.n_operands = 0;
2167 recog_data.n_alternatives = 0;
2168 recog_data.n_dups = 0;
2169 which_alternative = -1;
2171 switch (GET_CODE (body))
2173 case USE:
2174 case CLOBBER:
2175 case ASM_INPUT:
2176 case ADDR_VEC:
2177 case ADDR_DIFF_VEC:
2178 return;
2180 case SET:
2181 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2182 goto asm_insn;
2183 else
2184 goto normal_insn;
2185 case PARALLEL:
2186 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2187 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2188 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2189 goto asm_insn;
2190 else
2191 goto normal_insn;
2192 case ASM_OPERANDS:
2193 asm_insn:
2194 recog_data.n_operands = noperands = asm_noperands (body);
2195 if (noperands >= 0)
2197 /* This insn is an `asm' with operands. */
2199 /* expand_asm_operands makes sure there aren't too many operands. */
2200 if (noperands > MAX_RECOG_OPERANDS)
2201 abort ();
2203 /* Now get the operand values and constraints out of the insn. */
2204 decode_asm_operands (body, recog_data.operand,
2205 recog_data.operand_loc,
2206 recog_data.constraints,
2207 recog_data.operand_mode);
2208 if (noperands > 0)
2210 const char *p = recog_data.constraints[0];
2211 recog_data.n_alternatives = 1;
2212 while (*p)
2213 recog_data.n_alternatives += (*p++ == ',');
2215 break;
2217 fatal_insn_not_found (insn);
2219 default:
2220 normal_insn:
2221 /* Ordinary insn: recognize it, get the operands via insn_extract
2222 and get the constraints. */
2224 icode = recog_memoized (insn);
2225 if (icode < 0)
2226 fatal_insn_not_found (insn);
2228 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2229 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2230 recog_data.n_dups = insn_data[icode].n_dups;
2232 insn_extract (insn);
2234 for (i = 0; i < noperands; i++)
2236 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2237 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2238 /* VOIDmode match_operands gets mode from their real operand. */
2239 if (recog_data.operand_mode[i] == VOIDmode)
2240 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2243 for (i = 0; i < noperands; i++)
2244 recog_data.operand_type[i]
2245 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2246 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2247 : OP_IN);
2249 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2250 abort ();
2253 /* After calling extract_insn, you can use this function to extract some
2254 information from the constraint strings into a more usable form.
2255 The collected data is stored in recog_op_alt. */
2256 void
2257 preprocess_constraints ()
2259 int i;
2261 memset (recog_op_alt, 0, sizeof recog_op_alt);
2262 for (i = 0; i < recog_data.n_operands; i++)
2264 int j;
2265 struct operand_alternative *op_alt;
2266 const char *p = recog_data.constraints[i];
2268 op_alt = recog_op_alt[i];
2270 for (j = 0; j < recog_data.n_alternatives; j++)
2272 op_alt[j].class = NO_REGS;
2273 op_alt[j].constraint = p;
2274 op_alt[j].matches = -1;
2275 op_alt[j].matched = -1;
2277 if (*p == '\0' || *p == ',')
2279 op_alt[j].anything_ok = 1;
2280 continue;
2283 for (;;)
2285 char c = *p++;
2286 if (c == '#')
2288 c = *p++;
2289 while (c != ',' && c != '\0');
2290 if (c == ',' || c == '\0')
2291 break;
2293 switch (c)
2295 case '=': case '+': case '*': case '%':
2296 case 'E': case 'F': case 'G': case 'H':
2297 case 's': case 'i': case 'n':
2298 case 'I': case 'J': case 'K': case 'L':
2299 case 'M': case 'N': case 'O': case 'P':
2300 /* These don't say anything we care about. */
2301 break;
2303 case '?':
2304 op_alt[j].reject += 6;
2305 break;
2306 case '!':
2307 op_alt[j].reject += 600;
2308 break;
2309 case '&':
2310 op_alt[j].earlyclobber = 1;
2311 break;
2313 case '0': case '1': case '2': case '3': case '4':
2314 case '5': case '6': case '7': case '8': case '9':
2315 op_alt[j].matches = c - '0';
2316 recog_op_alt[op_alt[j].matches][j].matched = i;
2317 break;
2319 case 'm':
2320 op_alt[j].memory_ok = 1;
2321 break;
2322 case '<':
2323 op_alt[j].decmem_ok = 1;
2324 break;
2325 case '>':
2326 op_alt[j].incmem_ok = 1;
2327 break;
2328 case 'V':
2329 op_alt[j].nonoffmem_ok = 1;
2330 break;
2331 case 'o':
2332 op_alt[j].offmem_ok = 1;
2333 break;
2334 case 'X':
2335 op_alt[j].anything_ok = 1;
2336 break;
2338 case 'p':
2339 op_alt[j].is_address = 1;
2340 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2341 break;
2343 case 'g': case 'r':
2344 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2345 break;
2347 default:
2348 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2349 break;
2356 /* Check the operands of an insn against the insn's operand constraints
2357 and return 1 if they are valid.
2358 The information about the insn's operands, constraints, operand modes
2359 etc. is obtained from the global variables set up by extract_insn.
2361 WHICH_ALTERNATIVE is set to a number which indicates which
2362 alternative of constraints was matched: 0 for the first alternative,
2363 1 for the next, etc.
2365 In addition, when two operands are match
2366 and it happens that the output operand is (reg) while the
2367 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2368 make the output operand look like the input.
2369 This is because the output operand is the one the template will print.
2371 This is used in final, just before printing the assembler code and by
2372 the routines that determine an insn's attribute.
2374 If STRICT is a positive non-zero value, it means that we have been
2375 called after reload has been completed. In that case, we must
2376 do all checks strictly. If it is zero, it means that we have been called
2377 before reload has completed. In that case, we first try to see if we can
2378 find an alternative that matches strictly. If not, we try again, this
2379 time assuming that reload will fix up the insn. This provides a "best
2380 guess" for the alternative and is used to compute attributes of insns prior
2381 to reload. A negative value of STRICT is used for this internal call. */
2383 struct funny_match
2385 int this, other;
2389 constrain_operands (strict)
2390 int strict;
2392 const char *constraints[MAX_RECOG_OPERANDS];
2393 int matching_operands[MAX_RECOG_OPERANDS];
2394 int earlyclobber[MAX_RECOG_OPERANDS];
2395 register int c;
2397 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2398 int funny_match_index;
2400 which_alternative = 0;
2401 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2402 return 1;
2404 for (c = 0; c < recog_data.n_operands; c++)
2406 constraints[c] = recog_data.constraints[c];
2407 matching_operands[c] = -1;
2412 register int opno;
2413 int lose = 0;
2414 funny_match_index = 0;
2416 for (opno = 0; opno < recog_data.n_operands; opno++)
2418 register rtx op = recog_data.operand[opno];
2419 enum machine_mode mode = GET_MODE (op);
2420 register const char *p = constraints[opno];
2421 int offset = 0;
2422 int win = 0;
2423 int val;
2425 earlyclobber[opno] = 0;
2427 /* A unary operator may be accepted by the predicate, but it
2428 is irrelevant for matching constraints. */
2429 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2430 op = XEXP (op, 0);
2432 if (GET_CODE (op) == SUBREG)
2434 if (GET_CODE (SUBREG_REG (op)) == REG
2435 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2436 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2437 GET_MODE (SUBREG_REG (op)),
2438 SUBREG_BYTE (op),
2439 GET_MODE (op));
2440 op = SUBREG_REG (op);
2443 /* An empty constraint or empty alternative
2444 allows anything which matched the pattern. */
2445 if (*p == 0 || *p == ',')
2446 win = 1;
2448 while (*p && (c = *p++) != ',')
2449 switch (c)
2451 case '?': case '!': case '*': case '%':
2452 case '=': case '+':
2453 break;
2455 case '#':
2456 /* Ignore rest of this alternative as far as
2457 constraint checking is concerned. */
2458 while (*p && *p != ',')
2459 p++;
2460 break;
2462 case '&':
2463 earlyclobber[opno] = 1;
2464 break;
2466 case '0': case '1': case '2': case '3': case '4':
2467 case '5': case '6': case '7': case '8': case '9':
2469 /* This operand must be the same as a previous one.
2470 This kind of constraint is used for instructions such
2471 as add when they take only two operands.
2473 Note that the lower-numbered operand is passed first.
2475 If we are not testing strictly, assume that this constraint
2476 will be satisfied. */
2477 if (strict < 0)
2478 val = 1;
2479 else
2481 rtx op1 = recog_data.operand[c - '0'];
2482 rtx op2 = recog_data.operand[opno];
2484 /* A unary operator may be accepted by the predicate,
2485 but it is irrelevant for matching constraints. */
2486 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2487 op1 = XEXP (op1, 0);
2488 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2489 op2 = XEXP (op2, 0);
2491 val = operands_match_p (op1, op2);
2494 matching_operands[opno] = c - '0';
2495 matching_operands[c - '0'] = opno;
2497 if (val != 0)
2498 win = 1;
2499 /* If output is *x and input is *--x,
2500 arrange later to change the output to *--x as well,
2501 since the output op is the one that will be printed. */
2502 if (val == 2 && strict > 0)
2504 funny_match[funny_match_index].this = opno;
2505 funny_match[funny_match_index++].other = c - '0';
2507 break;
2509 case 'p':
2510 /* p is used for address_operands. When we are called by
2511 gen_reload, no one will have checked that the address is
2512 strictly valid, i.e., that all pseudos requiring hard regs
2513 have gotten them. */
2514 if (strict <= 0
2515 || (strict_memory_address_p (recog_data.operand_mode[opno],
2516 op)))
2517 win = 1;
2518 break;
2520 /* No need to check general_operand again;
2521 it was done in insn-recog.c. */
2522 case 'g':
2523 /* Anything goes unless it is a REG and really has a hard reg
2524 but the hard reg is not in the class GENERAL_REGS. */
2525 if (strict < 0
2526 || GENERAL_REGS == ALL_REGS
2527 || GET_CODE (op) != REG
2528 || (reload_in_progress
2529 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2530 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2531 win = 1;
2532 break;
2534 case 'X':
2535 /* This is used for a MATCH_SCRATCH in the cases when
2536 we don't actually need anything. So anything goes
2537 any time. */
2538 win = 1;
2539 break;
2541 case 'm':
2542 if (GET_CODE (op) == MEM
2543 /* Before reload, accept what reload can turn into mem. */
2544 || (strict < 0 && CONSTANT_P (op))
2545 /* During reload, accept a pseudo */
2546 || (reload_in_progress && GET_CODE (op) == REG
2547 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2548 win = 1;
2549 break;
2551 case '<':
2552 if (GET_CODE (op) == MEM
2553 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2554 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2555 win = 1;
2556 break;
2558 case '>':
2559 if (GET_CODE (op) == MEM
2560 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2561 || GET_CODE (XEXP (op, 0)) == POST_INC))
2562 win = 1;
2563 break;
2565 case 'E':
2566 #ifndef REAL_ARITHMETIC
2567 /* Match any CONST_DOUBLE, but only if
2568 we can examine the bits of it reliably. */
2569 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2570 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2571 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2572 break;
2573 #endif
2574 if (GET_CODE (op) == CONST_DOUBLE)
2575 win = 1;
2576 break;
2578 case 'F':
2579 if (GET_CODE (op) == CONST_DOUBLE)
2580 win = 1;
2581 break;
2583 case 'G':
2584 case 'H':
2585 if (GET_CODE (op) == CONST_DOUBLE
2586 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2587 win = 1;
2588 break;
2590 case 's':
2591 if (GET_CODE (op) == CONST_INT
2592 || (GET_CODE (op) == CONST_DOUBLE
2593 && GET_MODE (op) == VOIDmode))
2594 break;
2595 case 'i':
2596 if (CONSTANT_P (op))
2597 win = 1;
2598 break;
2600 case 'n':
2601 if (GET_CODE (op) == CONST_INT
2602 || (GET_CODE (op) == CONST_DOUBLE
2603 && GET_MODE (op) == VOIDmode))
2604 win = 1;
2605 break;
2607 case 'I':
2608 case 'J':
2609 case 'K':
2610 case 'L':
2611 case 'M':
2612 case 'N':
2613 case 'O':
2614 case 'P':
2615 if (GET_CODE (op) == CONST_INT
2616 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2617 win = 1;
2618 break;
2620 case 'V':
2621 if (GET_CODE (op) == MEM
2622 && ((strict > 0 && ! offsettable_memref_p (op))
2623 || (strict < 0
2624 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2625 || (reload_in_progress
2626 && !(GET_CODE (op) == REG
2627 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2628 win = 1;
2629 break;
2631 case 'o':
2632 if ((strict > 0 && offsettable_memref_p (op))
2633 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2634 /* Before reload, accept what reload can handle. */
2635 || (strict < 0
2636 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2637 /* During reload, accept a pseudo */
2638 || (reload_in_progress && GET_CODE (op) == REG
2639 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2640 win = 1;
2641 break;
2643 default:
2645 enum reg_class class;
2647 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2648 if (class != NO_REGS)
2650 if (strict < 0
2651 || (strict == 0
2652 && GET_CODE (op) == REG
2653 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2654 || (strict == 0 && GET_CODE (op) == SCRATCH)
2655 || (GET_CODE (op) == REG
2656 && reg_fits_class_p (op, class, offset, mode)))
2657 win = 1;
2659 #ifdef EXTRA_CONSTRAINT
2660 else if (EXTRA_CONSTRAINT (op, c))
2661 win = 1;
2662 #endif
2663 break;
2667 constraints[opno] = p;
2668 /* If this operand did not win somehow,
2669 this alternative loses. */
2670 if (! win)
2671 lose = 1;
2673 /* This alternative won; the operands are ok.
2674 Change whichever operands this alternative says to change. */
2675 if (! lose)
2677 int opno, eopno;
2679 /* See if any earlyclobber operand conflicts with some other
2680 operand. */
2682 if (strict > 0)
2683 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2684 /* Ignore earlyclobber operands now in memory,
2685 because we would often report failure when we have
2686 two memory operands, one of which was formerly a REG. */
2687 if (earlyclobber[eopno]
2688 && GET_CODE (recog_data.operand[eopno]) == REG)
2689 for (opno = 0; opno < recog_data.n_operands; opno++)
2690 if ((GET_CODE (recog_data.operand[opno]) == MEM
2691 || recog_data.operand_type[opno] != OP_OUT)
2692 && opno != eopno
2693 /* Ignore things like match_operator operands. */
2694 && *recog_data.constraints[opno] != 0
2695 && ! (matching_operands[opno] == eopno
2696 && operands_match_p (recog_data.operand[opno],
2697 recog_data.operand[eopno]))
2698 && ! safe_from_earlyclobber (recog_data.operand[opno],
2699 recog_data.operand[eopno]))
2700 lose = 1;
2702 if (! lose)
2704 while (--funny_match_index >= 0)
2706 recog_data.operand[funny_match[funny_match_index].other]
2707 = recog_data.operand[funny_match[funny_match_index].this];
2710 return 1;
2714 which_alternative++;
2716 while (which_alternative < recog_data.n_alternatives);
2718 which_alternative = -1;
2719 /* If we are about to reject this, but we are not to test strictly,
2720 try a very loose test. Only return failure if it fails also. */
2721 if (strict == 0)
2722 return constrain_operands (-1);
2723 else
2724 return 0;
2727 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2728 is a hard reg in class CLASS when its regno is offset by OFFSET
2729 and changed to mode MODE.
2730 If REG occupies multiple hard regs, all of them must be in CLASS. */
2733 reg_fits_class_p (operand, class, offset, mode)
2734 rtx operand;
2735 register enum reg_class class;
2736 int offset;
2737 enum machine_mode mode;
2739 register int regno = REGNO (operand);
2740 if (regno < FIRST_PSEUDO_REGISTER
2741 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2742 regno + offset))
2744 register int sr;
2745 regno += offset;
2746 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2747 sr > 0; sr--)
2748 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2749 regno + sr))
2750 break;
2751 return sr == 0;
2754 return 0;
2757 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2759 void
2760 split_all_insns (upd_life)
2761 int upd_life;
2763 sbitmap blocks;
2764 int changed;
2765 int i;
2767 blocks = sbitmap_alloc (n_basic_blocks);
2768 sbitmap_zero (blocks);
2769 changed = 0;
2771 for (i = n_basic_blocks - 1; i >= 0; --i)
2773 basic_block bb = BASIC_BLOCK (i);
2774 rtx insn, next;
2776 for (insn = bb->head; insn ; insn = next)
2778 rtx set;
2780 /* Can't use `next_real_insn' because that might go across
2781 CODE_LABELS and short-out basic blocks. */
2782 next = NEXT_INSN (insn);
2783 if (! INSN_P (insn))
2786 /* Don't split no-op move insns. These should silently
2787 disappear later in final. Splitting such insns would
2788 break the code that handles REG_NO_CONFLICT blocks. */
2790 else if ((set = single_set (insn)) != NULL
2791 && set_noop_p (set))
2793 /* Nops get in the way while scheduling, so delete them
2794 now if register allocation has already been done. It
2795 is too risky to try to do this before register
2796 allocation, and there are unlikely to be very many
2797 nops then anyways. */
2798 if (reload_completed)
2800 PUT_CODE (insn, NOTE);
2801 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2802 NOTE_SOURCE_FILE (insn) = 0;
2805 else
2807 /* Split insns here to get max fine-grain parallelism. */
2808 rtx first = PREV_INSN (insn);
2809 rtx last = try_split (PATTERN (insn), insn, 1);
2811 if (last != insn)
2813 SET_BIT (blocks, i);
2814 changed = 1;
2816 /* try_split returns the NOTE that INSN became. */
2817 PUT_CODE (insn, NOTE);
2818 NOTE_SOURCE_FILE (insn) = 0;
2819 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2821 /* ??? Coddle to md files that generate subregs in post-
2822 reload splitters instead of computing the proper
2823 hard register. */
2824 if (reload_completed && first != last)
2826 first = NEXT_INSN (first);
2827 while (1)
2829 if (INSN_P (first))
2830 cleanup_subreg_operands (first);
2831 if (first == last)
2832 break;
2833 first = NEXT_INSN (first);
2837 if (insn == bb->end)
2839 bb->end = last;
2840 break;
2845 if (insn == bb->end)
2846 break;
2849 /* ??? When we're called from just after reload, the CFG is in bad
2850 shape, and we may have fallen off the end. This could be fixed
2851 by having reload not try to delete unreachable code. Otherwise
2852 assert we found the end insn. */
2853 if (insn == NULL && upd_life)
2854 abort ();
2857 if (changed && upd_life)
2859 compute_bb_for_insn (get_max_uid ());
2860 count_or_remove_death_notes (blocks, 1);
2861 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2864 sbitmap_free (blocks);
2867 #ifdef HAVE_peephole2
2868 struct peep2_insn_data
2870 rtx insn;
2871 regset live_before;
2874 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2875 static int peep2_current;
2877 /* A non-insn marker indicating the last insn of the block.
2878 The live_before regset for this element is correct, indicating
2879 global_live_at_end for the block. */
2880 #define PEEP2_EOB pc_rtx
2882 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2883 does not exist. Used by the recognizer to find the next insn to match
2884 in a multi-insn pattern. */
2887 peep2_next_insn (n)
2888 int n;
2890 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2891 abort ();
2893 n += peep2_current;
2894 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2895 n -= MAX_INSNS_PER_PEEP2 + 1;
2897 if (peep2_insn_data[n].insn == PEEP2_EOB)
2898 return NULL_RTX;
2899 return peep2_insn_data[n].insn;
2902 /* Return true if REGNO is dead before the Nth non-note insn
2903 after `current'. */
2906 peep2_regno_dead_p (ofs, regno)
2907 int ofs;
2908 int regno;
2910 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2911 abort ();
2913 ofs += peep2_current;
2914 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2915 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2917 if (peep2_insn_data[ofs].insn == NULL_RTX)
2918 abort ();
2920 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2923 /* Similarly for a REG. */
2926 peep2_reg_dead_p (ofs, reg)
2927 int ofs;
2928 rtx reg;
2930 int regno, n;
2932 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2933 abort ();
2935 ofs += peep2_current;
2936 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2937 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2939 if (peep2_insn_data[ofs].insn == NULL_RTX)
2940 abort ();
2942 regno = REGNO (reg);
2943 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2944 while (--n >= 0)
2945 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2946 return 0;
2947 return 1;
2950 /* Try to find a hard register of mode MODE, matching the register class in
2951 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2952 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2953 in which case the only condition is that the register must be available
2954 before CURRENT_INSN.
2955 Registers that already have bits set in REG_SET will not be considered.
2957 If an appropriate register is available, it will be returned and the
2958 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2959 returned. */
2962 peep2_find_free_register (from, to, class_str, mode, reg_set)
2963 int from, to;
2964 const char *class_str;
2965 enum machine_mode mode;
2966 HARD_REG_SET *reg_set;
2968 static int search_ofs;
2969 enum reg_class class;
2970 HARD_REG_SET live;
2971 int i;
2973 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2974 abort ();
2976 from += peep2_current;
2977 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2978 from -= MAX_INSNS_PER_PEEP2 + 1;
2979 to += peep2_current;
2980 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2981 to -= MAX_INSNS_PER_PEEP2 + 1;
2983 if (peep2_insn_data[from].insn == NULL_RTX)
2984 abort ();
2985 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2987 while (from != to)
2989 HARD_REG_SET this_live;
2991 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2992 from = 0;
2993 if (peep2_insn_data[from].insn == NULL_RTX)
2994 abort ();
2995 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2996 IOR_HARD_REG_SET (live, this_live);
2999 class = (class_str[0] == 'r' ? GENERAL_REGS
3000 : REG_CLASS_FROM_LETTER (class_str[0]));
3002 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3004 int raw_regno, regno, success, j;
3006 /* Distribute the free registers as much as possible. */
3007 raw_regno = search_ofs + i;
3008 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3009 raw_regno -= FIRST_PSEUDO_REGISTER;
3010 #ifdef REG_ALLOC_ORDER
3011 regno = reg_alloc_order[raw_regno];
3012 #else
3013 regno = raw_regno;
3014 #endif
3016 /* Don't allocate fixed registers. */
3017 if (fixed_regs[regno])
3018 continue;
3019 /* Make sure the register is of the right class. */
3020 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3021 continue;
3022 /* And can support the mode we need. */
3023 if (! HARD_REGNO_MODE_OK (regno, mode))
3024 continue;
3025 /* And that we don't create an extra save/restore. */
3026 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3027 continue;
3028 /* And we don't clobber traceback for noreturn functions. */
3029 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3030 && (! reload_completed || frame_pointer_needed))
3031 continue;
3033 success = 1;
3034 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3036 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3037 || TEST_HARD_REG_BIT (live, regno + j))
3039 success = 0;
3040 break;
3043 if (success)
3045 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3046 SET_HARD_REG_BIT (*reg_set, regno + j);
3048 /* Start the next search with the next register. */
3049 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3050 raw_regno = 0;
3051 search_ofs = raw_regno;
3053 return gen_rtx_REG (mode, regno);
3057 search_ofs = 0;
3058 return NULL_RTX;
3061 /* Perform the peephole2 optimization pass. */
3063 void
3064 peephole2_optimize (dump_file)
3065 FILE *dump_file ATTRIBUTE_UNUSED;
3067 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3068 rtx insn, prev;
3069 regset live;
3070 int i, b;
3071 #ifdef HAVE_conditional_execution
3072 sbitmap blocks;
3073 int changed;
3074 #endif
3076 /* Initialize the regsets we're going to use. */
3077 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3078 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3079 live = INITIALIZE_REG_SET (rs_heads[i]);
3081 #ifdef HAVE_conditional_execution
3082 blocks = sbitmap_alloc (n_basic_blocks);
3083 sbitmap_zero (blocks);
3084 changed = 0;
3085 #else
3086 count_or_remove_death_notes (NULL, 1);
3087 #endif
3089 for (b = n_basic_blocks - 1; b >= 0; --b)
3091 basic_block bb = BASIC_BLOCK (b);
3092 struct propagate_block_info *pbi;
3094 /* Indicate that all slots except the last holds invalid data. */
3095 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3096 peep2_insn_data[i].insn = NULL_RTX;
3098 /* Indicate that the last slot contains live_after data. */
3099 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3100 peep2_current = MAX_INSNS_PER_PEEP2;
3102 /* Start up propagation. */
3103 COPY_REG_SET (live, bb->global_live_at_end);
3104 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3106 #ifdef HAVE_conditional_execution
3107 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3108 #else
3109 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3110 #endif
3112 for (insn = bb->end; ; insn = prev)
3114 prev = PREV_INSN (insn);
3115 if (INSN_P (insn))
3117 rtx try;
3118 int match_len;
3120 /* Record this insn. */
3121 if (--peep2_current < 0)
3122 peep2_current = MAX_INSNS_PER_PEEP2;
3123 peep2_insn_data[peep2_current].insn = insn;
3124 propagate_one_insn (pbi, insn);
3125 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3127 /* Match the peephole. */
3128 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3129 if (try != NULL)
3131 i = match_len + peep2_current;
3132 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3133 i -= MAX_INSNS_PER_PEEP2 + 1;
3135 /* Replace the old sequence with the new. */
3136 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3137 try = emit_insn_after (try, prev);
3139 /* Adjust the basic block boundaries. */
3140 if (peep2_insn_data[i].insn == bb->end)
3141 bb->end = try;
3142 if (insn == bb->head)
3143 bb->head = NEXT_INSN (prev);
3145 #ifdef HAVE_conditional_execution
3146 /* With conditional execution, we cannot back up the
3147 live information so easily, since the conditional
3148 death data structures are not so self-contained.
3149 So record that we've made a modification to this
3150 block and update life information at the end. */
3151 SET_BIT (blocks, b);
3152 changed = 1;
3154 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3155 peep2_insn_data[i].insn = NULL_RTX;
3156 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3157 #else
3158 /* Back up lifetime information past the end of the
3159 newly created sequence. */
3160 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3161 i = 0;
3162 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3164 /* Update life information for the new sequence. */
3167 if (INSN_P (try))
3169 if (--i < 0)
3170 i = MAX_INSNS_PER_PEEP2;
3171 peep2_insn_data[i].insn = try;
3172 propagate_one_insn (pbi, try);
3173 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3175 try = PREV_INSN (try);
3177 while (try != prev);
3179 /* ??? Should verify that LIVE now matches what we
3180 had before the new sequence. */
3182 peep2_current = i;
3183 #endif
3187 if (insn == bb->head)
3188 break;
3191 free_propagate_block_info (pbi);
3194 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3195 FREE_REG_SET (peep2_insn_data[i].live_before);
3196 FREE_REG_SET (live);
3198 #ifdef HAVE_conditional_execution
3199 count_or_remove_death_notes (blocks, 1);
3200 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3201 sbitmap_free (blocks);
3202 #endif
3204 #endif /* HAVE_peephole2 */