* configure.in (LIMITS_INC_SRCDIR): New variable.
[official-gcc.git] / gcc / recog.c
blobf847b4f8142f5097b64d67571ee405a9b4cbd21d
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "function.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "toplev.h"
36 #include "basic-block.h"
37 #include "output.h"
38 #include "reload.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
43 #else
44 #define STACK_PUSH_CODE PRE_INC
45 #endif
46 #endif
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
51 #else
52 #define STACK_POP_CODE POST_DEC
53 #endif
54 #endif
56 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
57 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
58 static rtx *find_constant_term_loc PARAMS ((rtx *));
59 static int insn_invalid_p PARAMS ((rtx));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
69 int volatile_ok;
71 struct recog_data recog_data;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77 /* On return from `constrain_operands', indicate which alternative
78 was satisfied. */
80 int which_alternative;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
86 int reload_completed;
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
92 void
93 init_recog_no_volatile ()
95 volatile_ok = 0;
98 void
99 init_recog ()
101 volatile_ok = 1;
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn)
115 rtx insn;
117 if (INSN_CODE (insn) < 0)
118 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
119 return INSN_CODE (insn);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x)
127 rtx x;
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = (rtx *) alloca (noperands * sizeof (rtx));
150 constraints = (const char **) alloca (noperands * sizeof (char *));
152 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
160 c = constraints[c[0] - '0'];
162 if (! asm_operand_ok (operands[i], c))
163 return 0;
166 return 1;
169 /* Static data for the next two routines. */
171 typedef struct change_t
173 rtx object;
174 int old_code;
175 rtx *loc;
176 rtx old;
177 } change_t;
179 static change_t *changes;
180 static int changes_allocated;
182 static int num_changes = 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
191 the change in place.
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object, loc, new, in_group)
204 rtx object;
205 rtx *loc;
206 rtx new;
207 int in_group;
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 if (in_group == 0 && num_changes != 0)
215 abort ();
217 *loc = new;
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
229 changes =
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
238 if (object && GET_CODE (object) != MEM)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
241 case invalid. */
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
246 num_changes++;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
251 if (in_group)
252 return 1;
253 else
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
260 static int
261 insn_invalid_p (insn)
262 rtx insn;
264 int icode = recog_memoized (insn);
265 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
267 if (is_asm && ! check_asm_operands (PATTERN (insn)))
268 return 1;
269 if (! is_asm && icode < 0)
270 return 1;
272 /* After reload, verify that all constraints are satisfied. */
273 if (reload_completed)
275 extract_insn (insn);
277 if (! constrain_operands (1))
278 return 1;
281 return 0;
284 /* Apply a group of changes previously issued with `validate_change'.
285 Return 1 if all changes are valid, zero otherwise. */
288 apply_change_group ()
290 int i;
292 /* The changes have been applied and all INSN_CODEs have been reset to force
293 rerecognition.
295 The changes are valid if we aren't given an object, or if we are
296 given a MEM and it still is a valid address, or if this is in insn
297 and it is recognized. In the latter case, if reload has completed,
298 we also require that the operands meet the constraints for
299 the insn. */
301 for (i = 0; i < num_changes; i++)
303 rtx object = changes[i].object;
305 if (object == 0)
306 continue;
308 if (GET_CODE (object) == MEM)
310 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
311 break;
313 else if (insn_invalid_p (object))
315 rtx pat = PATTERN (object);
317 /* Perhaps we couldn't recognize the insn because there were
318 extra CLOBBERs at the end. If so, try to re-recognize
319 without the last CLOBBER (later iterations will cause each of
320 them to be eliminated, in turn). But don't do this if we
321 have an ASM_OPERAND. */
322 if (GET_CODE (pat) == PARALLEL
323 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
324 && asm_noperands (PATTERN (object)) < 0)
326 rtx newpat;
328 if (XVECLEN (pat, 0) == 2)
329 newpat = XVECEXP (pat, 0, 0);
330 else
332 int j;
334 newpat
335 = gen_rtx_PARALLEL (VOIDmode,
336 rtvec_alloc (XVECLEN (pat, 0) - 1));
337 for (j = 0; j < XVECLEN (newpat, 0); j++)
338 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
341 /* Add a new change to this group to replace the pattern
342 with this new pattern. Then consider this change
343 as having succeeded. The change we added will
344 cause the entire call to fail if things remain invalid.
346 Note that this can lose if a later change than the one
347 we are processing specified &XVECEXP (PATTERN (object), 0, X)
348 but this shouldn't occur. */
350 validate_change (object, &PATTERN (object), newpat, 1);
352 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
353 /* If this insn is a CLOBBER or USE, it is always valid, but is
354 never recognized. */
355 continue;
356 else
357 break;
361 if (i == num_changes)
363 num_changes = 0;
364 return 1;
366 else
368 cancel_changes (0);
369 return 0;
373 /* Return the number of changes so far in the current group. */
376 num_validated_changes ()
378 return num_changes;
381 /* Retract the changes numbered NUM and up. */
383 void
384 cancel_changes (num)
385 int num;
387 int i;
389 /* Back out all the changes. Do this in the opposite order in which
390 they were made. */
391 for (i = num_changes - 1; i >= num; i--)
393 *changes[i].loc = changes[i].old;
394 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
395 INSN_CODE (changes[i].object) = changes[i].old_code;
397 num_changes = num;
400 /* Replace every occurrence of FROM in X with TO. Mark each change with
401 validate_change passing OBJECT. */
403 static void
404 validate_replace_rtx_1 (loc, from, to, object)
405 rtx *loc;
406 rtx from, to, object;
408 register int i, j;
409 register const char *fmt;
410 register rtx x = *loc;
411 enum rtx_code code;
413 if (!x)
414 return;
415 code = GET_CODE (x);
416 /* X matches FROM if it is the same rtx or they are both referring to the
417 same register in the same mode. Avoid calling rtx_equal_p unless the
418 operands look similar. */
420 if (x == from
421 || (GET_CODE (x) == REG && GET_CODE (from) == REG
422 && GET_MODE (x) == GET_MODE (from)
423 && REGNO (x) == REGNO (from))
424 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
425 && rtx_equal_p (x, from)))
427 validate_change (object, loc, to, 1);
428 return;
431 /* For commutative or comparison operations, try replacing each argument
432 separately and seeing if we made any changes. If so, put a constant
433 argument last.*/
434 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
436 int prev_changes = num_changes;
438 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
439 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
440 /* If nothing changed, we can exit now. In fact, continuing on
441 into the switch statement below can be wrong, eg. turning
442 (plus (symbol_ref) (const_int)) into
443 (const (plus (symbol_ref) (const_int))). This might not seem
444 so bad, but the first rtx is already enclosed in `const', so
445 we get a string of (const (const (const...))). */
446 if (prev_changes == num_changes)
447 return;
448 if (CONSTANT_P (XEXP (x, 0)))
450 validate_change (object, loc,
451 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
452 : swap_condition (code),
453 GET_MODE (x), XEXP (x, 1),
454 XEXP (x, 0)),
456 x = *loc;
457 code = GET_CODE (x);
461 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
462 done the substitution, otherwise we won't. */
464 switch (code)
466 case PLUS:
467 /* If we have a PLUS whose second operand is now a CONST_INT, use
468 plus_constant to try to simplify it. */
469 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
470 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
472 return;
474 case MINUS:
475 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
477 validate_change (object, loc,
478 plus_constant (XEXP (x, 0), - INTVAL (to)),
480 return;
482 break;
484 case ZERO_EXTEND:
485 case SIGN_EXTEND:
486 /* In these cases, the operation to be performed depends on the mode
487 of the operand. If we are replacing the operand with a VOIDmode
488 constant, we lose the information. So try to simplify the operation
489 in that case. */
490 if (GET_MODE (to) == VOIDmode
491 && (rtx_equal_p (XEXP (x, 0), from)
492 || (GET_CODE (XEXP (x, 0)) == SUBREG
493 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
495 rtx new = NULL_RTX;
497 /* If there is a subreg involved, crop to the portion of the
498 constant that we are interested in. */
499 if (GET_CODE (XEXP (x, 0)) == SUBREG)
501 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
502 to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
503 0, GET_MODE (from));
504 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
505 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
506 <= HOST_BITS_PER_WIDE_INT))
508 int i = SUBREG_WORD (XEXP (x, 0)) * BITS_PER_WORD;
509 HOST_WIDE_INT valh;
510 unsigned HOST_WIDE_INT vall;
512 if (GET_CODE (to) == CONST_INT)
514 vall = INTVAL (to);
515 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
517 else
519 vall = CONST_DOUBLE_LOW (to);
520 valh = CONST_DOUBLE_HIGH (to);
523 if (WORDS_BIG_ENDIAN)
524 i = (GET_MODE_BITSIZE (GET_MODE (from))
525 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
526 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
527 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
528 else if (i >= HOST_BITS_PER_WIDE_INT)
529 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
530 to = GEN_INT (trunc_int_for_mode (vall,
531 GET_MODE (XEXP (x, 0))));
533 else
534 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
537 /* If the above didn't fail, perform the extension from the
538 mode of the operand (and not the mode of FROM). */
539 if (to)
540 new = simplify_unary_operation (code, GET_MODE (x), to,
541 GET_MODE (XEXP (x, 0)));
543 /* If any of the above failed, substitute in something that
544 we know won't be recognized. */
545 if (!new)
546 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
548 validate_change (object, loc, new, 1);
549 return;
551 break;
553 case SUBREG:
554 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
555 expression. We can't do this later, since the information about inner mode
556 may be lost. */
557 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
559 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
560 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
561 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
563 rtx temp = operand_subword (to, SUBREG_WORD (x),
564 0, GET_MODE (from));
565 if (temp)
567 validate_change (object, loc, temp, 1);
568 return;
571 if (subreg_lowpart_p (x))
573 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
574 if (new)
576 validate_change (object, loc, new, 1);
577 return;
581 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
582 since we are saying that the high bits don't matter. */
583 if (GET_MODE (to) == VOIDmode
584 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
586 validate_change (object, loc, to, 1);
587 return;
591 /* Changing mode twice with SUBREG => just change it once,
592 or not at all if changing back to starting mode. */
593 if (GET_CODE (to) == SUBREG
594 && rtx_equal_p (SUBREG_REG (x), from))
596 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
597 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
599 validate_change (object, loc, SUBREG_REG (to), 1);
600 return;
603 validate_change (object, loc,
604 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
605 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
606 return;
609 /* If we have a SUBREG of a register that we are replacing and we are
610 replacing it with a MEM, make a new MEM and try replacing the
611 SUBREG with it. Don't do this if the MEM has a mode-dependent address
612 or if we would be widening it. */
614 if (GET_CODE (from) == REG
615 && GET_CODE (to) == MEM
616 && rtx_equal_p (SUBREG_REG (x), from)
617 && ! mode_dependent_address_p (XEXP (to, 0))
618 && ! MEM_VOLATILE_P (to)
619 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
621 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
622 enum machine_mode mode = GET_MODE (x);
623 rtx new;
625 if (BYTES_BIG_ENDIAN)
626 offset += (MIN (UNITS_PER_WORD,
627 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
628 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
630 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
631 MEM_COPY_ATTRIBUTES (new, to);
632 validate_change (object, loc, new, 1);
633 return;
635 break;
637 case ZERO_EXTRACT:
638 case SIGN_EXTRACT:
639 /* If we are replacing a register with memory, try to change the memory
640 to be the mode required for memory in extract operations (this isn't
641 likely to be an insertion operation; if it was, nothing bad will
642 happen, we might just fail in some cases). */
644 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
645 && rtx_equal_p (XEXP (x, 0), from)
646 && GET_CODE (XEXP (x, 1)) == CONST_INT
647 && GET_CODE (XEXP (x, 2)) == CONST_INT
648 && ! mode_dependent_address_p (XEXP (to, 0))
649 && ! MEM_VOLATILE_P (to))
651 enum machine_mode wanted_mode = VOIDmode;
652 enum machine_mode is_mode = GET_MODE (to);
653 int pos = INTVAL (XEXP (x, 2));
655 #ifdef HAVE_extzv
656 if (code == ZERO_EXTRACT)
658 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
659 if (wanted_mode == VOIDmode)
660 wanted_mode = word_mode;
662 #endif
663 #ifdef HAVE_extv
664 if (code == SIGN_EXTRACT)
666 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
667 if (wanted_mode == VOIDmode)
668 wanted_mode = word_mode;
670 #endif
672 /* If we have a narrower mode, we can do something. */
673 if (wanted_mode != VOIDmode
674 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
676 int offset = pos / BITS_PER_UNIT;
677 rtx newmem;
679 /* If the bytes and bits are counted differently, we
680 must adjust the offset. */
681 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
682 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
683 - offset);
685 pos %= GET_MODE_BITSIZE (wanted_mode);
687 newmem = gen_rtx_MEM (wanted_mode,
688 plus_constant (XEXP (to, 0), offset));
689 MEM_COPY_ATTRIBUTES (newmem, to);
691 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
692 validate_change (object, &XEXP (x, 0), newmem, 1);
696 break;
698 default:
699 break;
702 /* For commutative or comparison operations we've already performed
703 replacements. Don't try to perform them again. */
704 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
706 fmt = GET_RTX_FORMAT (code);
707 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
709 if (fmt[i] == 'e')
710 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
711 else if (fmt[i] == 'E')
712 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
713 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
718 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
719 with TO. After all changes have been made, validate by seeing
720 if INSN is still valid. */
723 validate_replace_rtx_subexp (from, to, insn, loc)
724 rtx from, to, insn, *loc;
726 validate_replace_rtx_1 (loc, from, to, insn);
727 return apply_change_group ();
730 /* Try replacing every occurrence of FROM in INSN with TO. After all
731 changes have been made, validate by seeing if INSN is still valid. */
734 validate_replace_rtx (from, to, insn)
735 rtx from, to, insn;
737 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
738 return apply_change_group ();
741 /* Try replacing every occurrence of FROM in INSN with TO. After all
742 changes have been made, validate by seeing if INSN is still valid. */
744 void
745 validate_replace_rtx_group (from, to, insn)
746 rtx from, to, insn;
748 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
751 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
752 SET_DESTs. After all changes have been made, validate by seeing if
753 INSN is still valid. */
756 validate_replace_src (from, to, insn)
757 rtx from, to, insn;
759 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
760 || GET_CODE (PATTERN (insn)) != SET)
761 abort ();
763 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
764 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
765 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
766 from, to, insn);
767 return apply_change_group ();
770 #ifdef HAVE_cc0
771 /* Return 1 if the insn using CC0 set by INSN does not contain
772 any ordered tests applied to the condition codes.
773 EQ and NE tests do not count. */
776 next_insn_tests_no_inequality (insn)
777 rtx insn;
779 register rtx next = next_cc0_user (insn);
781 /* If there is no next insn, we have to take the conservative choice. */
782 if (next == 0)
783 return 0;
785 return ((GET_CODE (next) == JUMP_INSN
786 || GET_CODE (next) == INSN
787 || GET_CODE (next) == CALL_INSN)
788 && ! inequality_comparisons_p (PATTERN (next)));
791 #if 0 /* This is useless since the insn that sets the cc's
792 must be followed immediately by the use of them. */
793 /* Return 1 if the CC value set up by INSN is not used. */
796 next_insns_test_no_inequality (insn)
797 rtx insn;
799 register rtx next = NEXT_INSN (insn);
801 for (; next != 0; next = NEXT_INSN (next))
803 if (GET_CODE (next) == CODE_LABEL
804 || GET_CODE (next) == BARRIER)
805 return 1;
806 if (GET_CODE (next) == NOTE)
807 continue;
808 if (inequality_comparisons_p (PATTERN (next)))
809 return 0;
810 if (sets_cc0_p (PATTERN (next)) == 1)
811 return 1;
812 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
813 return 1;
815 return 1;
817 #endif
818 #endif
820 /* This is used by find_single_use to locate an rtx that contains exactly one
821 use of DEST, which is typically either a REG or CC0. It returns a
822 pointer to the innermost rtx expression containing DEST. Appearances of
823 DEST that are being used to totally replace it are not counted. */
825 static rtx *
826 find_single_use_1 (dest, loc)
827 rtx dest;
828 rtx *loc;
830 rtx x = *loc;
831 enum rtx_code code = GET_CODE (x);
832 rtx *result = 0;
833 rtx *this_result;
834 int i;
835 const char *fmt;
837 switch (code)
839 case CONST_INT:
840 case CONST:
841 case LABEL_REF:
842 case SYMBOL_REF:
843 case CONST_DOUBLE:
844 case CLOBBER:
845 return 0;
847 case SET:
848 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
849 of a REG that occupies all of the REG, the insn uses DEST if
850 it is mentioned in the destination or the source. Otherwise, we
851 need just check the source. */
852 if (GET_CODE (SET_DEST (x)) != CC0
853 && GET_CODE (SET_DEST (x)) != PC
854 && GET_CODE (SET_DEST (x)) != REG
855 && ! (GET_CODE (SET_DEST (x)) == SUBREG
856 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
857 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
858 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
859 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
860 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
861 break;
863 return find_single_use_1 (dest, &SET_SRC (x));
865 case MEM:
866 case SUBREG:
867 return find_single_use_1 (dest, &XEXP (x, 0));
869 default:
870 break;
873 /* If it wasn't one of the common cases above, check each expression and
874 vector of this code. Look for a unique usage of DEST. */
876 fmt = GET_RTX_FORMAT (code);
877 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
879 if (fmt[i] == 'e')
881 if (dest == XEXP (x, i)
882 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
883 && REGNO (dest) == REGNO (XEXP (x, i))))
884 this_result = loc;
885 else
886 this_result = find_single_use_1 (dest, &XEXP (x, i));
888 if (result == 0)
889 result = this_result;
890 else if (this_result)
891 /* Duplicate usage. */
892 return 0;
894 else if (fmt[i] == 'E')
896 int j;
898 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
900 if (XVECEXP (x, i, j) == dest
901 || (GET_CODE (dest) == REG
902 && GET_CODE (XVECEXP (x, i, j)) == REG
903 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
904 this_result = loc;
905 else
906 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
908 if (result == 0)
909 result = this_result;
910 else if (this_result)
911 return 0;
916 return result;
919 /* See if DEST, produced in INSN, is used only a single time in the
920 sequel. If so, return a pointer to the innermost rtx expression in which
921 it is used.
923 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
925 This routine will return usually zero either before flow is called (because
926 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
927 note can't be trusted).
929 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
930 care about REG_DEAD notes or LOG_LINKS.
932 Otherwise, we find the single use by finding an insn that has a
933 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
934 only referenced once in that insn, we know that it must be the first
935 and last insn referencing DEST. */
937 rtx *
938 find_single_use (dest, insn, ploc)
939 rtx dest;
940 rtx insn;
941 rtx *ploc;
943 rtx next;
944 rtx *result;
945 rtx link;
947 #ifdef HAVE_cc0
948 if (dest == cc0_rtx)
950 next = NEXT_INSN (insn);
951 if (next == 0
952 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
953 return 0;
955 result = find_single_use_1 (dest, &PATTERN (next));
956 if (result && ploc)
957 *ploc = next;
958 return result;
960 #endif
962 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
963 return 0;
965 for (next = next_nonnote_insn (insn);
966 next != 0 && GET_CODE (next) != CODE_LABEL;
967 next = next_nonnote_insn (next))
968 if (INSN_P (next) && dead_or_set_p (next, dest))
970 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
971 if (XEXP (link, 0) == insn)
972 break;
974 if (link)
976 result = find_single_use_1 (dest, &PATTERN (next));
977 if (ploc)
978 *ploc = next;
979 return result;
983 return 0;
986 /* Return 1 if OP is a valid general operand for machine mode MODE.
987 This is either a register reference, a memory reference,
988 or a constant. In the case of a memory reference, the address
989 is checked for general validity for the target machine.
991 Register and memory references must have mode MODE in order to be valid,
992 but some constants have no machine mode and are valid for any mode.
994 If MODE is VOIDmode, OP is checked for validity for whatever mode
995 it has.
997 The main use of this function is as a predicate in match_operand
998 expressions in the machine description.
1000 For an explanation of this function's behavior for registers of
1001 class NO_REGS, see the comment for `register_operand'. */
1004 general_operand (op, mode)
1005 register rtx op;
1006 enum machine_mode mode;
1008 register enum rtx_code code = GET_CODE (op);
1009 int mode_altering_drug = 0;
1011 if (mode == VOIDmode)
1012 mode = GET_MODE (op);
1014 /* Don't accept CONST_INT or anything similar
1015 if the caller wants something floating. */
1016 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1017 && GET_MODE_CLASS (mode) != MODE_INT
1018 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1019 return 0;
1021 if (CONSTANT_P (op))
1022 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1023 || mode == VOIDmode)
1024 #ifdef LEGITIMATE_PIC_OPERAND_P
1025 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1026 #endif
1027 && LEGITIMATE_CONSTANT_P (op));
1029 /* Except for certain constants with VOIDmode, already checked for,
1030 OP's mode must match MODE if MODE specifies a mode. */
1032 if (GET_MODE (op) != mode)
1033 return 0;
1035 if (code == SUBREG)
1037 #ifdef INSN_SCHEDULING
1038 /* On machines that have insn scheduling, we want all memory
1039 reference to be explicit, so outlaw paradoxical SUBREGs. */
1040 if (GET_CODE (SUBREG_REG (op)) == MEM
1041 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1042 return 0;
1043 #endif
1045 op = SUBREG_REG (op);
1046 code = GET_CODE (op);
1047 #if 0
1048 /* No longer needed, since (SUBREG (MEM...))
1049 will load the MEM into a reload reg in the MEM's own mode. */
1050 mode_altering_drug = 1;
1051 #endif
1054 if (code == REG)
1055 /* A register whose class is NO_REGS is not a general operand. */
1056 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1057 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1059 if (code == MEM)
1061 register rtx y = XEXP (op, 0);
1063 if (! volatile_ok && MEM_VOLATILE_P (op))
1064 return 0;
1066 if (GET_CODE (y) == ADDRESSOF)
1067 return 1;
1069 /* Use the mem's mode, since it will be reloaded thus. */
1070 mode = GET_MODE (op);
1071 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1074 /* Pretend this is an operand for now; we'll run force_operand
1075 on its replacement in fixup_var_refs_1. */
1076 if (code == ADDRESSOF)
1077 return 1;
1079 return 0;
1081 win:
1082 if (mode_altering_drug)
1083 return ! mode_dependent_address_p (XEXP (op, 0));
1084 return 1;
1087 /* Return 1 if OP is a valid memory address for a memory reference
1088 of mode MODE.
1090 The main use of this function is as a predicate in match_operand
1091 expressions in the machine description. */
1094 address_operand (op, mode)
1095 register rtx op;
1096 enum machine_mode mode;
1098 return memory_address_p (mode, op);
1101 /* Return 1 if OP is a register reference of mode MODE.
1102 If MODE is VOIDmode, accept a register in any mode.
1104 The main use of this function is as a predicate in match_operand
1105 expressions in the machine description.
1107 As a special exception, registers whose class is NO_REGS are
1108 not accepted by `register_operand'. The reason for this change
1109 is to allow the representation of special architecture artifacts
1110 (such as a condition code register) without extending the rtl
1111 definitions. Since registers of class NO_REGS cannot be used
1112 as registers in any case where register classes are examined,
1113 it is most consistent to keep this function from accepting them. */
1116 register_operand (op, mode)
1117 register rtx op;
1118 enum machine_mode mode;
1120 if (GET_MODE (op) != mode && mode != VOIDmode)
1121 return 0;
1123 if (GET_CODE (op) == SUBREG)
1125 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1126 because it is guaranteed to be reloaded into one.
1127 Just make sure the MEM is valid in itself.
1128 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1129 but currently it does result from (SUBREG (REG)...) where the
1130 reg went on the stack.) */
1131 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1132 return general_operand (op, mode);
1134 #ifdef CLASS_CANNOT_CHANGE_MODE
1135 if (GET_CODE (SUBREG_REG (op)) == REG
1136 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1137 && (TEST_HARD_REG_BIT
1138 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1139 REGNO (SUBREG_REG (op))))
1140 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1141 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1142 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1143 return 0;
1144 #endif
1146 op = SUBREG_REG (op);
1149 /* If we have an ADDRESSOF, consider it valid since it will be
1150 converted into something that will not be a MEM. */
1151 if (GET_CODE (op) == ADDRESSOF)
1152 return 1;
1154 /* We don't consider registers whose class is NO_REGS
1155 to be a register operand. */
1156 return (GET_CODE (op) == REG
1157 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1158 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1161 /* Return 1 for a register in Pmode; ignore the tested mode. */
1164 pmode_register_operand (op, mode)
1165 rtx op;
1166 enum machine_mode mode ATTRIBUTE_UNUSED;
1168 return register_operand (op, Pmode);
1171 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1172 or a hard register. */
1175 scratch_operand (op, mode)
1176 register rtx op;
1177 enum machine_mode mode;
1179 if (GET_MODE (op) != mode && mode != VOIDmode)
1180 return 0;
1182 return (GET_CODE (op) == SCRATCH
1183 || (GET_CODE (op) == REG
1184 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1187 /* Return 1 if OP is a valid immediate operand for mode MODE.
1189 The main use of this function is as a predicate in match_operand
1190 expressions in the machine description. */
1193 immediate_operand (op, mode)
1194 register rtx op;
1195 enum machine_mode mode;
1197 /* Don't accept CONST_INT or anything similar
1198 if the caller wants something floating. */
1199 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1200 && GET_MODE_CLASS (mode) != MODE_INT
1201 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1202 return 0;
1204 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1205 result in 0/1. It seems a safe assumption that this is
1206 in range for everyone. */
1207 if (GET_CODE (op) == CONSTANT_P_RTX)
1208 return 1;
1210 return (CONSTANT_P (op)
1211 && (GET_MODE (op) == mode || mode == VOIDmode
1212 || GET_MODE (op) == VOIDmode)
1213 #ifdef LEGITIMATE_PIC_OPERAND_P
1214 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1215 #endif
1216 && LEGITIMATE_CONSTANT_P (op));
1219 /* Returns 1 if OP is an operand that is a CONST_INT. */
1222 const_int_operand (op, mode)
1223 register rtx op;
1224 enum machine_mode mode ATTRIBUTE_UNUSED;
1226 return GET_CODE (op) == CONST_INT;
1229 /* Returns 1 if OP is an operand that is a constant integer or constant
1230 floating-point number. */
1233 const_double_operand (op, mode)
1234 register rtx op;
1235 enum machine_mode mode;
1237 /* Don't accept CONST_INT or anything similar
1238 if the caller wants something floating. */
1239 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1240 && GET_MODE_CLASS (mode) != MODE_INT
1241 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1242 return 0;
1244 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1245 && (mode == VOIDmode || GET_MODE (op) == mode
1246 || GET_MODE (op) == VOIDmode));
1249 /* Return 1 if OP is a general operand that is not an immediate operand. */
1252 nonimmediate_operand (op, mode)
1253 register rtx op;
1254 enum machine_mode mode;
1256 return (general_operand (op, mode) && ! CONSTANT_P (op));
1259 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1262 nonmemory_operand (op, mode)
1263 register rtx op;
1264 enum machine_mode mode;
1266 if (CONSTANT_P (op))
1268 /* Don't accept CONST_INT or anything similar
1269 if the caller wants something floating. */
1270 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1271 && GET_MODE_CLASS (mode) != MODE_INT
1272 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1273 return 0;
1275 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1276 || mode == VOIDmode)
1277 #ifdef LEGITIMATE_PIC_OPERAND_P
1278 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1279 #endif
1280 && LEGITIMATE_CONSTANT_P (op));
1283 if (GET_MODE (op) != mode && mode != VOIDmode)
1284 return 0;
1286 if (GET_CODE (op) == SUBREG)
1288 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1289 because it is guaranteed to be reloaded into one.
1290 Just make sure the MEM is valid in itself.
1291 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1292 but currently it does result from (SUBREG (REG)...) where the
1293 reg went on the stack.) */
1294 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1295 return general_operand (op, mode);
1296 op = SUBREG_REG (op);
1299 /* We don't consider registers whose class is NO_REGS
1300 to be a register operand. */
1301 return (GET_CODE (op) == REG
1302 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1303 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1306 /* Return 1 if OP is a valid operand that stands for pushing a
1307 value of mode MODE onto the stack.
1309 The main use of this function is as a predicate in match_operand
1310 expressions in the machine description. */
1313 push_operand (op, mode)
1314 rtx op;
1315 enum machine_mode mode;
1317 if (GET_CODE (op) != MEM)
1318 return 0;
1320 if (mode != VOIDmode && GET_MODE (op) != mode)
1321 return 0;
1323 op = XEXP (op, 0);
1325 if (GET_CODE (op) != STACK_PUSH_CODE)
1326 return 0;
1328 return XEXP (op, 0) == stack_pointer_rtx;
1331 /* Return 1 if OP is a valid operand that stands for popping a
1332 value of mode MODE off the stack.
1334 The main use of this function is as a predicate in match_operand
1335 expressions in the machine description. */
1338 pop_operand (op, mode)
1339 rtx op;
1340 enum machine_mode mode;
1342 if (GET_CODE (op) != MEM)
1343 return 0;
1345 if (mode != VOIDmode && GET_MODE (op) != mode)
1346 return 0;
1348 op = XEXP (op, 0);
1350 if (GET_CODE (op) != STACK_POP_CODE)
1351 return 0;
1353 return XEXP (op, 0) == stack_pointer_rtx;
1356 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1359 memory_address_p (mode, addr)
1360 enum machine_mode mode ATTRIBUTE_UNUSED;
1361 register rtx addr;
1363 if (GET_CODE (addr) == ADDRESSOF)
1364 return 1;
1366 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1367 return 0;
1369 win:
1370 return 1;
1373 /* Return 1 if OP is a valid memory reference with mode MODE,
1374 including a valid address.
1376 The main use of this function is as a predicate in match_operand
1377 expressions in the machine description. */
1380 memory_operand (op, mode)
1381 register rtx op;
1382 enum machine_mode mode;
1384 rtx inner;
1386 if (! reload_completed)
1387 /* Note that no SUBREG is a memory operand before end of reload pass,
1388 because (SUBREG (MEM...)) forces reloading into a register. */
1389 return GET_CODE (op) == MEM && general_operand (op, mode);
1391 if (mode != VOIDmode && GET_MODE (op) != mode)
1392 return 0;
1394 inner = op;
1395 if (GET_CODE (inner) == SUBREG)
1396 inner = SUBREG_REG (inner);
1398 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1401 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1402 that is, a memory reference whose address is a general_operand. */
1405 indirect_operand (op, mode)
1406 register rtx op;
1407 enum machine_mode mode;
1409 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1410 if (! reload_completed
1411 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1413 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1414 rtx inner = SUBREG_REG (op);
1416 if (BYTES_BIG_ENDIAN)
1417 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1418 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1420 if (mode != VOIDmode && GET_MODE (op) != mode)
1421 return 0;
1423 /* The only way that we can have a general_operand as the resulting
1424 address is if OFFSET is zero and the address already is an operand
1425 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1426 operand. */
1428 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1429 || (GET_CODE (XEXP (inner, 0)) == PLUS
1430 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1431 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1432 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1435 return (GET_CODE (op) == MEM
1436 && memory_operand (op, mode)
1437 && general_operand (XEXP (op, 0), Pmode));
1440 /* Return 1 if this is a comparison operator. This allows the use of
1441 MATCH_OPERATOR to recognize all the branch insns. */
1444 comparison_operator (op, mode)
1445 register rtx op;
1446 enum machine_mode mode;
1448 return ((mode == VOIDmode || GET_MODE (op) == mode)
1449 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1452 /* If BODY is an insn body that uses ASM_OPERANDS,
1453 return the number of operands (both input and output) in the insn.
1454 Otherwise return -1. */
1457 asm_noperands (body)
1458 rtx body;
1460 switch (GET_CODE (body))
1462 case ASM_OPERANDS:
1463 /* No output operands: return number of input operands. */
1464 return ASM_OPERANDS_INPUT_LENGTH (body);
1465 case SET:
1466 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1467 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1468 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1469 else
1470 return -1;
1471 case PARALLEL:
1472 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1473 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1475 /* Multiple output operands, or 1 output plus some clobbers:
1476 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1477 int i;
1478 int n_sets;
1480 /* Count backwards through CLOBBERs to determine number of SETs. */
1481 for (i = XVECLEN (body, 0); i > 0; i--)
1483 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1484 break;
1485 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1486 return -1;
1489 /* N_SETS is now number of output operands. */
1490 n_sets = i;
1492 /* Verify that all the SETs we have
1493 came from a single original asm_operands insn
1494 (so that invalid combinations are blocked). */
1495 for (i = 0; i < n_sets; i++)
1497 rtx elt = XVECEXP (body, 0, i);
1498 if (GET_CODE (elt) != SET)
1499 return -1;
1500 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1501 return -1;
1502 /* If these ASM_OPERANDS rtx's came from different original insns
1503 then they aren't allowed together. */
1504 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1505 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1506 return -1;
1508 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1509 + n_sets);
1511 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1513 /* 0 outputs, but some clobbers:
1514 body is [(asm_operands ...) (clobber (reg ...))...]. */
1515 int i;
1517 /* Make sure all the other parallel things really are clobbers. */
1518 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1519 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1520 return -1;
1522 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1524 else
1525 return -1;
1526 default:
1527 return -1;
1531 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1532 copy its operands (both input and output) into the vector OPERANDS,
1533 the locations of the operands within the insn into the vector OPERAND_LOCS,
1534 and the constraints for the operands into CONSTRAINTS.
1535 Write the modes of the operands into MODES.
1536 Return the assembler-template.
1538 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1539 we don't store that info. */
1541 const char *
1542 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1543 rtx body;
1544 rtx *operands;
1545 rtx **operand_locs;
1546 const char **constraints;
1547 enum machine_mode *modes;
1549 register int i;
1550 int noperands;
1551 const char *template = 0;
1553 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1555 rtx asmop = SET_SRC (body);
1556 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1558 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1560 for (i = 1; i < noperands; i++)
1562 if (operand_locs)
1563 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1564 if (operands)
1565 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1566 if (constraints)
1567 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1568 if (modes)
1569 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1572 /* The output is in the SET.
1573 Its constraint is in the ASM_OPERANDS itself. */
1574 if (operands)
1575 operands[0] = SET_DEST (body);
1576 if (operand_locs)
1577 operand_locs[0] = &SET_DEST (body);
1578 if (constraints)
1579 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1580 if (modes)
1581 modes[0] = GET_MODE (SET_DEST (body));
1582 template = ASM_OPERANDS_TEMPLATE (asmop);
1584 else if (GET_CODE (body) == ASM_OPERANDS)
1586 rtx asmop = body;
1587 /* No output operands: BODY is (asm_operands ....). */
1589 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1591 /* The input operands are found in the 1st element vector. */
1592 /* Constraints for inputs are in the 2nd element vector. */
1593 for (i = 0; i < noperands; i++)
1595 if (operand_locs)
1596 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1597 if (operands)
1598 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1599 if (constraints)
1600 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1601 if (modes)
1602 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1604 template = ASM_OPERANDS_TEMPLATE (asmop);
1606 else if (GET_CODE (body) == PARALLEL
1607 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1609 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1610 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1611 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1612 int nout = 0; /* Does not include CLOBBERs. */
1614 /* At least one output, plus some CLOBBERs. */
1616 /* The outputs are in the SETs.
1617 Their constraints are in the ASM_OPERANDS itself. */
1618 for (i = 0; i < nparallel; i++)
1620 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1621 break; /* Past last SET */
1623 if (operands)
1624 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1625 if (operand_locs)
1626 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1627 if (constraints)
1628 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1629 if (modes)
1630 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1631 nout++;
1634 for (i = 0; i < nin; i++)
1636 if (operand_locs)
1637 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1638 if (operands)
1639 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1640 if (constraints)
1641 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1642 if (modes)
1643 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1646 template = ASM_OPERANDS_TEMPLATE (asmop);
1648 else if (GET_CODE (body) == PARALLEL
1649 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1651 /* No outputs, but some CLOBBERs. */
1653 rtx asmop = XVECEXP (body, 0, 0);
1654 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1656 for (i = 0; i < nin; i++)
1658 if (operand_locs)
1659 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1660 if (operands)
1661 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1662 if (constraints)
1663 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1664 if (modes)
1665 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1668 template = ASM_OPERANDS_TEMPLATE (asmop);
1671 return template;
1674 /* Check if an asm_operand matches it's constraints.
1675 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1678 asm_operand_ok (op, constraint)
1679 rtx op;
1680 const char *constraint;
1682 int result = 0;
1684 /* Use constrain_operands after reload. */
1685 if (reload_completed)
1686 abort ();
1688 while (*constraint)
1690 char c = *constraint++;
1691 switch (c)
1693 case '=':
1694 case '+':
1695 case '*':
1696 case '%':
1697 case '?':
1698 case '!':
1699 case '#':
1700 case '&':
1701 case ',':
1702 break;
1704 case '0': case '1': case '2': case '3': case '4':
1705 case '5': case '6': case '7': case '8': case '9':
1706 /* For best results, our caller should have given us the
1707 proper matching constraint, but we can't actually fail
1708 the check if they didn't. Indicate that results are
1709 inconclusive. */
1710 result = -1;
1711 break;
1713 case 'p':
1714 if (address_operand (op, VOIDmode))
1715 return 1;
1716 break;
1718 case 'm':
1719 case 'V': /* non-offsettable */
1720 if (memory_operand (op, VOIDmode))
1721 return 1;
1722 break;
1724 case 'o': /* offsettable */
1725 if (offsettable_nonstrict_memref_p (op))
1726 return 1;
1727 break;
1729 case '<':
1730 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1731 excepting those that expand_call created. Further, on some
1732 machines which do not have generalized auto inc/dec, an inc/dec
1733 is not a memory_operand.
1735 Match any memory and hope things are resolved after reload. */
1737 if (GET_CODE (op) == MEM
1738 && (1
1739 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1740 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1741 return 1;
1742 break;
1744 case '>':
1745 if (GET_CODE (op) == MEM
1746 && (1
1747 || GET_CODE (XEXP (op, 0)) == PRE_INC
1748 || GET_CODE (XEXP (op, 0)) == POST_INC))
1749 return 1;
1750 break;
1752 case 'E':
1753 #ifndef REAL_ARITHMETIC
1754 /* Match any floating double constant, but only if
1755 we can examine the bits of it reliably. */
1756 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1757 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1758 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1759 break;
1760 #endif
1761 /* FALLTHRU */
1763 case 'F':
1764 if (GET_CODE (op) == CONST_DOUBLE)
1765 return 1;
1766 break;
1768 case 'G':
1769 if (GET_CODE (op) == CONST_DOUBLE
1770 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1771 return 1;
1772 break;
1773 case 'H':
1774 if (GET_CODE (op) == CONST_DOUBLE
1775 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1776 return 1;
1777 break;
1779 case 's':
1780 if (GET_CODE (op) == CONST_INT
1781 || (GET_CODE (op) == CONST_DOUBLE
1782 && GET_MODE (op) == VOIDmode))
1783 break;
1784 /* FALLTHRU */
1786 case 'i':
1787 if (CONSTANT_P (op)
1788 #ifdef LEGITIMATE_PIC_OPERAND_P
1789 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1790 #endif
1792 return 1;
1793 break;
1795 case 'n':
1796 if (GET_CODE (op) == CONST_INT
1797 || (GET_CODE (op) == CONST_DOUBLE
1798 && GET_MODE (op) == VOIDmode))
1799 return 1;
1800 break;
1802 case 'I':
1803 if (GET_CODE (op) == CONST_INT
1804 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1805 return 1;
1806 break;
1807 case 'J':
1808 if (GET_CODE (op) == CONST_INT
1809 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1810 return 1;
1811 break;
1812 case 'K':
1813 if (GET_CODE (op) == CONST_INT
1814 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1815 return 1;
1816 break;
1817 case 'L':
1818 if (GET_CODE (op) == CONST_INT
1819 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1820 return 1;
1821 break;
1822 case 'M':
1823 if (GET_CODE (op) == CONST_INT
1824 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1825 return 1;
1826 break;
1827 case 'N':
1828 if (GET_CODE (op) == CONST_INT
1829 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1830 return 1;
1831 break;
1832 case 'O':
1833 if (GET_CODE (op) == CONST_INT
1834 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1835 return 1;
1836 break;
1837 case 'P':
1838 if (GET_CODE (op) == CONST_INT
1839 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1840 return 1;
1841 break;
1843 case 'X':
1844 return 1;
1846 case 'g':
1847 if (general_operand (op, VOIDmode))
1848 return 1;
1849 break;
1851 default:
1852 /* For all other letters, we first check for a register class,
1853 otherwise it is an EXTRA_CONSTRAINT. */
1854 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1856 case 'r':
1857 if (GET_MODE (op) == BLKmode)
1858 break;
1859 if (register_operand (op, VOIDmode))
1860 return 1;
1862 #ifdef EXTRA_CONSTRAINT
1863 if (EXTRA_CONSTRAINT (op, c))
1864 return 1;
1865 #endif
1866 break;
1870 return result;
1873 /* Given an rtx *P, if it is a sum containing an integer constant term,
1874 return the location (type rtx *) of the pointer to that constant term.
1875 Otherwise, return a null pointer. */
1877 static rtx *
1878 find_constant_term_loc (p)
1879 rtx *p;
1881 register rtx *tem;
1882 register enum rtx_code code = GET_CODE (*p);
1884 /* If *P IS such a constant term, P is its location. */
1886 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1887 || code == CONST)
1888 return p;
1890 /* Otherwise, if not a sum, it has no constant term. */
1892 if (GET_CODE (*p) != PLUS)
1893 return 0;
1895 /* If one of the summands is constant, return its location. */
1897 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1898 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1899 return p;
1901 /* Otherwise, check each summand for containing a constant term. */
1903 if (XEXP (*p, 0) != 0)
1905 tem = find_constant_term_loc (&XEXP (*p, 0));
1906 if (tem != 0)
1907 return tem;
1910 if (XEXP (*p, 1) != 0)
1912 tem = find_constant_term_loc (&XEXP (*p, 1));
1913 if (tem != 0)
1914 return tem;
1917 return 0;
1920 /* Return 1 if OP is a memory reference
1921 whose address contains no side effects
1922 and remains valid after the addition
1923 of a positive integer less than the
1924 size of the object being referenced.
1926 We assume that the original address is valid and do not check it.
1928 This uses strict_memory_address_p as a subroutine, so
1929 don't use it before reload. */
1932 offsettable_memref_p (op)
1933 rtx op;
1935 return ((GET_CODE (op) == MEM)
1936 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1939 /* Similar, but don't require a strictly valid mem ref:
1940 consider pseudo-regs valid as index or base regs. */
1943 offsettable_nonstrict_memref_p (op)
1944 rtx op;
1946 return ((GET_CODE (op) == MEM)
1947 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1950 /* Return 1 if Y is a memory address which contains no side effects
1951 and would remain valid after the addition of a positive integer
1952 less than the size of that mode.
1954 We assume that the original address is valid and do not check it.
1955 We do check that it is valid for narrower modes.
1957 If STRICTP is nonzero, we require a strictly valid address,
1958 for the sake of use in reload.c. */
1961 offsettable_address_p (strictp, mode, y)
1962 int strictp;
1963 enum machine_mode mode;
1964 register rtx y;
1966 register enum rtx_code ycode = GET_CODE (y);
1967 register rtx z;
1968 rtx y1 = y;
1969 rtx *y2;
1970 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1971 (strictp ? strict_memory_address_p : memory_address_p);
1972 unsigned int mode_sz = GET_MODE_SIZE (mode);
1974 if (CONSTANT_ADDRESS_P (y))
1975 return 1;
1977 /* Adjusting an offsettable address involves changing to a narrower mode.
1978 Make sure that's OK. */
1980 if (mode_dependent_address_p (y))
1981 return 0;
1983 /* ??? How much offset does an offsettable BLKmode reference need?
1984 Clearly that depends on the situation in which it's being used.
1985 However, the current situation in which we test 0xffffffff is
1986 less than ideal. Caveat user. */
1987 if (mode_sz == 0)
1988 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1990 /* If the expression contains a constant term,
1991 see if it remains valid when max possible offset is added. */
1993 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1995 int good;
1997 y1 = *y2;
1998 *y2 = plus_constant (*y2, mode_sz - 1);
1999 /* Use QImode because an odd displacement may be automatically invalid
2000 for any wider mode. But it should be valid for a single byte. */
2001 good = (*addressp) (QImode, y);
2003 /* In any case, restore old contents of memory. */
2004 *y2 = y1;
2005 return good;
2008 if (GET_RTX_CLASS (ycode) == 'a')
2009 return 0;
2011 /* The offset added here is chosen as the maximum offset that
2012 any instruction could need to add when operating on something
2013 of the specified mode. We assume that if Y and Y+c are
2014 valid addresses then so is Y+d for all 0<d<c. */
2016 z = plus_constant_for_output (y, mode_sz - 1);
2018 /* Use QImode because an odd displacement may be automatically invalid
2019 for any wider mode. But it should be valid for a single byte. */
2020 return (*addressp) (QImode, z);
2023 /* Return 1 if ADDR is an address-expression whose effect depends
2024 on the mode of the memory reference it is used in.
2026 Autoincrement addressing is a typical example of mode-dependence
2027 because the amount of the increment depends on the mode. */
2030 mode_dependent_address_p (addr)
2031 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2033 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2034 return 0;
2035 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2036 win: ATTRIBUTE_UNUSED_LABEL
2037 return 1;
2040 /* Return 1 if OP is a general operand
2041 other than a memory ref with a mode dependent address. */
2044 mode_independent_operand (op, mode)
2045 enum machine_mode mode;
2046 rtx op;
2048 rtx addr;
2050 if (! general_operand (op, mode))
2051 return 0;
2053 if (GET_CODE (op) != MEM)
2054 return 1;
2056 addr = XEXP (op, 0);
2057 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2058 return 1;
2059 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2060 lose: ATTRIBUTE_UNUSED_LABEL
2061 return 0;
2064 /* Given an operand OP that is a valid memory reference which
2065 satisfies offsettable_memref_p, return a new memory reference whose
2066 address has been adjusted by OFFSET. OFFSET should be positive and
2067 less than the size of the object referenced. */
2070 adj_offsettable_operand (op, offset)
2071 rtx op;
2072 int offset;
2074 register enum rtx_code code = GET_CODE (op);
2076 if (code == MEM)
2078 register rtx y = XEXP (op, 0);
2079 register rtx new;
2081 if (CONSTANT_ADDRESS_P (y))
2083 new = gen_rtx_MEM (GET_MODE (op),
2084 plus_constant_for_output (y, offset));
2085 MEM_COPY_ATTRIBUTES (new, op);
2086 return new;
2089 if (GET_CODE (y) == PLUS)
2091 rtx z = y;
2092 register rtx *const_loc;
2094 op = copy_rtx (op);
2095 z = XEXP (op, 0);
2096 const_loc = find_constant_term_loc (&z);
2097 if (const_loc)
2099 *const_loc = plus_constant_for_output (*const_loc, offset);
2100 return op;
2104 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2105 MEM_COPY_ATTRIBUTES (new, op);
2106 return new;
2108 abort ();
2111 /* Like extract_insn, but save insn extracted and don't extract again, when
2112 called again for the same insn expecting that recog_data still contain the
2113 valid information. This is used primary by gen_attr infrastructure that
2114 often does extract insn again and again. */
2115 void
2116 extract_insn_cached (insn)
2117 rtx insn;
2119 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2120 return;
2121 extract_insn (insn);
2122 recog_data.insn = insn;
2124 /* Do cached extract_insn, constrain_operand and complain about failures.
2125 Used by insn_attrtab. */
2126 void
2127 extract_constrain_insn_cached (insn)
2128 rtx insn;
2130 extract_insn_cached (insn);
2131 if (which_alternative == -1
2132 && !constrain_operands (reload_completed))
2133 fatal_insn_not_found (insn);
2135 /* Do cached constrain_operand and complain about failures. */
2137 constrain_operands_cached (strict)
2138 int strict;
2140 if (which_alternative == -1)
2141 return constrain_operands (strict);
2142 else
2143 return 1;
2146 /* Analyze INSN and fill in recog_data. */
2148 void
2149 extract_insn (insn)
2150 rtx insn;
2152 int i;
2153 int icode;
2154 int noperands;
2155 rtx body = PATTERN (insn);
2157 recog_data.insn = NULL;
2158 recog_data.n_operands = 0;
2159 recog_data.n_alternatives = 0;
2160 recog_data.n_dups = 0;
2161 which_alternative = -1;
2163 switch (GET_CODE (body))
2165 case USE:
2166 case CLOBBER:
2167 case ASM_INPUT:
2168 case ADDR_VEC:
2169 case ADDR_DIFF_VEC:
2170 return;
2172 case SET:
2173 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2174 goto asm_insn;
2175 else
2176 goto normal_insn;
2177 case PARALLEL:
2178 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2179 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2180 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2181 goto asm_insn;
2182 else
2183 goto normal_insn;
2184 case ASM_OPERANDS:
2185 asm_insn:
2186 recog_data.n_operands = noperands = asm_noperands (body);
2187 if (noperands >= 0)
2189 /* This insn is an `asm' with operands. */
2191 /* expand_asm_operands makes sure there aren't too many operands. */
2192 if (noperands > MAX_RECOG_OPERANDS)
2193 abort ();
2195 /* Now get the operand values and constraints out of the insn. */
2196 decode_asm_operands (body, recog_data.operand,
2197 recog_data.operand_loc,
2198 recog_data.constraints,
2199 recog_data.operand_mode);
2200 if (noperands > 0)
2202 const char *p = recog_data.constraints[0];
2203 recog_data.n_alternatives = 1;
2204 while (*p)
2205 recog_data.n_alternatives += (*p++ == ',');
2207 break;
2209 fatal_insn_not_found (insn);
2211 default:
2212 normal_insn:
2213 /* Ordinary insn: recognize it, get the operands via insn_extract
2214 and get the constraints. */
2216 icode = recog_memoized (insn);
2217 if (icode < 0)
2218 fatal_insn_not_found (insn);
2220 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2221 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2222 recog_data.n_dups = insn_data[icode].n_dups;
2224 insn_extract (insn);
2226 for (i = 0; i < noperands; i++)
2228 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2229 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2230 /* VOIDmode match_operands gets mode from their real operand. */
2231 if (recog_data.operand_mode[i] == VOIDmode)
2232 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2235 for (i = 0; i < noperands; i++)
2236 recog_data.operand_type[i]
2237 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2238 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2239 : OP_IN);
2241 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2242 abort ();
2245 /* After calling extract_insn, you can use this function to extract some
2246 information from the constraint strings into a more usable form.
2247 The collected data is stored in recog_op_alt. */
2248 void
2249 preprocess_constraints ()
2251 int i;
2253 memset (recog_op_alt, 0, sizeof recog_op_alt);
2254 for (i = 0; i < recog_data.n_operands; i++)
2256 int j;
2257 struct operand_alternative *op_alt;
2258 const char *p = recog_data.constraints[i];
2260 op_alt = recog_op_alt[i];
2262 for (j = 0; j < recog_data.n_alternatives; j++)
2264 op_alt[j].class = NO_REGS;
2265 op_alt[j].constraint = p;
2266 op_alt[j].matches = -1;
2267 op_alt[j].matched = -1;
2269 if (*p == '\0' || *p == ',')
2271 op_alt[j].anything_ok = 1;
2272 continue;
2275 for (;;)
2277 char c = *p++;
2278 if (c == '#')
2280 c = *p++;
2281 while (c != ',' && c != '\0');
2282 if (c == ',' || c == '\0')
2283 break;
2285 switch (c)
2287 case '=': case '+': case '*': case '%':
2288 case 'E': case 'F': case 'G': case 'H':
2289 case 's': case 'i': case 'n':
2290 case 'I': case 'J': case 'K': case 'L':
2291 case 'M': case 'N': case 'O': case 'P':
2292 /* These don't say anything we care about. */
2293 break;
2295 case '?':
2296 op_alt[j].reject += 6;
2297 break;
2298 case '!':
2299 op_alt[j].reject += 600;
2300 break;
2301 case '&':
2302 op_alt[j].earlyclobber = 1;
2303 break;
2305 case '0': case '1': case '2': case '3': case '4':
2306 case '5': case '6': case '7': case '8': case '9':
2307 op_alt[j].matches = c - '0';
2308 recog_op_alt[op_alt[j].matches][j].matched = i;
2309 break;
2311 case 'm':
2312 op_alt[j].memory_ok = 1;
2313 break;
2314 case '<':
2315 op_alt[j].decmem_ok = 1;
2316 break;
2317 case '>':
2318 op_alt[j].incmem_ok = 1;
2319 break;
2320 case 'V':
2321 op_alt[j].nonoffmem_ok = 1;
2322 break;
2323 case 'o':
2324 op_alt[j].offmem_ok = 1;
2325 break;
2326 case 'X':
2327 op_alt[j].anything_ok = 1;
2328 break;
2330 case 'p':
2331 op_alt[j].is_address = 1;
2332 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2333 break;
2335 case 'g': case 'r':
2336 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2337 break;
2339 default:
2340 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2341 break;
2348 /* Check the operands of an insn against the insn's operand constraints
2349 and return 1 if they are valid.
2350 The information about the insn's operands, constraints, operand modes
2351 etc. is obtained from the global variables set up by extract_insn.
2353 WHICH_ALTERNATIVE is set to a number which indicates which
2354 alternative of constraints was matched: 0 for the first alternative,
2355 1 for the next, etc.
2357 In addition, when two operands are match
2358 and it happens that the output operand is (reg) while the
2359 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2360 make the output operand look like the input.
2361 This is because the output operand is the one the template will print.
2363 This is used in final, just before printing the assembler code and by
2364 the routines that determine an insn's attribute.
2366 If STRICT is a positive non-zero value, it means that we have been
2367 called after reload has been completed. In that case, we must
2368 do all checks strictly. If it is zero, it means that we have been called
2369 before reload has completed. In that case, we first try to see if we can
2370 find an alternative that matches strictly. If not, we try again, this
2371 time assuming that reload will fix up the insn. This provides a "best
2372 guess" for the alternative and is used to compute attributes of insns prior
2373 to reload. A negative value of STRICT is used for this internal call. */
2375 struct funny_match
2377 int this, other;
2381 constrain_operands (strict)
2382 int strict;
2384 const char *constraints[MAX_RECOG_OPERANDS];
2385 int matching_operands[MAX_RECOG_OPERANDS];
2386 int earlyclobber[MAX_RECOG_OPERANDS];
2387 register int c;
2389 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2390 int funny_match_index;
2392 which_alternative = 0;
2393 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2394 return 1;
2396 for (c = 0; c < recog_data.n_operands; c++)
2398 constraints[c] = recog_data.constraints[c];
2399 matching_operands[c] = -1;
2404 register int opno;
2405 int lose = 0;
2406 funny_match_index = 0;
2408 for (opno = 0; opno < recog_data.n_operands; opno++)
2410 register rtx op = recog_data.operand[opno];
2411 enum machine_mode mode = GET_MODE (op);
2412 register const char *p = constraints[opno];
2413 int offset = 0;
2414 int win = 0;
2415 int val;
2417 earlyclobber[opno] = 0;
2419 /* A unary operator may be accepted by the predicate, but it
2420 is irrelevant for matching constraints. */
2421 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2422 op = XEXP (op, 0);
2424 if (GET_CODE (op) == SUBREG)
2426 if (GET_CODE (SUBREG_REG (op)) == REG
2427 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2428 offset = SUBREG_WORD (op);
2429 op = SUBREG_REG (op);
2432 /* An empty constraint or empty alternative
2433 allows anything which matched the pattern. */
2434 if (*p == 0 || *p == ',')
2435 win = 1;
2437 while (*p && (c = *p++) != ',')
2438 switch (c)
2440 case '?': case '!': case '*': case '%':
2441 case '=': case '+':
2442 break;
2444 case '#':
2445 /* Ignore rest of this alternative as far as
2446 constraint checking is concerned. */
2447 while (*p && *p != ',')
2448 p++;
2449 break;
2451 case '&':
2452 earlyclobber[opno] = 1;
2453 break;
2455 case '0': case '1': case '2': case '3': case '4':
2456 case '5': case '6': case '7': case '8': case '9':
2458 /* This operand must be the same as a previous one.
2459 This kind of constraint is used for instructions such
2460 as add when they take only two operands.
2462 Note that the lower-numbered operand is passed first.
2464 If we are not testing strictly, assume that this constraint
2465 will be satisfied. */
2466 if (strict < 0)
2467 val = 1;
2468 else
2470 rtx op1 = recog_data.operand[c - '0'];
2471 rtx op2 = recog_data.operand[opno];
2473 /* A unary operator may be accepted by the predicate,
2474 but it is irrelevant for matching constraints. */
2475 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2476 op1 = XEXP (op1, 0);
2477 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2478 op2 = XEXP (op2, 0);
2480 val = operands_match_p (op1, op2);
2483 matching_operands[opno] = c - '0';
2484 matching_operands[c - '0'] = opno;
2486 if (val != 0)
2487 win = 1;
2488 /* If output is *x and input is *--x,
2489 arrange later to change the output to *--x as well,
2490 since the output op is the one that will be printed. */
2491 if (val == 2 && strict > 0)
2493 funny_match[funny_match_index].this = opno;
2494 funny_match[funny_match_index++].other = c - '0';
2496 break;
2498 case 'p':
2499 /* p is used for address_operands. When we are called by
2500 gen_reload, no one will have checked that the address is
2501 strictly valid, i.e., that all pseudos requiring hard regs
2502 have gotten them. */
2503 if (strict <= 0
2504 || (strict_memory_address_p (recog_data.operand_mode[opno],
2505 op)))
2506 win = 1;
2507 break;
2509 /* No need to check general_operand again;
2510 it was done in insn-recog.c. */
2511 case 'g':
2512 /* Anything goes unless it is a REG and really has a hard reg
2513 but the hard reg is not in the class GENERAL_REGS. */
2514 if (strict < 0
2515 || GENERAL_REGS == ALL_REGS
2516 || GET_CODE (op) != REG
2517 || (reload_in_progress
2518 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2519 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2520 win = 1;
2521 break;
2523 case 'X':
2524 /* This is used for a MATCH_SCRATCH in the cases when
2525 we don't actually need anything. So anything goes
2526 any time. */
2527 win = 1;
2528 break;
2530 case 'm':
2531 if (GET_CODE (op) == MEM
2532 /* Before reload, accept what reload can turn into mem. */
2533 || (strict < 0 && CONSTANT_P (op))
2534 /* During reload, accept a pseudo */
2535 || (reload_in_progress && GET_CODE (op) == REG
2536 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2537 win = 1;
2538 break;
2540 case '<':
2541 if (GET_CODE (op) == MEM
2542 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2543 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2544 win = 1;
2545 break;
2547 case '>':
2548 if (GET_CODE (op) == MEM
2549 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2550 || GET_CODE (XEXP (op, 0)) == POST_INC))
2551 win = 1;
2552 break;
2554 case 'E':
2555 #ifndef REAL_ARITHMETIC
2556 /* Match any CONST_DOUBLE, but only if
2557 we can examine the bits of it reliably. */
2558 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2559 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2560 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2561 break;
2562 #endif
2563 if (GET_CODE (op) == CONST_DOUBLE)
2564 win = 1;
2565 break;
2567 case 'F':
2568 if (GET_CODE (op) == CONST_DOUBLE)
2569 win = 1;
2570 break;
2572 case 'G':
2573 case 'H':
2574 if (GET_CODE (op) == CONST_DOUBLE
2575 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2576 win = 1;
2577 break;
2579 case 's':
2580 if (GET_CODE (op) == CONST_INT
2581 || (GET_CODE (op) == CONST_DOUBLE
2582 && GET_MODE (op) == VOIDmode))
2583 break;
2584 case 'i':
2585 if (CONSTANT_P (op))
2586 win = 1;
2587 break;
2589 case 'n':
2590 if (GET_CODE (op) == CONST_INT
2591 || (GET_CODE (op) == CONST_DOUBLE
2592 && GET_MODE (op) == VOIDmode))
2593 win = 1;
2594 break;
2596 case 'I':
2597 case 'J':
2598 case 'K':
2599 case 'L':
2600 case 'M':
2601 case 'N':
2602 case 'O':
2603 case 'P':
2604 if (GET_CODE (op) == CONST_INT
2605 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2606 win = 1;
2607 break;
2609 case 'V':
2610 if (GET_CODE (op) == MEM
2611 && ((strict > 0 && ! offsettable_memref_p (op))
2612 || (strict < 0
2613 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2614 || (reload_in_progress
2615 && !(GET_CODE (op) == REG
2616 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2617 win = 1;
2618 break;
2620 case 'o':
2621 if ((strict > 0 && offsettable_memref_p (op))
2622 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2623 /* Before reload, accept what reload can handle. */
2624 || (strict < 0
2625 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2626 /* During reload, accept a pseudo */
2627 || (reload_in_progress && GET_CODE (op) == REG
2628 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2629 win = 1;
2630 break;
2632 default:
2634 enum reg_class class;
2636 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2637 if (class != NO_REGS)
2639 if (strict < 0
2640 || (strict == 0
2641 && GET_CODE (op) == REG
2642 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2643 || (strict == 0 && GET_CODE (op) == SCRATCH)
2644 || (GET_CODE (op) == REG
2645 && reg_fits_class_p (op, class, offset, mode)))
2646 win = 1;
2648 #ifdef EXTRA_CONSTRAINT
2649 else if (EXTRA_CONSTRAINT (op, c))
2650 win = 1;
2651 #endif
2652 break;
2656 constraints[opno] = p;
2657 /* If this operand did not win somehow,
2658 this alternative loses. */
2659 if (! win)
2660 lose = 1;
2662 /* This alternative won; the operands are ok.
2663 Change whichever operands this alternative says to change. */
2664 if (! lose)
2666 int opno, eopno;
2668 /* See if any earlyclobber operand conflicts with some other
2669 operand. */
2671 if (strict > 0)
2672 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2673 /* Ignore earlyclobber operands now in memory,
2674 because we would often report failure when we have
2675 two memory operands, one of which was formerly a REG. */
2676 if (earlyclobber[eopno]
2677 && GET_CODE (recog_data.operand[eopno]) == REG)
2678 for (opno = 0; opno < recog_data.n_operands; opno++)
2679 if ((GET_CODE (recog_data.operand[opno]) == MEM
2680 || recog_data.operand_type[opno] != OP_OUT)
2681 && opno != eopno
2682 /* Ignore things like match_operator operands. */
2683 && *recog_data.constraints[opno] != 0
2684 && ! (matching_operands[opno] == eopno
2685 && operands_match_p (recog_data.operand[opno],
2686 recog_data.operand[eopno]))
2687 && ! safe_from_earlyclobber (recog_data.operand[opno],
2688 recog_data.operand[eopno]))
2689 lose = 1;
2691 if (! lose)
2693 while (--funny_match_index >= 0)
2695 recog_data.operand[funny_match[funny_match_index].other]
2696 = recog_data.operand[funny_match[funny_match_index].this];
2699 return 1;
2703 which_alternative++;
2705 while (which_alternative < recog_data.n_alternatives);
2707 which_alternative = -1;
2708 /* If we are about to reject this, but we are not to test strictly,
2709 try a very loose test. Only return failure if it fails also. */
2710 if (strict == 0)
2711 return constrain_operands (-1);
2712 else
2713 return 0;
2716 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2717 is a hard reg in class CLASS when its regno is offset by OFFSET
2718 and changed to mode MODE.
2719 If REG occupies multiple hard regs, all of them must be in CLASS. */
2722 reg_fits_class_p (operand, class, offset, mode)
2723 rtx operand;
2724 register enum reg_class class;
2725 int offset;
2726 enum machine_mode mode;
2728 register int regno = REGNO (operand);
2729 if (regno < FIRST_PSEUDO_REGISTER
2730 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2731 regno + offset))
2733 register int sr;
2734 regno += offset;
2735 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2736 sr > 0; sr--)
2737 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2738 regno + sr))
2739 break;
2740 return sr == 0;
2743 return 0;
2746 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2748 void
2749 split_all_insns (upd_life)
2750 int upd_life;
2752 sbitmap blocks;
2753 int changed;
2754 int i;
2756 blocks = sbitmap_alloc (n_basic_blocks);
2757 sbitmap_zero (blocks);
2758 changed = 0;
2760 for (i = n_basic_blocks - 1; i >= 0; --i)
2762 basic_block bb = BASIC_BLOCK (i);
2763 rtx insn, next;
2765 for (insn = bb->head; insn ; insn = next)
2767 rtx set;
2769 /* Can't use `next_real_insn' because that might go across
2770 CODE_LABELS and short-out basic blocks. */
2771 next = NEXT_INSN (insn);
2772 if (! INSN_P (insn))
2775 /* Don't split no-op move insns. These should silently
2776 disappear later in final. Splitting such insns would
2777 break the code that handles REG_NO_CONFLICT blocks. */
2779 else if ((set = single_set (insn)) != NULL
2780 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2782 /* Nops get in the way while scheduling, so delete them
2783 now if register allocation has already been done. It
2784 is too risky to try to do this before register
2785 allocation, and there are unlikely to be very many
2786 nops then anyways. */
2787 if (reload_completed)
2789 PUT_CODE (insn, NOTE);
2790 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2791 NOTE_SOURCE_FILE (insn) = 0;
2794 else
2796 /* Split insns here to get max fine-grain parallelism. */
2797 rtx first = PREV_INSN (insn);
2798 rtx last = try_split (PATTERN (insn), insn, 1);
2800 if (last != insn)
2802 SET_BIT (blocks, i);
2803 changed = 1;
2805 /* try_split returns the NOTE that INSN became. */
2806 PUT_CODE (insn, NOTE);
2807 NOTE_SOURCE_FILE (insn) = 0;
2808 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2810 /* ??? Coddle to md files that generate subregs in post-
2811 reload splitters instead of computing the proper
2812 hard register. */
2813 if (reload_completed && first != last)
2815 first = NEXT_INSN (first);
2816 while (1)
2818 if (INSN_P (first))
2819 cleanup_subreg_operands (first);
2820 if (first == last)
2821 break;
2822 first = NEXT_INSN (first);
2826 if (insn == bb->end)
2828 bb->end = last;
2829 break;
2834 if (insn == bb->end)
2835 break;
2838 /* ??? When we're called from just after reload, the CFG is in bad
2839 shape, and we may have fallen off the end. This could be fixed
2840 by having reload not try to delete unreachable code. Otherwise
2841 assert we found the end insn. */
2842 if (insn == NULL && upd_life)
2843 abort ();
2846 if (changed && upd_life)
2848 compute_bb_for_insn (get_max_uid ());
2849 count_or_remove_death_notes (blocks, 1);
2850 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2853 sbitmap_free (blocks);
2856 #ifdef HAVE_peephole2
2857 struct peep2_insn_data
2859 rtx insn;
2860 regset live_before;
2863 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2864 static int peep2_current;
2866 /* A non-insn marker indicating the last insn of the block.
2867 The live_before regset for this element is correct, indicating
2868 global_live_at_end for the block. */
2869 #define PEEP2_EOB pc_rtx
2871 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2872 does not exist. Used by the recognizer to find the next insn to match
2873 in a multi-insn pattern. */
2876 peep2_next_insn (n)
2877 int n;
2879 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2880 abort ();
2882 n += peep2_current;
2883 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2884 n -= MAX_INSNS_PER_PEEP2 + 1;
2886 if (peep2_insn_data[n].insn == PEEP2_EOB)
2887 return NULL_RTX;
2888 return peep2_insn_data[n].insn;
2891 /* Return true if REGNO is dead before the Nth non-note insn
2892 after `current'. */
2895 peep2_regno_dead_p (ofs, regno)
2896 int ofs;
2897 int regno;
2899 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2900 abort ();
2902 ofs += peep2_current;
2903 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2904 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2906 if (peep2_insn_data[ofs].insn == NULL_RTX)
2907 abort ();
2909 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2912 /* Similarly for a REG. */
2915 peep2_reg_dead_p (ofs, reg)
2916 int ofs;
2917 rtx reg;
2919 int regno, n;
2921 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2922 abort ();
2924 ofs += peep2_current;
2925 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2926 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2928 if (peep2_insn_data[ofs].insn == NULL_RTX)
2929 abort ();
2931 regno = REGNO (reg);
2932 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2933 while (--n >= 0)
2934 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2935 return 0;
2936 return 1;
2939 /* Try to find a hard register of mode MODE, matching the register class in
2940 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2941 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2942 in which case the only condition is that the register must be available
2943 before CURRENT_INSN.
2944 Registers that already have bits set in REG_SET will not be considered.
2946 If an appropriate register is available, it will be returned and the
2947 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2948 returned. */
2951 peep2_find_free_register (from, to, class_str, mode, reg_set)
2952 int from, to;
2953 const char *class_str;
2954 enum machine_mode mode;
2955 HARD_REG_SET *reg_set;
2957 static int search_ofs;
2958 enum reg_class class;
2959 HARD_REG_SET live;
2960 int i;
2962 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2963 abort ();
2965 from += peep2_current;
2966 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2967 from -= MAX_INSNS_PER_PEEP2 + 1;
2968 to += peep2_current;
2969 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2970 to -= MAX_INSNS_PER_PEEP2 + 1;
2972 if (peep2_insn_data[from].insn == NULL_RTX)
2973 abort ();
2974 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2976 while (from != to)
2978 HARD_REG_SET this_live;
2980 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2981 from = 0;
2982 if (peep2_insn_data[from].insn == NULL_RTX)
2983 abort ();
2984 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2985 IOR_HARD_REG_SET (live, this_live);
2988 class = (class_str[0] == 'r' ? GENERAL_REGS
2989 : REG_CLASS_FROM_LETTER (class_str[0]));
2991 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2993 int raw_regno, regno, success, j;
2995 /* Distribute the free registers as much as possible. */
2996 raw_regno = search_ofs + i;
2997 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2998 raw_regno -= FIRST_PSEUDO_REGISTER;
2999 #ifdef REG_ALLOC_ORDER
3000 regno = reg_alloc_order[raw_regno];
3001 #else
3002 regno = raw_regno;
3003 #endif
3005 /* Don't allocate fixed registers. */
3006 if (fixed_regs[regno])
3007 continue;
3008 /* Make sure the register is of the right class. */
3009 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3010 continue;
3011 /* And can support the mode we need. */
3012 if (! HARD_REGNO_MODE_OK (regno, mode))
3013 continue;
3014 /* And that we don't create an extra save/restore. */
3015 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3016 continue;
3017 /* And we don't clobber traceback for noreturn functions. */
3018 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3019 && (! reload_completed || frame_pointer_needed))
3020 continue;
3022 success = 1;
3023 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3025 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3026 || TEST_HARD_REG_BIT (live, regno + j))
3028 success = 0;
3029 break;
3032 if (success)
3034 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3035 SET_HARD_REG_BIT (*reg_set, regno + j);
3037 /* Start the next search with the next register. */
3038 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3039 raw_regno = 0;
3040 search_ofs = raw_regno;
3042 return gen_rtx_REG (mode, regno);
3046 search_ofs = 0;
3047 return NULL_RTX;
3050 /* Perform the peephole2 optimization pass. */
3052 void
3053 peephole2_optimize (dump_file)
3054 FILE *dump_file ATTRIBUTE_UNUSED;
3056 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3057 rtx insn, prev;
3058 regset live;
3059 int i, b;
3060 #ifdef HAVE_conditional_execution
3061 sbitmap blocks;
3062 int changed;
3063 #endif
3065 /* Initialize the regsets we're going to use. */
3066 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3067 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3068 live = INITIALIZE_REG_SET (rs_heads[i]);
3070 #ifdef HAVE_conditional_execution
3071 blocks = sbitmap_alloc (n_basic_blocks);
3072 sbitmap_zero (blocks);
3073 changed = 0;
3074 #else
3075 count_or_remove_death_notes (NULL, 1);
3076 #endif
3078 for (b = n_basic_blocks - 1; b >= 0; --b)
3080 basic_block bb = BASIC_BLOCK (b);
3081 struct propagate_block_info *pbi;
3083 /* Indicate that all slots except the last holds invalid data. */
3084 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3085 peep2_insn_data[i].insn = NULL_RTX;
3087 /* Indicate that the last slot contains live_after data. */
3088 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3089 peep2_current = MAX_INSNS_PER_PEEP2;
3091 /* Start up propagation. */
3092 COPY_REG_SET (live, bb->global_live_at_end);
3093 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3095 #ifdef HAVE_conditional_execution
3096 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3097 #else
3098 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3099 #endif
3101 for (insn = bb->end; ; insn = prev)
3103 prev = PREV_INSN (insn);
3104 if (INSN_P (insn))
3106 rtx try;
3107 int match_len;
3109 /* Record this insn. */
3110 if (--peep2_current < 0)
3111 peep2_current = MAX_INSNS_PER_PEEP2;
3112 peep2_insn_data[peep2_current].insn = insn;
3113 propagate_one_insn (pbi, insn);
3114 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3116 /* Match the peephole. */
3117 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3118 if (try != NULL)
3120 i = match_len + peep2_current;
3121 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3122 i -= MAX_INSNS_PER_PEEP2 + 1;
3124 /* Replace the old sequence with the new. */
3125 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3126 try = emit_insn_after (try, prev);
3128 /* Adjust the basic block boundaries. */
3129 if (peep2_insn_data[i].insn == bb->end)
3130 bb->end = try;
3131 if (insn == bb->head)
3132 bb->head = NEXT_INSN (prev);
3134 #ifdef HAVE_conditional_execution
3135 /* With conditional execution, we cannot back up the
3136 live information so easily, since the conditional
3137 death data structures are not so self-contained.
3138 So record that we've made a modification to this
3139 block and update life information at the end. */
3140 SET_BIT (blocks, b);
3141 changed = 1;
3143 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3144 peep2_insn_data[i].insn = NULL_RTX;
3145 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3146 #else
3147 /* Back up lifetime information past the end of the
3148 newly created sequence. */
3149 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3150 i = 0;
3151 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3153 /* Update life information for the new sequence. */
3156 if (INSN_P (try))
3158 if (--i < 0)
3159 i = MAX_INSNS_PER_PEEP2;
3160 peep2_insn_data[i].insn = try;
3161 propagate_one_insn (pbi, try);
3162 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3164 try = PREV_INSN (try);
3166 while (try != prev);
3168 /* ??? Should verify that LIVE now matches what we
3169 had before the new sequence. */
3171 peep2_current = i;
3172 #endif
3176 if (insn == bb->head)
3177 break;
3180 free_propagate_block_info (pbi);
3183 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3184 FREE_REG_SET (peep2_insn_data[i].live_before);
3185 FREE_REG_SET (live);
3187 #ifdef HAVE_conditional_execution
3188 count_or_remove_death_notes (blocks, 1);
3189 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3190 sbitmap_free (blocks);
3191 #endif
3193 #endif /* HAVE_peephole2 */