2000-05-02 Jeff Sturm <jsturm@one-point.com>
[official-gcc.git] / gcc / recog.c
blob86209fe773a4f20b77f6224167f354553932d7a8
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "function.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "toplev.h"
36 #include "basic-block.h"
37 #include "output.h"
38 #include "reload.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
43 #else
44 #define STACK_PUSH_CODE PRE_INC
45 #endif
46 #endif
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
51 #else
52 #define STACK_POP_CODE POST_DEC
53 #endif
54 #endif
56 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
57 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
58 static rtx *find_constant_term_loc PARAMS ((rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
69 int volatile_ok;
71 struct recog_data recog_data;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77 /* On return from `constrain_operands', indicate which alternative
78 was satisfied. */
80 int which_alternative;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
86 int reload_completed;
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
92 void
93 init_recog_no_volatile ()
95 volatile_ok = 0;
98 void
99 init_recog ()
101 volatile_ok = 1;
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn)
115 rtx insn;
117 if (INSN_CODE (insn) < 0)
118 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
119 return INSN_CODE (insn);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x)
127 rtx x;
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = (rtx *) alloca (noperands * sizeof (rtx));
150 constraints = (const char **) alloca (noperands * sizeof (char *));
152 decode_asm_operands (x, operands, NULL, constraints, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
160 c = constraints[c[0] - '0'];
162 if (! asm_operand_ok (operands[i], c))
163 return 0;
166 return 1;
169 /* Static data for the next two routines. */
171 typedef struct change_t
173 rtx object;
174 int old_code;
175 rtx *loc;
176 rtx old;
177 } change_t;
179 static change_t *changes;
180 static int changes_allocated;
182 static int num_changes = 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
191 the change in place.
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object, loc, new, in_group)
204 rtx object;
205 rtx *loc;
206 rtx new;
207 int in_group;
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 if (in_group == 0 && num_changes != 0)
215 abort ();
217 *loc = new;
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
229 changes =
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
238 if (object && GET_CODE (object) != MEM)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
241 case invalid. */
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
246 num_changes++;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
251 if (in_group)
252 return 1;
253 else
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
261 insn_invalid_p (insn)
262 rtx insn;
264 rtx pat = PATTERN (insn);
265 int num_clobbers = 0;
266 /* If we are before reload and the pattern is a SET, see if we can add
267 clobbers. */
268 int icode = recog (pat, insn,
269 (GET_CODE (pat) == SET
270 && ! reload_completed && ! reload_in_progress)
271 ? &num_clobbers : 0);
272 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
275 /* If this is an asm and the operand aren't legal, then fail. Likewise if
276 this is not an asm and the insn wasn't recognized. */
277 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
278 || (!is_asm && icode < 0))
279 return 1;
281 /* If we have to add CLOBBERs, fail if we have to add ones that reference
282 hard registers since our callers can't know if they are live or not.
283 Otherwise, add them. */
284 if (num_clobbers > 0)
286 rtx newpat;
288 if (added_clobbers_hard_reg_p (icode))
289 return 1;
291 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
292 XVECEXP (newpat, 0, 0) = pat;
293 add_clobbers (newpat, icode);
294 PATTERN (insn) = pat = newpat;
297 /* After reload, verify that all constraints are satisfied. */
298 if (reload_completed)
300 extract_insn (insn);
302 if (! constrain_operands (1))
303 return 1;
306 INSN_CODE (insn) = icode;
307 return 0;
310 /* Apply a group of changes previously issued with `validate_change'.
311 Return 1 if all changes are valid, zero otherwise. */
314 apply_change_group ()
316 int i;
317 rtx last_validated = NULL_RTX;
319 /* The changes have been applied and all INSN_CODEs have been reset to force
320 rerecognition.
322 The changes are valid if we aren't given an object, or if we are
323 given a MEM and it still is a valid address, or if this is in insn
324 and it is recognized. In the latter case, if reload has completed,
325 we also require that the operands meet the constraints for
326 the insn. */
328 for (i = 0; i < num_changes; i++)
330 rtx object = changes[i].object;
332 /* if there is no object to test or if it is the same as the one we
333 already tested, ignore it. */
334 if (object == 0 || object == last_validated)
335 continue;
337 if (GET_CODE (object) == MEM)
339 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
340 break;
342 else if (insn_invalid_p (object))
344 rtx pat = PATTERN (object);
346 /* Perhaps we couldn't recognize the insn because there were
347 extra CLOBBERs at the end. If so, try to re-recognize
348 without the last CLOBBER (later iterations will cause each of
349 them to be eliminated, in turn). But don't do this if we
350 have an ASM_OPERAND. */
351 if (GET_CODE (pat) == PARALLEL
352 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
353 && asm_noperands (PATTERN (object)) < 0)
355 rtx newpat;
357 if (XVECLEN (pat, 0) == 2)
358 newpat = XVECEXP (pat, 0, 0);
359 else
361 int j;
363 newpat
364 = gen_rtx_PARALLEL (VOIDmode,
365 rtvec_alloc (XVECLEN (pat, 0) - 1));
366 for (j = 0; j < XVECLEN (newpat, 0); j++)
367 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
370 /* Add a new change to this group to replace the pattern
371 with this new pattern. Then consider this change
372 as having succeeded. The change we added will
373 cause the entire call to fail if things remain invalid.
375 Note that this can lose if a later change than the one
376 we are processing specified &XVECEXP (PATTERN (object), 0, X)
377 but this shouldn't occur. */
379 validate_change (object, &PATTERN (object), newpat, 1);
380 continue;
382 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
383 /* If this insn is a CLOBBER or USE, it is always valid, but is
384 never recognized. */
385 continue;
386 else
387 break;
389 last_validated = object;
392 if (i == num_changes)
394 num_changes = 0;
395 return 1;
397 else
399 cancel_changes (0);
400 return 0;
404 /* Return the number of changes so far in the current group. */
407 num_validated_changes ()
409 return num_changes;
412 /* Retract the changes numbered NUM and up. */
414 void
415 cancel_changes (num)
416 int num;
418 int i;
420 /* Back out all the changes. Do this in the opposite order in which
421 they were made. */
422 for (i = num_changes - 1; i >= num; i--)
424 *changes[i].loc = changes[i].old;
425 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
426 INSN_CODE (changes[i].object) = changes[i].old_code;
428 num_changes = num;
431 /* Replace every occurrence of FROM in X with TO. Mark each change with
432 validate_change passing OBJECT. */
434 static void
435 validate_replace_rtx_1 (loc, from, to, object)
436 rtx *loc;
437 rtx from, to, object;
439 register int i, j;
440 register const char *fmt;
441 register rtx x = *loc;
442 enum rtx_code code;
444 if (!x)
445 return;
446 code = GET_CODE (x);
447 /* X matches FROM if it is the same rtx or they are both referring to the
448 same register in the same mode. Avoid calling rtx_equal_p unless the
449 operands look similar. */
451 if (x == from
452 || (GET_CODE (x) == REG && GET_CODE (from) == REG
453 && GET_MODE (x) == GET_MODE (from)
454 && REGNO (x) == REGNO (from))
455 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
456 && rtx_equal_p (x, from)))
458 validate_change (object, loc, to, 1);
459 return;
462 /* For commutative or comparison operations, try replacing each argument
463 separately and seeing if we made any changes. If so, put a constant
464 argument last.*/
465 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
467 int prev_changes = num_changes;
469 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
470 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
471 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
473 validate_change (object, loc,
474 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
475 : swap_condition (code),
476 GET_MODE (x), XEXP (x, 1),
477 XEXP (x, 0)),
479 x = *loc;
480 code = GET_CODE (x);
484 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
485 done the substitution, otherwise we won't. */
487 switch (code)
489 case PLUS:
490 /* If we have a PLUS whose second operand is now a CONST_INT, use
491 plus_constant to try to simplify it. */
492 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
493 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
495 return;
497 case MINUS:
498 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
500 validate_change (object, loc,
501 plus_constant (XEXP (x, 0), - INTVAL (to)),
503 return;
505 break;
507 case ZERO_EXTEND:
508 case SIGN_EXTEND:
509 /* In these cases, the operation to be performed depends on the mode
510 of the operand. If we are replacing the operand with a VOIDmode
511 constant, we lose the information. So try to simplify the operation
512 in that case. */
513 if (GET_MODE (to) == VOIDmode
514 && (rtx_equal_p (XEXP (x, 0), from)
515 || (GET_CODE (XEXP (x, 0)) == SUBREG
516 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
518 rtx new = NULL_RTX;
520 /* If there is a subreg involved, crop to the portion of the
521 constant that we are interested in. */
522 if (GET_CODE (XEXP (x, 0)) == SUBREG)
524 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
525 to = operand_subword (to,
526 (SUBREG_BYTE (XEXP (x, 0))
527 / UNITS_PER_WORD),
528 0, GET_MODE (from));
529 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
530 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
531 <= HOST_BITS_PER_WIDE_INT))
533 int i = SUBREG_BYTE (XEXP (x, 0)) * BITS_PER_UNIT;
534 HOST_WIDE_INT valh;
535 unsigned HOST_WIDE_INT vall;
537 if (GET_CODE (to) == CONST_INT)
539 vall = INTVAL (to);
540 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
542 else
544 vall = CONST_DOUBLE_LOW (to);
545 valh = CONST_DOUBLE_HIGH (to);
548 if (WORDS_BIG_ENDIAN)
549 i = (GET_MODE_BITSIZE (GET_MODE (from))
550 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
551 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
552 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
553 else if (i >= HOST_BITS_PER_WIDE_INT)
554 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
555 to = GEN_INT (trunc_int_for_mode (vall,
556 GET_MODE (XEXP (x, 0))));
558 else
559 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
562 /* If the above didn't fail, perform the extension from the
563 mode of the operand (and not the mode of FROM). */
564 if (to)
565 new = simplify_unary_operation (code, GET_MODE (x), to,
566 GET_MODE (XEXP (x, 0)));
568 /* If any of the above failed, substitute in something that
569 we know won't be recognized. */
570 if (!new)
571 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
573 validate_change (object, loc, new, 1);
574 return;
576 break;
578 case SUBREG:
579 /* In case we are replacing by constant, attempt to simplify it to
580 non-SUBREG expression. We can't do this later, since the information
581 about inner mode may be lost. */
582 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
584 int offset, part;
585 unsigned HOST_WIDE_INT val;
587 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
588 since we are saying that the high bits don't matter. */
589 if (GET_MODE (to) == VOIDmode
590 && (GET_MODE_SIZE (GET_MODE (x))
591 >= GET_MODE_SIZE (GET_MODE (from))))
593 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
594 if (new)
596 validate_change (object, loc, new, 1);
597 return;
601 offset = SUBREG_BYTE (x) * BITS_PER_UNIT;
602 switch (GET_CODE (to))
604 case CONST_DOUBLE:
605 if (GET_MODE (to) != VOIDmode)
606 break;
608 part = offset >= HOST_BITS_PER_WIDE_INT;
609 if ((BITS_PER_WORD > HOST_BITS_PER_WIDE_INT
610 && BYTES_BIG_ENDIAN)
611 || (BITS_PER_WORD <= HOST_BITS_PER_WIDE_INT
612 && WORDS_BIG_ENDIAN))
613 part = !part;
614 val = part ? CONST_DOUBLE_HIGH (to) : CONST_DOUBLE_LOW (to);
615 offset %= HOST_BITS_PER_WIDE_INT;
617 /* FALLTHROUGH */
618 case CONST_INT:
619 if (GET_CODE (to) == CONST_INT)
620 val = INTVAL (to);
623 /* Avoid creating bogus SUBREGs */
624 enum machine_mode mode = GET_MODE (x);
625 enum machine_mode inner_mode = GET_MODE (from);
627 /* We've already picked the word we want from a double, so
628 pretend this is actually an integer. */
629 if (GET_CODE (to) == CONST_DOUBLE)
630 inner_mode = SImode;
632 if (GET_MODE_CLASS (mode) != MODE_INT)
634 /* Substitute in something that we know won't be
635 recognized. */
636 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
637 validate_change (object, loc, to, 1);
638 return;
641 if (BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
643 if (WORDS_BIG_ENDIAN)
644 offset = GET_MODE_BITSIZE (inner_mode)
645 - GET_MODE_BITSIZE (mode) - offset;
646 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
647 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
648 offset = offset + BITS_PER_WORD - GET_MODE_BITSIZE (mode)
649 - 2 * (offset % BITS_PER_WORD);
652 if (offset >= HOST_BITS_PER_WIDE_INT)
653 to = ((HOST_WIDE_INT) val < 0) ? constm1_rtx : const0_rtx;
654 else
656 val >>= offset;
657 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
658 val = trunc_int_for_mode (val, mode);
659 to = GEN_INT (val);
662 validate_change (object, loc, to, 1);
663 return;
666 default:
667 break;
671 /* Changing mode twice with SUBREG => just change it once,
672 or not at all if changing back to starting mode. */
673 if (GET_CODE (to) == SUBREG
674 && rtx_equal_p (SUBREG_REG (x), from))
676 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
677 && SUBREG_BYTE (x) == 0 && SUBREG_BYTE (to) == 0)
679 validate_change (object, loc, SUBREG_REG (to), 1);
680 return;
683 /* Make sure the 2 byte counts added together are an even unit
684 of x's mode, and combine them if so. Otherwise we run
685 into problems with something like:
686 (subreg:HI (subreg:QI (SI:55) 3) 0)
687 we end up with an odd offset into a HI which is invalid. */
689 if (SUBREG_BYTE (to) % GET_MODE_SIZE (GET_MODE (x)) == 0)
690 validate_change (object, loc,
691 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
692 SUBREG_BYTE(x) + SUBREG_BYTE (to)),
694 else
695 validate_change (object, loc, to, 1);
697 return;
700 /* If we have a SUBREG of a register that we are replacing and we are
701 replacing it with a MEM, make a new MEM and try replacing the
702 SUBREG with it. Don't do this if the MEM has a mode-dependent address
703 or if we would be widening it. */
705 if (GET_CODE (from) == REG
706 && GET_CODE (to) == MEM
707 && rtx_equal_p (SUBREG_REG (x), from)
708 && ! mode_dependent_address_p (XEXP (to, 0))
709 && ! MEM_VOLATILE_P (to)
710 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
712 int offset = SUBREG_BYTE (x);
713 enum machine_mode mode = GET_MODE (x);
714 rtx new;
716 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
717 MEM_COPY_ATTRIBUTES (new, to);
718 validate_change (object, loc, new, 1);
719 return;
721 break;
723 case ZERO_EXTRACT:
724 case SIGN_EXTRACT:
725 /* If we are replacing a register with memory, try to change the memory
726 to be the mode required for memory in extract operations (this isn't
727 likely to be an insertion operation; if it was, nothing bad will
728 happen, we might just fail in some cases). */
730 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
731 && rtx_equal_p (XEXP (x, 0), from)
732 && GET_CODE (XEXP (x, 1)) == CONST_INT
733 && GET_CODE (XEXP (x, 2)) == CONST_INT
734 && ! mode_dependent_address_p (XEXP (to, 0))
735 && ! MEM_VOLATILE_P (to))
737 enum machine_mode wanted_mode = VOIDmode;
738 enum machine_mode is_mode = GET_MODE (to);
739 int pos = INTVAL (XEXP (x, 2));
741 #ifdef HAVE_extzv
742 if (code == ZERO_EXTRACT)
744 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
745 if (wanted_mode == VOIDmode)
746 wanted_mode = word_mode;
748 #endif
749 #ifdef HAVE_extv
750 if (code == SIGN_EXTRACT)
752 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
753 if (wanted_mode == VOIDmode)
754 wanted_mode = word_mode;
756 #endif
758 /* If we have a narrower mode, we can do something. */
759 if (wanted_mode != VOIDmode
760 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
762 int offset = pos / BITS_PER_UNIT;
763 rtx newmem;
765 /* If the bytes and bits are counted differently, we
766 must adjust the offset. */
767 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
768 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
769 - offset);
771 pos %= GET_MODE_BITSIZE (wanted_mode);
773 newmem = gen_rtx_MEM (wanted_mode,
774 plus_constant (XEXP (to, 0), offset));
775 MEM_COPY_ATTRIBUTES (newmem, to);
777 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
778 validate_change (object, &XEXP (x, 0), newmem, 1);
782 break;
784 default:
785 break;
788 /* For commutative or comparison operations we've already performed
789 replacements. Don't try to perform them again. */
790 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
792 fmt = GET_RTX_FORMAT (code);
793 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
795 if (fmt[i] == 'e')
796 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
797 else if (fmt[i] == 'E')
798 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
799 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
804 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
805 with TO. After all changes have been made, validate by seeing
806 if INSN is still valid. */
809 validate_replace_rtx_subexp (from, to, insn, loc)
810 rtx from, to, insn, *loc;
812 validate_replace_rtx_1 (loc, from, to, insn);
813 return apply_change_group ();
816 /* Try replacing every occurrence of FROM in INSN with TO. After all
817 changes have been made, validate by seeing if INSN is still valid. */
820 validate_replace_rtx (from, to, insn)
821 rtx from, to, insn;
823 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
824 return apply_change_group ();
827 /* Try replacing every occurrence of FROM in INSN with TO. */
829 void
830 validate_replace_rtx_group (from, to, insn)
831 rtx from, to, insn;
833 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
836 /* Function called by note_uses to replace used subexpressions. */
837 struct validate_replace_src_data
839 rtx from; /* Old RTX */
840 rtx to; /* New RTX */
841 rtx insn; /* Insn in which substitution is occurring. */
844 static void
845 validate_replace_src_1 (x, data)
846 rtx *x;
847 void *data;
849 struct validate_replace_src_data *d
850 = (struct validate_replace_src_data *) data;
852 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
855 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
856 SET_DESTs. After all changes have been made, validate by seeing if
857 INSN is still valid. */
860 validate_replace_src (from, to, insn)
861 rtx from, to, insn;
863 struct validate_replace_src_data d;
865 d.from = from;
866 d.to = to;
867 d.insn = insn;
868 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
869 return apply_change_group ();
872 #ifdef HAVE_cc0
873 /* Return 1 if the insn using CC0 set by INSN does not contain
874 any ordered tests applied to the condition codes.
875 EQ and NE tests do not count. */
878 next_insn_tests_no_inequality (insn)
879 rtx insn;
881 register rtx next = next_cc0_user (insn);
883 /* If there is no next insn, we have to take the conservative choice. */
884 if (next == 0)
885 return 0;
887 return ((GET_CODE (next) == JUMP_INSN
888 || GET_CODE (next) == INSN
889 || GET_CODE (next) == CALL_INSN)
890 && ! inequality_comparisons_p (PATTERN (next)));
893 #if 0 /* This is useless since the insn that sets the cc's
894 must be followed immediately by the use of them. */
895 /* Return 1 if the CC value set up by INSN is not used. */
898 next_insns_test_no_inequality (insn)
899 rtx insn;
901 register rtx next = NEXT_INSN (insn);
903 for (; next != 0; next = NEXT_INSN (next))
905 if (GET_CODE (next) == CODE_LABEL
906 || GET_CODE (next) == BARRIER)
907 return 1;
908 if (GET_CODE (next) == NOTE)
909 continue;
910 if (inequality_comparisons_p (PATTERN (next)))
911 return 0;
912 if (sets_cc0_p (PATTERN (next)) == 1)
913 return 1;
914 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
915 return 1;
917 return 1;
919 #endif
920 #endif
922 /* This is used by find_single_use to locate an rtx that contains exactly one
923 use of DEST, which is typically either a REG or CC0. It returns a
924 pointer to the innermost rtx expression containing DEST. Appearances of
925 DEST that are being used to totally replace it are not counted. */
927 static rtx *
928 find_single_use_1 (dest, loc)
929 rtx dest;
930 rtx *loc;
932 rtx x = *loc;
933 enum rtx_code code = GET_CODE (x);
934 rtx *result = 0;
935 rtx *this_result;
936 int i;
937 const char *fmt;
939 switch (code)
941 case CONST_INT:
942 case CONST:
943 case LABEL_REF:
944 case SYMBOL_REF:
945 case CONST_DOUBLE:
946 case CLOBBER:
947 return 0;
949 case SET:
950 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
951 of a REG that occupies all of the REG, the insn uses DEST if
952 it is mentioned in the destination or the source. Otherwise, we
953 need just check the source. */
954 if (GET_CODE (SET_DEST (x)) != CC0
955 && GET_CODE (SET_DEST (x)) != PC
956 && GET_CODE (SET_DEST (x)) != REG
957 && ! (GET_CODE (SET_DEST (x)) == SUBREG
958 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
959 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
960 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
961 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
962 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
963 break;
965 return find_single_use_1 (dest, &SET_SRC (x));
967 case MEM:
968 case SUBREG:
969 return find_single_use_1 (dest, &XEXP (x, 0));
971 default:
972 break;
975 /* If it wasn't one of the common cases above, check each expression and
976 vector of this code. Look for a unique usage of DEST. */
978 fmt = GET_RTX_FORMAT (code);
979 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
981 if (fmt[i] == 'e')
983 if (dest == XEXP (x, i)
984 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
985 && REGNO (dest) == REGNO (XEXP (x, i))))
986 this_result = loc;
987 else
988 this_result = find_single_use_1 (dest, &XEXP (x, i));
990 if (result == 0)
991 result = this_result;
992 else if (this_result)
993 /* Duplicate usage. */
994 return 0;
996 else if (fmt[i] == 'E')
998 int j;
1000 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1002 if (XVECEXP (x, i, j) == dest
1003 || (GET_CODE (dest) == REG
1004 && GET_CODE (XVECEXP (x, i, j)) == REG
1005 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
1006 this_result = loc;
1007 else
1008 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
1010 if (result == 0)
1011 result = this_result;
1012 else if (this_result)
1013 return 0;
1018 return result;
1021 /* See if DEST, produced in INSN, is used only a single time in the
1022 sequel. If so, return a pointer to the innermost rtx expression in which
1023 it is used.
1025 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
1027 This routine will return usually zero either before flow is called (because
1028 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
1029 note can't be trusted).
1031 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
1032 care about REG_DEAD notes or LOG_LINKS.
1034 Otherwise, we find the single use by finding an insn that has a
1035 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
1036 only referenced once in that insn, we know that it must be the first
1037 and last insn referencing DEST. */
1039 rtx *
1040 find_single_use (dest, insn, ploc)
1041 rtx dest;
1042 rtx insn;
1043 rtx *ploc;
1045 rtx next;
1046 rtx *result;
1047 rtx link;
1049 #ifdef HAVE_cc0
1050 if (dest == cc0_rtx)
1052 next = NEXT_INSN (insn);
1053 if (next == 0
1054 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
1055 return 0;
1057 result = find_single_use_1 (dest, &PATTERN (next));
1058 if (result && ploc)
1059 *ploc = next;
1060 return result;
1062 #endif
1064 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
1065 return 0;
1067 for (next = next_nonnote_insn (insn);
1068 next != 0 && GET_CODE (next) != CODE_LABEL;
1069 next = next_nonnote_insn (next))
1070 if (INSN_P (next) && dead_or_set_p (next, dest))
1072 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
1073 if (XEXP (link, 0) == insn)
1074 break;
1076 if (link)
1078 result = find_single_use_1 (dest, &PATTERN (next));
1079 if (ploc)
1080 *ploc = next;
1081 return result;
1085 return 0;
1088 /* Return 1 if OP is a valid general operand for machine mode MODE.
1089 This is either a register reference, a memory reference,
1090 or a constant. In the case of a memory reference, the address
1091 is checked for general validity for the target machine.
1093 Register and memory references must have mode MODE in order to be valid,
1094 but some constants have no machine mode and are valid for any mode.
1096 If MODE is VOIDmode, OP is checked for validity for whatever mode
1097 it has.
1099 The main use of this function is as a predicate in match_operand
1100 expressions in the machine description.
1102 For an explanation of this function's behavior for registers of
1103 class NO_REGS, see the comment for `register_operand'. */
1106 general_operand (op, mode)
1107 register rtx op;
1108 enum machine_mode mode;
1110 register enum rtx_code code = GET_CODE (op);
1112 if (mode == VOIDmode)
1113 mode = GET_MODE (op);
1115 /* Don't accept CONST_INT or anything similar
1116 if the caller wants something floating. */
1117 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1118 && GET_MODE_CLASS (mode) != MODE_INT
1119 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1120 return 0;
1122 if (GET_CODE (op) == CONST_INT
1123 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1124 return 0;
1126 if (CONSTANT_P (op))
1127 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1128 || mode == VOIDmode)
1129 #ifdef LEGITIMATE_PIC_OPERAND_P
1130 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1131 #endif
1132 && LEGITIMATE_CONSTANT_P (op));
1134 /* Except for certain constants with VOIDmode, already checked for,
1135 OP's mode must match MODE if MODE specifies a mode. */
1137 if (GET_MODE (op) != mode)
1138 return 0;
1140 if (code == SUBREG)
1142 #ifdef INSN_SCHEDULING
1143 /* On machines that have insn scheduling, we want all memory
1144 reference to be explicit, so outlaw paradoxical SUBREGs. */
1145 if (GET_CODE (SUBREG_REG (op)) == MEM
1146 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1147 return 0;
1148 #endif
1150 op = SUBREG_REG (op);
1151 code = GET_CODE (op);
1154 if (code == REG)
1155 /* A register whose class is NO_REGS is not a general operand. */
1156 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1157 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1159 if (code == MEM)
1161 register rtx y = XEXP (op, 0);
1163 if (! volatile_ok && MEM_VOLATILE_P (op))
1164 return 0;
1166 if (GET_CODE (y) == ADDRESSOF)
1167 return 1;
1169 /* Use the mem's mode, since it will be reloaded thus. */
1170 mode = GET_MODE (op);
1171 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1174 /* Pretend this is an operand for now; we'll run force_operand
1175 on its replacement in fixup_var_refs_1. */
1176 if (code == ADDRESSOF)
1177 return 1;
1179 return 0;
1181 win:
1182 return 1;
1185 /* Return 1 if OP is a valid memory address for a memory reference
1186 of mode MODE.
1188 The main use of this function is as a predicate in match_operand
1189 expressions in the machine description. */
1192 address_operand (op, mode)
1193 register rtx op;
1194 enum machine_mode mode;
1196 return memory_address_p (mode, op);
1199 /* Return 1 if OP is a register reference of mode MODE.
1200 If MODE is VOIDmode, accept a register in any mode.
1202 The main use of this function is as a predicate in match_operand
1203 expressions in the machine description.
1205 As a special exception, registers whose class is NO_REGS are
1206 not accepted by `register_operand'. The reason for this change
1207 is to allow the representation of special architecture artifacts
1208 (such as a condition code register) without extending the rtl
1209 definitions. Since registers of class NO_REGS cannot be used
1210 as registers in any case where register classes are examined,
1211 it is most consistent to keep this function from accepting them. */
1214 register_operand (op, mode)
1215 register rtx op;
1216 enum machine_mode mode;
1218 if (GET_MODE (op) != mode && mode != VOIDmode)
1219 return 0;
1221 if (GET_CODE (op) == SUBREG)
1223 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1224 because it is guaranteed to be reloaded into one.
1225 Just make sure the MEM is valid in itself.
1226 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1227 but currently it does result from (SUBREG (REG)...) where the
1228 reg went on the stack.) */
1229 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1230 return general_operand (op, mode);
1232 #ifdef CLASS_CANNOT_CHANGE_MODE
1233 if (GET_CODE (SUBREG_REG (op)) == REG
1234 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1235 && (TEST_HARD_REG_BIT
1236 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1237 REGNO (SUBREG_REG (op))))
1238 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1239 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1240 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1241 return 0;
1242 #endif
1244 op = SUBREG_REG (op);
1247 /* If we have an ADDRESSOF, consider it valid since it will be
1248 converted into something that will not be a MEM. */
1249 if (GET_CODE (op) == ADDRESSOF)
1250 return 1;
1252 /* We don't consider registers whose class is NO_REGS
1253 to be a register operand. */
1254 return (GET_CODE (op) == REG
1255 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1256 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1259 /* Return 1 for a register in Pmode; ignore the tested mode. */
1262 pmode_register_operand (op, mode)
1263 rtx op;
1264 enum machine_mode mode ATTRIBUTE_UNUSED;
1266 return register_operand (op, Pmode);
1269 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1270 or a hard register. */
1273 scratch_operand (op, mode)
1274 register rtx op;
1275 enum machine_mode mode;
1277 if (GET_MODE (op) != mode && mode != VOIDmode)
1278 return 0;
1280 return (GET_CODE (op) == SCRATCH
1281 || (GET_CODE (op) == REG
1282 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1285 /* Return 1 if OP is a valid immediate operand for mode MODE.
1287 The main use of this function is as a predicate in match_operand
1288 expressions in the machine description. */
1291 immediate_operand (op, mode)
1292 register rtx op;
1293 enum machine_mode mode;
1295 /* Don't accept CONST_INT or anything similar
1296 if the caller wants something floating. */
1297 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1298 && GET_MODE_CLASS (mode) != MODE_INT
1299 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1300 return 0;
1302 if (GET_CODE (op) == CONST_INT
1303 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1304 return 0;
1306 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1307 result in 0/1. It seems a safe assumption that this is
1308 in range for everyone. */
1309 if (GET_CODE (op) == CONSTANT_P_RTX)
1310 return 1;
1312 return (CONSTANT_P (op)
1313 && (GET_MODE (op) == mode || mode == VOIDmode
1314 || GET_MODE (op) == VOIDmode)
1315 #ifdef LEGITIMATE_PIC_OPERAND_P
1316 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1317 #endif
1318 && LEGITIMATE_CONSTANT_P (op));
1321 /* Returns 1 if OP is an operand that is a CONST_INT. */
1324 const_int_operand (op, mode)
1325 register rtx op;
1326 enum machine_mode mode ATTRIBUTE_UNUSED;
1328 return GET_CODE (op) == CONST_INT;
1331 /* Returns 1 if OP is an operand that is a constant integer or constant
1332 floating-point number. */
1335 const_double_operand (op, mode)
1336 register rtx op;
1337 enum machine_mode mode;
1339 /* Don't accept CONST_INT or anything similar
1340 if the caller wants something floating. */
1341 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1342 && GET_MODE_CLASS (mode) != MODE_INT
1343 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1344 return 0;
1346 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1347 && (mode == VOIDmode || GET_MODE (op) == mode
1348 || GET_MODE (op) == VOIDmode));
1351 /* Return 1 if OP is a general operand that is not an immediate operand. */
1354 nonimmediate_operand (op, mode)
1355 register rtx op;
1356 enum machine_mode mode;
1358 return (general_operand (op, mode) && ! CONSTANT_P (op));
1361 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1364 nonmemory_operand (op, mode)
1365 register rtx op;
1366 enum machine_mode mode;
1368 if (CONSTANT_P (op))
1370 /* Don't accept CONST_INT or anything similar
1371 if the caller wants something floating. */
1372 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1373 && GET_MODE_CLASS (mode) != MODE_INT
1374 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1375 return 0;
1377 if (GET_CODE (op) == CONST_INT
1378 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1379 return 0;
1381 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1382 || mode == VOIDmode)
1383 #ifdef LEGITIMATE_PIC_OPERAND_P
1384 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1385 #endif
1386 && LEGITIMATE_CONSTANT_P (op));
1389 if (GET_MODE (op) != mode && mode != VOIDmode)
1390 return 0;
1392 if (GET_CODE (op) == SUBREG)
1394 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1395 because it is guaranteed to be reloaded into one.
1396 Just make sure the MEM is valid in itself.
1397 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1398 but currently it does result from (SUBREG (REG)...) where the
1399 reg went on the stack.) */
1400 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1401 return general_operand (op, mode);
1402 op = SUBREG_REG (op);
1405 /* We don't consider registers whose class is NO_REGS
1406 to be a register operand. */
1407 return (GET_CODE (op) == REG
1408 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1409 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1412 /* Return 1 if OP is a valid operand that stands for pushing a
1413 value of mode MODE onto the stack.
1415 The main use of this function is as a predicate in match_operand
1416 expressions in the machine description. */
1419 push_operand (op, mode)
1420 rtx op;
1421 enum machine_mode mode;
1423 unsigned int rounded_size = GET_MODE_SIZE (mode);
1425 #ifdef PUSH_ROUNDING
1426 rounded_size = PUSH_ROUNDING (rounded_size);
1427 #endif
1429 if (GET_CODE (op) != MEM)
1430 return 0;
1432 if (mode != VOIDmode && GET_MODE (op) != mode)
1433 return 0;
1435 op = XEXP (op, 0);
1437 if (rounded_size == GET_MODE_SIZE (mode))
1439 if (GET_CODE (op) != STACK_PUSH_CODE)
1440 return 0;
1442 else
1444 if (GET_CODE (op) != PRE_MODIFY
1445 || GET_CODE (XEXP (op, 1)) != PLUS
1446 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1447 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1448 #ifdef STACK_GROWS_DOWNWARD
1449 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1450 #else
1451 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1452 #endif
1454 return 0;
1457 return XEXP (op, 0) == stack_pointer_rtx;
1460 /* Return 1 if OP is a valid operand that stands for popping a
1461 value of mode MODE off the stack.
1463 The main use of this function is as a predicate in match_operand
1464 expressions in the machine description. */
1467 pop_operand (op, mode)
1468 rtx op;
1469 enum machine_mode mode;
1471 if (GET_CODE (op) != MEM)
1472 return 0;
1474 if (mode != VOIDmode && GET_MODE (op) != mode)
1475 return 0;
1477 op = XEXP (op, 0);
1479 if (GET_CODE (op) != STACK_POP_CODE)
1480 return 0;
1482 return XEXP (op, 0) == stack_pointer_rtx;
1485 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1488 memory_address_p (mode, addr)
1489 enum machine_mode mode ATTRIBUTE_UNUSED;
1490 register rtx addr;
1492 if (GET_CODE (addr) == ADDRESSOF)
1493 return 1;
1495 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1496 return 0;
1498 win:
1499 return 1;
1502 /* Return 1 if OP is a valid memory reference with mode MODE,
1503 including a valid address.
1505 The main use of this function is as a predicate in match_operand
1506 expressions in the machine description. */
1509 memory_operand (op, mode)
1510 register rtx op;
1511 enum machine_mode mode;
1513 rtx inner;
1515 if (! reload_completed)
1516 /* Note that no SUBREG is a memory operand before end of reload pass,
1517 because (SUBREG (MEM...)) forces reloading into a register. */
1518 return GET_CODE (op) == MEM && general_operand (op, mode);
1520 if (mode != VOIDmode && GET_MODE (op) != mode)
1521 return 0;
1523 inner = op;
1524 if (GET_CODE (inner) == SUBREG)
1525 inner = SUBREG_REG (inner);
1527 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1530 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1531 that is, a memory reference whose address is a general_operand. */
1534 indirect_operand (op, mode)
1535 register rtx op;
1536 enum machine_mode mode;
1538 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1539 if (! reload_completed
1540 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1542 register int offset = SUBREG_BYTE (op);
1543 rtx inner = SUBREG_REG (op);
1545 if (mode != VOIDmode && GET_MODE (op) != mode)
1546 return 0;
1548 /* The only way that we can have a general_operand as the resulting
1549 address is if OFFSET is zero and the address already is an operand
1550 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1551 operand. */
1553 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1554 || (GET_CODE (XEXP (inner, 0)) == PLUS
1555 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1556 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1557 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1560 return (GET_CODE (op) == MEM
1561 && memory_operand (op, mode)
1562 && general_operand (XEXP (op, 0), Pmode));
1565 /* Return 1 if this is a comparison operator. This allows the use of
1566 MATCH_OPERATOR to recognize all the branch insns. */
1569 comparison_operator (op, mode)
1570 register rtx op;
1571 enum machine_mode mode;
1573 return ((mode == VOIDmode || GET_MODE (op) == mode)
1574 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1577 /* If BODY is an insn body that uses ASM_OPERANDS,
1578 return the number of operands (both input and output) in the insn.
1579 Otherwise return -1. */
1582 asm_noperands (body)
1583 rtx body;
1585 switch (GET_CODE (body))
1587 case ASM_OPERANDS:
1588 /* No output operands: return number of input operands. */
1589 return ASM_OPERANDS_INPUT_LENGTH (body);
1590 case SET:
1591 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1592 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1593 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1594 else
1595 return -1;
1596 case PARALLEL:
1597 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1598 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1600 /* Multiple output operands, or 1 output plus some clobbers:
1601 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1602 int i;
1603 int n_sets;
1605 /* Count backwards through CLOBBERs to determine number of SETs. */
1606 for (i = XVECLEN (body, 0); i > 0; i--)
1608 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1609 break;
1610 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1611 return -1;
1614 /* N_SETS is now number of output operands. */
1615 n_sets = i;
1617 /* Verify that all the SETs we have
1618 came from a single original asm_operands insn
1619 (so that invalid combinations are blocked). */
1620 for (i = 0; i < n_sets; i++)
1622 rtx elt = XVECEXP (body, 0, i);
1623 if (GET_CODE (elt) != SET)
1624 return -1;
1625 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1626 return -1;
1627 /* If these ASM_OPERANDS rtx's came from different original insns
1628 then they aren't allowed together. */
1629 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1630 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1631 return -1;
1633 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1634 + n_sets);
1636 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1638 /* 0 outputs, but some clobbers:
1639 body is [(asm_operands ...) (clobber (reg ...))...]. */
1640 int i;
1642 /* Make sure all the other parallel things really are clobbers. */
1643 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1644 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1645 return -1;
1647 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1649 else
1650 return -1;
1651 default:
1652 return -1;
1656 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1657 copy its operands (both input and output) into the vector OPERANDS,
1658 the locations of the operands within the insn into the vector OPERAND_LOCS,
1659 and the constraints for the operands into CONSTRAINTS.
1660 Write the modes of the operands into MODES.
1661 Return the assembler-template.
1663 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1664 we don't store that info. */
1666 const char *
1667 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1668 rtx body;
1669 rtx *operands;
1670 rtx **operand_locs;
1671 const char **constraints;
1672 enum machine_mode *modes;
1674 register int i;
1675 int noperands;
1676 const char *template = 0;
1678 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1680 rtx asmop = SET_SRC (body);
1681 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1683 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1685 for (i = 1; i < noperands; i++)
1687 if (operand_locs)
1688 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1689 if (operands)
1690 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1691 if (constraints)
1692 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1693 if (modes)
1694 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1697 /* The output is in the SET.
1698 Its constraint is in the ASM_OPERANDS itself. */
1699 if (operands)
1700 operands[0] = SET_DEST (body);
1701 if (operand_locs)
1702 operand_locs[0] = &SET_DEST (body);
1703 if (constraints)
1704 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1705 if (modes)
1706 modes[0] = GET_MODE (SET_DEST (body));
1707 template = ASM_OPERANDS_TEMPLATE (asmop);
1709 else if (GET_CODE (body) == ASM_OPERANDS)
1711 rtx asmop = body;
1712 /* No output operands: BODY is (asm_operands ....). */
1714 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1716 /* The input operands are found in the 1st element vector. */
1717 /* Constraints for inputs are in the 2nd element vector. */
1718 for (i = 0; i < noperands; i++)
1720 if (operand_locs)
1721 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1722 if (operands)
1723 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1724 if (constraints)
1725 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1726 if (modes)
1727 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1729 template = ASM_OPERANDS_TEMPLATE (asmop);
1731 else if (GET_CODE (body) == PARALLEL
1732 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1734 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1735 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1736 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1737 int nout = 0; /* Does not include CLOBBERs. */
1739 /* At least one output, plus some CLOBBERs. */
1741 /* The outputs are in the SETs.
1742 Their constraints are in the ASM_OPERANDS itself. */
1743 for (i = 0; i < nparallel; i++)
1745 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1746 break; /* Past last SET */
1748 if (operands)
1749 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1750 if (operand_locs)
1751 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1752 if (constraints)
1753 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1754 if (modes)
1755 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1756 nout++;
1759 for (i = 0; i < nin; i++)
1761 if (operand_locs)
1762 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1763 if (operands)
1764 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1765 if (constraints)
1766 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1767 if (modes)
1768 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1771 template = ASM_OPERANDS_TEMPLATE (asmop);
1773 else if (GET_CODE (body) == PARALLEL
1774 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1776 /* No outputs, but some CLOBBERs. */
1778 rtx asmop = XVECEXP (body, 0, 0);
1779 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1781 for (i = 0; i < nin; i++)
1783 if (operand_locs)
1784 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1785 if (operands)
1786 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1787 if (constraints)
1788 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1789 if (modes)
1790 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1793 template = ASM_OPERANDS_TEMPLATE (asmop);
1796 return template;
1799 /* Check if an asm_operand matches it's constraints.
1800 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1803 asm_operand_ok (op, constraint)
1804 rtx op;
1805 const char *constraint;
1807 int result = 0;
1809 /* Use constrain_operands after reload. */
1810 if (reload_completed)
1811 abort ();
1813 while (*constraint)
1815 char c = *constraint++;
1816 switch (c)
1818 case '=':
1819 case '+':
1820 case '*':
1821 case '%':
1822 case '?':
1823 case '!':
1824 case '#':
1825 case '&':
1826 case ',':
1827 break;
1829 case '0': case '1': case '2': case '3': case '4':
1830 case '5': case '6': case '7': case '8': case '9':
1831 /* For best results, our caller should have given us the
1832 proper matching constraint, but we can't actually fail
1833 the check if they didn't. Indicate that results are
1834 inconclusive. */
1835 result = -1;
1836 break;
1838 case 'p':
1839 if (address_operand (op, VOIDmode))
1840 return 1;
1841 break;
1843 case 'm':
1844 case 'V': /* non-offsettable */
1845 if (memory_operand (op, VOIDmode))
1846 return 1;
1847 break;
1849 case 'o': /* offsettable */
1850 if (offsettable_nonstrict_memref_p (op))
1851 return 1;
1852 break;
1854 case '<':
1855 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1856 excepting those that expand_call created. Further, on some
1857 machines which do not have generalized auto inc/dec, an inc/dec
1858 is not a memory_operand.
1860 Match any memory and hope things are resolved after reload. */
1862 if (GET_CODE (op) == MEM
1863 && (1
1864 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1865 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1866 return 1;
1867 break;
1869 case '>':
1870 if (GET_CODE (op) == MEM
1871 && (1
1872 || GET_CODE (XEXP (op, 0)) == PRE_INC
1873 || GET_CODE (XEXP (op, 0)) == POST_INC))
1874 return 1;
1875 break;
1877 case 'E':
1878 #ifndef REAL_ARITHMETIC
1879 /* Match any floating double constant, but only if
1880 we can examine the bits of it reliably. */
1881 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1882 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1883 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1884 break;
1885 #endif
1886 /* FALLTHRU */
1888 case 'F':
1889 if (GET_CODE (op) == CONST_DOUBLE)
1890 return 1;
1891 break;
1893 case 'G':
1894 if (GET_CODE (op) == CONST_DOUBLE
1895 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1896 return 1;
1897 break;
1898 case 'H':
1899 if (GET_CODE (op) == CONST_DOUBLE
1900 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1901 return 1;
1902 break;
1904 case 's':
1905 if (GET_CODE (op) == CONST_INT
1906 || (GET_CODE (op) == CONST_DOUBLE
1907 && GET_MODE (op) == VOIDmode))
1908 break;
1909 /* FALLTHRU */
1911 case 'i':
1912 if (CONSTANT_P (op)
1913 #ifdef LEGITIMATE_PIC_OPERAND_P
1914 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1915 #endif
1917 return 1;
1918 break;
1920 case 'n':
1921 if (GET_CODE (op) == CONST_INT
1922 || (GET_CODE (op) == CONST_DOUBLE
1923 && GET_MODE (op) == VOIDmode))
1924 return 1;
1925 break;
1927 case 'I':
1928 if (GET_CODE (op) == CONST_INT
1929 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1930 return 1;
1931 break;
1932 case 'J':
1933 if (GET_CODE (op) == CONST_INT
1934 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1935 return 1;
1936 break;
1937 case 'K':
1938 if (GET_CODE (op) == CONST_INT
1939 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1940 return 1;
1941 break;
1942 case 'L':
1943 if (GET_CODE (op) == CONST_INT
1944 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1945 return 1;
1946 break;
1947 case 'M':
1948 if (GET_CODE (op) == CONST_INT
1949 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1950 return 1;
1951 break;
1952 case 'N':
1953 if (GET_CODE (op) == CONST_INT
1954 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1955 return 1;
1956 break;
1957 case 'O':
1958 if (GET_CODE (op) == CONST_INT
1959 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1960 return 1;
1961 break;
1962 case 'P':
1963 if (GET_CODE (op) == CONST_INT
1964 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1965 return 1;
1966 break;
1968 case 'X':
1969 return 1;
1971 case 'g':
1972 if (general_operand (op, VOIDmode))
1973 return 1;
1974 break;
1976 default:
1977 /* For all other letters, we first check for a register class,
1978 otherwise it is an EXTRA_CONSTRAINT. */
1979 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1981 case 'r':
1982 if (GET_MODE (op) == BLKmode)
1983 break;
1984 if (register_operand (op, VOIDmode))
1985 return 1;
1987 #ifdef EXTRA_CONSTRAINT
1988 if (EXTRA_CONSTRAINT (op, c))
1989 return 1;
1990 #endif
1991 break;
1995 return result;
1998 /* Given an rtx *P, if it is a sum containing an integer constant term,
1999 return the location (type rtx *) of the pointer to that constant term.
2000 Otherwise, return a null pointer. */
2002 static rtx *
2003 find_constant_term_loc (p)
2004 rtx *p;
2006 register rtx *tem;
2007 register enum rtx_code code = GET_CODE (*p);
2009 /* If *P IS such a constant term, P is its location. */
2011 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2012 || code == CONST)
2013 return p;
2015 /* Otherwise, if not a sum, it has no constant term. */
2017 if (GET_CODE (*p) != PLUS)
2018 return 0;
2020 /* If one of the summands is constant, return its location. */
2022 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2023 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2024 return p;
2026 /* Otherwise, check each summand for containing a constant term. */
2028 if (XEXP (*p, 0) != 0)
2030 tem = find_constant_term_loc (&XEXP (*p, 0));
2031 if (tem != 0)
2032 return tem;
2035 if (XEXP (*p, 1) != 0)
2037 tem = find_constant_term_loc (&XEXP (*p, 1));
2038 if (tem != 0)
2039 return tem;
2042 return 0;
2045 /* Return 1 if OP is a memory reference
2046 whose address contains no side effects
2047 and remains valid after the addition
2048 of a positive integer less than the
2049 size of the object being referenced.
2051 We assume that the original address is valid and do not check it.
2053 This uses strict_memory_address_p as a subroutine, so
2054 don't use it before reload. */
2057 offsettable_memref_p (op)
2058 rtx op;
2060 return ((GET_CODE (op) == MEM)
2061 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
2064 /* Similar, but don't require a strictly valid mem ref:
2065 consider pseudo-regs valid as index or base regs. */
2068 offsettable_nonstrict_memref_p (op)
2069 rtx op;
2071 return ((GET_CODE (op) == MEM)
2072 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
2075 /* Return 1 if Y is a memory address which contains no side effects
2076 and would remain valid after the addition of a positive integer
2077 less than the size of that mode.
2079 We assume that the original address is valid and do not check it.
2080 We do check that it is valid for narrower modes.
2082 If STRICTP is nonzero, we require a strictly valid address,
2083 for the sake of use in reload.c. */
2086 offsettable_address_p (strictp, mode, y)
2087 int strictp;
2088 enum machine_mode mode;
2089 register rtx y;
2091 register enum rtx_code ycode = GET_CODE (y);
2092 register rtx z;
2093 rtx y1 = y;
2094 rtx *y2;
2095 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
2096 (strictp ? strict_memory_address_p : memory_address_p);
2097 unsigned int mode_sz = GET_MODE_SIZE (mode);
2099 if (CONSTANT_ADDRESS_P (y))
2100 return 1;
2102 /* Adjusting an offsettable address involves changing to a narrower mode.
2103 Make sure that's OK. */
2105 if (mode_dependent_address_p (y))
2106 return 0;
2108 /* ??? How much offset does an offsettable BLKmode reference need?
2109 Clearly that depends on the situation in which it's being used.
2110 However, the current situation in which we test 0xffffffff is
2111 less than ideal. Caveat user. */
2112 if (mode_sz == 0)
2113 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2115 /* If the expression contains a constant term,
2116 see if it remains valid when max possible offset is added. */
2118 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2120 int good;
2122 y1 = *y2;
2123 *y2 = plus_constant (*y2, mode_sz - 1);
2124 /* Use QImode because an odd displacement may be automatically invalid
2125 for any wider mode. But it should be valid for a single byte. */
2126 good = (*addressp) (QImode, y);
2128 /* In any case, restore old contents of memory. */
2129 *y2 = y1;
2130 return good;
2133 if (GET_RTX_CLASS (ycode) == 'a')
2134 return 0;
2136 /* The offset added here is chosen as the maximum offset that
2137 any instruction could need to add when operating on something
2138 of the specified mode. We assume that if Y and Y+c are
2139 valid addresses then so is Y+d for all 0<d<c. */
2141 z = plus_constant_for_output (y, mode_sz - 1);
2143 /* Use QImode because an odd displacement may be automatically invalid
2144 for any wider mode. But it should be valid for a single byte. */
2145 return (*addressp) (QImode, z);
2148 /* Return 1 if ADDR is an address-expression whose effect depends
2149 on the mode of the memory reference it is used in.
2151 Autoincrement addressing is a typical example of mode-dependence
2152 because the amount of the increment depends on the mode. */
2155 mode_dependent_address_p (addr)
2156 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2158 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2159 return 0;
2160 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2161 win: ATTRIBUTE_UNUSED_LABEL
2162 return 1;
2165 /* Return 1 if OP is a general operand
2166 other than a memory ref with a mode dependent address. */
2169 mode_independent_operand (op, mode)
2170 enum machine_mode mode;
2171 rtx op;
2173 rtx addr;
2175 if (! general_operand (op, mode))
2176 return 0;
2178 if (GET_CODE (op) != MEM)
2179 return 1;
2181 addr = XEXP (op, 0);
2182 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2183 return 1;
2184 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2185 lose: ATTRIBUTE_UNUSED_LABEL
2186 return 0;
2189 /* Given an operand OP that is a valid memory reference which
2190 satisfies offsettable_memref_p, return a new memory reference whose
2191 address has been adjusted by OFFSET. OFFSET should be positive and
2192 less than the size of the object referenced. */
2195 adj_offsettable_operand (op, offset)
2196 rtx op;
2197 int offset;
2199 register enum rtx_code code = GET_CODE (op);
2201 if (code == MEM)
2203 register rtx y = XEXP (op, 0);
2204 register rtx new;
2206 if (CONSTANT_ADDRESS_P (y))
2208 new = gen_rtx_MEM (GET_MODE (op),
2209 plus_constant_for_output (y, offset));
2210 MEM_COPY_ATTRIBUTES (new, op);
2211 return new;
2214 if (GET_CODE (y) == PLUS)
2216 rtx z = y;
2217 register rtx *const_loc;
2219 op = copy_rtx (op);
2220 z = XEXP (op, 0);
2221 const_loc = find_constant_term_loc (&z);
2222 if (const_loc)
2224 *const_loc = plus_constant_for_output (*const_loc, offset);
2225 return op;
2229 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2230 MEM_COPY_ATTRIBUTES (new, op);
2231 return new;
2233 abort ();
2236 /* Like extract_insn, but save insn extracted and don't extract again, when
2237 called again for the same insn expecting that recog_data still contain the
2238 valid information. This is used primary by gen_attr infrastructure that
2239 often does extract insn again and again. */
2240 void
2241 extract_insn_cached (insn)
2242 rtx insn;
2244 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2245 return;
2246 extract_insn (insn);
2247 recog_data.insn = insn;
2249 /* Do cached extract_insn, constrain_operand and complain about failures.
2250 Used by insn_attrtab. */
2251 void
2252 extract_constrain_insn_cached (insn)
2253 rtx insn;
2255 extract_insn_cached (insn);
2256 if (which_alternative == -1
2257 && !constrain_operands (reload_completed))
2258 fatal_insn_not_found (insn);
2260 /* Do cached constrain_operand and complain about failures. */
2262 constrain_operands_cached (strict)
2263 int strict;
2265 if (which_alternative == -1)
2266 return constrain_operands (strict);
2267 else
2268 return 1;
2271 /* Analyze INSN and fill in recog_data. */
2273 void
2274 extract_insn (insn)
2275 rtx insn;
2277 int i;
2278 int icode;
2279 int noperands;
2280 rtx body = PATTERN (insn);
2282 recog_data.insn = NULL;
2283 recog_data.n_operands = 0;
2284 recog_data.n_alternatives = 0;
2285 recog_data.n_dups = 0;
2286 which_alternative = -1;
2288 switch (GET_CODE (body))
2290 case USE:
2291 case CLOBBER:
2292 case ASM_INPUT:
2293 case ADDR_VEC:
2294 case ADDR_DIFF_VEC:
2295 return;
2297 case SET:
2298 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2299 goto asm_insn;
2300 else
2301 goto normal_insn;
2302 case PARALLEL:
2303 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2304 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2305 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2306 goto asm_insn;
2307 else
2308 goto normal_insn;
2309 case ASM_OPERANDS:
2310 asm_insn:
2311 recog_data.n_operands = noperands = asm_noperands (body);
2312 if (noperands >= 0)
2314 /* This insn is an `asm' with operands. */
2316 /* expand_asm_operands makes sure there aren't too many operands. */
2317 if (noperands > MAX_RECOG_OPERANDS)
2318 abort ();
2320 /* Now get the operand values and constraints out of the insn. */
2321 decode_asm_operands (body, recog_data.operand,
2322 recog_data.operand_loc,
2323 recog_data.constraints,
2324 recog_data.operand_mode);
2325 if (noperands > 0)
2327 const char *p = recog_data.constraints[0];
2328 recog_data.n_alternatives = 1;
2329 while (*p)
2330 recog_data.n_alternatives += (*p++ == ',');
2332 break;
2334 fatal_insn_not_found (insn);
2336 default:
2337 normal_insn:
2338 /* Ordinary insn: recognize it, get the operands via insn_extract
2339 and get the constraints. */
2341 icode = recog_memoized (insn);
2342 if (icode < 0)
2343 fatal_insn_not_found (insn);
2345 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2346 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2347 recog_data.n_dups = insn_data[icode].n_dups;
2349 insn_extract (insn);
2351 for (i = 0; i < noperands; i++)
2353 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2354 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2355 /* VOIDmode match_operands gets mode from their real operand. */
2356 if (recog_data.operand_mode[i] == VOIDmode)
2357 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2360 for (i = 0; i < noperands; i++)
2361 recog_data.operand_type[i]
2362 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2363 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2364 : OP_IN);
2366 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2367 abort ();
2370 /* After calling extract_insn, you can use this function to extract some
2371 information from the constraint strings into a more usable form.
2372 The collected data is stored in recog_op_alt. */
2373 void
2374 preprocess_constraints ()
2376 int i;
2378 memset (recog_op_alt, 0, sizeof recog_op_alt);
2379 for (i = 0; i < recog_data.n_operands; i++)
2381 int j;
2382 struct operand_alternative *op_alt;
2383 const char *p = recog_data.constraints[i];
2385 op_alt = recog_op_alt[i];
2387 for (j = 0; j < recog_data.n_alternatives; j++)
2389 op_alt[j].class = NO_REGS;
2390 op_alt[j].constraint = p;
2391 op_alt[j].matches = -1;
2392 op_alt[j].matched = -1;
2394 if (*p == '\0' || *p == ',')
2396 op_alt[j].anything_ok = 1;
2397 continue;
2400 for (;;)
2402 char c = *p++;
2403 if (c == '#')
2405 c = *p++;
2406 while (c != ',' && c != '\0');
2407 if (c == ',' || c == '\0')
2408 break;
2410 switch (c)
2412 case '=': case '+': case '*': case '%':
2413 case 'E': case 'F': case 'G': case 'H':
2414 case 's': case 'i': case 'n':
2415 case 'I': case 'J': case 'K': case 'L':
2416 case 'M': case 'N': case 'O': case 'P':
2417 /* These don't say anything we care about. */
2418 break;
2420 case '?':
2421 op_alt[j].reject += 6;
2422 break;
2423 case '!':
2424 op_alt[j].reject += 600;
2425 break;
2426 case '&':
2427 op_alt[j].earlyclobber = 1;
2428 break;
2430 case '0': case '1': case '2': case '3': case '4':
2431 case '5': case '6': case '7': case '8': case '9':
2432 op_alt[j].matches = c - '0';
2433 recog_op_alt[op_alt[j].matches][j].matched = i;
2434 break;
2436 case 'm':
2437 op_alt[j].memory_ok = 1;
2438 break;
2439 case '<':
2440 op_alt[j].decmem_ok = 1;
2441 break;
2442 case '>':
2443 op_alt[j].incmem_ok = 1;
2444 break;
2445 case 'V':
2446 op_alt[j].nonoffmem_ok = 1;
2447 break;
2448 case 'o':
2449 op_alt[j].offmem_ok = 1;
2450 break;
2451 case 'X':
2452 op_alt[j].anything_ok = 1;
2453 break;
2455 case 'p':
2456 op_alt[j].is_address = 1;
2457 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2458 break;
2460 case 'g': case 'r':
2461 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2462 break;
2464 default:
2465 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2466 break;
2473 /* Check the operands of an insn against the insn's operand constraints
2474 and return 1 if they are valid.
2475 The information about the insn's operands, constraints, operand modes
2476 etc. is obtained from the global variables set up by extract_insn.
2478 WHICH_ALTERNATIVE is set to a number which indicates which
2479 alternative of constraints was matched: 0 for the first alternative,
2480 1 for the next, etc.
2482 In addition, when two operands are match
2483 and it happens that the output operand is (reg) while the
2484 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2485 make the output operand look like the input.
2486 This is because the output operand is the one the template will print.
2488 This is used in final, just before printing the assembler code and by
2489 the routines that determine an insn's attribute.
2491 If STRICT is a positive non-zero value, it means that we have been
2492 called after reload has been completed. In that case, we must
2493 do all checks strictly. If it is zero, it means that we have been called
2494 before reload has completed. In that case, we first try to see if we can
2495 find an alternative that matches strictly. If not, we try again, this
2496 time assuming that reload will fix up the insn. This provides a "best
2497 guess" for the alternative and is used to compute attributes of insns prior
2498 to reload. A negative value of STRICT is used for this internal call. */
2500 struct funny_match
2502 int this, other;
2506 constrain_operands (strict)
2507 int strict;
2509 const char *constraints[MAX_RECOG_OPERANDS];
2510 int matching_operands[MAX_RECOG_OPERANDS];
2511 int earlyclobber[MAX_RECOG_OPERANDS];
2512 register int c;
2514 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2515 int funny_match_index;
2517 which_alternative = 0;
2518 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2519 return 1;
2521 for (c = 0; c < recog_data.n_operands; c++)
2523 constraints[c] = recog_data.constraints[c];
2524 matching_operands[c] = -1;
2529 register int opno;
2530 int lose = 0;
2531 funny_match_index = 0;
2533 for (opno = 0; opno < recog_data.n_operands; opno++)
2535 register rtx op = recog_data.operand[opno];
2536 enum machine_mode mode = GET_MODE (op);
2537 register const char *p = constraints[opno];
2538 int offset = 0;
2539 int win = 0;
2540 int val;
2542 earlyclobber[opno] = 0;
2544 /* A unary operator may be accepted by the predicate, but it
2545 is irrelevant for matching constraints. */
2546 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2547 op = XEXP (op, 0);
2549 if (GET_CODE (op) == SUBREG)
2551 if (GET_CODE (SUBREG_REG (op)) == REG
2552 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2553 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2554 GET_MODE (SUBREG_REG (op)),
2555 SUBREG_BYTE (op),
2556 GET_MODE (op));
2557 op = SUBREG_REG (op);
2560 /* An empty constraint or empty alternative
2561 allows anything which matched the pattern. */
2562 if (*p == 0 || *p == ',')
2563 win = 1;
2565 while (*p && (c = *p++) != ',')
2566 switch (c)
2568 case '?': case '!': case '*': case '%':
2569 case '=': case '+':
2570 break;
2572 case '#':
2573 /* Ignore rest of this alternative as far as
2574 constraint checking is concerned. */
2575 while (*p && *p != ',')
2576 p++;
2577 break;
2579 case '&':
2580 earlyclobber[opno] = 1;
2581 break;
2583 case '0': case '1': case '2': case '3': case '4':
2584 case '5': case '6': case '7': case '8': case '9':
2586 /* This operand must be the same as a previous one.
2587 This kind of constraint is used for instructions such
2588 as add when they take only two operands.
2590 Note that the lower-numbered operand is passed first.
2592 If we are not testing strictly, assume that this constraint
2593 will be satisfied. */
2594 if (strict < 0)
2595 val = 1;
2596 else
2598 rtx op1 = recog_data.operand[c - '0'];
2599 rtx op2 = recog_data.operand[opno];
2601 /* A unary operator may be accepted by the predicate,
2602 but it is irrelevant for matching constraints. */
2603 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2604 op1 = XEXP (op1, 0);
2605 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2606 op2 = XEXP (op2, 0);
2608 val = operands_match_p (op1, op2);
2611 matching_operands[opno] = c - '0';
2612 matching_operands[c - '0'] = opno;
2614 if (val != 0)
2615 win = 1;
2616 /* If output is *x and input is *--x,
2617 arrange later to change the output to *--x as well,
2618 since the output op is the one that will be printed. */
2619 if (val == 2 && strict > 0)
2621 funny_match[funny_match_index].this = opno;
2622 funny_match[funny_match_index++].other = c - '0';
2624 break;
2626 case 'p':
2627 /* p is used for address_operands. When we are called by
2628 gen_reload, no one will have checked that the address is
2629 strictly valid, i.e., that all pseudos requiring hard regs
2630 have gotten them. */
2631 if (strict <= 0
2632 || (strict_memory_address_p (recog_data.operand_mode[opno],
2633 op)))
2634 win = 1;
2635 break;
2637 /* No need to check general_operand again;
2638 it was done in insn-recog.c. */
2639 case 'g':
2640 /* Anything goes unless it is a REG and really has a hard reg
2641 but the hard reg is not in the class GENERAL_REGS. */
2642 if (strict < 0
2643 || GENERAL_REGS == ALL_REGS
2644 || GET_CODE (op) != REG
2645 || (reload_in_progress
2646 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2647 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2648 win = 1;
2649 break;
2651 case 'X':
2652 /* This is used for a MATCH_SCRATCH in the cases when
2653 we don't actually need anything. So anything goes
2654 any time. */
2655 win = 1;
2656 break;
2658 case 'm':
2659 if (GET_CODE (op) == MEM
2660 /* Before reload, accept what reload can turn into mem. */
2661 || (strict < 0 && CONSTANT_P (op))
2662 /* During reload, accept a pseudo */
2663 || (reload_in_progress && GET_CODE (op) == REG
2664 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2665 win = 1;
2666 break;
2668 case '<':
2669 if (GET_CODE (op) == MEM
2670 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2671 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2672 win = 1;
2673 break;
2675 case '>':
2676 if (GET_CODE (op) == MEM
2677 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2678 || GET_CODE (XEXP (op, 0)) == POST_INC))
2679 win = 1;
2680 break;
2682 case 'E':
2683 #ifndef REAL_ARITHMETIC
2684 /* Match any CONST_DOUBLE, but only if
2685 we can examine the bits of it reliably. */
2686 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2687 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2688 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2689 break;
2690 #endif
2691 if (GET_CODE (op) == CONST_DOUBLE)
2692 win = 1;
2693 break;
2695 case 'F':
2696 if (GET_CODE (op) == CONST_DOUBLE)
2697 win = 1;
2698 break;
2700 case 'G':
2701 case 'H':
2702 if (GET_CODE (op) == CONST_DOUBLE
2703 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2704 win = 1;
2705 break;
2707 case 's':
2708 if (GET_CODE (op) == CONST_INT
2709 || (GET_CODE (op) == CONST_DOUBLE
2710 && GET_MODE (op) == VOIDmode))
2711 break;
2712 case 'i':
2713 if (CONSTANT_P (op))
2714 win = 1;
2715 break;
2717 case 'n':
2718 if (GET_CODE (op) == CONST_INT
2719 || (GET_CODE (op) == CONST_DOUBLE
2720 && GET_MODE (op) == VOIDmode))
2721 win = 1;
2722 break;
2724 case 'I':
2725 case 'J':
2726 case 'K':
2727 case 'L':
2728 case 'M':
2729 case 'N':
2730 case 'O':
2731 case 'P':
2732 if (GET_CODE (op) == CONST_INT
2733 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2734 win = 1;
2735 break;
2737 case 'V':
2738 if (GET_CODE (op) == MEM
2739 && ((strict > 0 && ! offsettable_memref_p (op))
2740 || (strict < 0
2741 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2742 || (reload_in_progress
2743 && !(GET_CODE (op) == REG
2744 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2745 win = 1;
2746 break;
2748 case 'o':
2749 if ((strict > 0 && offsettable_memref_p (op))
2750 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2751 /* Before reload, accept what reload can handle. */
2752 || (strict < 0
2753 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2754 /* During reload, accept a pseudo */
2755 || (reload_in_progress && GET_CODE (op) == REG
2756 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2757 win = 1;
2758 break;
2760 default:
2762 enum reg_class class;
2764 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2765 if (class != NO_REGS)
2767 if (strict < 0
2768 || (strict == 0
2769 && GET_CODE (op) == REG
2770 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2771 || (strict == 0 && GET_CODE (op) == SCRATCH)
2772 || (GET_CODE (op) == REG
2773 && reg_fits_class_p (op, class, offset, mode)))
2774 win = 1;
2776 #ifdef EXTRA_CONSTRAINT
2777 else if (EXTRA_CONSTRAINT (op, c))
2778 win = 1;
2779 #endif
2780 break;
2784 constraints[opno] = p;
2785 /* If this operand did not win somehow,
2786 this alternative loses. */
2787 if (! win)
2788 lose = 1;
2790 /* This alternative won; the operands are ok.
2791 Change whichever operands this alternative says to change. */
2792 if (! lose)
2794 int opno, eopno;
2796 /* See if any earlyclobber operand conflicts with some other
2797 operand. */
2799 if (strict > 0)
2800 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2801 /* Ignore earlyclobber operands now in memory,
2802 because we would often report failure when we have
2803 two memory operands, one of which was formerly a REG. */
2804 if (earlyclobber[eopno]
2805 && GET_CODE (recog_data.operand[eopno]) == REG)
2806 for (opno = 0; opno < recog_data.n_operands; opno++)
2807 if ((GET_CODE (recog_data.operand[opno]) == MEM
2808 || recog_data.operand_type[opno] != OP_OUT)
2809 && opno != eopno
2810 /* Ignore things like match_operator operands. */
2811 && *recog_data.constraints[opno] != 0
2812 && ! (matching_operands[opno] == eopno
2813 && operands_match_p (recog_data.operand[opno],
2814 recog_data.operand[eopno]))
2815 && ! safe_from_earlyclobber (recog_data.operand[opno],
2816 recog_data.operand[eopno]))
2817 lose = 1;
2819 if (! lose)
2821 while (--funny_match_index >= 0)
2823 recog_data.operand[funny_match[funny_match_index].other]
2824 = recog_data.operand[funny_match[funny_match_index].this];
2827 return 1;
2831 which_alternative++;
2833 while (which_alternative < recog_data.n_alternatives);
2835 which_alternative = -1;
2836 /* If we are about to reject this, but we are not to test strictly,
2837 try a very loose test. Only return failure if it fails also. */
2838 if (strict == 0)
2839 return constrain_operands (-1);
2840 else
2841 return 0;
2844 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2845 is a hard reg in class CLASS when its regno is offset by OFFSET
2846 and changed to mode MODE.
2847 If REG occupies multiple hard regs, all of them must be in CLASS. */
2850 reg_fits_class_p (operand, class, offset, mode)
2851 rtx operand;
2852 register enum reg_class class;
2853 int offset;
2854 enum machine_mode mode;
2856 register int regno = REGNO (operand);
2857 if (regno < FIRST_PSEUDO_REGISTER
2858 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2859 regno + offset))
2861 register int sr;
2862 regno += offset;
2863 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2864 sr > 0; sr--)
2865 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2866 regno + sr))
2867 break;
2868 return sr == 0;
2871 return 0;
2874 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2876 void
2877 split_all_insns (upd_life)
2878 int upd_life;
2880 sbitmap blocks;
2881 int changed;
2882 int i;
2884 blocks = sbitmap_alloc (n_basic_blocks);
2885 sbitmap_zero (blocks);
2886 changed = 0;
2888 for (i = n_basic_blocks - 1; i >= 0; --i)
2890 basic_block bb = BASIC_BLOCK (i);
2891 rtx insn, next;
2893 for (insn = bb->head; insn ; insn = next)
2895 rtx set;
2897 /* Can't use `next_real_insn' because that might go across
2898 CODE_LABELS and short-out basic blocks. */
2899 next = NEXT_INSN (insn);
2900 if (! INSN_P (insn))
2903 /* Don't split no-op move insns. These should silently
2904 disappear later in final. Splitting such insns would
2905 break the code that handles REG_NO_CONFLICT blocks. */
2907 else if ((set = single_set (insn)) != NULL
2908 && set_noop_p (set))
2910 /* Nops get in the way while scheduling, so delete them
2911 now if register allocation has already been done. It
2912 is too risky to try to do this before register
2913 allocation, and there are unlikely to be very many
2914 nops then anyways. */
2915 if (reload_completed)
2917 PUT_CODE (insn, NOTE);
2918 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2919 NOTE_SOURCE_FILE (insn) = 0;
2922 else
2924 /* Split insns here to get max fine-grain parallelism. */
2925 rtx first = PREV_INSN (insn);
2926 rtx last = try_split (PATTERN (insn), insn, 1);
2928 if (last != insn)
2930 SET_BIT (blocks, i);
2931 changed = 1;
2933 /* try_split returns the NOTE that INSN became. */
2934 PUT_CODE (insn, NOTE);
2935 NOTE_SOURCE_FILE (insn) = 0;
2936 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2938 /* ??? Coddle to md files that generate subregs in post-
2939 reload splitters instead of computing the proper
2940 hard register. */
2941 if (reload_completed && first != last)
2943 first = NEXT_INSN (first);
2944 while (1)
2946 if (INSN_P (first))
2947 cleanup_subreg_operands (first);
2948 if (first == last)
2949 break;
2950 first = NEXT_INSN (first);
2954 if (insn == bb->end)
2956 bb->end = last;
2957 break;
2962 if (insn == bb->end)
2963 break;
2966 /* ??? When we're called from just after reload, the CFG is in bad
2967 shape, and we may have fallen off the end. This could be fixed
2968 by having reload not try to delete unreachable code. Otherwise
2969 assert we found the end insn. */
2970 if (insn == NULL && upd_life)
2971 abort ();
2974 if (changed && upd_life)
2976 compute_bb_for_insn (get_max_uid ());
2977 count_or_remove_death_notes (blocks, 1);
2978 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2981 sbitmap_free (blocks);
2984 #ifdef HAVE_peephole2
2985 struct peep2_insn_data
2987 rtx insn;
2988 regset live_before;
2991 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2992 static int peep2_current;
2994 /* A non-insn marker indicating the last insn of the block.
2995 The live_before regset for this element is correct, indicating
2996 global_live_at_end for the block. */
2997 #define PEEP2_EOB pc_rtx
2999 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3000 does not exist. Used by the recognizer to find the next insn to match
3001 in a multi-insn pattern. */
3004 peep2_next_insn (n)
3005 int n;
3007 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3008 abort ();
3010 n += peep2_current;
3011 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3012 n -= MAX_INSNS_PER_PEEP2 + 1;
3014 if (peep2_insn_data[n].insn == PEEP2_EOB)
3015 return NULL_RTX;
3016 return peep2_insn_data[n].insn;
3019 /* Return true if REGNO is dead before the Nth non-note insn
3020 after `current'. */
3023 peep2_regno_dead_p (ofs, regno)
3024 int ofs;
3025 int regno;
3027 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3028 abort ();
3030 ofs += peep2_current;
3031 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3032 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3034 if (peep2_insn_data[ofs].insn == NULL_RTX)
3035 abort ();
3037 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3040 /* Similarly for a REG. */
3043 peep2_reg_dead_p (ofs, reg)
3044 int ofs;
3045 rtx reg;
3047 int regno, n;
3049 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3050 abort ();
3052 ofs += peep2_current;
3053 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3054 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3056 if (peep2_insn_data[ofs].insn == NULL_RTX)
3057 abort ();
3059 regno = REGNO (reg);
3060 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3061 while (--n >= 0)
3062 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3063 return 0;
3064 return 1;
3067 /* Try to find a hard register of mode MODE, matching the register class in
3068 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3069 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3070 in which case the only condition is that the register must be available
3071 before CURRENT_INSN.
3072 Registers that already have bits set in REG_SET will not be considered.
3074 If an appropriate register is available, it will be returned and the
3075 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3076 returned. */
3079 peep2_find_free_register (from, to, class_str, mode, reg_set)
3080 int from, to;
3081 const char *class_str;
3082 enum machine_mode mode;
3083 HARD_REG_SET *reg_set;
3085 static int search_ofs;
3086 enum reg_class class;
3087 HARD_REG_SET live;
3088 int i;
3090 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
3091 abort ();
3093 from += peep2_current;
3094 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3095 from -= MAX_INSNS_PER_PEEP2 + 1;
3096 to += peep2_current;
3097 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3098 to -= MAX_INSNS_PER_PEEP2 + 1;
3100 if (peep2_insn_data[from].insn == NULL_RTX)
3101 abort ();
3102 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3104 while (from != to)
3106 HARD_REG_SET this_live;
3108 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3109 from = 0;
3110 if (peep2_insn_data[from].insn == NULL_RTX)
3111 abort ();
3112 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3113 IOR_HARD_REG_SET (live, this_live);
3116 class = (class_str[0] == 'r' ? GENERAL_REGS
3117 : REG_CLASS_FROM_LETTER (class_str[0]));
3119 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3121 int raw_regno, regno, success, j;
3123 /* Distribute the free registers as much as possible. */
3124 raw_regno = search_ofs + i;
3125 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3126 raw_regno -= FIRST_PSEUDO_REGISTER;
3127 #ifdef REG_ALLOC_ORDER
3128 regno = reg_alloc_order[raw_regno];
3129 #else
3130 regno = raw_regno;
3131 #endif
3133 /* Don't allocate fixed registers. */
3134 if (fixed_regs[regno])
3135 continue;
3136 /* Make sure the register is of the right class. */
3137 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3138 continue;
3139 /* And can support the mode we need. */
3140 if (! HARD_REGNO_MODE_OK (regno, mode))
3141 continue;
3142 /* And that we don't create an extra save/restore. */
3143 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3144 continue;
3145 /* And we don't clobber traceback for noreturn functions. */
3146 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3147 && (! reload_completed || frame_pointer_needed))
3148 continue;
3150 success = 1;
3151 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3153 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3154 || TEST_HARD_REG_BIT (live, regno + j))
3156 success = 0;
3157 break;
3160 if (success)
3162 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3163 SET_HARD_REG_BIT (*reg_set, regno + j);
3165 /* Start the next search with the next register. */
3166 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3167 raw_regno = 0;
3168 search_ofs = raw_regno;
3170 return gen_rtx_REG (mode, regno);
3174 search_ofs = 0;
3175 return NULL_RTX;
3178 /* Perform the peephole2 optimization pass. */
3180 void
3181 peephole2_optimize (dump_file)
3182 FILE *dump_file ATTRIBUTE_UNUSED;
3184 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3185 rtx insn, prev;
3186 regset live;
3187 int i, b;
3188 #ifdef HAVE_conditional_execution
3189 sbitmap blocks;
3190 int changed;
3191 #endif
3193 /* Initialize the regsets we're going to use. */
3194 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3195 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3196 live = INITIALIZE_REG_SET (rs_heads[i]);
3198 #ifdef HAVE_conditional_execution
3199 blocks = sbitmap_alloc (n_basic_blocks);
3200 sbitmap_zero (blocks);
3201 changed = 0;
3202 #else
3203 count_or_remove_death_notes (NULL, 1);
3204 #endif
3206 for (b = n_basic_blocks - 1; b >= 0; --b)
3208 basic_block bb = BASIC_BLOCK (b);
3209 struct propagate_block_info *pbi;
3211 /* Indicate that all slots except the last holds invalid data. */
3212 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3213 peep2_insn_data[i].insn = NULL_RTX;
3215 /* Indicate that the last slot contains live_after data. */
3216 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3217 peep2_current = MAX_INSNS_PER_PEEP2;
3219 /* Start up propagation. */
3220 COPY_REG_SET (live, bb->global_live_at_end);
3221 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3223 #ifdef HAVE_conditional_execution
3224 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3225 #else
3226 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3227 #endif
3229 for (insn = bb->end; ; insn = prev)
3231 prev = PREV_INSN (insn);
3232 if (INSN_P (insn))
3234 rtx try;
3235 int match_len;
3237 /* Record this insn. */
3238 if (--peep2_current < 0)
3239 peep2_current = MAX_INSNS_PER_PEEP2;
3240 peep2_insn_data[peep2_current].insn = insn;
3241 propagate_one_insn (pbi, insn);
3242 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3244 /* Match the peephole. */
3245 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3246 if (try != NULL)
3248 i = match_len + peep2_current;
3249 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3250 i -= MAX_INSNS_PER_PEEP2 + 1;
3252 /* Replace the old sequence with the new. */
3253 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3254 try = emit_insn_after (try, prev);
3256 /* Adjust the basic block boundaries. */
3257 if (peep2_insn_data[i].insn == bb->end)
3258 bb->end = try;
3259 if (insn == bb->head)
3260 bb->head = NEXT_INSN (prev);
3262 #ifdef HAVE_conditional_execution
3263 /* With conditional execution, we cannot back up the
3264 live information so easily, since the conditional
3265 death data structures are not so self-contained.
3266 So record that we've made a modification to this
3267 block and update life information at the end. */
3268 SET_BIT (blocks, b);
3269 changed = 1;
3271 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3272 peep2_insn_data[i].insn = NULL_RTX;
3273 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3274 #else
3275 /* Back up lifetime information past the end of the
3276 newly created sequence. */
3277 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3278 i = 0;
3279 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3281 /* Update life information for the new sequence. */
3284 if (INSN_P (try))
3286 if (--i < 0)
3287 i = MAX_INSNS_PER_PEEP2;
3288 peep2_insn_data[i].insn = try;
3289 propagate_one_insn (pbi, try);
3290 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3292 try = PREV_INSN (try);
3294 while (try != prev);
3296 /* ??? Should verify that LIVE now matches what we
3297 had before the new sequence. */
3299 peep2_current = i;
3300 #endif
3304 if (insn == bb->head)
3305 break;
3308 free_propagate_block_info (pbi);
3311 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3312 FREE_REG_SET (peep2_insn_data[i].live_before);
3313 FREE_REG_SET (live);
3315 #ifdef HAVE_conditional_execution
3316 count_or_remove_death_notes (blocks, 1);
3317 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3318 sbitmap_free (blocks);
3319 #endif
3321 #endif /* HAVE_peephole2 */