* alpha.h: NULL_PTR -> NULL.
[official-gcc.git] / gcc / recog.c
blob86483168f7c6349b4d41a43bbe882664c611a7d9
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "function.h"
33 #include "flags.h"
34 #include "real.h"
35 #include "toplev.h"
36 #include "basic-block.h"
37 #include "output.h"
38 #include "reload.h"
40 #ifndef STACK_PUSH_CODE
41 #ifdef STACK_GROWS_DOWNWARD
42 #define STACK_PUSH_CODE PRE_DEC
43 #else
44 #define STACK_PUSH_CODE PRE_INC
45 #endif
46 #endif
48 #ifndef STACK_POP_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_POP_CODE POST_INC
51 #else
52 #define STACK_POP_CODE POST_DEC
53 #endif
54 #endif
56 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
57 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
58 static rtx *find_constant_term_loc PARAMS ((rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
69 int volatile_ok;
71 struct recog_data recog_data;
73 /* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
77 /* On return from `constrain_operands', indicate which alternative
78 was satisfied. */
80 int which_alternative;
82 /* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
86 int reload_completed;
88 /* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
92 void
93 init_recog_no_volatile ()
95 volatile_ok = 0;
98 void
99 init_recog ()
101 volatile_ok = 1;
104 /* Try recognizing the instruction INSN,
105 and return the code number that results.
106 Remember the code so that repeated calls do not
107 need to spend the time for actual rerecognition.
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
114 recog_memoized_1 (insn)
115 rtx insn;
117 if (INSN_CODE (insn) < 0)
118 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
119 return INSN_CODE (insn);
122 /* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
126 check_asm_operands (x)
127 rtx x;
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = (rtx *) alloca (noperands * sizeof (rtx));
150 constraints = (const char **) alloca (noperands * sizeof (char *));
152 decode_asm_operands (x, operands, NULL, constraints, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
160 c = constraints[c[0] - '0'];
162 if (! asm_operand_ok (operands[i], c))
163 return 0;
166 return 1;
169 /* Static data for the next two routines. */
171 typedef struct change_t
173 rtx object;
174 int old_code;
175 rtx *loc;
176 rtx old;
177 } change_t;
179 static change_t *changes;
180 static int changes_allocated;
182 static int num_changes = 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
191 the change in place.
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object, loc, new, in_group)
204 rtx object;
205 rtx *loc;
206 rtx new;
207 int in_group;
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 if (in_group == 0 && num_changes != 0)
215 abort ();
217 *loc = new;
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
229 changes =
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
238 if (object && GET_CODE (object) != MEM)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
241 case invalid. */
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
246 num_changes++;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
251 if (in_group)
252 return 1;
253 else
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
261 insn_invalid_p (insn)
262 rtx insn;
264 rtx pat = PATTERN (insn);
265 int num_clobbers = 0;
266 /* If we are before reload and the pattern is a SET, see if we can add
267 clobbers. */
268 int icode = recog (pat, insn,
269 (GET_CODE (pat) == SET
270 && ! reload_completed && ! reload_in_progress)
271 ? &num_clobbers : 0);
272 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
275 /* If this is an asm and the operand aren't legal, then fail. Likewise if
276 this is not an asm and the insn wasn't recognized. */
277 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
278 || (!is_asm && icode < 0))
279 return 1;
281 /* If we have to add CLOBBERs, fail if we have to add ones that reference
282 hard registers since our callers can't know if they are live or not.
283 Otherwise, add them. */
284 if (num_clobbers > 0)
286 rtx newpat;
288 if (added_clobbers_hard_reg_p (icode))
289 return 1;
291 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
292 XVECEXP (newpat, 0, 0) = pat;
293 add_clobbers (newpat, icode);
294 PATTERN (insn) = pat = newpat;
297 /* After reload, verify that all constraints are satisfied. */
298 if (reload_completed)
300 extract_insn (insn);
302 if (! constrain_operands (1))
303 return 1;
306 INSN_CODE (insn) = icode;
307 return 0;
310 /* Apply a group of changes previously issued with `validate_change'.
311 Return 1 if all changes are valid, zero otherwise. */
314 apply_change_group ()
316 int i;
318 /* The changes have been applied and all INSN_CODEs have been reset to force
319 rerecognition.
321 The changes are valid if we aren't given an object, or if we are
322 given a MEM and it still is a valid address, or if this is in insn
323 and it is recognized. In the latter case, if reload has completed,
324 we also require that the operands meet the constraints for
325 the insn. */
327 for (i = 0; i < num_changes; i++)
329 rtx object = changes[i].object;
331 if (object == 0)
332 continue;
334 if (GET_CODE (object) == MEM)
336 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
337 break;
339 else if (insn_invalid_p (object))
341 rtx pat = PATTERN (object);
343 /* Perhaps we couldn't recognize the insn because there were
344 extra CLOBBERs at the end. If so, try to re-recognize
345 without the last CLOBBER (later iterations will cause each of
346 them to be eliminated, in turn). But don't do this if we
347 have an ASM_OPERAND. */
348 if (GET_CODE (pat) == PARALLEL
349 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
350 && asm_noperands (PATTERN (object)) < 0)
352 rtx newpat;
354 if (XVECLEN (pat, 0) == 2)
355 newpat = XVECEXP (pat, 0, 0);
356 else
358 int j;
360 newpat
361 = gen_rtx_PARALLEL (VOIDmode,
362 rtvec_alloc (XVECLEN (pat, 0) - 1));
363 for (j = 0; j < XVECLEN (newpat, 0); j++)
364 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
367 /* Add a new change to this group to replace the pattern
368 with this new pattern. Then consider this change
369 as having succeeded. The change we added will
370 cause the entire call to fail if things remain invalid.
372 Note that this can lose if a later change than the one
373 we are processing specified &XVECEXP (PATTERN (object), 0, X)
374 but this shouldn't occur. */
376 validate_change (object, &PATTERN (object), newpat, 1);
378 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
379 /* If this insn is a CLOBBER or USE, it is always valid, but is
380 never recognized. */
381 continue;
382 else
383 break;
387 if (i == num_changes)
389 num_changes = 0;
390 return 1;
392 else
394 cancel_changes (0);
395 return 0;
399 /* Return the number of changes so far in the current group. */
402 num_validated_changes ()
404 return num_changes;
407 /* Retract the changes numbered NUM and up. */
409 void
410 cancel_changes (num)
411 int num;
413 int i;
415 /* Back out all the changes. Do this in the opposite order in which
416 they were made. */
417 for (i = num_changes - 1; i >= num; i--)
419 *changes[i].loc = changes[i].old;
420 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
421 INSN_CODE (changes[i].object) = changes[i].old_code;
423 num_changes = num;
426 /* Replace every occurrence of FROM in X with TO. Mark each change with
427 validate_change passing OBJECT. */
429 static void
430 validate_replace_rtx_1 (loc, from, to, object)
431 rtx *loc;
432 rtx from, to, object;
434 register int i, j;
435 register const char *fmt;
436 register rtx x = *loc;
437 enum rtx_code code;
439 if (!x)
440 return;
441 code = GET_CODE (x);
442 /* X matches FROM if it is the same rtx or they are both referring to the
443 same register in the same mode. Avoid calling rtx_equal_p unless the
444 operands look similar. */
446 if (x == from
447 || (GET_CODE (x) == REG && GET_CODE (from) == REG
448 && GET_MODE (x) == GET_MODE (from)
449 && REGNO (x) == REGNO (from))
450 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
451 && rtx_equal_p (x, from)))
453 validate_change (object, loc, to, 1);
454 return;
457 /* For commutative or comparison operations, try replacing each argument
458 separately and seeing if we made any changes. If so, put a constant
459 argument last.*/
460 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
462 int prev_changes = num_changes;
464 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
465 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
466 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
468 validate_change (object, loc,
469 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
470 : swap_condition (code),
471 GET_MODE (x), XEXP (x, 1),
472 XEXP (x, 0)),
474 x = *loc;
475 code = GET_CODE (x);
479 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
480 done the substitution, otherwise we won't. */
482 switch (code)
484 case PLUS:
485 /* If we have a PLUS whose second operand is now a CONST_INT, use
486 plus_constant to try to simplify it. */
487 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
488 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
490 return;
492 case MINUS:
493 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
495 validate_change (object, loc,
496 plus_constant (XEXP (x, 0), - INTVAL (to)),
498 return;
500 break;
502 case ZERO_EXTEND:
503 case SIGN_EXTEND:
504 /* In these cases, the operation to be performed depends on the mode
505 of the operand. If we are replacing the operand with a VOIDmode
506 constant, we lose the information. So try to simplify the operation
507 in that case. */
508 if (GET_MODE (to) == VOIDmode
509 && (rtx_equal_p (XEXP (x, 0), from)
510 || (GET_CODE (XEXP (x, 0)) == SUBREG
511 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
513 rtx new = NULL_RTX;
515 /* If there is a subreg involved, crop to the portion of the
516 constant that we are interested in. */
517 if (GET_CODE (XEXP (x, 0)) == SUBREG)
519 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
520 to = operand_subword (to,
521 (SUBREG_BYTE (XEXP (x, 0))
522 / UNITS_PER_WORD),
523 0, GET_MODE (from));
524 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
525 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
526 <= HOST_BITS_PER_WIDE_INT))
528 int i = SUBREG_BYTE (XEXP (x, 0)) * BITS_PER_UNIT;
529 HOST_WIDE_INT valh;
530 unsigned HOST_WIDE_INT vall;
532 if (GET_CODE (to) == CONST_INT)
534 vall = INTVAL (to);
535 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
537 else
539 vall = CONST_DOUBLE_LOW (to);
540 valh = CONST_DOUBLE_HIGH (to);
543 if (WORDS_BIG_ENDIAN)
544 i = (GET_MODE_BITSIZE (GET_MODE (from))
545 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
546 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
547 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
548 else if (i >= HOST_BITS_PER_WIDE_INT)
549 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
550 to = GEN_INT (trunc_int_for_mode (vall,
551 GET_MODE (XEXP (x, 0))));
553 else
554 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
557 /* If the above didn't fail, perform the extension from the
558 mode of the operand (and not the mode of FROM). */
559 if (to)
560 new = simplify_unary_operation (code, GET_MODE (x), to,
561 GET_MODE (XEXP (x, 0)));
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
565 if (!new)
566 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
569 return;
571 break;
573 case SUBREG:
574 /* In case we are replacing by constant, attempt to simplify it to
575 non-SUBREG expression. We can't do this later, since the information
576 about inner mode may be lost. */
577 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
579 int offset, part;
580 unsigned HOST_WIDE_INT val;
582 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
583 since we are saying that the high bits don't matter. */
584 if (GET_MODE (to) == VOIDmode
585 && (GET_MODE_SIZE (GET_MODE (x))
586 >= GET_MODE_SIZE (GET_MODE (from))))
588 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
589 if (new)
591 validate_change (object, loc, new, 1);
592 return;
596 offset = SUBREG_BYTE (x) * BITS_PER_UNIT;
597 switch (GET_CODE (to))
599 case CONST_DOUBLE:
600 if (GET_MODE (to) != VOIDmode)
601 break;
603 part = offset >= HOST_BITS_PER_WIDE_INT;
604 if ((BITS_PER_WORD > HOST_BITS_PER_WIDE_INT
605 && BYTES_BIG_ENDIAN)
606 || (BITS_PER_WORD <= HOST_BITS_PER_WIDE_INT
607 && WORDS_BIG_ENDIAN))
608 part = !part;
609 val = part ? CONST_DOUBLE_HIGH (to) : CONST_DOUBLE_LOW (to);
610 offset %= HOST_BITS_PER_WIDE_INT;
612 /* FALLTHROUGH */
613 case CONST_INT:
614 if (GET_CODE (to) == CONST_INT)
615 val = INTVAL (to);
618 /* Avoid creating bogus SUBREGs */
619 enum machine_mode mode = GET_MODE (x);
620 enum machine_mode inner_mode = GET_MODE (from);
622 /* We've already picked the word we want from a double, so
623 pretend this is actually an integer. */
624 if (GET_CODE (to) == CONST_DOUBLE)
625 inner_mode = SImode;
627 if (GET_MODE_CLASS (mode) != MODE_INT)
629 /* Substitute in something that we know won't be
630 recognized. */
631 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
632 validate_change (object, loc, to, 1);
633 return;
636 if (BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
638 if (WORDS_BIG_ENDIAN)
639 offset = GET_MODE_BITSIZE (inner_mode)
640 - GET_MODE_BITSIZE (mode) - offset;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
643 offset = offset + BITS_PER_WORD - GET_MODE_BITSIZE (mode)
644 - 2 * (offset % BITS_PER_WORD);
647 if (offset >= HOST_BITS_PER_WIDE_INT)
648 to = ((HOST_WIDE_INT) val < 0) ? constm1_rtx : const0_rtx;
649 else
651 val >>= offset;
652 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
653 val = trunc_int_for_mode (val, mode);
654 to = GEN_INT (val);
657 validate_change (object, loc, to, 1);
658 return;
661 default:
662 break;
666 /* Changing mode twice with SUBREG => just change it once,
667 or not at all if changing back to starting mode. */
668 if (GET_CODE (to) == SUBREG
669 && rtx_equal_p (SUBREG_REG (x), from))
671 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
672 && SUBREG_BYTE (x) == 0 && SUBREG_BYTE (to) == 0)
674 validate_change (object, loc, SUBREG_REG (to), 1);
675 return;
678 /* Make sure the 2 byte counts added together are an even unit
679 of x's mode, and combine them if so. Otherwise we run
680 into problems with something like:
681 (subreg:HI (subreg:QI (SI:55) 3) 0)
682 we end up with an odd offset into a HI which is invalid. */
684 if (SUBREG_BYTE (to) % GET_MODE_SIZE (GET_MODE (x)) == 0)
685 validate_change (object, loc,
686 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
687 SUBREG_BYTE(x) + SUBREG_BYTE (to)),
689 else
690 validate_change (object, loc, to, 1);
692 return;
695 /* If we have a SUBREG of a register that we are replacing and we are
696 replacing it with a MEM, make a new MEM and try replacing the
697 SUBREG with it. Don't do this if the MEM has a mode-dependent address
698 or if we would be widening it. */
700 if (GET_CODE (from) == REG
701 && GET_CODE (to) == MEM
702 && rtx_equal_p (SUBREG_REG (x), from)
703 && ! mode_dependent_address_p (XEXP (to, 0))
704 && ! MEM_VOLATILE_P (to)
705 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
707 int offset = SUBREG_BYTE (x);
708 enum machine_mode mode = GET_MODE (x);
709 rtx new;
711 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
712 MEM_COPY_ATTRIBUTES (new, to);
713 validate_change (object, loc, new, 1);
714 return;
716 break;
718 case ZERO_EXTRACT:
719 case SIGN_EXTRACT:
720 /* If we are replacing a register with memory, try to change the memory
721 to be the mode required for memory in extract operations (this isn't
722 likely to be an insertion operation; if it was, nothing bad will
723 happen, we might just fail in some cases). */
725 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
726 && rtx_equal_p (XEXP (x, 0), from)
727 && GET_CODE (XEXP (x, 1)) == CONST_INT
728 && GET_CODE (XEXP (x, 2)) == CONST_INT
729 && ! mode_dependent_address_p (XEXP (to, 0))
730 && ! MEM_VOLATILE_P (to))
732 enum machine_mode wanted_mode = VOIDmode;
733 enum machine_mode is_mode = GET_MODE (to);
734 int pos = INTVAL (XEXP (x, 2));
736 #ifdef HAVE_extzv
737 if (code == ZERO_EXTRACT)
739 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
740 if (wanted_mode == VOIDmode)
741 wanted_mode = word_mode;
743 #endif
744 #ifdef HAVE_extv
745 if (code == SIGN_EXTRACT)
747 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
748 if (wanted_mode == VOIDmode)
749 wanted_mode = word_mode;
751 #endif
753 /* If we have a narrower mode, we can do something. */
754 if (wanted_mode != VOIDmode
755 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
757 int offset = pos / BITS_PER_UNIT;
758 rtx newmem;
760 /* If the bytes and bits are counted differently, we
761 must adjust the offset. */
762 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
763 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
764 - offset);
766 pos %= GET_MODE_BITSIZE (wanted_mode);
768 newmem = gen_rtx_MEM (wanted_mode,
769 plus_constant (XEXP (to, 0), offset));
770 MEM_COPY_ATTRIBUTES (newmem, to);
772 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
773 validate_change (object, &XEXP (x, 0), newmem, 1);
777 break;
779 default:
780 break;
783 /* For commutative or comparison operations we've already performed
784 replacements. Don't try to perform them again. */
785 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
787 fmt = GET_RTX_FORMAT (code);
788 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
790 if (fmt[i] == 'e')
791 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
792 else if (fmt[i] == 'E')
793 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
794 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
799 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
800 with TO. After all changes have been made, validate by seeing
801 if INSN is still valid. */
804 validate_replace_rtx_subexp (from, to, insn, loc)
805 rtx from, to, insn, *loc;
807 validate_replace_rtx_1 (loc, from, to, insn);
808 return apply_change_group ();
811 /* Try replacing every occurrence of FROM in INSN with TO. After all
812 changes have been made, validate by seeing if INSN is still valid. */
815 validate_replace_rtx (from, to, insn)
816 rtx from, to, insn;
818 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
819 return apply_change_group ();
822 /* Try replacing every occurrence of FROM in INSN with TO. */
824 void
825 validate_replace_rtx_group (from, to, insn)
826 rtx from, to, insn;
828 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
831 /* Function called by note_uses to replace used subexpressions. */
832 struct validate_replace_src_data
834 rtx from; /* Old RTX */
835 rtx to; /* New RTX */
836 rtx insn; /* Insn in which substitution is occurring. */
839 static void
840 validate_replace_src_1 (x, data)
841 rtx *x;
842 void *data;
844 struct validate_replace_src_data *d
845 = (struct validate_replace_src_data *) data;
847 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
850 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
851 SET_DESTs. After all changes have been made, validate by seeing if
852 INSN is still valid. */
855 validate_replace_src (from, to, insn)
856 rtx from, to, insn;
858 struct validate_replace_src_data d;
860 d.from = from;
861 d.to = to;
862 d.insn = insn;
863 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
864 return apply_change_group ();
867 #ifdef HAVE_cc0
868 /* Return 1 if the insn using CC0 set by INSN does not contain
869 any ordered tests applied to the condition codes.
870 EQ and NE tests do not count. */
873 next_insn_tests_no_inequality (insn)
874 rtx insn;
876 register rtx next = next_cc0_user (insn);
878 /* If there is no next insn, we have to take the conservative choice. */
879 if (next == 0)
880 return 0;
882 return ((GET_CODE (next) == JUMP_INSN
883 || GET_CODE (next) == INSN
884 || GET_CODE (next) == CALL_INSN)
885 && ! inequality_comparisons_p (PATTERN (next)));
888 #if 0 /* This is useless since the insn that sets the cc's
889 must be followed immediately by the use of them. */
890 /* Return 1 if the CC value set up by INSN is not used. */
893 next_insns_test_no_inequality (insn)
894 rtx insn;
896 register rtx next = NEXT_INSN (insn);
898 for (; next != 0; next = NEXT_INSN (next))
900 if (GET_CODE (next) == CODE_LABEL
901 || GET_CODE (next) == BARRIER)
902 return 1;
903 if (GET_CODE (next) == NOTE)
904 continue;
905 if (inequality_comparisons_p (PATTERN (next)))
906 return 0;
907 if (sets_cc0_p (PATTERN (next)) == 1)
908 return 1;
909 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
910 return 1;
912 return 1;
914 #endif
915 #endif
917 /* This is used by find_single_use to locate an rtx that contains exactly one
918 use of DEST, which is typically either a REG or CC0. It returns a
919 pointer to the innermost rtx expression containing DEST. Appearances of
920 DEST that are being used to totally replace it are not counted. */
922 static rtx *
923 find_single_use_1 (dest, loc)
924 rtx dest;
925 rtx *loc;
927 rtx x = *loc;
928 enum rtx_code code = GET_CODE (x);
929 rtx *result = 0;
930 rtx *this_result;
931 int i;
932 const char *fmt;
934 switch (code)
936 case CONST_INT:
937 case CONST:
938 case LABEL_REF:
939 case SYMBOL_REF:
940 case CONST_DOUBLE:
941 case CLOBBER:
942 return 0;
944 case SET:
945 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
946 of a REG that occupies all of the REG, the insn uses DEST if
947 it is mentioned in the destination or the source. Otherwise, we
948 need just check the source. */
949 if (GET_CODE (SET_DEST (x)) != CC0
950 && GET_CODE (SET_DEST (x)) != PC
951 && GET_CODE (SET_DEST (x)) != REG
952 && ! (GET_CODE (SET_DEST (x)) == SUBREG
953 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
954 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
955 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
956 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
957 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
958 break;
960 return find_single_use_1 (dest, &SET_SRC (x));
962 case MEM:
963 case SUBREG:
964 return find_single_use_1 (dest, &XEXP (x, 0));
966 default:
967 break;
970 /* If it wasn't one of the common cases above, check each expression and
971 vector of this code. Look for a unique usage of DEST. */
973 fmt = GET_RTX_FORMAT (code);
974 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
976 if (fmt[i] == 'e')
978 if (dest == XEXP (x, i)
979 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
980 && REGNO (dest) == REGNO (XEXP (x, i))))
981 this_result = loc;
982 else
983 this_result = find_single_use_1 (dest, &XEXP (x, i));
985 if (result == 0)
986 result = this_result;
987 else if (this_result)
988 /* Duplicate usage. */
989 return 0;
991 else if (fmt[i] == 'E')
993 int j;
995 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
997 if (XVECEXP (x, i, j) == dest
998 || (GET_CODE (dest) == REG
999 && GET_CODE (XVECEXP (x, i, j)) == REG
1000 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
1001 this_result = loc;
1002 else
1003 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
1005 if (result == 0)
1006 result = this_result;
1007 else if (this_result)
1008 return 0;
1013 return result;
1016 /* See if DEST, produced in INSN, is used only a single time in the
1017 sequel. If so, return a pointer to the innermost rtx expression in which
1018 it is used.
1020 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
1022 This routine will return usually zero either before flow is called (because
1023 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
1024 note can't be trusted).
1026 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
1027 care about REG_DEAD notes or LOG_LINKS.
1029 Otherwise, we find the single use by finding an insn that has a
1030 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
1031 only referenced once in that insn, we know that it must be the first
1032 and last insn referencing DEST. */
1034 rtx *
1035 find_single_use (dest, insn, ploc)
1036 rtx dest;
1037 rtx insn;
1038 rtx *ploc;
1040 rtx next;
1041 rtx *result;
1042 rtx link;
1044 #ifdef HAVE_cc0
1045 if (dest == cc0_rtx)
1047 next = NEXT_INSN (insn);
1048 if (next == 0
1049 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
1050 return 0;
1052 result = find_single_use_1 (dest, &PATTERN (next));
1053 if (result && ploc)
1054 *ploc = next;
1055 return result;
1057 #endif
1059 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
1060 return 0;
1062 for (next = next_nonnote_insn (insn);
1063 next != 0 && GET_CODE (next) != CODE_LABEL;
1064 next = next_nonnote_insn (next))
1065 if (INSN_P (next) && dead_or_set_p (next, dest))
1067 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
1068 if (XEXP (link, 0) == insn)
1069 break;
1071 if (link)
1073 result = find_single_use_1 (dest, &PATTERN (next));
1074 if (ploc)
1075 *ploc = next;
1076 return result;
1080 return 0;
1083 /* Return 1 if OP is a valid general operand for machine mode MODE.
1084 This is either a register reference, a memory reference,
1085 or a constant. In the case of a memory reference, the address
1086 is checked for general validity for the target machine.
1088 Register and memory references must have mode MODE in order to be valid,
1089 but some constants have no machine mode and are valid for any mode.
1091 If MODE is VOIDmode, OP is checked for validity for whatever mode
1092 it has.
1094 The main use of this function is as a predicate in match_operand
1095 expressions in the machine description.
1097 For an explanation of this function's behavior for registers of
1098 class NO_REGS, see the comment for `register_operand'. */
1101 general_operand (op, mode)
1102 register rtx op;
1103 enum machine_mode mode;
1105 register enum rtx_code code = GET_CODE (op);
1107 if (mode == VOIDmode)
1108 mode = GET_MODE (op);
1110 /* Don't accept CONST_INT or anything similar
1111 if the caller wants something floating. */
1112 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1113 && GET_MODE_CLASS (mode) != MODE_INT
1114 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1115 return 0;
1117 if (GET_CODE (op) == CONST_INT
1118 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1119 return 0;
1121 if (CONSTANT_P (op))
1122 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1123 || mode == VOIDmode)
1124 #ifdef LEGITIMATE_PIC_OPERAND_P
1125 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1126 #endif
1127 && LEGITIMATE_CONSTANT_P (op));
1129 /* Except for certain constants with VOIDmode, already checked for,
1130 OP's mode must match MODE if MODE specifies a mode. */
1132 if (GET_MODE (op) != mode)
1133 return 0;
1135 if (code == SUBREG)
1137 #ifdef INSN_SCHEDULING
1138 /* On machines that have insn scheduling, we want all memory
1139 reference to be explicit, so outlaw paradoxical SUBREGs. */
1140 if (GET_CODE (SUBREG_REG (op)) == MEM
1141 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1142 return 0;
1143 #endif
1145 op = SUBREG_REG (op);
1146 code = GET_CODE (op);
1149 if (code == REG)
1150 /* A register whose class is NO_REGS is not a general operand. */
1151 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1152 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1154 if (code == MEM)
1156 register rtx y = XEXP (op, 0);
1158 if (! volatile_ok && MEM_VOLATILE_P (op))
1159 return 0;
1161 if (GET_CODE (y) == ADDRESSOF)
1162 return 1;
1164 /* Use the mem's mode, since it will be reloaded thus. */
1165 mode = GET_MODE (op);
1166 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1169 /* Pretend this is an operand for now; we'll run force_operand
1170 on its replacement in fixup_var_refs_1. */
1171 if (code == ADDRESSOF)
1172 return 1;
1174 return 0;
1176 win:
1177 return 1;
1180 /* Return 1 if OP is a valid memory address for a memory reference
1181 of mode MODE.
1183 The main use of this function is as a predicate in match_operand
1184 expressions in the machine description. */
1187 address_operand (op, mode)
1188 register rtx op;
1189 enum machine_mode mode;
1191 return memory_address_p (mode, op);
1194 /* Return 1 if OP is a register reference of mode MODE.
1195 If MODE is VOIDmode, accept a register in any mode.
1197 The main use of this function is as a predicate in match_operand
1198 expressions in the machine description.
1200 As a special exception, registers whose class is NO_REGS are
1201 not accepted by `register_operand'. The reason for this change
1202 is to allow the representation of special architecture artifacts
1203 (such as a condition code register) without extending the rtl
1204 definitions. Since registers of class NO_REGS cannot be used
1205 as registers in any case where register classes are examined,
1206 it is most consistent to keep this function from accepting them. */
1209 register_operand (op, mode)
1210 register rtx op;
1211 enum machine_mode mode;
1213 if (GET_MODE (op) != mode && mode != VOIDmode)
1214 return 0;
1216 if (GET_CODE (op) == SUBREG)
1218 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1219 because it is guaranteed to be reloaded into one.
1220 Just make sure the MEM is valid in itself.
1221 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1222 but currently it does result from (SUBREG (REG)...) where the
1223 reg went on the stack.) */
1224 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1225 return general_operand (op, mode);
1227 #ifdef CLASS_CANNOT_CHANGE_MODE
1228 if (GET_CODE (SUBREG_REG (op)) == REG
1229 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1230 && (TEST_HARD_REG_BIT
1231 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1232 REGNO (SUBREG_REG (op))))
1233 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1234 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1235 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1236 return 0;
1237 #endif
1239 op = SUBREG_REG (op);
1242 /* If we have an ADDRESSOF, consider it valid since it will be
1243 converted into something that will not be a MEM. */
1244 if (GET_CODE (op) == ADDRESSOF)
1245 return 1;
1247 /* We don't consider registers whose class is NO_REGS
1248 to be a register operand. */
1249 return (GET_CODE (op) == REG
1250 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1251 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1254 /* Return 1 for a register in Pmode; ignore the tested mode. */
1257 pmode_register_operand (op, mode)
1258 rtx op;
1259 enum machine_mode mode ATTRIBUTE_UNUSED;
1261 return register_operand (op, Pmode);
1264 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1265 or a hard register. */
1268 scratch_operand (op, mode)
1269 register rtx op;
1270 enum machine_mode mode;
1272 if (GET_MODE (op) != mode && mode != VOIDmode)
1273 return 0;
1275 return (GET_CODE (op) == SCRATCH
1276 || (GET_CODE (op) == REG
1277 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1280 /* Return 1 if OP is a valid immediate operand for mode MODE.
1282 The main use of this function is as a predicate in match_operand
1283 expressions in the machine description. */
1286 immediate_operand (op, mode)
1287 register rtx op;
1288 enum machine_mode mode;
1290 /* Don't accept CONST_INT or anything similar
1291 if the caller wants something floating. */
1292 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1293 && GET_MODE_CLASS (mode) != MODE_INT
1294 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1295 return 0;
1297 if (GET_CODE (op) == CONST_INT
1298 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1299 return 0;
1301 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1302 result in 0/1. It seems a safe assumption that this is
1303 in range for everyone. */
1304 if (GET_CODE (op) == CONSTANT_P_RTX)
1305 return 1;
1307 return (CONSTANT_P (op)
1308 && (GET_MODE (op) == mode || mode == VOIDmode
1309 || GET_MODE (op) == VOIDmode)
1310 #ifdef LEGITIMATE_PIC_OPERAND_P
1311 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1312 #endif
1313 && LEGITIMATE_CONSTANT_P (op));
1316 /* Returns 1 if OP is an operand that is a CONST_INT. */
1319 const_int_operand (op, mode)
1320 register rtx op;
1321 enum machine_mode mode ATTRIBUTE_UNUSED;
1323 return GET_CODE (op) == CONST_INT;
1326 /* Returns 1 if OP is an operand that is a constant integer or constant
1327 floating-point number. */
1330 const_double_operand (op, mode)
1331 register rtx op;
1332 enum machine_mode mode;
1334 /* Don't accept CONST_INT or anything similar
1335 if the caller wants something floating. */
1336 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1337 && GET_MODE_CLASS (mode) != MODE_INT
1338 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1339 return 0;
1341 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1342 && (mode == VOIDmode || GET_MODE (op) == mode
1343 || GET_MODE (op) == VOIDmode));
1346 /* Return 1 if OP is a general operand that is not an immediate operand. */
1349 nonimmediate_operand (op, mode)
1350 register rtx op;
1351 enum machine_mode mode;
1353 return (general_operand (op, mode) && ! CONSTANT_P (op));
1356 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1359 nonmemory_operand (op, mode)
1360 register rtx op;
1361 enum machine_mode mode;
1363 if (CONSTANT_P (op))
1365 /* Don't accept CONST_INT or anything similar
1366 if the caller wants something floating. */
1367 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1368 && GET_MODE_CLASS (mode) != MODE_INT
1369 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1370 return 0;
1372 if (GET_CODE (op) == CONST_INT
1373 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1374 return 0;
1376 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1377 || mode == VOIDmode)
1378 #ifdef LEGITIMATE_PIC_OPERAND_P
1379 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1380 #endif
1381 && LEGITIMATE_CONSTANT_P (op));
1384 if (GET_MODE (op) != mode && mode != VOIDmode)
1385 return 0;
1387 if (GET_CODE (op) == SUBREG)
1389 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1390 because it is guaranteed to be reloaded into one.
1391 Just make sure the MEM is valid in itself.
1392 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1393 but currently it does result from (SUBREG (REG)...) where the
1394 reg went on the stack.) */
1395 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1396 return general_operand (op, mode);
1397 op = SUBREG_REG (op);
1400 /* We don't consider registers whose class is NO_REGS
1401 to be a register operand. */
1402 return (GET_CODE (op) == REG
1403 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1404 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1407 /* Return 1 if OP is a valid operand that stands for pushing a
1408 value of mode MODE onto the stack.
1410 The main use of this function is as a predicate in match_operand
1411 expressions in the machine description. */
1414 push_operand (op, mode)
1415 rtx op;
1416 enum machine_mode mode;
1418 unsigned int rounded_size = GET_MODE_SIZE (mode);
1420 #ifdef PUSH_ROUNDING
1421 rounded_size = PUSH_ROUNDING (rounded_size);
1422 #endif
1424 if (GET_CODE (op) != MEM)
1425 return 0;
1427 if (mode != VOIDmode && GET_MODE (op) != mode)
1428 return 0;
1430 op = XEXP (op, 0);
1432 if (rounded_size == GET_MODE_SIZE (mode))
1434 if (GET_CODE (op) != STACK_PUSH_CODE)
1435 return 0;
1437 else
1439 if (GET_CODE (op) != PRE_MODIFY
1440 || GET_CODE (XEXP (op, 1)) != PLUS
1441 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1442 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1443 #ifdef STACK_GROWS_DOWNWARD
1444 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1445 #else
1446 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1447 #endif
1449 return 0;
1452 return XEXP (op, 0) == stack_pointer_rtx;
1455 /* Return 1 if OP is a valid operand that stands for popping a
1456 value of mode MODE off the stack.
1458 The main use of this function is as a predicate in match_operand
1459 expressions in the machine description. */
1462 pop_operand (op, mode)
1463 rtx op;
1464 enum machine_mode mode;
1466 if (GET_CODE (op) != MEM)
1467 return 0;
1469 if (mode != VOIDmode && GET_MODE (op) != mode)
1470 return 0;
1472 op = XEXP (op, 0);
1474 if (GET_CODE (op) != STACK_POP_CODE)
1475 return 0;
1477 return XEXP (op, 0) == stack_pointer_rtx;
1480 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1483 memory_address_p (mode, addr)
1484 enum machine_mode mode ATTRIBUTE_UNUSED;
1485 register rtx addr;
1487 if (GET_CODE (addr) == ADDRESSOF)
1488 return 1;
1490 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1491 return 0;
1493 win:
1494 return 1;
1497 /* Return 1 if OP is a valid memory reference with mode MODE,
1498 including a valid address.
1500 The main use of this function is as a predicate in match_operand
1501 expressions in the machine description. */
1504 memory_operand (op, mode)
1505 register rtx op;
1506 enum machine_mode mode;
1508 rtx inner;
1510 if (! reload_completed)
1511 /* Note that no SUBREG is a memory operand before end of reload pass,
1512 because (SUBREG (MEM...)) forces reloading into a register. */
1513 return GET_CODE (op) == MEM && general_operand (op, mode);
1515 if (mode != VOIDmode && GET_MODE (op) != mode)
1516 return 0;
1518 inner = op;
1519 if (GET_CODE (inner) == SUBREG)
1520 inner = SUBREG_REG (inner);
1522 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1525 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1526 that is, a memory reference whose address is a general_operand. */
1529 indirect_operand (op, mode)
1530 register rtx op;
1531 enum machine_mode mode;
1533 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1534 if (! reload_completed
1535 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1537 register int offset = SUBREG_BYTE (op);
1538 rtx inner = SUBREG_REG (op);
1540 if (mode != VOIDmode && GET_MODE (op) != mode)
1541 return 0;
1543 /* The only way that we can have a general_operand as the resulting
1544 address is if OFFSET is zero and the address already is an operand
1545 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1546 operand. */
1548 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1549 || (GET_CODE (XEXP (inner, 0)) == PLUS
1550 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1551 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1552 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1555 return (GET_CODE (op) == MEM
1556 && memory_operand (op, mode)
1557 && general_operand (XEXP (op, 0), Pmode));
1560 /* Return 1 if this is a comparison operator. This allows the use of
1561 MATCH_OPERATOR to recognize all the branch insns. */
1564 comparison_operator (op, mode)
1565 register rtx op;
1566 enum machine_mode mode;
1568 return ((mode == VOIDmode || GET_MODE (op) == mode)
1569 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1572 /* If BODY is an insn body that uses ASM_OPERANDS,
1573 return the number of operands (both input and output) in the insn.
1574 Otherwise return -1. */
1577 asm_noperands (body)
1578 rtx body;
1580 switch (GET_CODE (body))
1582 case ASM_OPERANDS:
1583 /* No output operands: return number of input operands. */
1584 return ASM_OPERANDS_INPUT_LENGTH (body);
1585 case SET:
1586 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1587 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1588 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1589 else
1590 return -1;
1591 case PARALLEL:
1592 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1593 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1595 /* Multiple output operands, or 1 output plus some clobbers:
1596 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1597 int i;
1598 int n_sets;
1600 /* Count backwards through CLOBBERs to determine number of SETs. */
1601 for (i = XVECLEN (body, 0); i > 0; i--)
1603 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1604 break;
1605 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1606 return -1;
1609 /* N_SETS is now number of output operands. */
1610 n_sets = i;
1612 /* Verify that all the SETs we have
1613 came from a single original asm_operands insn
1614 (so that invalid combinations are blocked). */
1615 for (i = 0; i < n_sets; i++)
1617 rtx elt = XVECEXP (body, 0, i);
1618 if (GET_CODE (elt) != SET)
1619 return -1;
1620 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1621 return -1;
1622 /* If these ASM_OPERANDS rtx's came from different original insns
1623 then they aren't allowed together. */
1624 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1625 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1626 return -1;
1628 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1629 + n_sets);
1631 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1633 /* 0 outputs, but some clobbers:
1634 body is [(asm_operands ...) (clobber (reg ...))...]. */
1635 int i;
1637 /* Make sure all the other parallel things really are clobbers. */
1638 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1639 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1640 return -1;
1642 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1644 else
1645 return -1;
1646 default:
1647 return -1;
1651 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1652 copy its operands (both input and output) into the vector OPERANDS,
1653 the locations of the operands within the insn into the vector OPERAND_LOCS,
1654 and the constraints for the operands into CONSTRAINTS.
1655 Write the modes of the operands into MODES.
1656 Return the assembler-template.
1658 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1659 we don't store that info. */
1661 const char *
1662 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1663 rtx body;
1664 rtx *operands;
1665 rtx **operand_locs;
1666 const char **constraints;
1667 enum machine_mode *modes;
1669 register int i;
1670 int noperands;
1671 const char *template = 0;
1673 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1675 rtx asmop = SET_SRC (body);
1676 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1678 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1680 for (i = 1; i < noperands; i++)
1682 if (operand_locs)
1683 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1684 if (operands)
1685 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1686 if (constraints)
1687 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1688 if (modes)
1689 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1692 /* The output is in the SET.
1693 Its constraint is in the ASM_OPERANDS itself. */
1694 if (operands)
1695 operands[0] = SET_DEST (body);
1696 if (operand_locs)
1697 operand_locs[0] = &SET_DEST (body);
1698 if (constraints)
1699 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1700 if (modes)
1701 modes[0] = GET_MODE (SET_DEST (body));
1702 template = ASM_OPERANDS_TEMPLATE (asmop);
1704 else if (GET_CODE (body) == ASM_OPERANDS)
1706 rtx asmop = body;
1707 /* No output operands: BODY is (asm_operands ....). */
1709 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1711 /* The input operands are found in the 1st element vector. */
1712 /* Constraints for inputs are in the 2nd element vector. */
1713 for (i = 0; i < noperands; i++)
1715 if (operand_locs)
1716 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1717 if (operands)
1718 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1719 if (constraints)
1720 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1721 if (modes)
1722 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1724 template = ASM_OPERANDS_TEMPLATE (asmop);
1726 else if (GET_CODE (body) == PARALLEL
1727 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1729 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1730 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1731 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1732 int nout = 0; /* Does not include CLOBBERs. */
1734 /* At least one output, plus some CLOBBERs. */
1736 /* The outputs are in the SETs.
1737 Their constraints are in the ASM_OPERANDS itself. */
1738 for (i = 0; i < nparallel; i++)
1740 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1741 break; /* Past last SET */
1743 if (operands)
1744 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1745 if (operand_locs)
1746 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1747 if (constraints)
1748 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1749 if (modes)
1750 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1751 nout++;
1754 for (i = 0; i < nin; i++)
1756 if (operand_locs)
1757 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1758 if (operands)
1759 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1760 if (constraints)
1761 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1762 if (modes)
1763 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1766 template = ASM_OPERANDS_TEMPLATE (asmop);
1768 else if (GET_CODE (body) == PARALLEL
1769 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1771 /* No outputs, but some CLOBBERs. */
1773 rtx asmop = XVECEXP (body, 0, 0);
1774 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1776 for (i = 0; i < nin; i++)
1778 if (operand_locs)
1779 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1780 if (operands)
1781 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1782 if (constraints)
1783 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1784 if (modes)
1785 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1788 template = ASM_OPERANDS_TEMPLATE (asmop);
1791 return template;
1794 /* Check if an asm_operand matches it's constraints.
1795 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1798 asm_operand_ok (op, constraint)
1799 rtx op;
1800 const char *constraint;
1802 int result = 0;
1804 /* Use constrain_operands after reload. */
1805 if (reload_completed)
1806 abort ();
1808 while (*constraint)
1810 char c = *constraint++;
1811 switch (c)
1813 case '=':
1814 case '+':
1815 case '*':
1816 case '%':
1817 case '?':
1818 case '!':
1819 case '#':
1820 case '&':
1821 case ',':
1822 break;
1824 case '0': case '1': case '2': case '3': case '4':
1825 case '5': case '6': case '7': case '8': case '9':
1826 /* For best results, our caller should have given us the
1827 proper matching constraint, but we can't actually fail
1828 the check if they didn't. Indicate that results are
1829 inconclusive. */
1830 result = -1;
1831 break;
1833 case 'p':
1834 if (address_operand (op, VOIDmode))
1835 return 1;
1836 break;
1838 case 'm':
1839 case 'V': /* non-offsettable */
1840 if (memory_operand (op, VOIDmode))
1841 return 1;
1842 break;
1844 case 'o': /* offsettable */
1845 if (offsettable_nonstrict_memref_p (op))
1846 return 1;
1847 break;
1849 case '<':
1850 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1851 excepting those that expand_call created. Further, on some
1852 machines which do not have generalized auto inc/dec, an inc/dec
1853 is not a memory_operand.
1855 Match any memory and hope things are resolved after reload. */
1857 if (GET_CODE (op) == MEM
1858 && (1
1859 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1860 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1861 return 1;
1862 break;
1864 case '>':
1865 if (GET_CODE (op) == MEM
1866 && (1
1867 || GET_CODE (XEXP (op, 0)) == PRE_INC
1868 || GET_CODE (XEXP (op, 0)) == POST_INC))
1869 return 1;
1870 break;
1872 case 'E':
1873 #ifndef REAL_ARITHMETIC
1874 /* Match any floating double constant, but only if
1875 we can examine the bits of it reliably. */
1876 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1877 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1878 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1879 break;
1880 #endif
1881 /* FALLTHRU */
1883 case 'F':
1884 if (GET_CODE (op) == CONST_DOUBLE)
1885 return 1;
1886 break;
1888 case 'G':
1889 if (GET_CODE (op) == CONST_DOUBLE
1890 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1891 return 1;
1892 break;
1893 case 'H':
1894 if (GET_CODE (op) == CONST_DOUBLE
1895 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1896 return 1;
1897 break;
1899 case 's':
1900 if (GET_CODE (op) == CONST_INT
1901 || (GET_CODE (op) == CONST_DOUBLE
1902 && GET_MODE (op) == VOIDmode))
1903 break;
1904 /* FALLTHRU */
1906 case 'i':
1907 if (CONSTANT_P (op)
1908 #ifdef LEGITIMATE_PIC_OPERAND_P
1909 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1910 #endif
1912 return 1;
1913 break;
1915 case 'n':
1916 if (GET_CODE (op) == CONST_INT
1917 || (GET_CODE (op) == CONST_DOUBLE
1918 && GET_MODE (op) == VOIDmode))
1919 return 1;
1920 break;
1922 case 'I':
1923 if (GET_CODE (op) == CONST_INT
1924 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1925 return 1;
1926 break;
1927 case 'J':
1928 if (GET_CODE (op) == CONST_INT
1929 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1930 return 1;
1931 break;
1932 case 'K':
1933 if (GET_CODE (op) == CONST_INT
1934 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1935 return 1;
1936 break;
1937 case 'L':
1938 if (GET_CODE (op) == CONST_INT
1939 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1940 return 1;
1941 break;
1942 case 'M':
1943 if (GET_CODE (op) == CONST_INT
1944 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1945 return 1;
1946 break;
1947 case 'N':
1948 if (GET_CODE (op) == CONST_INT
1949 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1950 return 1;
1951 break;
1952 case 'O':
1953 if (GET_CODE (op) == CONST_INT
1954 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1955 return 1;
1956 break;
1957 case 'P':
1958 if (GET_CODE (op) == CONST_INT
1959 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1960 return 1;
1961 break;
1963 case 'X':
1964 return 1;
1966 case 'g':
1967 if (general_operand (op, VOIDmode))
1968 return 1;
1969 break;
1971 default:
1972 /* For all other letters, we first check for a register class,
1973 otherwise it is an EXTRA_CONSTRAINT. */
1974 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1976 case 'r':
1977 if (GET_MODE (op) == BLKmode)
1978 break;
1979 if (register_operand (op, VOIDmode))
1980 return 1;
1982 #ifdef EXTRA_CONSTRAINT
1983 if (EXTRA_CONSTRAINT (op, c))
1984 return 1;
1985 #endif
1986 break;
1990 return result;
1993 /* Given an rtx *P, if it is a sum containing an integer constant term,
1994 return the location (type rtx *) of the pointer to that constant term.
1995 Otherwise, return a null pointer. */
1997 static rtx *
1998 find_constant_term_loc (p)
1999 rtx *p;
2001 register rtx *tem;
2002 register enum rtx_code code = GET_CODE (*p);
2004 /* If *P IS such a constant term, P is its location. */
2006 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2007 || code == CONST)
2008 return p;
2010 /* Otherwise, if not a sum, it has no constant term. */
2012 if (GET_CODE (*p) != PLUS)
2013 return 0;
2015 /* If one of the summands is constant, return its location. */
2017 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2018 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2019 return p;
2021 /* Otherwise, check each summand for containing a constant term. */
2023 if (XEXP (*p, 0) != 0)
2025 tem = find_constant_term_loc (&XEXP (*p, 0));
2026 if (tem != 0)
2027 return tem;
2030 if (XEXP (*p, 1) != 0)
2032 tem = find_constant_term_loc (&XEXP (*p, 1));
2033 if (tem != 0)
2034 return tem;
2037 return 0;
2040 /* Return 1 if OP is a memory reference
2041 whose address contains no side effects
2042 and remains valid after the addition
2043 of a positive integer less than the
2044 size of the object being referenced.
2046 We assume that the original address is valid and do not check it.
2048 This uses strict_memory_address_p as a subroutine, so
2049 don't use it before reload. */
2052 offsettable_memref_p (op)
2053 rtx op;
2055 return ((GET_CODE (op) == MEM)
2056 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
2059 /* Similar, but don't require a strictly valid mem ref:
2060 consider pseudo-regs valid as index or base regs. */
2063 offsettable_nonstrict_memref_p (op)
2064 rtx op;
2066 return ((GET_CODE (op) == MEM)
2067 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
2070 /* Return 1 if Y is a memory address which contains no side effects
2071 and would remain valid after the addition of a positive integer
2072 less than the size of that mode.
2074 We assume that the original address is valid and do not check it.
2075 We do check that it is valid for narrower modes.
2077 If STRICTP is nonzero, we require a strictly valid address,
2078 for the sake of use in reload.c. */
2081 offsettable_address_p (strictp, mode, y)
2082 int strictp;
2083 enum machine_mode mode;
2084 register rtx y;
2086 register enum rtx_code ycode = GET_CODE (y);
2087 register rtx z;
2088 rtx y1 = y;
2089 rtx *y2;
2090 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
2091 (strictp ? strict_memory_address_p : memory_address_p);
2092 unsigned int mode_sz = GET_MODE_SIZE (mode);
2094 if (CONSTANT_ADDRESS_P (y))
2095 return 1;
2097 /* Adjusting an offsettable address involves changing to a narrower mode.
2098 Make sure that's OK. */
2100 if (mode_dependent_address_p (y))
2101 return 0;
2103 /* ??? How much offset does an offsettable BLKmode reference need?
2104 Clearly that depends on the situation in which it's being used.
2105 However, the current situation in which we test 0xffffffff is
2106 less than ideal. Caveat user. */
2107 if (mode_sz == 0)
2108 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2110 /* If the expression contains a constant term,
2111 see if it remains valid when max possible offset is added. */
2113 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2115 int good;
2117 y1 = *y2;
2118 *y2 = plus_constant (*y2, mode_sz - 1);
2119 /* Use QImode because an odd displacement may be automatically invalid
2120 for any wider mode. But it should be valid for a single byte. */
2121 good = (*addressp) (QImode, y);
2123 /* In any case, restore old contents of memory. */
2124 *y2 = y1;
2125 return good;
2128 if (GET_RTX_CLASS (ycode) == 'a')
2129 return 0;
2131 /* The offset added here is chosen as the maximum offset that
2132 any instruction could need to add when operating on something
2133 of the specified mode. We assume that if Y and Y+c are
2134 valid addresses then so is Y+d for all 0<d<c. */
2136 z = plus_constant_for_output (y, mode_sz - 1);
2138 /* Use QImode because an odd displacement may be automatically invalid
2139 for any wider mode. But it should be valid for a single byte. */
2140 return (*addressp) (QImode, z);
2143 /* Return 1 if ADDR is an address-expression whose effect depends
2144 on the mode of the memory reference it is used in.
2146 Autoincrement addressing is a typical example of mode-dependence
2147 because the amount of the increment depends on the mode. */
2150 mode_dependent_address_p (addr)
2151 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2153 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2154 return 0;
2155 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2156 win: ATTRIBUTE_UNUSED_LABEL
2157 return 1;
2160 /* Return 1 if OP is a general operand
2161 other than a memory ref with a mode dependent address. */
2164 mode_independent_operand (op, mode)
2165 enum machine_mode mode;
2166 rtx op;
2168 rtx addr;
2170 if (! general_operand (op, mode))
2171 return 0;
2173 if (GET_CODE (op) != MEM)
2174 return 1;
2176 addr = XEXP (op, 0);
2177 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2178 return 1;
2179 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2180 lose: ATTRIBUTE_UNUSED_LABEL
2181 return 0;
2184 /* Given an operand OP that is a valid memory reference which
2185 satisfies offsettable_memref_p, return a new memory reference whose
2186 address has been adjusted by OFFSET. OFFSET should be positive and
2187 less than the size of the object referenced. */
2190 adj_offsettable_operand (op, offset)
2191 rtx op;
2192 int offset;
2194 register enum rtx_code code = GET_CODE (op);
2196 if (code == MEM)
2198 register rtx y = XEXP (op, 0);
2199 register rtx new;
2201 if (CONSTANT_ADDRESS_P (y))
2203 new = gen_rtx_MEM (GET_MODE (op),
2204 plus_constant_for_output (y, offset));
2205 MEM_COPY_ATTRIBUTES (new, op);
2206 return new;
2209 if (GET_CODE (y) == PLUS)
2211 rtx z = y;
2212 register rtx *const_loc;
2214 op = copy_rtx (op);
2215 z = XEXP (op, 0);
2216 const_loc = find_constant_term_loc (&z);
2217 if (const_loc)
2219 *const_loc = plus_constant_for_output (*const_loc, offset);
2220 return op;
2224 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2225 MEM_COPY_ATTRIBUTES (new, op);
2226 return new;
2228 abort ();
2231 /* Like extract_insn, but save insn extracted and don't extract again, when
2232 called again for the same insn expecting that recog_data still contain the
2233 valid information. This is used primary by gen_attr infrastructure that
2234 often does extract insn again and again. */
2235 void
2236 extract_insn_cached (insn)
2237 rtx insn;
2239 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2240 return;
2241 extract_insn (insn);
2242 recog_data.insn = insn;
2244 /* Do cached extract_insn, constrain_operand and complain about failures.
2245 Used by insn_attrtab. */
2246 void
2247 extract_constrain_insn_cached (insn)
2248 rtx insn;
2250 extract_insn_cached (insn);
2251 if (which_alternative == -1
2252 && !constrain_operands (reload_completed))
2253 fatal_insn_not_found (insn);
2255 /* Do cached constrain_operand and complain about failures. */
2257 constrain_operands_cached (strict)
2258 int strict;
2260 if (which_alternative == -1)
2261 return constrain_operands (strict);
2262 else
2263 return 1;
2266 /* Analyze INSN and fill in recog_data. */
2268 void
2269 extract_insn (insn)
2270 rtx insn;
2272 int i;
2273 int icode;
2274 int noperands;
2275 rtx body = PATTERN (insn);
2277 recog_data.insn = NULL;
2278 recog_data.n_operands = 0;
2279 recog_data.n_alternatives = 0;
2280 recog_data.n_dups = 0;
2281 which_alternative = -1;
2283 switch (GET_CODE (body))
2285 case USE:
2286 case CLOBBER:
2287 case ASM_INPUT:
2288 case ADDR_VEC:
2289 case ADDR_DIFF_VEC:
2290 return;
2292 case SET:
2293 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2294 goto asm_insn;
2295 else
2296 goto normal_insn;
2297 case PARALLEL:
2298 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2299 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2300 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2301 goto asm_insn;
2302 else
2303 goto normal_insn;
2304 case ASM_OPERANDS:
2305 asm_insn:
2306 recog_data.n_operands = noperands = asm_noperands (body);
2307 if (noperands >= 0)
2309 /* This insn is an `asm' with operands. */
2311 /* expand_asm_operands makes sure there aren't too many operands. */
2312 if (noperands > MAX_RECOG_OPERANDS)
2313 abort ();
2315 /* Now get the operand values and constraints out of the insn. */
2316 decode_asm_operands (body, recog_data.operand,
2317 recog_data.operand_loc,
2318 recog_data.constraints,
2319 recog_data.operand_mode);
2320 if (noperands > 0)
2322 const char *p = recog_data.constraints[0];
2323 recog_data.n_alternatives = 1;
2324 while (*p)
2325 recog_data.n_alternatives += (*p++ == ',');
2327 break;
2329 fatal_insn_not_found (insn);
2331 default:
2332 normal_insn:
2333 /* Ordinary insn: recognize it, get the operands via insn_extract
2334 and get the constraints. */
2336 icode = recog_memoized (insn);
2337 if (icode < 0)
2338 fatal_insn_not_found (insn);
2340 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2341 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2342 recog_data.n_dups = insn_data[icode].n_dups;
2344 insn_extract (insn);
2346 for (i = 0; i < noperands; i++)
2348 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2349 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2350 /* VOIDmode match_operands gets mode from their real operand. */
2351 if (recog_data.operand_mode[i] == VOIDmode)
2352 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2355 for (i = 0; i < noperands; i++)
2356 recog_data.operand_type[i]
2357 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2358 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2359 : OP_IN);
2361 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2362 abort ();
2365 /* After calling extract_insn, you can use this function to extract some
2366 information from the constraint strings into a more usable form.
2367 The collected data is stored in recog_op_alt. */
2368 void
2369 preprocess_constraints ()
2371 int i;
2373 memset (recog_op_alt, 0, sizeof recog_op_alt);
2374 for (i = 0; i < recog_data.n_operands; i++)
2376 int j;
2377 struct operand_alternative *op_alt;
2378 const char *p = recog_data.constraints[i];
2380 op_alt = recog_op_alt[i];
2382 for (j = 0; j < recog_data.n_alternatives; j++)
2384 op_alt[j].class = NO_REGS;
2385 op_alt[j].constraint = p;
2386 op_alt[j].matches = -1;
2387 op_alt[j].matched = -1;
2389 if (*p == '\0' || *p == ',')
2391 op_alt[j].anything_ok = 1;
2392 continue;
2395 for (;;)
2397 char c = *p++;
2398 if (c == '#')
2400 c = *p++;
2401 while (c != ',' && c != '\0');
2402 if (c == ',' || c == '\0')
2403 break;
2405 switch (c)
2407 case '=': case '+': case '*': case '%':
2408 case 'E': case 'F': case 'G': case 'H':
2409 case 's': case 'i': case 'n':
2410 case 'I': case 'J': case 'K': case 'L':
2411 case 'M': case 'N': case 'O': case 'P':
2412 /* These don't say anything we care about. */
2413 break;
2415 case '?':
2416 op_alt[j].reject += 6;
2417 break;
2418 case '!':
2419 op_alt[j].reject += 600;
2420 break;
2421 case '&':
2422 op_alt[j].earlyclobber = 1;
2423 break;
2425 case '0': case '1': case '2': case '3': case '4':
2426 case '5': case '6': case '7': case '8': case '9':
2427 op_alt[j].matches = c - '0';
2428 recog_op_alt[op_alt[j].matches][j].matched = i;
2429 break;
2431 case 'm':
2432 op_alt[j].memory_ok = 1;
2433 break;
2434 case '<':
2435 op_alt[j].decmem_ok = 1;
2436 break;
2437 case '>':
2438 op_alt[j].incmem_ok = 1;
2439 break;
2440 case 'V':
2441 op_alt[j].nonoffmem_ok = 1;
2442 break;
2443 case 'o':
2444 op_alt[j].offmem_ok = 1;
2445 break;
2446 case 'X':
2447 op_alt[j].anything_ok = 1;
2448 break;
2450 case 'p':
2451 op_alt[j].is_address = 1;
2452 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2453 break;
2455 case 'g': case 'r':
2456 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2457 break;
2459 default:
2460 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2461 break;
2468 /* Check the operands of an insn against the insn's operand constraints
2469 and return 1 if they are valid.
2470 The information about the insn's operands, constraints, operand modes
2471 etc. is obtained from the global variables set up by extract_insn.
2473 WHICH_ALTERNATIVE is set to a number which indicates which
2474 alternative of constraints was matched: 0 for the first alternative,
2475 1 for the next, etc.
2477 In addition, when two operands are match
2478 and it happens that the output operand is (reg) while the
2479 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2480 make the output operand look like the input.
2481 This is because the output operand is the one the template will print.
2483 This is used in final, just before printing the assembler code and by
2484 the routines that determine an insn's attribute.
2486 If STRICT is a positive non-zero value, it means that we have been
2487 called after reload has been completed. In that case, we must
2488 do all checks strictly. If it is zero, it means that we have been called
2489 before reload has completed. In that case, we first try to see if we can
2490 find an alternative that matches strictly. If not, we try again, this
2491 time assuming that reload will fix up the insn. This provides a "best
2492 guess" for the alternative and is used to compute attributes of insns prior
2493 to reload. A negative value of STRICT is used for this internal call. */
2495 struct funny_match
2497 int this, other;
2501 constrain_operands (strict)
2502 int strict;
2504 const char *constraints[MAX_RECOG_OPERANDS];
2505 int matching_operands[MAX_RECOG_OPERANDS];
2506 int earlyclobber[MAX_RECOG_OPERANDS];
2507 register int c;
2509 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2510 int funny_match_index;
2512 which_alternative = 0;
2513 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2514 return 1;
2516 for (c = 0; c < recog_data.n_operands; c++)
2518 constraints[c] = recog_data.constraints[c];
2519 matching_operands[c] = -1;
2524 register int opno;
2525 int lose = 0;
2526 funny_match_index = 0;
2528 for (opno = 0; opno < recog_data.n_operands; opno++)
2530 register rtx op = recog_data.operand[opno];
2531 enum machine_mode mode = GET_MODE (op);
2532 register const char *p = constraints[opno];
2533 int offset = 0;
2534 int win = 0;
2535 int val;
2537 earlyclobber[opno] = 0;
2539 /* A unary operator may be accepted by the predicate, but it
2540 is irrelevant for matching constraints. */
2541 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2542 op = XEXP (op, 0);
2544 if (GET_CODE (op) == SUBREG)
2546 if (GET_CODE (SUBREG_REG (op)) == REG
2547 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2548 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2549 GET_MODE (SUBREG_REG (op)),
2550 SUBREG_BYTE (op),
2551 GET_MODE (op));
2552 op = SUBREG_REG (op);
2555 /* An empty constraint or empty alternative
2556 allows anything which matched the pattern. */
2557 if (*p == 0 || *p == ',')
2558 win = 1;
2560 while (*p && (c = *p++) != ',')
2561 switch (c)
2563 case '?': case '!': case '*': case '%':
2564 case '=': case '+':
2565 break;
2567 case '#':
2568 /* Ignore rest of this alternative as far as
2569 constraint checking is concerned. */
2570 while (*p && *p != ',')
2571 p++;
2572 break;
2574 case '&':
2575 earlyclobber[opno] = 1;
2576 break;
2578 case '0': case '1': case '2': case '3': case '4':
2579 case '5': case '6': case '7': case '8': case '9':
2581 /* This operand must be the same as a previous one.
2582 This kind of constraint is used for instructions such
2583 as add when they take only two operands.
2585 Note that the lower-numbered operand is passed first.
2587 If we are not testing strictly, assume that this constraint
2588 will be satisfied. */
2589 if (strict < 0)
2590 val = 1;
2591 else
2593 rtx op1 = recog_data.operand[c - '0'];
2594 rtx op2 = recog_data.operand[opno];
2596 /* A unary operator may be accepted by the predicate,
2597 but it is irrelevant for matching constraints. */
2598 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2599 op1 = XEXP (op1, 0);
2600 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2601 op2 = XEXP (op2, 0);
2603 val = operands_match_p (op1, op2);
2606 matching_operands[opno] = c - '0';
2607 matching_operands[c - '0'] = opno;
2609 if (val != 0)
2610 win = 1;
2611 /* If output is *x and input is *--x,
2612 arrange later to change the output to *--x as well,
2613 since the output op is the one that will be printed. */
2614 if (val == 2 && strict > 0)
2616 funny_match[funny_match_index].this = opno;
2617 funny_match[funny_match_index++].other = c - '0';
2619 break;
2621 case 'p':
2622 /* p is used for address_operands. When we are called by
2623 gen_reload, no one will have checked that the address is
2624 strictly valid, i.e., that all pseudos requiring hard regs
2625 have gotten them. */
2626 if (strict <= 0
2627 || (strict_memory_address_p (recog_data.operand_mode[opno],
2628 op)))
2629 win = 1;
2630 break;
2632 /* No need to check general_operand again;
2633 it was done in insn-recog.c. */
2634 case 'g':
2635 /* Anything goes unless it is a REG and really has a hard reg
2636 but the hard reg is not in the class GENERAL_REGS. */
2637 if (strict < 0
2638 || GENERAL_REGS == ALL_REGS
2639 || GET_CODE (op) != REG
2640 || (reload_in_progress
2641 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2642 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2643 win = 1;
2644 break;
2646 case 'X':
2647 /* This is used for a MATCH_SCRATCH in the cases when
2648 we don't actually need anything. So anything goes
2649 any time. */
2650 win = 1;
2651 break;
2653 case 'm':
2654 if (GET_CODE (op) == MEM
2655 /* Before reload, accept what reload can turn into mem. */
2656 || (strict < 0 && CONSTANT_P (op))
2657 /* During reload, accept a pseudo */
2658 || (reload_in_progress && GET_CODE (op) == REG
2659 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2660 win = 1;
2661 break;
2663 case '<':
2664 if (GET_CODE (op) == MEM
2665 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2666 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2667 win = 1;
2668 break;
2670 case '>':
2671 if (GET_CODE (op) == MEM
2672 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2673 || GET_CODE (XEXP (op, 0)) == POST_INC))
2674 win = 1;
2675 break;
2677 case 'E':
2678 #ifndef REAL_ARITHMETIC
2679 /* Match any CONST_DOUBLE, but only if
2680 we can examine the bits of it reliably. */
2681 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2682 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2683 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2684 break;
2685 #endif
2686 if (GET_CODE (op) == CONST_DOUBLE)
2687 win = 1;
2688 break;
2690 case 'F':
2691 if (GET_CODE (op) == CONST_DOUBLE)
2692 win = 1;
2693 break;
2695 case 'G':
2696 case 'H':
2697 if (GET_CODE (op) == CONST_DOUBLE
2698 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2699 win = 1;
2700 break;
2702 case 's':
2703 if (GET_CODE (op) == CONST_INT
2704 || (GET_CODE (op) == CONST_DOUBLE
2705 && GET_MODE (op) == VOIDmode))
2706 break;
2707 case 'i':
2708 if (CONSTANT_P (op))
2709 win = 1;
2710 break;
2712 case 'n':
2713 if (GET_CODE (op) == CONST_INT
2714 || (GET_CODE (op) == CONST_DOUBLE
2715 && GET_MODE (op) == VOIDmode))
2716 win = 1;
2717 break;
2719 case 'I':
2720 case 'J':
2721 case 'K':
2722 case 'L':
2723 case 'M':
2724 case 'N':
2725 case 'O':
2726 case 'P':
2727 if (GET_CODE (op) == CONST_INT
2728 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2729 win = 1;
2730 break;
2732 case 'V':
2733 if (GET_CODE (op) == MEM
2734 && ((strict > 0 && ! offsettable_memref_p (op))
2735 || (strict < 0
2736 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2737 || (reload_in_progress
2738 && !(GET_CODE (op) == REG
2739 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2740 win = 1;
2741 break;
2743 case 'o':
2744 if ((strict > 0 && offsettable_memref_p (op))
2745 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2746 /* Before reload, accept what reload can handle. */
2747 || (strict < 0
2748 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2749 /* During reload, accept a pseudo */
2750 || (reload_in_progress && GET_CODE (op) == REG
2751 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2752 win = 1;
2753 break;
2755 default:
2757 enum reg_class class;
2759 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2760 if (class != NO_REGS)
2762 if (strict < 0
2763 || (strict == 0
2764 && GET_CODE (op) == REG
2765 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2766 || (strict == 0 && GET_CODE (op) == SCRATCH)
2767 || (GET_CODE (op) == REG
2768 && reg_fits_class_p (op, class, offset, mode)))
2769 win = 1;
2771 #ifdef EXTRA_CONSTRAINT
2772 else if (EXTRA_CONSTRAINT (op, c))
2773 win = 1;
2774 #endif
2775 break;
2779 constraints[opno] = p;
2780 /* If this operand did not win somehow,
2781 this alternative loses. */
2782 if (! win)
2783 lose = 1;
2785 /* This alternative won; the operands are ok.
2786 Change whichever operands this alternative says to change. */
2787 if (! lose)
2789 int opno, eopno;
2791 /* See if any earlyclobber operand conflicts with some other
2792 operand. */
2794 if (strict > 0)
2795 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2796 /* Ignore earlyclobber operands now in memory,
2797 because we would often report failure when we have
2798 two memory operands, one of which was formerly a REG. */
2799 if (earlyclobber[eopno]
2800 && GET_CODE (recog_data.operand[eopno]) == REG)
2801 for (opno = 0; opno < recog_data.n_operands; opno++)
2802 if ((GET_CODE (recog_data.operand[opno]) == MEM
2803 || recog_data.operand_type[opno] != OP_OUT)
2804 && opno != eopno
2805 /* Ignore things like match_operator operands. */
2806 && *recog_data.constraints[opno] != 0
2807 && ! (matching_operands[opno] == eopno
2808 && operands_match_p (recog_data.operand[opno],
2809 recog_data.operand[eopno]))
2810 && ! safe_from_earlyclobber (recog_data.operand[opno],
2811 recog_data.operand[eopno]))
2812 lose = 1;
2814 if (! lose)
2816 while (--funny_match_index >= 0)
2818 recog_data.operand[funny_match[funny_match_index].other]
2819 = recog_data.operand[funny_match[funny_match_index].this];
2822 return 1;
2826 which_alternative++;
2828 while (which_alternative < recog_data.n_alternatives);
2830 which_alternative = -1;
2831 /* If we are about to reject this, but we are not to test strictly,
2832 try a very loose test. Only return failure if it fails also. */
2833 if (strict == 0)
2834 return constrain_operands (-1);
2835 else
2836 return 0;
2839 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2840 is a hard reg in class CLASS when its regno is offset by OFFSET
2841 and changed to mode MODE.
2842 If REG occupies multiple hard regs, all of them must be in CLASS. */
2845 reg_fits_class_p (operand, class, offset, mode)
2846 rtx operand;
2847 register enum reg_class class;
2848 int offset;
2849 enum machine_mode mode;
2851 register int regno = REGNO (operand);
2852 if (regno < FIRST_PSEUDO_REGISTER
2853 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2854 regno + offset))
2856 register int sr;
2857 regno += offset;
2858 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2859 sr > 0; sr--)
2860 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2861 regno + sr))
2862 break;
2863 return sr == 0;
2866 return 0;
2869 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2871 void
2872 split_all_insns (upd_life)
2873 int upd_life;
2875 sbitmap blocks;
2876 int changed;
2877 int i;
2879 blocks = sbitmap_alloc (n_basic_blocks);
2880 sbitmap_zero (blocks);
2881 changed = 0;
2883 for (i = n_basic_blocks - 1; i >= 0; --i)
2885 basic_block bb = BASIC_BLOCK (i);
2886 rtx insn, next;
2888 for (insn = bb->head; insn ; insn = next)
2890 rtx set;
2892 /* Can't use `next_real_insn' because that might go across
2893 CODE_LABELS and short-out basic blocks. */
2894 next = NEXT_INSN (insn);
2895 if (! INSN_P (insn))
2898 /* Don't split no-op move insns. These should silently
2899 disappear later in final. Splitting such insns would
2900 break the code that handles REG_NO_CONFLICT blocks. */
2902 else if ((set = single_set (insn)) != NULL
2903 && set_noop_p (set))
2905 /* Nops get in the way while scheduling, so delete them
2906 now if register allocation has already been done. It
2907 is too risky to try to do this before register
2908 allocation, and there are unlikely to be very many
2909 nops then anyways. */
2910 if (reload_completed)
2912 PUT_CODE (insn, NOTE);
2913 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2914 NOTE_SOURCE_FILE (insn) = 0;
2917 else
2919 /* Split insns here to get max fine-grain parallelism. */
2920 rtx first = PREV_INSN (insn);
2921 rtx last = try_split (PATTERN (insn), insn, 1);
2923 if (last != insn)
2925 SET_BIT (blocks, i);
2926 changed = 1;
2928 /* try_split returns the NOTE that INSN became. */
2929 PUT_CODE (insn, NOTE);
2930 NOTE_SOURCE_FILE (insn) = 0;
2931 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2933 /* ??? Coddle to md files that generate subregs in post-
2934 reload splitters instead of computing the proper
2935 hard register. */
2936 if (reload_completed && first != last)
2938 first = NEXT_INSN (first);
2939 while (1)
2941 if (INSN_P (first))
2942 cleanup_subreg_operands (first);
2943 if (first == last)
2944 break;
2945 first = NEXT_INSN (first);
2949 if (insn == bb->end)
2951 bb->end = last;
2952 break;
2957 if (insn == bb->end)
2958 break;
2961 /* ??? When we're called from just after reload, the CFG is in bad
2962 shape, and we may have fallen off the end. This could be fixed
2963 by having reload not try to delete unreachable code. Otherwise
2964 assert we found the end insn. */
2965 if (insn == NULL && upd_life)
2966 abort ();
2969 if (changed && upd_life)
2971 compute_bb_for_insn (get_max_uid ());
2972 count_or_remove_death_notes (blocks, 1);
2973 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2976 sbitmap_free (blocks);
2979 #ifdef HAVE_peephole2
2980 struct peep2_insn_data
2982 rtx insn;
2983 regset live_before;
2986 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2987 static int peep2_current;
2989 /* A non-insn marker indicating the last insn of the block.
2990 The live_before regset for this element is correct, indicating
2991 global_live_at_end for the block. */
2992 #define PEEP2_EOB pc_rtx
2994 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2995 does not exist. Used by the recognizer to find the next insn to match
2996 in a multi-insn pattern. */
2999 peep2_next_insn (n)
3000 int n;
3002 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3003 abort ();
3005 n += peep2_current;
3006 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3007 n -= MAX_INSNS_PER_PEEP2 + 1;
3009 if (peep2_insn_data[n].insn == PEEP2_EOB)
3010 return NULL_RTX;
3011 return peep2_insn_data[n].insn;
3014 /* Return true if REGNO is dead before the Nth non-note insn
3015 after `current'. */
3018 peep2_regno_dead_p (ofs, regno)
3019 int ofs;
3020 int regno;
3022 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3023 abort ();
3025 ofs += peep2_current;
3026 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3027 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3029 if (peep2_insn_data[ofs].insn == NULL_RTX)
3030 abort ();
3032 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3035 /* Similarly for a REG. */
3038 peep2_reg_dead_p (ofs, reg)
3039 int ofs;
3040 rtx reg;
3042 int regno, n;
3044 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3045 abort ();
3047 ofs += peep2_current;
3048 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3049 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3051 if (peep2_insn_data[ofs].insn == NULL_RTX)
3052 abort ();
3054 regno = REGNO (reg);
3055 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3056 while (--n >= 0)
3057 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3058 return 0;
3059 return 1;
3062 /* Try to find a hard register of mode MODE, matching the register class in
3063 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3064 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3065 in which case the only condition is that the register must be available
3066 before CURRENT_INSN.
3067 Registers that already have bits set in REG_SET will not be considered.
3069 If an appropriate register is available, it will be returned and the
3070 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3071 returned. */
3074 peep2_find_free_register (from, to, class_str, mode, reg_set)
3075 int from, to;
3076 const char *class_str;
3077 enum machine_mode mode;
3078 HARD_REG_SET *reg_set;
3080 static int search_ofs;
3081 enum reg_class class;
3082 HARD_REG_SET live;
3083 int i;
3085 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
3086 abort ();
3088 from += peep2_current;
3089 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3090 from -= MAX_INSNS_PER_PEEP2 + 1;
3091 to += peep2_current;
3092 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3093 to -= MAX_INSNS_PER_PEEP2 + 1;
3095 if (peep2_insn_data[from].insn == NULL_RTX)
3096 abort ();
3097 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3099 while (from != to)
3101 HARD_REG_SET this_live;
3103 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3104 from = 0;
3105 if (peep2_insn_data[from].insn == NULL_RTX)
3106 abort ();
3107 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3108 IOR_HARD_REG_SET (live, this_live);
3111 class = (class_str[0] == 'r' ? GENERAL_REGS
3112 : REG_CLASS_FROM_LETTER (class_str[0]));
3114 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3116 int raw_regno, regno, success, j;
3118 /* Distribute the free registers as much as possible. */
3119 raw_regno = search_ofs + i;
3120 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3121 raw_regno -= FIRST_PSEUDO_REGISTER;
3122 #ifdef REG_ALLOC_ORDER
3123 regno = reg_alloc_order[raw_regno];
3124 #else
3125 regno = raw_regno;
3126 #endif
3128 /* Don't allocate fixed registers. */
3129 if (fixed_regs[regno])
3130 continue;
3131 /* Make sure the register is of the right class. */
3132 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3133 continue;
3134 /* And can support the mode we need. */
3135 if (! HARD_REGNO_MODE_OK (regno, mode))
3136 continue;
3137 /* And that we don't create an extra save/restore. */
3138 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3139 continue;
3140 /* And we don't clobber traceback for noreturn functions. */
3141 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3142 && (! reload_completed || frame_pointer_needed))
3143 continue;
3145 success = 1;
3146 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3148 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3149 || TEST_HARD_REG_BIT (live, regno + j))
3151 success = 0;
3152 break;
3155 if (success)
3157 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3158 SET_HARD_REG_BIT (*reg_set, regno + j);
3160 /* Start the next search with the next register. */
3161 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3162 raw_regno = 0;
3163 search_ofs = raw_regno;
3165 return gen_rtx_REG (mode, regno);
3169 search_ofs = 0;
3170 return NULL_RTX;
3173 /* Perform the peephole2 optimization pass. */
3175 void
3176 peephole2_optimize (dump_file)
3177 FILE *dump_file ATTRIBUTE_UNUSED;
3179 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3180 rtx insn, prev;
3181 regset live;
3182 int i, b;
3183 #ifdef HAVE_conditional_execution
3184 sbitmap blocks;
3185 int changed;
3186 #endif
3188 /* Initialize the regsets we're going to use. */
3189 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3190 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3191 live = INITIALIZE_REG_SET (rs_heads[i]);
3193 #ifdef HAVE_conditional_execution
3194 blocks = sbitmap_alloc (n_basic_blocks);
3195 sbitmap_zero (blocks);
3196 changed = 0;
3197 #else
3198 count_or_remove_death_notes (NULL, 1);
3199 #endif
3201 for (b = n_basic_blocks - 1; b >= 0; --b)
3203 basic_block bb = BASIC_BLOCK (b);
3204 struct propagate_block_info *pbi;
3206 /* Indicate that all slots except the last holds invalid data. */
3207 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3208 peep2_insn_data[i].insn = NULL_RTX;
3210 /* Indicate that the last slot contains live_after data. */
3211 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3212 peep2_current = MAX_INSNS_PER_PEEP2;
3214 /* Start up propagation. */
3215 COPY_REG_SET (live, bb->global_live_at_end);
3216 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3218 #ifdef HAVE_conditional_execution
3219 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3220 #else
3221 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3222 #endif
3224 for (insn = bb->end; ; insn = prev)
3226 prev = PREV_INSN (insn);
3227 if (INSN_P (insn))
3229 rtx try;
3230 int match_len;
3232 /* Record this insn. */
3233 if (--peep2_current < 0)
3234 peep2_current = MAX_INSNS_PER_PEEP2;
3235 peep2_insn_data[peep2_current].insn = insn;
3236 propagate_one_insn (pbi, insn);
3237 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3239 /* Match the peephole. */
3240 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3241 if (try != NULL)
3243 i = match_len + peep2_current;
3244 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3245 i -= MAX_INSNS_PER_PEEP2 + 1;
3247 /* Replace the old sequence with the new. */
3248 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3249 try = emit_insn_after (try, prev);
3251 /* Adjust the basic block boundaries. */
3252 if (peep2_insn_data[i].insn == bb->end)
3253 bb->end = try;
3254 if (insn == bb->head)
3255 bb->head = NEXT_INSN (prev);
3257 #ifdef HAVE_conditional_execution
3258 /* With conditional execution, we cannot back up the
3259 live information so easily, since the conditional
3260 death data structures are not so self-contained.
3261 So record that we've made a modification to this
3262 block and update life information at the end. */
3263 SET_BIT (blocks, b);
3264 changed = 1;
3266 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3267 peep2_insn_data[i].insn = NULL_RTX;
3268 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3269 #else
3270 /* Back up lifetime information past the end of the
3271 newly created sequence. */
3272 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3273 i = 0;
3274 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3276 /* Update life information for the new sequence. */
3279 if (INSN_P (try))
3281 if (--i < 0)
3282 i = MAX_INSNS_PER_PEEP2;
3283 peep2_insn_data[i].insn = try;
3284 propagate_one_insn (pbi, try);
3285 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3287 try = PREV_INSN (try);
3289 while (try != prev);
3291 /* ??? Should verify that LIVE now matches what we
3292 had before the new sequence. */
3294 peep2_current = i;
3295 #endif
3299 if (insn == bb->head)
3300 break;
3303 free_propagate_block_info (pbi);
3306 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3307 FREE_REG_SET (peep2_insn_data[i].live_before);
3308 FREE_REG_SET (live);
3310 #ifdef HAVE_conditional_execution
3311 count_or_remove_death_notes (blocks, 1);
3312 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3313 sbitmap_free (blocks);
3314 #endif
3316 #endif /* HAVE_peephole2 */