* gnu/regexp/CharIndexedReader.java: Removed.
[official-gcc.git] / gcc / recog.c
blobd7c950724008860eff5610a87bb6cb0ec8534b6e
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (rtx insn)
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
131 check_asm_operands (rtx x)
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
158 for (i = 0; i < noperands; i++)
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
170 return 1;
173 /* Static data for the next two routines. */
175 typedef struct change_t
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
183 static change_t *changes;
184 static int changes_allocated;
186 static int num_changes = 0;
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 if (in_group == 0 && num_changes != 0)
215 abort ();
217 *loc = new;
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
236 if (object && GET_CODE (object) != MEM)
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 case invalid. */
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
244 num_changes++;
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
249 if (in_group)
250 return 1;
251 else
252 return apply_change_group ();
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
259 insn_invalid_p (rtx insn)
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
264 clobbers. */
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
276 return 1;
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
283 rtx newpat;
285 if (added_clobbers_hard_reg_p (icode))
286 return 1;
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
297 extract_insn (insn);
299 if (! constrain_operands (1))
300 return 1;
303 INSN_CODE (insn) = icode;
304 return 0;
307 /* Return number of changes made and not validated yet. */
309 num_changes_pending (void)
311 return num_changes;
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
318 apply_change_group (void)
320 int i;
321 rtx last_validated = NULL_RTX;
323 /* The changes have been applied and all INSN_CODEs have been reset to force
324 rerecognition.
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
330 the insn. */
332 for (i = 0; i < num_changes; i++)
334 rtx object = changes[i].object;
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
339 continue;
341 if (GET_CODE (object) == MEM)
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 break;
346 else if (insn_invalid_p (object))
348 rtx pat = PATTERN (object);
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
359 rtx newpat;
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
363 else
365 int j;
367 newpat
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
383 validate_change (object, &PATTERN (object), newpat, 1);
384 continue;
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
393 last_validated = object;
396 if (i == num_changes)
398 basic_block bb;
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
406 num_changes = 0;
407 return 1;
409 else
411 cancel_changes (0);
412 return 0;
416 /* Return the number of changes so far in the current group. */
419 num_validated_changes (void)
421 return num_changes;
424 /* Retract the changes numbered NUM and up. */
426 void
427 cancel_changes (int num)
429 int i;
431 /* Back out all the changes. Do this in the opposite order in which
432 they were made. */
433 for (i = num_changes - 1; i >= num; i--)
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
437 INSN_CODE (changes[i].object) = changes[i].old_code;
439 num_changes = num;
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
445 static void
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
456 if (!x)
457 return;
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
468 if (x == from
469 || (GET_CODE (x) == REG && GET_CODE (from) == REG
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
475 validate_change (object, loc, to, 1);
476 return;
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
484 if (GET_CODE (x) == PARALLEL)
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
494 abort ();
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
496 from, to, object);
498 else
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
502 else
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
505 if (fmt[i] == 'e')
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
514 return;
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
524 if (SWAPPABLE_OPERANDS_P (x)
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
531 XEXP (x, 0)), 1);
532 x = *loc;
533 code = GET_CODE (x);
536 switch (code)
538 case PLUS:
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
545 simplify_gen_binary
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
547 break;
548 case MINUS:
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
552 simplify_gen_binary
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
556 GET_MODE (x))), 1);
557 break;
558 case ZERO_EXTEND:
559 case SIGN_EXTEND:
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
563 op0_mode);
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
566 if (!new)
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
570 break;
571 case SUBREG:
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
574 SUBREG_BYTE (x));
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
579 if (new)
580 validate_change (object, loc, new, 1);
581 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
589 if (GET_CODE (XEXP (x, 0)) == MEM
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
599 if (GET_CODE (x) == ZERO_EXTRACT)
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
606 else if (GET_CODE (x) == SIGN_EXTRACT)
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
618 int offset = pos / BITS_PER_UNIT;
619 rtx newmem;
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
624 offset =
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
626 offset);
628 pos %= GET_MODE_BITSIZE (wanted_mode);
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
637 break;
639 default:
640 break;
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
665 /* Try replacing every occurrence of FROM in INSN with TO. */
667 void
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
681 static void
682 validate_replace_src_1 (rtx *x, void *data)
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
691 SET_DESTs. */
693 void
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
696 struct validate_replace_src_data d;
698 d.from = from;
699 d.to = to;
700 d.insn = insn;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
704 #ifdef HAVE_cc0
705 /* Return 1 if the insn using CC0 set by INSN does not contain
706 any ordered tests applied to the condition codes.
707 EQ and NE tests do not count. */
710 next_insn_tests_no_inequality (rtx insn)
712 rtx next = next_cc0_user (insn);
714 /* If there is no next insn, we have to take the conservative choice. */
715 if (next == 0)
716 return 0;
718 return ((GET_CODE (next) == JUMP_INSN
719 || GET_CODE (next) == INSN
720 || GET_CODE (next) == CALL_INSN)
721 && ! inequality_comparisons_p (PATTERN (next)));
723 #endif
725 /* This is used by find_single_use to locate an rtx that contains exactly one
726 use of DEST, which is typically either a REG or CC0. It returns a
727 pointer to the innermost rtx expression containing DEST. Appearances of
728 DEST that are being used to totally replace it are not counted. */
730 static rtx *
731 find_single_use_1 (rtx dest, rtx *loc)
733 rtx x = *loc;
734 enum rtx_code code = GET_CODE (x);
735 rtx *result = 0;
736 rtx *this_result;
737 int i;
738 const char *fmt;
740 switch (code)
742 case CONST_INT:
743 case CONST:
744 case LABEL_REF:
745 case SYMBOL_REF:
746 case CONST_DOUBLE:
747 case CONST_VECTOR:
748 case CLOBBER:
749 return 0;
751 case SET:
752 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
753 of a REG that occupies all of the REG, the insn uses DEST if
754 it is mentioned in the destination or the source. Otherwise, we
755 need just check the source. */
756 if (GET_CODE (SET_DEST (x)) != CC0
757 && GET_CODE (SET_DEST (x)) != PC
758 && GET_CODE (SET_DEST (x)) != REG
759 && ! (GET_CODE (SET_DEST (x)) == SUBREG
760 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
761 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
763 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
764 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
765 break;
767 return find_single_use_1 (dest, &SET_SRC (x));
769 case MEM:
770 case SUBREG:
771 return find_single_use_1 (dest, &XEXP (x, 0));
773 default:
774 break;
777 /* If it wasn't one of the common cases above, check each expression and
778 vector of this code. Look for a unique usage of DEST. */
780 fmt = GET_RTX_FORMAT (code);
781 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
783 if (fmt[i] == 'e')
785 if (dest == XEXP (x, i)
786 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
787 && REGNO (dest) == REGNO (XEXP (x, i))))
788 this_result = loc;
789 else
790 this_result = find_single_use_1 (dest, &XEXP (x, i));
792 if (result == 0)
793 result = this_result;
794 else if (this_result)
795 /* Duplicate usage. */
796 return 0;
798 else if (fmt[i] == 'E')
800 int j;
802 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
804 if (XVECEXP (x, i, j) == dest
805 || (GET_CODE (dest) == REG
806 && GET_CODE (XVECEXP (x, i, j)) == REG
807 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
808 this_result = loc;
809 else
810 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
812 if (result == 0)
813 result = this_result;
814 else if (this_result)
815 return 0;
820 return result;
823 /* See if DEST, produced in INSN, is used only a single time in the
824 sequel. If so, return a pointer to the innermost rtx expression in which
825 it is used.
827 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
829 This routine will return usually zero either before flow is called (because
830 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
831 note can't be trusted).
833 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
834 care about REG_DEAD notes or LOG_LINKS.
836 Otherwise, we find the single use by finding an insn that has a
837 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
838 only referenced once in that insn, we know that it must be the first
839 and last insn referencing DEST. */
841 rtx *
842 find_single_use (rtx dest, rtx insn, rtx *ploc)
844 rtx next;
845 rtx *result;
846 rtx link;
848 #ifdef HAVE_cc0
849 if (dest == cc0_rtx)
851 next = NEXT_INSN (insn);
852 if (next == 0
853 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
854 return 0;
856 result = find_single_use_1 (dest, &PATTERN (next));
857 if (result && ploc)
858 *ploc = next;
859 return result;
861 #endif
863 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
864 return 0;
866 for (next = next_nonnote_insn (insn);
867 next != 0 && GET_CODE (next) != CODE_LABEL;
868 next = next_nonnote_insn (next))
869 if (INSN_P (next) && dead_or_set_p (next, dest))
871 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
872 if (XEXP (link, 0) == insn)
873 break;
875 if (link)
877 result = find_single_use_1 (dest, &PATTERN (next));
878 if (ploc)
879 *ploc = next;
880 return result;
884 return 0;
887 /* Return 1 if OP is a valid general operand for machine mode MODE.
888 This is either a register reference, a memory reference,
889 or a constant. In the case of a memory reference, the address
890 is checked for general validity for the target machine.
892 Register and memory references must have mode MODE in order to be valid,
893 but some constants have no machine mode and are valid for any mode.
895 If MODE is VOIDmode, OP is checked for validity for whatever mode
896 it has.
898 The main use of this function is as a predicate in match_operand
899 expressions in the machine description.
901 For an explanation of this function's behavior for registers of
902 class NO_REGS, see the comment for `register_operand'. */
905 general_operand (rtx op, enum machine_mode mode)
907 enum rtx_code code = GET_CODE (op);
909 if (mode == VOIDmode)
910 mode = GET_MODE (op);
912 /* Don't accept CONST_INT or anything similar
913 if the caller wants something floating. */
914 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
915 && GET_MODE_CLASS (mode) != MODE_INT
916 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
917 return 0;
919 if (GET_CODE (op) == CONST_INT
920 && mode != VOIDmode
921 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
922 return 0;
924 if (CONSTANT_P (op))
925 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
926 || mode == VOIDmode)
927 #ifdef LEGITIMATE_PIC_OPERAND_P
928 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
929 #endif
930 && LEGITIMATE_CONSTANT_P (op));
932 /* Except for certain constants with VOIDmode, already checked for,
933 OP's mode must match MODE if MODE specifies a mode. */
935 if (GET_MODE (op) != mode)
936 return 0;
938 if (code == SUBREG)
940 rtx sub = SUBREG_REG (op);
942 #ifdef INSN_SCHEDULING
943 /* On machines that have insn scheduling, we want all memory
944 reference to be explicit, so outlaw paradoxical SUBREGs. */
945 if (GET_CODE (sub) == MEM
946 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
947 return 0;
948 #endif
949 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
950 may result in incorrect reference. We should simplify all valid
951 subregs of MEM anyway. But allow this after reload because we
952 might be called from cleanup_subreg_operands.
954 ??? This is a kludge. */
955 if (!reload_completed && SUBREG_BYTE (op) != 0
956 && GET_CODE (sub) == MEM)
957 return 0;
959 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
960 create such rtl, and we must reject it. */
961 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
962 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
963 return 0;
965 op = sub;
966 code = GET_CODE (op);
969 if (code == REG)
970 /* A register whose class is NO_REGS is not a general operand. */
971 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
972 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
974 if (code == MEM)
976 rtx y = XEXP (op, 0);
978 if (! volatile_ok && MEM_VOLATILE_P (op))
979 return 0;
981 if (GET_CODE (y) == ADDRESSOF)
982 return 1;
984 /* Use the mem's mode, since it will be reloaded thus. */
985 mode = GET_MODE (op);
986 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
989 /* Pretend this is an operand for now; we'll run force_operand
990 on its replacement in fixup_var_refs_1. */
991 if (code == ADDRESSOF)
992 return 1;
994 return 0;
996 win:
997 return 1;
1000 /* Return 1 if OP is a valid memory address for a memory reference
1001 of mode MODE.
1003 The main use of this function is as a predicate in match_operand
1004 expressions in the machine description. */
1007 address_operand (rtx op, enum machine_mode mode)
1009 return memory_address_p (mode, op);
1012 /* Return 1 if OP is a register reference of mode MODE.
1013 If MODE is VOIDmode, accept a register in any mode.
1015 The main use of this function is as a predicate in match_operand
1016 expressions in the machine description.
1018 As a special exception, registers whose class is NO_REGS are
1019 not accepted by `register_operand'. The reason for this change
1020 is to allow the representation of special architecture artifacts
1021 (such as a condition code register) without extending the rtl
1022 definitions. Since registers of class NO_REGS cannot be used
1023 as registers in any case where register classes are examined,
1024 it is most consistent to keep this function from accepting them. */
1027 register_operand (rtx op, enum machine_mode mode)
1029 if (GET_MODE (op) != mode && mode != VOIDmode)
1030 return 0;
1032 if (GET_CODE (op) == SUBREG)
1034 rtx sub = SUBREG_REG (op);
1036 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1037 because it is guaranteed to be reloaded into one.
1038 Just make sure the MEM is valid in itself.
1039 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1040 but currently it does result from (SUBREG (REG)...) where the
1041 reg went on the stack.) */
1042 if (! reload_completed && GET_CODE (sub) == MEM)
1043 return general_operand (op, mode);
1045 #ifdef CANNOT_CHANGE_MODE_CLASS
1046 if (GET_CODE (sub) == REG
1047 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1048 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1049 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1050 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1051 return 0;
1052 #endif
1054 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1055 create such rtl, and we must reject it. */
1056 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1057 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1058 return 0;
1060 op = sub;
1063 /* If we have an ADDRESSOF, consider it valid since it will be
1064 converted into something that will not be a MEM. */
1065 if (GET_CODE (op) == ADDRESSOF)
1066 return 1;
1068 /* We don't consider registers whose class is NO_REGS
1069 to be a register operand. */
1070 return (GET_CODE (op) == REG
1071 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1072 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1075 /* Return 1 for a register in Pmode; ignore the tested mode. */
1078 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1080 return register_operand (op, Pmode);
1083 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1084 or a hard register. */
1087 scratch_operand (rtx op, enum machine_mode mode)
1089 if (GET_MODE (op) != mode && mode != VOIDmode)
1090 return 0;
1092 return (GET_CODE (op) == SCRATCH
1093 || (GET_CODE (op) == REG
1094 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1097 /* Return 1 if OP is a valid immediate operand for mode MODE.
1099 The main use of this function is as a predicate in match_operand
1100 expressions in the machine description. */
1103 immediate_operand (rtx op, enum machine_mode mode)
1105 /* Don't accept CONST_INT or anything similar
1106 if the caller wants something floating. */
1107 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1108 && GET_MODE_CLASS (mode) != MODE_INT
1109 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1110 return 0;
1112 if (GET_CODE (op) == CONST_INT
1113 && mode != VOIDmode
1114 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1115 return 0;
1117 return (CONSTANT_P (op)
1118 && (GET_MODE (op) == mode || mode == VOIDmode
1119 || GET_MODE (op) == VOIDmode)
1120 #ifdef LEGITIMATE_PIC_OPERAND_P
1121 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1122 #endif
1123 && LEGITIMATE_CONSTANT_P (op));
1126 /* Returns 1 if OP is an operand that is a CONST_INT. */
1129 const_int_operand (rtx op, enum machine_mode mode)
1131 if (GET_CODE (op) != CONST_INT)
1132 return 0;
1134 if (mode != VOIDmode
1135 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1136 return 0;
1138 return 1;
1141 /* Returns 1 if OP is an operand that is a constant integer or constant
1142 floating-point number. */
1145 const_double_operand (rtx op, enum machine_mode mode)
1147 /* Don't accept CONST_INT or anything similar
1148 if the caller wants something floating. */
1149 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1150 && GET_MODE_CLASS (mode) != MODE_INT
1151 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1152 return 0;
1154 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1155 && (mode == VOIDmode || GET_MODE (op) == mode
1156 || GET_MODE (op) == VOIDmode));
1159 /* Return 1 if OP is a general operand that is not an immediate operand. */
1162 nonimmediate_operand (rtx op, enum machine_mode mode)
1164 return (general_operand (op, mode) && ! CONSTANT_P (op));
1167 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1170 nonmemory_operand (rtx op, enum machine_mode mode)
1172 if (CONSTANT_P (op))
1174 /* Don't accept CONST_INT or anything similar
1175 if the caller wants something floating. */
1176 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1177 && GET_MODE_CLASS (mode) != MODE_INT
1178 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1179 return 0;
1181 if (GET_CODE (op) == CONST_INT
1182 && mode != VOIDmode
1183 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1184 return 0;
1186 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1187 || mode == VOIDmode)
1188 #ifdef LEGITIMATE_PIC_OPERAND_P
1189 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1190 #endif
1191 && LEGITIMATE_CONSTANT_P (op));
1194 if (GET_MODE (op) != mode && mode != VOIDmode)
1195 return 0;
1197 if (GET_CODE (op) == SUBREG)
1199 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1200 because it is guaranteed to be reloaded into one.
1201 Just make sure the MEM is valid in itself.
1202 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1203 but currently it does result from (SUBREG (REG)...) where the
1204 reg went on the stack.) */
1205 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1206 return general_operand (op, mode);
1207 op = SUBREG_REG (op);
1210 /* We don't consider registers whose class is NO_REGS
1211 to be a register operand. */
1212 return (GET_CODE (op) == REG
1213 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1214 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1217 /* Return 1 if OP is a valid operand that stands for pushing a
1218 value of mode MODE onto the stack.
1220 The main use of this function is as a predicate in match_operand
1221 expressions in the machine description. */
1224 push_operand (rtx op, enum machine_mode mode)
1226 unsigned int rounded_size = GET_MODE_SIZE (mode);
1228 #ifdef PUSH_ROUNDING
1229 rounded_size = PUSH_ROUNDING (rounded_size);
1230 #endif
1232 if (GET_CODE (op) != MEM)
1233 return 0;
1235 if (mode != VOIDmode && GET_MODE (op) != mode)
1236 return 0;
1238 op = XEXP (op, 0);
1240 if (rounded_size == GET_MODE_SIZE (mode))
1242 if (GET_CODE (op) != STACK_PUSH_CODE)
1243 return 0;
1245 else
1247 if (GET_CODE (op) != PRE_MODIFY
1248 || GET_CODE (XEXP (op, 1)) != PLUS
1249 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1250 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1251 #ifdef STACK_GROWS_DOWNWARD
1252 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1253 #else
1254 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1255 #endif
1257 return 0;
1260 return XEXP (op, 0) == stack_pointer_rtx;
1263 /* Return 1 if OP is a valid operand that stands for popping a
1264 value of mode MODE off the stack.
1266 The main use of this function is as a predicate in match_operand
1267 expressions in the machine description. */
1270 pop_operand (rtx op, enum machine_mode mode)
1272 if (GET_CODE (op) != MEM)
1273 return 0;
1275 if (mode != VOIDmode && GET_MODE (op) != mode)
1276 return 0;
1278 op = XEXP (op, 0);
1280 if (GET_CODE (op) != STACK_POP_CODE)
1281 return 0;
1283 return XEXP (op, 0) == stack_pointer_rtx;
1286 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1289 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1291 if (GET_CODE (addr) == ADDRESSOF)
1292 return 1;
1294 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1295 return 0;
1297 win:
1298 return 1;
1301 /* Return 1 if OP is a valid memory reference with mode MODE,
1302 including a valid address.
1304 The main use of this function is as a predicate in match_operand
1305 expressions in the machine description. */
1308 memory_operand (rtx op, enum machine_mode mode)
1310 rtx inner;
1312 if (! reload_completed)
1313 /* Note that no SUBREG is a memory operand before end of reload pass,
1314 because (SUBREG (MEM...)) forces reloading into a register. */
1315 return GET_CODE (op) == MEM && general_operand (op, mode);
1317 if (mode != VOIDmode && GET_MODE (op) != mode)
1318 return 0;
1320 inner = op;
1321 if (GET_CODE (inner) == SUBREG)
1322 inner = SUBREG_REG (inner);
1324 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1327 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1328 that is, a memory reference whose address is a general_operand. */
1331 indirect_operand (rtx op, enum machine_mode mode)
1333 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1334 if (! reload_completed
1335 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1337 int offset = SUBREG_BYTE (op);
1338 rtx inner = SUBREG_REG (op);
1340 if (mode != VOIDmode && GET_MODE (op) != mode)
1341 return 0;
1343 /* The only way that we can have a general_operand as the resulting
1344 address is if OFFSET is zero and the address already is an operand
1345 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1346 operand. */
1348 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1349 || (GET_CODE (XEXP (inner, 0)) == PLUS
1350 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1351 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1352 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1355 return (GET_CODE (op) == MEM
1356 && memory_operand (op, mode)
1357 && general_operand (XEXP (op, 0), Pmode));
1360 /* Return 1 if this is a comparison operator. This allows the use of
1361 MATCH_OPERATOR to recognize all the branch insns. */
1364 comparison_operator (rtx op, enum machine_mode mode)
1366 return ((mode == VOIDmode || GET_MODE (op) == mode)
1367 && COMPARISON_P (op));
1370 /* If BODY is an insn body that uses ASM_OPERANDS,
1371 return the number of operands (both input and output) in the insn.
1372 Otherwise return -1. */
1375 asm_noperands (rtx body)
1377 switch (GET_CODE (body))
1379 case ASM_OPERANDS:
1380 /* No output operands: return number of input operands. */
1381 return ASM_OPERANDS_INPUT_LENGTH (body);
1382 case SET:
1383 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1384 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1385 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1386 else
1387 return -1;
1388 case PARALLEL:
1389 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1390 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1392 /* Multiple output operands, or 1 output plus some clobbers:
1393 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1394 int i;
1395 int n_sets;
1397 /* Count backwards through CLOBBERs to determine number of SETs. */
1398 for (i = XVECLEN (body, 0); i > 0; i--)
1400 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1401 break;
1402 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1403 return -1;
1406 /* N_SETS is now number of output operands. */
1407 n_sets = i;
1409 /* Verify that all the SETs we have
1410 came from a single original asm_operands insn
1411 (so that invalid combinations are blocked). */
1412 for (i = 0; i < n_sets; i++)
1414 rtx elt = XVECEXP (body, 0, i);
1415 if (GET_CODE (elt) != SET)
1416 return -1;
1417 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1418 return -1;
1419 /* If these ASM_OPERANDS rtx's came from different original insns
1420 then they aren't allowed together. */
1421 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1422 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1423 return -1;
1425 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1426 + n_sets);
1428 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1430 /* 0 outputs, but some clobbers:
1431 body is [(asm_operands ...) (clobber (reg ...))...]. */
1432 int i;
1434 /* Make sure all the other parallel things really are clobbers. */
1435 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1436 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1437 return -1;
1439 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1441 else
1442 return -1;
1443 default:
1444 return -1;
1448 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1449 copy its operands (both input and output) into the vector OPERANDS,
1450 the locations of the operands within the insn into the vector OPERAND_LOCS,
1451 and the constraints for the operands into CONSTRAINTS.
1452 Write the modes of the operands into MODES.
1453 Return the assembler-template.
1455 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1456 we don't store that info. */
1458 const char *
1459 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1460 const char **constraints, enum machine_mode *modes)
1462 int i;
1463 int noperands;
1464 const char *template = 0;
1466 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1468 rtx asmop = SET_SRC (body);
1469 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1471 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1473 for (i = 1; i < noperands; i++)
1475 if (operand_locs)
1476 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1477 if (operands)
1478 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1479 if (constraints)
1480 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1481 if (modes)
1482 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1485 /* The output is in the SET.
1486 Its constraint is in the ASM_OPERANDS itself. */
1487 if (operands)
1488 operands[0] = SET_DEST (body);
1489 if (operand_locs)
1490 operand_locs[0] = &SET_DEST (body);
1491 if (constraints)
1492 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1493 if (modes)
1494 modes[0] = GET_MODE (SET_DEST (body));
1495 template = ASM_OPERANDS_TEMPLATE (asmop);
1497 else if (GET_CODE (body) == ASM_OPERANDS)
1499 rtx asmop = body;
1500 /* No output operands: BODY is (asm_operands ....). */
1502 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1504 /* The input operands are found in the 1st element vector. */
1505 /* Constraints for inputs are in the 2nd element vector. */
1506 for (i = 0; i < noperands; i++)
1508 if (operand_locs)
1509 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1510 if (operands)
1511 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1512 if (constraints)
1513 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1514 if (modes)
1515 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1517 template = ASM_OPERANDS_TEMPLATE (asmop);
1519 else if (GET_CODE (body) == PARALLEL
1520 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1521 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1523 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1524 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1525 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1526 int nout = 0; /* Does not include CLOBBERs. */
1528 /* At least one output, plus some CLOBBERs. */
1530 /* The outputs are in the SETs.
1531 Their constraints are in the ASM_OPERANDS itself. */
1532 for (i = 0; i < nparallel; i++)
1534 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1535 break; /* Past last SET */
1537 if (operands)
1538 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1539 if (operand_locs)
1540 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1541 if (constraints)
1542 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1543 if (modes)
1544 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1545 nout++;
1548 for (i = 0; i < nin; i++)
1550 if (operand_locs)
1551 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1552 if (operands)
1553 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1554 if (constraints)
1555 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1556 if (modes)
1557 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1560 template = ASM_OPERANDS_TEMPLATE (asmop);
1562 else if (GET_CODE (body) == PARALLEL
1563 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1565 /* No outputs, but some CLOBBERs. */
1567 rtx asmop = XVECEXP (body, 0, 0);
1568 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1570 for (i = 0; i < nin; i++)
1572 if (operand_locs)
1573 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1574 if (operands)
1575 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1576 if (constraints)
1577 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1578 if (modes)
1579 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1582 template = ASM_OPERANDS_TEMPLATE (asmop);
1585 return template;
1588 /* Check if an asm_operand matches its constraints.
1589 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1592 asm_operand_ok (rtx op, const char *constraint)
1594 int result = 0;
1596 /* Use constrain_operands after reload. */
1597 if (reload_completed)
1598 abort ();
1600 while (*constraint)
1602 char c = *constraint;
1603 int len;
1604 switch (c)
1606 case ',':
1607 constraint++;
1608 continue;
1609 case '=':
1610 case '+':
1611 case '*':
1612 case '%':
1613 case '!':
1614 case '#':
1615 case '&':
1616 case '?':
1617 break;
1619 case '0': case '1': case '2': case '3': case '4':
1620 case '5': case '6': case '7': case '8': case '9':
1621 /* For best results, our caller should have given us the
1622 proper matching constraint, but we can't actually fail
1623 the check if they didn't. Indicate that results are
1624 inconclusive. */
1626 constraint++;
1627 while (ISDIGIT (*constraint));
1628 if (! result)
1629 result = -1;
1630 continue;
1632 case 'p':
1633 if (address_operand (op, VOIDmode))
1634 result = 1;
1635 break;
1637 case 'm':
1638 case 'V': /* non-offsettable */
1639 if (memory_operand (op, VOIDmode))
1640 result = 1;
1641 break;
1643 case 'o': /* offsettable */
1644 if (offsettable_nonstrict_memref_p (op))
1645 result = 1;
1646 break;
1648 case '<':
1649 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1650 excepting those that expand_call created. Further, on some
1651 machines which do not have generalized auto inc/dec, an inc/dec
1652 is not a memory_operand.
1654 Match any memory and hope things are resolved after reload. */
1656 if (GET_CODE (op) == MEM
1657 && (1
1658 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1659 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1660 result = 1;
1661 break;
1663 case '>':
1664 if (GET_CODE (op) == MEM
1665 && (1
1666 || GET_CODE (XEXP (op, 0)) == PRE_INC
1667 || GET_CODE (XEXP (op, 0)) == POST_INC))
1668 result = 1;
1669 break;
1671 case 'E':
1672 case 'F':
1673 if (GET_CODE (op) == CONST_DOUBLE
1674 || (GET_CODE (op) == CONST_VECTOR
1675 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1676 result = 1;
1677 break;
1679 case 'G':
1680 if (GET_CODE (op) == CONST_DOUBLE
1681 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1682 result = 1;
1683 break;
1684 case 'H':
1685 if (GET_CODE (op) == CONST_DOUBLE
1686 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1687 result = 1;
1688 break;
1690 case 's':
1691 if (GET_CODE (op) == CONST_INT
1692 || (GET_CODE (op) == CONST_DOUBLE
1693 && GET_MODE (op) == VOIDmode))
1694 break;
1695 /* Fall through. */
1697 case 'i':
1698 if (CONSTANT_P (op)
1699 #ifdef LEGITIMATE_PIC_OPERAND_P
1700 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1701 #endif
1703 result = 1;
1704 break;
1706 case 'n':
1707 if (GET_CODE (op) == CONST_INT
1708 || (GET_CODE (op) == CONST_DOUBLE
1709 && GET_MODE (op) == VOIDmode))
1710 result = 1;
1711 break;
1713 case 'I':
1714 if (GET_CODE (op) == CONST_INT
1715 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1716 result = 1;
1717 break;
1718 case 'J':
1719 if (GET_CODE (op) == CONST_INT
1720 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1721 result = 1;
1722 break;
1723 case 'K':
1724 if (GET_CODE (op) == CONST_INT
1725 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1726 result = 1;
1727 break;
1728 case 'L':
1729 if (GET_CODE (op) == CONST_INT
1730 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1731 result = 1;
1732 break;
1733 case 'M':
1734 if (GET_CODE (op) == CONST_INT
1735 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1736 result = 1;
1737 break;
1738 case 'N':
1739 if (GET_CODE (op) == CONST_INT
1740 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1741 result = 1;
1742 break;
1743 case 'O':
1744 if (GET_CODE (op) == CONST_INT
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1746 result = 1;
1747 break;
1748 case 'P':
1749 if (GET_CODE (op) == CONST_INT
1750 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1751 result = 1;
1752 break;
1754 case 'X':
1755 result = 1;
1756 break;
1758 case 'g':
1759 if (general_operand (op, VOIDmode))
1760 result = 1;
1761 break;
1763 default:
1764 /* For all other letters, we first check for a register class,
1765 otherwise it is an EXTRA_CONSTRAINT. */
1766 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1768 case 'r':
1769 if (GET_MODE (op) == BLKmode)
1770 break;
1771 if (register_operand (op, VOIDmode))
1772 result = 1;
1774 #ifdef EXTRA_CONSTRAINT_STR
1775 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1776 result = 1;
1777 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1778 /* Every memory operand can be reloaded to fit. */
1779 && memory_operand (op, VOIDmode))
1780 result = 1;
1781 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1782 /* Every address operand can be reloaded to fit. */
1783 && address_operand (op, VOIDmode))
1784 result = 1;
1785 #endif
1786 break;
1788 len = CONSTRAINT_LEN (c, constraint);
1790 constraint++;
1791 while (--len && *constraint);
1792 if (len)
1793 return 0;
1796 return result;
1799 /* Given an rtx *P, if it is a sum containing an integer constant term,
1800 return the location (type rtx *) of the pointer to that constant term.
1801 Otherwise, return a null pointer. */
1803 rtx *
1804 find_constant_term_loc (rtx *p)
1806 rtx *tem;
1807 enum rtx_code code = GET_CODE (*p);
1809 /* If *P IS such a constant term, P is its location. */
1811 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1812 || code == CONST)
1813 return p;
1815 /* Otherwise, if not a sum, it has no constant term. */
1817 if (GET_CODE (*p) != PLUS)
1818 return 0;
1820 /* If one of the summands is constant, return its location. */
1822 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1823 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1824 return p;
1826 /* Otherwise, check each summand for containing a constant term. */
1828 if (XEXP (*p, 0) != 0)
1830 tem = find_constant_term_loc (&XEXP (*p, 0));
1831 if (tem != 0)
1832 return tem;
1835 if (XEXP (*p, 1) != 0)
1837 tem = find_constant_term_loc (&XEXP (*p, 1));
1838 if (tem != 0)
1839 return tem;
1842 return 0;
1845 /* Return 1 if OP is a memory reference
1846 whose address contains no side effects
1847 and remains valid after the addition
1848 of a positive integer less than the
1849 size of the object being referenced.
1851 We assume that the original address is valid and do not check it.
1853 This uses strict_memory_address_p as a subroutine, so
1854 don't use it before reload. */
1857 offsettable_memref_p (rtx op)
1859 return ((GET_CODE (op) == MEM)
1860 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1863 /* Similar, but don't require a strictly valid mem ref:
1864 consider pseudo-regs valid as index or base regs. */
1867 offsettable_nonstrict_memref_p (rtx op)
1869 return ((GET_CODE (op) == MEM)
1870 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1873 /* Return 1 if Y is a memory address which contains no side effects
1874 and would remain valid after the addition of a positive integer
1875 less than the size of that mode.
1877 We assume that the original address is valid and do not check it.
1878 We do check that it is valid for narrower modes.
1880 If STRICTP is nonzero, we require a strictly valid address,
1881 for the sake of use in reload.c. */
1884 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1886 enum rtx_code ycode = GET_CODE (y);
1887 rtx z;
1888 rtx y1 = y;
1889 rtx *y2;
1890 int (*addressp) (enum machine_mode, rtx) =
1891 (strictp ? strict_memory_address_p : memory_address_p);
1892 unsigned int mode_sz = GET_MODE_SIZE (mode);
1894 if (CONSTANT_ADDRESS_P (y))
1895 return 1;
1897 /* Adjusting an offsettable address involves changing to a narrower mode.
1898 Make sure that's OK. */
1900 if (mode_dependent_address_p (y))
1901 return 0;
1903 /* ??? How much offset does an offsettable BLKmode reference need?
1904 Clearly that depends on the situation in which it's being used.
1905 However, the current situation in which we test 0xffffffff is
1906 less than ideal. Caveat user. */
1907 if (mode_sz == 0)
1908 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1910 /* If the expression contains a constant term,
1911 see if it remains valid when max possible offset is added. */
1913 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1915 int good;
1917 y1 = *y2;
1918 *y2 = plus_constant (*y2, mode_sz - 1);
1919 /* Use QImode because an odd displacement may be automatically invalid
1920 for any wider mode. But it should be valid for a single byte. */
1921 good = (*addressp) (QImode, y);
1923 /* In any case, restore old contents of memory. */
1924 *y2 = y1;
1925 return good;
1928 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1929 return 0;
1931 /* The offset added here is chosen as the maximum offset that
1932 any instruction could need to add when operating on something
1933 of the specified mode. We assume that if Y and Y+c are
1934 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1935 go inside a LO_SUM here, so we do so as well. */
1936 if (GET_CODE (y) == LO_SUM
1937 && mode != BLKmode
1938 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1939 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1940 plus_constant (XEXP (y, 1), mode_sz - 1));
1941 else
1942 z = plus_constant (y, mode_sz - 1);
1944 /* Use QImode because an odd displacement may be automatically invalid
1945 for any wider mode. But it should be valid for a single byte. */
1946 return (*addressp) (QImode, z);
1949 /* Return 1 if ADDR is an address-expression whose effect depends
1950 on the mode of the memory reference it is used in.
1952 Autoincrement addressing is a typical example of mode-dependence
1953 because the amount of the increment depends on the mode. */
1956 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1958 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1959 return 0;
1960 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1961 win: ATTRIBUTE_UNUSED_LABEL
1962 return 1;
1965 /* Like extract_insn, but save insn extracted and don't extract again, when
1966 called again for the same insn expecting that recog_data still contain the
1967 valid information. This is used primary by gen_attr infrastructure that
1968 often does extract insn again and again. */
1969 void
1970 extract_insn_cached (rtx insn)
1972 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1973 return;
1974 extract_insn (insn);
1975 recog_data.insn = insn;
1977 /* Do cached extract_insn, constrain_operands and complain about failures.
1978 Used by insn_attrtab. */
1979 void
1980 extract_constrain_insn_cached (rtx insn)
1982 extract_insn_cached (insn);
1983 if (which_alternative == -1
1984 && !constrain_operands (reload_completed))
1985 fatal_insn_not_found (insn);
1987 /* Do cached constrain_operands and complain about failures. */
1989 constrain_operands_cached (int strict)
1991 if (which_alternative == -1)
1992 return constrain_operands (strict);
1993 else
1994 return 1;
1997 /* Analyze INSN and fill in recog_data. */
1999 void
2000 extract_insn (rtx insn)
2002 int i;
2003 int icode;
2004 int noperands;
2005 rtx body = PATTERN (insn);
2007 recog_data.insn = NULL;
2008 recog_data.n_operands = 0;
2009 recog_data.n_alternatives = 0;
2010 recog_data.n_dups = 0;
2011 which_alternative = -1;
2013 switch (GET_CODE (body))
2015 case USE:
2016 case CLOBBER:
2017 case ASM_INPUT:
2018 case ADDR_VEC:
2019 case ADDR_DIFF_VEC:
2020 return;
2022 case SET:
2023 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2024 goto asm_insn;
2025 else
2026 goto normal_insn;
2027 case PARALLEL:
2028 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2029 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2030 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2031 goto asm_insn;
2032 else
2033 goto normal_insn;
2034 case ASM_OPERANDS:
2035 asm_insn:
2036 recog_data.n_operands = noperands = asm_noperands (body);
2037 if (noperands >= 0)
2039 /* This insn is an `asm' with operands. */
2041 /* expand_asm_operands makes sure there aren't too many operands. */
2042 if (noperands > MAX_RECOG_OPERANDS)
2043 abort ();
2045 /* Now get the operand values and constraints out of the insn. */
2046 decode_asm_operands (body, recog_data.operand,
2047 recog_data.operand_loc,
2048 recog_data.constraints,
2049 recog_data.operand_mode);
2050 if (noperands > 0)
2052 const char *p = recog_data.constraints[0];
2053 recog_data.n_alternatives = 1;
2054 while (*p)
2055 recog_data.n_alternatives += (*p++ == ',');
2057 break;
2059 fatal_insn_not_found (insn);
2061 default:
2062 normal_insn:
2063 /* Ordinary insn: recognize it, get the operands via insn_extract
2064 and get the constraints. */
2066 icode = recog_memoized (insn);
2067 if (icode < 0)
2068 fatal_insn_not_found (insn);
2070 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2071 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2072 recog_data.n_dups = insn_data[icode].n_dups;
2074 insn_extract (insn);
2076 for (i = 0; i < noperands; i++)
2078 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2079 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2080 /* VOIDmode match_operands gets mode from their real operand. */
2081 if (recog_data.operand_mode[i] == VOIDmode)
2082 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2085 for (i = 0; i < noperands; i++)
2086 recog_data.operand_type[i]
2087 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2088 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2089 : OP_IN);
2091 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2092 abort ();
2095 /* After calling extract_insn, you can use this function to extract some
2096 information from the constraint strings into a more usable form.
2097 The collected data is stored in recog_op_alt. */
2098 void
2099 preprocess_constraints (void)
2101 int i;
2103 for (i = 0; i < recog_data.n_operands; i++)
2104 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2105 * sizeof (struct operand_alternative)));
2107 for (i = 0; i < recog_data.n_operands; i++)
2109 int j;
2110 struct operand_alternative *op_alt;
2111 const char *p = recog_data.constraints[i];
2113 op_alt = recog_op_alt[i];
2115 for (j = 0; j < recog_data.n_alternatives; j++)
2117 op_alt[j].class = NO_REGS;
2118 op_alt[j].constraint = p;
2119 op_alt[j].matches = -1;
2120 op_alt[j].matched = -1;
2122 if (*p == '\0' || *p == ',')
2124 op_alt[j].anything_ok = 1;
2125 continue;
2128 for (;;)
2130 char c = *p;
2131 if (c == '#')
2133 c = *++p;
2134 while (c != ',' && c != '\0');
2135 if (c == ',' || c == '\0')
2137 p++;
2138 break;
2141 switch (c)
2143 case '=': case '+': case '*': case '%':
2144 case 'E': case 'F': case 'G': case 'H':
2145 case 's': case 'i': case 'n':
2146 case 'I': case 'J': case 'K': case 'L':
2147 case 'M': case 'N': case 'O': case 'P':
2148 /* These don't say anything we care about. */
2149 break;
2151 case '?':
2152 op_alt[j].reject += 6;
2153 break;
2154 case '!':
2155 op_alt[j].reject += 600;
2156 break;
2157 case '&':
2158 op_alt[j].earlyclobber = 1;
2159 break;
2161 case '0': case '1': case '2': case '3': case '4':
2162 case '5': case '6': case '7': case '8': case '9':
2164 char *end;
2165 op_alt[j].matches = strtoul (p, &end, 10);
2166 recog_op_alt[op_alt[j].matches][j].matched = i;
2167 p = end;
2169 continue;
2171 case 'm':
2172 op_alt[j].memory_ok = 1;
2173 break;
2174 case '<':
2175 op_alt[j].decmem_ok = 1;
2176 break;
2177 case '>':
2178 op_alt[j].incmem_ok = 1;
2179 break;
2180 case 'V':
2181 op_alt[j].nonoffmem_ok = 1;
2182 break;
2183 case 'o':
2184 op_alt[j].offmem_ok = 1;
2185 break;
2186 case 'X':
2187 op_alt[j].anything_ok = 1;
2188 break;
2190 case 'p':
2191 op_alt[j].is_address = 1;
2192 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2193 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2194 break;
2196 case 'g': case 'r':
2197 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2198 break;
2200 default:
2201 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2203 op_alt[j].memory_ok = 1;
2204 break;
2206 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2208 op_alt[j].is_address = 1;
2209 op_alt[j].class
2210 = (reg_class_subunion
2211 [(int) op_alt[j].class]
2212 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2213 break;
2216 op_alt[j].class
2217 = (reg_class_subunion
2218 [(int) op_alt[j].class]
2219 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2220 break;
2222 p += CONSTRAINT_LEN (c, p);
2228 /* Check the operands of an insn against the insn's operand constraints
2229 and return 1 if they are valid.
2230 The information about the insn's operands, constraints, operand modes
2231 etc. is obtained from the global variables set up by extract_insn.
2233 WHICH_ALTERNATIVE is set to a number which indicates which
2234 alternative of constraints was matched: 0 for the first alternative,
2235 1 for the next, etc.
2237 In addition, when two operands are required to match
2238 and it happens that the output operand is (reg) while the
2239 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2240 make the output operand look like the input.
2241 This is because the output operand is the one the template will print.
2243 This is used in final, just before printing the assembler code and by
2244 the routines that determine an insn's attribute.
2246 If STRICT is a positive nonzero value, it means that we have been
2247 called after reload has been completed. In that case, we must
2248 do all checks strictly. If it is zero, it means that we have been called
2249 before reload has completed. In that case, we first try to see if we can
2250 find an alternative that matches strictly. If not, we try again, this
2251 time assuming that reload will fix up the insn. This provides a "best
2252 guess" for the alternative and is used to compute attributes of insns prior
2253 to reload. A negative value of STRICT is used for this internal call. */
2255 struct funny_match
2257 int this, other;
2261 constrain_operands (int strict)
2263 const char *constraints[MAX_RECOG_OPERANDS];
2264 int matching_operands[MAX_RECOG_OPERANDS];
2265 int earlyclobber[MAX_RECOG_OPERANDS];
2266 int c;
2268 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2269 int funny_match_index;
2271 which_alternative = 0;
2272 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2273 return 1;
2275 for (c = 0; c < recog_data.n_operands; c++)
2277 constraints[c] = recog_data.constraints[c];
2278 matching_operands[c] = -1;
2283 int opno;
2284 int lose = 0;
2285 funny_match_index = 0;
2287 for (opno = 0; opno < recog_data.n_operands; opno++)
2289 rtx op = recog_data.operand[opno];
2290 enum machine_mode mode = GET_MODE (op);
2291 const char *p = constraints[opno];
2292 int offset = 0;
2293 int win = 0;
2294 int val;
2295 int len;
2297 earlyclobber[opno] = 0;
2299 /* A unary operator may be accepted by the predicate, but it
2300 is irrelevant for matching constraints. */
2301 if (UNARY_P (op))
2302 op = XEXP (op, 0);
2304 if (GET_CODE (op) == SUBREG)
2306 if (GET_CODE (SUBREG_REG (op)) == REG
2307 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2308 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2309 GET_MODE (SUBREG_REG (op)),
2310 SUBREG_BYTE (op),
2311 GET_MODE (op));
2312 op = SUBREG_REG (op);
2315 /* An empty constraint or empty alternative
2316 allows anything which matched the pattern. */
2317 if (*p == 0 || *p == ',')
2318 win = 1;
2321 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2323 case '\0':
2324 len = 0;
2325 break;
2326 case ',':
2327 c = '\0';
2328 break;
2330 case '?': case '!': case '*': case '%':
2331 case '=': case '+':
2332 break;
2334 case '#':
2335 /* Ignore rest of this alternative as far as
2336 constraint checking is concerned. */
2338 p++;
2339 while (*p && *p != ',');
2340 len = 0;
2341 break;
2343 case '&':
2344 earlyclobber[opno] = 1;
2345 break;
2347 case '0': case '1': case '2': case '3': case '4':
2348 case '5': case '6': case '7': case '8': case '9':
2350 /* This operand must be the same as a previous one.
2351 This kind of constraint is used for instructions such
2352 as add when they take only two operands.
2354 Note that the lower-numbered operand is passed first.
2356 If we are not testing strictly, assume that this
2357 constraint will be satisfied. */
2359 char *end;
2360 int match;
2362 match = strtoul (p, &end, 10);
2363 p = end;
2365 if (strict < 0)
2366 val = 1;
2367 else
2369 rtx op1 = recog_data.operand[match];
2370 rtx op2 = recog_data.operand[opno];
2372 /* A unary operator may be accepted by the predicate,
2373 but it is irrelevant for matching constraints. */
2374 if (UNARY_P (op1))
2375 op1 = XEXP (op1, 0);
2376 if (UNARY_P (op2))
2377 op2 = XEXP (op2, 0);
2379 val = operands_match_p (op1, op2);
2382 matching_operands[opno] = match;
2383 matching_operands[match] = opno;
2385 if (val != 0)
2386 win = 1;
2388 /* If output is *x and input is *--x, arrange later
2389 to change the output to *--x as well, since the
2390 output op is the one that will be printed. */
2391 if (val == 2 && strict > 0)
2393 funny_match[funny_match_index].this = opno;
2394 funny_match[funny_match_index++].other = match;
2397 len = 0;
2398 break;
2400 case 'p':
2401 /* p is used for address_operands. When we are called by
2402 gen_reload, no one will have checked that the address is
2403 strictly valid, i.e., that all pseudos requiring hard regs
2404 have gotten them. */
2405 if (strict <= 0
2406 || (strict_memory_address_p (recog_data.operand_mode[opno],
2407 op)))
2408 win = 1;
2409 break;
2411 /* No need to check general_operand again;
2412 it was done in insn-recog.c. */
2413 case 'g':
2414 /* Anything goes unless it is a REG and really has a hard reg
2415 but the hard reg is not in the class GENERAL_REGS. */
2416 if (strict < 0
2417 || GENERAL_REGS == ALL_REGS
2418 || GET_CODE (op) != REG
2419 || (reload_in_progress
2420 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2421 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2422 win = 1;
2423 break;
2425 case 'X':
2426 /* This is used for a MATCH_SCRATCH in the cases when
2427 we don't actually need anything. So anything goes
2428 any time. */
2429 win = 1;
2430 break;
2432 case 'm':
2433 /* Memory operands must be valid, to the extent
2434 required by STRICT. */
2435 if (GET_CODE (op) == MEM)
2437 if (strict > 0
2438 && !strict_memory_address_p (GET_MODE (op),
2439 XEXP (op, 0)))
2440 break;
2441 if (strict == 0
2442 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2443 break;
2444 win = 1;
2446 /* Before reload, accept what reload can turn into mem. */
2447 else if (strict < 0 && CONSTANT_P (op))
2448 win = 1;
2449 /* During reload, accept a pseudo */
2450 else if (reload_in_progress && GET_CODE (op) == REG
2451 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2452 win = 1;
2453 break;
2455 case '<':
2456 if (GET_CODE (op) == MEM
2457 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2458 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2459 win = 1;
2460 break;
2462 case '>':
2463 if (GET_CODE (op) == MEM
2464 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2465 || GET_CODE (XEXP (op, 0)) == POST_INC))
2466 win = 1;
2467 break;
2469 case 'E':
2470 case 'F':
2471 if (GET_CODE (op) == CONST_DOUBLE
2472 || (GET_CODE (op) == CONST_VECTOR
2473 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2474 win = 1;
2475 break;
2477 case 'G':
2478 case 'H':
2479 if (GET_CODE (op) == CONST_DOUBLE
2480 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2481 win = 1;
2482 break;
2484 case 's':
2485 if (GET_CODE (op) == CONST_INT
2486 || (GET_CODE (op) == CONST_DOUBLE
2487 && GET_MODE (op) == VOIDmode))
2488 break;
2489 case 'i':
2490 if (CONSTANT_P (op))
2491 win = 1;
2492 break;
2494 case 'n':
2495 if (GET_CODE (op) == CONST_INT
2496 || (GET_CODE (op) == CONST_DOUBLE
2497 && GET_MODE (op) == VOIDmode))
2498 win = 1;
2499 break;
2501 case 'I':
2502 case 'J':
2503 case 'K':
2504 case 'L':
2505 case 'M':
2506 case 'N':
2507 case 'O':
2508 case 'P':
2509 if (GET_CODE (op) == CONST_INT
2510 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2511 win = 1;
2512 break;
2514 case 'V':
2515 if (GET_CODE (op) == MEM
2516 && ((strict > 0 && ! offsettable_memref_p (op))
2517 || (strict < 0
2518 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2519 || (reload_in_progress
2520 && !(GET_CODE (op) == REG
2521 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2522 win = 1;
2523 break;
2525 case 'o':
2526 if ((strict > 0 && offsettable_memref_p (op))
2527 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2528 /* Before reload, accept what reload can handle. */
2529 || (strict < 0
2530 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2531 /* During reload, accept a pseudo */
2532 || (reload_in_progress && GET_CODE (op) == REG
2533 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2534 win = 1;
2535 break;
2537 default:
2539 enum reg_class class;
2541 class = (c == 'r'
2542 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2543 if (class != NO_REGS)
2545 if (strict < 0
2546 || (strict == 0
2547 && GET_CODE (op) == REG
2548 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2549 || (strict == 0 && GET_CODE (op) == SCRATCH)
2550 || (GET_CODE (op) == REG
2551 && reg_fits_class_p (op, class, offset, mode)))
2552 win = 1;
2554 #ifdef EXTRA_CONSTRAINT_STR
2555 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2556 win = 1;
2558 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2559 /* Every memory operand can be reloaded to fit. */
2560 && ((strict < 0 && GET_CODE (op) == MEM)
2561 /* Before reload, accept what reload can turn
2562 into mem. */
2563 || (strict < 0 && CONSTANT_P (op))
2564 /* During reload, accept a pseudo */
2565 || (reload_in_progress && GET_CODE (op) == REG
2566 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2567 win = 1;
2568 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2569 /* Every address operand can be reloaded to fit. */
2570 && strict < 0)
2571 win = 1;
2572 #endif
2573 break;
2576 while (p += len, c);
2578 constraints[opno] = p;
2579 /* If this operand did not win somehow,
2580 this alternative loses. */
2581 if (! win)
2582 lose = 1;
2584 /* This alternative won; the operands are ok.
2585 Change whichever operands this alternative says to change. */
2586 if (! lose)
2588 int opno, eopno;
2590 /* See if any earlyclobber operand conflicts with some other
2591 operand. */
2593 if (strict > 0)
2594 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2595 /* Ignore earlyclobber operands now in memory,
2596 because we would often report failure when we have
2597 two memory operands, one of which was formerly a REG. */
2598 if (earlyclobber[eopno]
2599 && GET_CODE (recog_data.operand[eopno]) == REG)
2600 for (opno = 0; opno < recog_data.n_operands; opno++)
2601 if ((GET_CODE (recog_data.operand[opno]) == MEM
2602 || recog_data.operand_type[opno] != OP_OUT)
2603 && opno != eopno
2604 /* Ignore things like match_operator operands. */
2605 && *recog_data.constraints[opno] != 0
2606 && ! (matching_operands[opno] == eopno
2607 && operands_match_p (recog_data.operand[opno],
2608 recog_data.operand[eopno]))
2609 && ! safe_from_earlyclobber (recog_data.operand[opno],
2610 recog_data.operand[eopno]))
2611 lose = 1;
2613 if (! lose)
2615 while (--funny_match_index >= 0)
2617 recog_data.operand[funny_match[funny_match_index].other]
2618 = recog_data.operand[funny_match[funny_match_index].this];
2621 return 1;
2625 which_alternative++;
2627 while (which_alternative < recog_data.n_alternatives);
2629 which_alternative = -1;
2630 /* If we are about to reject this, but we are not to test strictly,
2631 try a very loose test. Only return failure if it fails also. */
2632 if (strict == 0)
2633 return constrain_operands (-1);
2634 else
2635 return 0;
2638 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2639 is a hard reg in class CLASS when its regno is offset by OFFSET
2640 and changed to mode MODE.
2641 If REG occupies multiple hard regs, all of them must be in CLASS. */
2644 reg_fits_class_p (rtx operand, enum reg_class class, int offset,
2645 enum machine_mode mode)
2647 int regno = REGNO (operand);
2648 if (regno < FIRST_PSEUDO_REGISTER
2649 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2650 regno + offset))
2652 int sr;
2653 regno += offset;
2654 for (sr = hard_regno_nregs[regno][mode] - 1;
2655 sr > 0; sr--)
2656 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2657 regno + sr))
2658 break;
2659 return sr == 0;
2662 return 0;
2665 /* Split single instruction. Helper function for split_all_insns and
2666 split_all_insns_noflow. Return last insn in the sequence if successful,
2667 or NULL if unsuccessful. */
2669 static rtx
2670 split_insn (rtx insn)
2672 /* Split insns here to get max fine-grain parallelism. */
2673 rtx first = PREV_INSN (insn);
2674 rtx last = try_split (PATTERN (insn), insn, 1);
2676 if (last == insn)
2677 return NULL_RTX;
2679 /* try_split returns the NOTE that INSN became. */
2680 PUT_CODE (insn, NOTE);
2681 NOTE_SOURCE_FILE (insn) = 0;
2682 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2684 /* ??? Coddle to md files that generate subregs in post-reload
2685 splitters instead of computing the proper hard register. */
2686 if (reload_completed && first != last)
2688 first = NEXT_INSN (first);
2689 for (;;)
2691 if (INSN_P (first))
2692 cleanup_subreg_operands (first);
2693 if (first == last)
2694 break;
2695 first = NEXT_INSN (first);
2698 return last;
2701 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2703 void
2704 split_all_insns (int upd_life)
2706 sbitmap blocks;
2707 bool changed;
2708 basic_block bb;
2710 blocks = sbitmap_alloc (last_basic_block);
2711 sbitmap_zero (blocks);
2712 changed = false;
2714 FOR_EACH_BB_REVERSE (bb)
2716 rtx insn, next;
2717 bool finish = false;
2719 for (insn = BB_HEAD (bb); !finish ; insn = next)
2721 /* Can't use `next_real_insn' because that might go across
2722 CODE_LABELS and short-out basic blocks. */
2723 next = NEXT_INSN (insn);
2724 finish = (insn == BB_END (bb));
2725 if (INSN_P (insn))
2727 rtx set = single_set (insn);
2729 /* Don't split no-op move insns. These should silently
2730 disappear later in final. Splitting such insns would
2731 break the code that handles REG_NO_CONFLICT blocks. */
2732 if (set && set_noop_p (set))
2734 /* Nops get in the way while scheduling, so delete them
2735 now if register allocation has already been done. It
2736 is too risky to try to do this before register
2737 allocation, and there are unlikely to be very many
2738 nops then anyways. */
2739 if (reload_completed)
2741 /* If the no-op set has a REG_UNUSED note, we need
2742 to update liveness information. */
2743 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2745 SET_BIT (blocks, bb->index);
2746 changed = true;
2748 /* ??? Is life info affected by deleting edges? */
2749 delete_insn_and_edges (insn);
2752 else
2754 rtx last = split_insn (insn);
2755 if (last)
2757 /* The split sequence may include barrier, but the
2758 BB boundary we are interested in will be set to
2759 previous one. */
2761 while (GET_CODE (last) == BARRIER)
2762 last = PREV_INSN (last);
2763 SET_BIT (blocks, bb->index);
2764 changed = true;
2771 if (changed)
2773 int old_last_basic_block = last_basic_block;
2775 find_many_sub_basic_blocks (blocks);
2777 if (old_last_basic_block != last_basic_block && upd_life)
2778 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2781 if (changed && upd_life)
2782 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2783 PROP_DEATH_NOTES);
2785 #ifdef ENABLE_CHECKING
2786 verify_flow_info ();
2787 #endif
2789 sbitmap_free (blocks);
2792 /* Same as split_all_insns, but do not expect CFG to be available.
2793 Used by machine dependent reorg passes. */
2795 void
2796 split_all_insns_noflow (void)
2798 rtx next, insn;
2800 for (insn = get_insns (); insn; insn = next)
2802 next = NEXT_INSN (insn);
2803 if (INSN_P (insn))
2805 /* Don't split no-op move insns. These should silently
2806 disappear later in final. Splitting such insns would
2807 break the code that handles REG_NO_CONFLICT blocks. */
2808 rtx set = single_set (insn);
2809 if (set && set_noop_p (set))
2811 /* Nops get in the way while scheduling, so delete them
2812 now if register allocation has already been done. It
2813 is too risky to try to do this before register
2814 allocation, and there are unlikely to be very many
2815 nops then anyways.
2817 ??? Should we use delete_insn when the CFG isn't valid? */
2818 if (reload_completed)
2819 delete_insn_and_edges (insn);
2821 else
2822 split_insn (insn);
2827 #ifdef HAVE_peephole2
2828 struct peep2_insn_data
2830 rtx insn;
2831 regset live_before;
2834 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2835 static int peep2_current;
2837 /* A non-insn marker indicating the last insn of the block.
2838 The live_before regset for this element is correct, indicating
2839 global_live_at_end for the block. */
2840 #define PEEP2_EOB pc_rtx
2842 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2843 does not exist. Used by the recognizer to find the next insn to match
2844 in a multi-insn pattern. */
2847 peep2_next_insn (int n)
2849 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2850 abort ();
2852 n += peep2_current;
2853 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2854 n -= MAX_INSNS_PER_PEEP2 + 1;
2856 if (peep2_insn_data[n].insn == PEEP2_EOB)
2857 return NULL_RTX;
2858 return peep2_insn_data[n].insn;
2861 /* Return true if REGNO is dead before the Nth non-note insn
2862 after `current'. */
2865 peep2_regno_dead_p (int ofs, int regno)
2867 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2868 abort ();
2870 ofs += peep2_current;
2871 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2872 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2874 if (peep2_insn_data[ofs].insn == NULL_RTX)
2875 abort ();
2877 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2880 /* Similarly for a REG. */
2883 peep2_reg_dead_p (int ofs, rtx reg)
2885 int regno, n;
2887 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2888 abort ();
2890 ofs += peep2_current;
2891 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2892 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2894 if (peep2_insn_data[ofs].insn == NULL_RTX)
2895 abort ();
2897 regno = REGNO (reg);
2898 n = hard_regno_nregs[regno][GET_MODE (reg)];
2899 while (--n >= 0)
2900 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2901 return 0;
2902 return 1;
2905 /* Try to find a hard register of mode MODE, matching the register class in
2906 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2907 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2908 in which case the only condition is that the register must be available
2909 before CURRENT_INSN.
2910 Registers that already have bits set in REG_SET will not be considered.
2912 If an appropriate register is available, it will be returned and the
2913 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2914 returned. */
2917 peep2_find_free_register (int from, int to, const char *class_str,
2918 enum machine_mode mode, HARD_REG_SET *reg_set)
2920 static int search_ofs;
2921 enum reg_class class;
2922 HARD_REG_SET live;
2923 int i;
2925 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2926 abort ();
2928 from += peep2_current;
2929 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2930 from -= MAX_INSNS_PER_PEEP2 + 1;
2931 to += peep2_current;
2932 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2933 to -= MAX_INSNS_PER_PEEP2 + 1;
2935 if (peep2_insn_data[from].insn == NULL_RTX)
2936 abort ();
2937 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2939 while (from != to)
2941 HARD_REG_SET this_live;
2943 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2944 from = 0;
2945 if (peep2_insn_data[from].insn == NULL_RTX)
2946 abort ();
2947 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2948 IOR_HARD_REG_SET (live, this_live);
2951 class = (class_str[0] == 'r' ? GENERAL_REGS
2952 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2954 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2956 int raw_regno, regno, success, j;
2958 /* Distribute the free registers as much as possible. */
2959 raw_regno = search_ofs + i;
2960 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2961 raw_regno -= FIRST_PSEUDO_REGISTER;
2962 #ifdef REG_ALLOC_ORDER
2963 regno = reg_alloc_order[raw_regno];
2964 #else
2965 regno = raw_regno;
2966 #endif
2968 /* Don't allocate fixed registers. */
2969 if (fixed_regs[regno])
2970 continue;
2971 /* Make sure the register is of the right class. */
2972 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2973 continue;
2974 /* And can support the mode we need. */
2975 if (! HARD_REGNO_MODE_OK (regno, mode))
2976 continue;
2977 /* And that we don't create an extra save/restore. */
2978 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2979 continue;
2980 /* And we don't clobber traceback for noreturn functions. */
2981 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2982 && (! reload_completed || frame_pointer_needed))
2983 continue;
2985 success = 1;
2986 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2988 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2989 || TEST_HARD_REG_BIT (live, regno + j))
2991 success = 0;
2992 break;
2995 if (success)
2997 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2998 SET_HARD_REG_BIT (*reg_set, regno + j);
3000 /* Start the next search with the next register. */
3001 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3002 raw_regno = 0;
3003 search_ofs = raw_regno;
3005 return gen_rtx_REG (mode, regno);
3009 search_ofs = 0;
3010 return NULL_RTX;
3013 /* Perform the peephole2 optimization pass. */
3015 void
3016 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
3018 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3019 rtx insn, prev;
3020 regset live;
3021 int i;
3022 basic_block bb;
3023 #ifdef HAVE_conditional_execution
3024 sbitmap blocks;
3025 bool changed;
3026 #endif
3027 bool do_cleanup_cfg = false;
3028 bool do_rebuild_jump_labels = false;
3030 /* Initialize the regsets we're going to use. */
3031 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3032 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3033 live = INITIALIZE_REG_SET (rs_heads[i]);
3035 #ifdef HAVE_conditional_execution
3036 blocks = sbitmap_alloc (last_basic_block);
3037 sbitmap_zero (blocks);
3038 changed = false;
3039 #else
3040 count_or_remove_death_notes (NULL, 1);
3041 #endif
3043 FOR_EACH_BB_REVERSE (bb)
3045 struct propagate_block_info *pbi;
3047 /* Indicate that all slots except the last holds invalid data. */
3048 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3049 peep2_insn_data[i].insn = NULL_RTX;
3051 /* Indicate that the last slot contains live_after data. */
3052 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3053 peep2_current = MAX_INSNS_PER_PEEP2;
3055 /* Start up propagation. */
3056 COPY_REG_SET (live, bb->global_live_at_end);
3057 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3059 #ifdef HAVE_conditional_execution
3060 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3061 #else
3062 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3063 #endif
3065 for (insn = BB_END (bb); ; insn = prev)
3067 prev = PREV_INSN (insn);
3068 if (INSN_P (insn))
3070 rtx try, before_try, x;
3071 int match_len;
3072 rtx note;
3073 bool was_call = false;
3075 /* Record this insn. */
3076 if (--peep2_current < 0)
3077 peep2_current = MAX_INSNS_PER_PEEP2;
3078 peep2_insn_data[peep2_current].insn = insn;
3079 propagate_one_insn (pbi, insn);
3080 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3082 /* Match the peephole. */
3083 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3084 if (try != NULL)
3086 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3087 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3088 cfg-related call notes. */
3089 for (i = 0; i <= match_len; ++i)
3091 int j;
3092 rtx old_insn, new_insn, note;
3094 j = i + peep2_current;
3095 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3096 j -= MAX_INSNS_PER_PEEP2 + 1;
3097 old_insn = peep2_insn_data[j].insn;
3098 if (GET_CODE (old_insn) != CALL_INSN)
3099 continue;
3100 was_call = true;
3102 new_insn = try;
3103 while (new_insn != NULL_RTX)
3105 if (GET_CODE (new_insn) == CALL_INSN)
3106 break;
3107 new_insn = NEXT_INSN (new_insn);
3110 if (new_insn == NULL_RTX)
3111 abort ();
3113 CALL_INSN_FUNCTION_USAGE (new_insn)
3114 = CALL_INSN_FUNCTION_USAGE (old_insn);
3116 for (note = REG_NOTES (old_insn);
3117 note;
3118 note = XEXP (note, 1))
3119 switch (REG_NOTE_KIND (note))
3121 case REG_NORETURN:
3122 case REG_SETJMP:
3123 case REG_ALWAYS_RETURN:
3124 REG_NOTES (new_insn)
3125 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3126 XEXP (note, 0),
3127 REG_NOTES (new_insn));
3128 default:
3129 /* Discard all other reg notes. */
3130 break;
3133 /* Croak if there is another call in the sequence. */
3134 while (++i <= match_len)
3136 j = i + peep2_current;
3137 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3138 j -= MAX_INSNS_PER_PEEP2 + 1;
3139 old_insn = peep2_insn_data[j].insn;
3140 if (GET_CODE (old_insn) == CALL_INSN)
3141 abort ();
3143 break;
3146 i = match_len + peep2_current;
3147 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3148 i -= MAX_INSNS_PER_PEEP2 + 1;
3150 note = find_reg_note (peep2_insn_data[i].insn,
3151 REG_EH_REGION, NULL_RTX);
3153 /* Replace the old sequence with the new. */
3154 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3155 INSN_LOCATOR (peep2_insn_data[i].insn));
3156 before_try = PREV_INSN (insn);
3157 delete_insn_chain (insn, peep2_insn_data[i].insn);
3159 /* Re-insert the EH_REGION notes. */
3160 if (note || (was_call && nonlocal_goto_handler_labels))
3162 edge eh_edge;
3164 for (eh_edge = bb->succ; eh_edge
3165 ; eh_edge = eh_edge->succ_next)
3166 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3167 break;
3169 for (x = try ; x != before_try ; x = PREV_INSN (x))
3170 if (GET_CODE (x) == CALL_INSN
3171 || (flag_non_call_exceptions
3172 && may_trap_p (PATTERN (x))
3173 && !find_reg_note (x, REG_EH_REGION, NULL)))
3175 if (note)
3176 REG_NOTES (x)
3177 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3178 XEXP (note, 0),
3179 REG_NOTES (x));
3181 if (x != BB_END (bb) && eh_edge)
3183 edge nfte, nehe;
3184 int flags;
3186 nfte = split_block (bb, x);
3187 flags = (eh_edge->flags
3188 & (EDGE_EH | EDGE_ABNORMAL));
3189 if (GET_CODE (x) == CALL_INSN)
3190 flags |= EDGE_ABNORMAL_CALL;
3191 nehe = make_edge (nfte->src, eh_edge->dest,
3192 flags);
3194 nehe->probability = eh_edge->probability;
3195 nfte->probability
3196 = REG_BR_PROB_BASE - nehe->probability;
3198 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3199 #ifdef HAVE_conditional_execution
3200 SET_BIT (blocks, nfte->dest->index);
3201 changed = true;
3202 #endif
3203 bb = nfte->src;
3204 eh_edge = nehe;
3208 /* Converting possibly trapping insn to non-trapping is
3209 possible. Zap dummy outgoing edges. */
3210 do_cleanup_cfg |= purge_dead_edges (bb);
3213 #ifdef HAVE_conditional_execution
3214 /* With conditional execution, we cannot back up the
3215 live information so easily, since the conditional
3216 death data structures are not so self-contained.
3217 So record that we've made a modification to this
3218 block and update life information at the end. */
3219 SET_BIT (blocks, bb->index);
3220 changed = true;
3222 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3223 peep2_insn_data[i].insn = NULL_RTX;
3224 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3225 #else
3226 /* Back up lifetime information past the end of the
3227 newly created sequence. */
3228 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3229 i = 0;
3230 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3232 /* Update life information for the new sequence. */
3233 x = try;
3236 if (INSN_P (x))
3238 if (--i < 0)
3239 i = MAX_INSNS_PER_PEEP2;
3240 peep2_insn_data[i].insn = x;
3241 propagate_one_insn (pbi, x);
3242 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3244 x = PREV_INSN (x);
3246 while (x != prev);
3248 /* ??? Should verify that LIVE now matches what we
3249 had before the new sequence. */
3251 peep2_current = i;
3252 #endif
3254 /* If we generated a jump instruction, it won't have
3255 JUMP_LABEL set. Recompute after we're done. */
3256 for (x = try; x != before_try; x = PREV_INSN (x))
3257 if (GET_CODE (x) == JUMP_INSN)
3259 do_rebuild_jump_labels = true;
3260 break;
3265 if (insn == BB_HEAD (bb))
3266 break;
3269 free_propagate_block_info (pbi);
3272 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3273 FREE_REG_SET (peep2_insn_data[i].live_before);
3274 FREE_REG_SET (live);
3276 if (do_rebuild_jump_labels)
3277 rebuild_jump_labels (get_insns ());
3279 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3280 we've changed global life since exception handlers are no longer
3281 reachable. */
3282 if (do_cleanup_cfg)
3284 cleanup_cfg (0);
3285 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3287 #ifdef HAVE_conditional_execution
3288 else
3290 count_or_remove_death_notes (blocks, 1);
3291 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3293 sbitmap_free (blocks);
3294 #endif
3296 #endif /* HAVE_peephole2 */
3298 /* Common predicates for use with define_bypass. */
3300 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3301 data not the address operand(s) of the store. IN_INSN must be
3302 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3303 SETs inside. */
3306 store_data_bypass_p (rtx out_insn, rtx in_insn)
3308 rtx out_set, in_set;
3310 in_set = single_set (in_insn);
3311 if (! in_set)
3312 abort ();
3314 if (GET_CODE (SET_DEST (in_set)) != MEM)
3315 return false;
3317 out_set = single_set (out_insn);
3318 if (out_set)
3320 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3321 return false;
3323 else
3325 rtx out_pat;
3326 int i;
3328 out_pat = PATTERN (out_insn);
3329 if (GET_CODE (out_pat) != PARALLEL)
3330 abort ();
3332 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3334 rtx exp = XVECEXP (out_pat, 0, i);
3336 if (GET_CODE (exp) == CLOBBER)
3337 continue;
3339 if (GET_CODE (exp) != SET)
3340 abort ();
3342 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3343 return false;
3347 return true;
3350 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3351 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3352 or multiple set; IN_INSN should be single_set for truth, but for convenience
3353 of insn categorization may be any JUMP or CALL insn. */
3356 if_test_bypass_p (rtx out_insn, rtx in_insn)
3358 rtx out_set, in_set;
3360 in_set = single_set (in_insn);
3361 if (! in_set)
3363 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3364 return false;
3365 abort ();
3368 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3369 return false;
3370 in_set = SET_SRC (in_set);
3372 out_set = single_set (out_insn);
3373 if (out_set)
3375 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3376 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3377 return false;
3379 else
3381 rtx out_pat;
3382 int i;
3384 out_pat = PATTERN (out_insn);
3385 if (GET_CODE (out_pat) != PARALLEL)
3386 abort ();
3388 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3390 rtx exp = XVECEXP (out_pat, 0, i);
3392 if (GET_CODE (exp) == CLOBBER)
3393 continue;
3395 if (GET_CODE (exp) != SET)
3396 abort ();
3398 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3399 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3400 return false;
3404 return true;