PR c/13282
[official-gcc.git] / gcc / recog.c
blob12b0fbf68a15496c26933514ed56d782af4a62a1
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (rtx insn)
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
131 check_asm_operands (rtx x)
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
158 for (i = 0; i < noperands; i++)
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
170 return 1;
173 /* Static data for the next two routines. */
175 typedef struct change_t
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
183 static change_t *changes;
184 static int changes_allocated;
186 static int num_changes = 0;
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 if (in_group == 0 && num_changes != 0)
215 abort ();
217 *loc = new;
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
229 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
232 changes[num_changes].object = object;
233 changes[num_changes].loc = loc;
234 changes[num_changes].old = old;
236 if (object && !MEM_P (object))
238 /* Set INSN_CODE to force rerecognition of insn. Save old code in
239 case invalid. */
240 changes[num_changes].old_code = INSN_CODE (object);
241 INSN_CODE (object) = -1;
244 num_changes++;
246 /* If we are making a group of changes, return 1. Otherwise, validate the
247 change group we made. */
249 if (in_group)
250 return 1;
251 else
252 return apply_change_group ();
255 /* This subroutine of apply_change_group verifies whether the changes to INSN
256 were valid; i.e. whether INSN can still be recognized. */
259 insn_invalid_p (rtx insn)
261 rtx pat = PATTERN (insn);
262 int num_clobbers = 0;
263 /* If we are before reload and the pattern is a SET, see if we can add
264 clobbers. */
265 int icode = recog (pat, insn,
266 (GET_CODE (pat) == SET
267 && ! reload_completed && ! reload_in_progress)
268 ? &num_clobbers : 0);
269 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
272 /* If this is an asm and the operand aren't legal, then fail. Likewise if
273 this is not an asm and the insn wasn't recognized. */
274 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
275 || (!is_asm && icode < 0))
276 return 1;
278 /* If we have to add CLOBBERs, fail if we have to add ones that reference
279 hard registers since our callers can't know if they are live or not.
280 Otherwise, add them. */
281 if (num_clobbers > 0)
283 rtx newpat;
285 if (added_clobbers_hard_reg_p (icode))
286 return 1;
288 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
289 XVECEXP (newpat, 0, 0) = pat;
290 add_clobbers (newpat, icode);
291 PATTERN (insn) = pat = newpat;
294 /* After reload, verify that all constraints are satisfied. */
295 if (reload_completed)
297 extract_insn (insn);
299 if (! constrain_operands (1))
300 return 1;
303 INSN_CODE (insn) = icode;
304 return 0;
307 /* Return number of changes made and not validated yet. */
309 num_changes_pending (void)
311 return num_changes;
314 /* Apply a group of changes previously issued with `validate_change'.
315 Return 1 if all changes are valid, zero otherwise. */
318 apply_change_group (void)
320 int i;
321 rtx last_validated = NULL_RTX;
323 /* The changes have been applied and all INSN_CODEs have been reset to force
324 rerecognition.
326 The changes are valid if we aren't given an object, or if we are
327 given a MEM and it still is a valid address, or if this is in insn
328 and it is recognized. In the latter case, if reload has completed,
329 we also require that the operands meet the constraints for
330 the insn. */
332 for (i = 0; i < num_changes; i++)
334 rtx object = changes[i].object;
336 /* If there is no object to test or if it is the same as the one we
337 already tested, ignore it. */
338 if (object == 0 || object == last_validated)
339 continue;
341 if (MEM_P (object))
343 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
344 break;
346 else if (insn_invalid_p (object))
348 rtx pat = PATTERN (object);
350 /* Perhaps we couldn't recognize the insn because there were
351 extra CLOBBERs at the end. If so, try to re-recognize
352 without the last CLOBBER (later iterations will cause each of
353 them to be eliminated, in turn). But don't do this if we
354 have an ASM_OPERAND. */
355 if (GET_CODE (pat) == PARALLEL
356 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
357 && asm_noperands (PATTERN (object)) < 0)
359 rtx newpat;
361 if (XVECLEN (pat, 0) == 2)
362 newpat = XVECEXP (pat, 0, 0);
363 else
365 int j;
367 newpat
368 = gen_rtx_PARALLEL (VOIDmode,
369 rtvec_alloc (XVECLEN (pat, 0) - 1));
370 for (j = 0; j < XVECLEN (newpat, 0); j++)
371 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
374 /* Add a new change to this group to replace the pattern
375 with this new pattern. Then consider this change
376 as having succeeded. The change we added will
377 cause the entire call to fail if things remain invalid.
379 Note that this can lose if a later change than the one
380 we are processing specified &XVECEXP (PATTERN (object), 0, X)
381 but this shouldn't occur. */
383 validate_change (object, &PATTERN (object), newpat, 1);
384 continue;
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
393 last_validated = object;
396 if (i == num_changes)
398 basic_block bb;
400 for (i = 0; i < num_changes; i++)
401 if (changes[i].object
402 && INSN_P (changes[i].object)
403 && (bb = BLOCK_FOR_INSN (changes[i].object)))
404 bb->flags |= BB_DIRTY;
406 num_changes = 0;
407 return 1;
409 else
411 cancel_changes (0);
412 return 0;
416 /* Return the number of changes so far in the current group. */
419 num_validated_changes (void)
421 return num_changes;
424 /* Retract the changes numbered NUM and up. */
426 void
427 cancel_changes (int num)
429 int i;
431 /* Back out all the changes. Do this in the opposite order in which
432 they were made. */
433 for (i = num_changes - 1; i >= num; i--)
435 *changes[i].loc = changes[i].old;
436 if (changes[i].object && !MEM_P (changes[i].object))
437 INSN_CODE (changes[i].object) = changes[i].old_code;
439 num_changes = num;
442 /* Replace every occurrence of FROM in X with TO. Mark each change with
443 validate_change passing OBJECT. */
445 static void
446 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
456 if (!x)
457 return;
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
468 if (x == from
469 || (REG_P (x) && REG_P (from)
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
475 validate_change (object, loc, to, 1);
476 return;
479 /* Call ourself recursively to perform the replacements.
480 We must not replace inside already replaced expression, otherwise we
481 get infinite recursion for replacements like (reg X)->(subreg (reg X))
482 done by regmove, so we must special case shared ASM_OPERANDS. */
484 if (GET_CODE (x) == PARALLEL)
486 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
488 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
489 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
491 /* Verify that operands are really shared. */
492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0))) !=
493 ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, j))))
494 abort ();
495 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
496 from, to, object);
498 else
499 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
502 else
503 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
505 if (fmt[i] == 'e')
506 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
507 else if (fmt[i] == 'E')
508 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
509 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
512 /* If we didn't substitute, there is nothing more to do. */
513 if (num_changes == prev_changes)
514 return;
516 /* Allow substituted expression to have different mode. This is used by
517 regmove to change mode of pseudo register. */
518 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
519 op0_mode = GET_MODE (XEXP (x, 0));
521 /* Do changes needed to keep rtx consistent. Don't do any other
522 simplifications, as it is not our job. */
524 if (SWAPPABLE_OPERANDS_P (x)
525 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
527 validate_change (object, loc,
528 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
529 : swap_condition (code),
530 GET_MODE (x), XEXP (x, 1),
531 XEXP (x, 0)), 1);
532 x = *loc;
533 code = GET_CODE (x);
536 switch (code)
538 case PLUS:
539 /* If we have a PLUS whose second operand is now a CONST_INT, use
540 simplify_gen_binary to try to simplify it.
541 ??? We may want later to remove this, once simplification is
542 separated from this function. */
543 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
544 validate_change (object, loc,
545 simplify_gen_binary
546 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
547 break;
548 case MINUS:
549 if (GET_CODE (XEXP (x, 1)) == CONST_INT
550 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
551 validate_change (object, loc,
552 simplify_gen_binary
553 (PLUS, GET_MODE (x), XEXP (x, 0),
554 simplify_gen_unary (NEG,
555 GET_MODE (x), XEXP (x, 1),
556 GET_MODE (x))), 1);
557 break;
558 case ZERO_EXTEND:
559 case SIGN_EXTEND:
560 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
562 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
563 op0_mode);
564 /* If any of the above failed, substitute in something that
565 we know won't be recognized. */
566 if (!new)
567 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
568 validate_change (object, loc, new, 1);
570 break;
571 case SUBREG:
572 /* All subregs possible to simplify should be simplified. */
573 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
574 SUBREG_BYTE (x));
576 /* Subregs of VOIDmode operands are incorrect. */
577 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
578 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
579 if (new)
580 validate_change (object, loc, new, 1);
581 break;
582 case ZERO_EXTRACT:
583 case SIGN_EXTRACT:
584 /* If we are replacing a register with memory, try to change the memory
585 to be the mode required for memory in extract operations (this isn't
586 likely to be an insertion operation; if it was, nothing bad will
587 happen, we might just fail in some cases). */
589 if (MEM_P (XEXP (x, 0))
590 && GET_CODE (XEXP (x, 1)) == CONST_INT
591 && GET_CODE (XEXP (x, 2)) == CONST_INT
592 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
593 && !MEM_VOLATILE_P (XEXP (x, 0)))
595 enum machine_mode wanted_mode = VOIDmode;
596 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
597 int pos = INTVAL (XEXP (x, 2));
599 if (GET_CODE (x) == ZERO_EXTRACT)
601 enum machine_mode new_mode
602 = mode_for_extraction (EP_extzv, 1);
603 if (new_mode != MAX_MACHINE_MODE)
604 wanted_mode = new_mode;
606 else if (GET_CODE (x) == SIGN_EXTRACT)
608 enum machine_mode new_mode
609 = mode_for_extraction (EP_extv, 1);
610 if (new_mode != MAX_MACHINE_MODE)
611 wanted_mode = new_mode;
614 /* If we have a narrower mode, we can do something. */
615 if (wanted_mode != VOIDmode
616 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
618 int offset = pos / BITS_PER_UNIT;
619 rtx newmem;
621 /* If the bytes and bits are counted differently, we
622 must adjust the offset. */
623 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
624 offset =
625 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
626 offset);
628 pos %= GET_MODE_BITSIZE (wanted_mode);
630 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
632 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
633 validate_change (object, &XEXP (x, 0), newmem, 1);
637 break;
639 default:
640 break;
644 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
645 with TO. After all changes have been made, validate by seeing
646 if INSN is still valid. */
649 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
651 validate_replace_rtx_1 (loc, from, to, insn);
652 return apply_change_group ();
655 /* Try replacing every occurrence of FROM in INSN with TO. After all
656 changes have been made, validate by seeing if INSN is still valid. */
659 validate_replace_rtx (rtx from, rtx to, rtx insn)
661 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 return apply_change_group ();
665 /* Try replacing every occurrence of FROM in INSN with TO. */
667 void
668 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
670 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
673 /* Function called by note_uses to replace used subexpressions. */
674 struct validate_replace_src_data
676 rtx from; /* Old RTX */
677 rtx to; /* New RTX */
678 rtx insn; /* Insn in which substitution is occurring. */
681 static void
682 validate_replace_src_1 (rtx *x, void *data)
684 struct validate_replace_src_data *d
685 = (struct validate_replace_src_data *) data;
687 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
690 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
691 SET_DESTs. */
693 void
694 validate_replace_src_group (rtx from, rtx to, rtx insn)
696 struct validate_replace_src_data d;
698 d.from = from;
699 d.to = to;
700 d.insn = insn;
701 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
704 #ifdef HAVE_cc0
705 /* Return 1 if the insn using CC0 set by INSN does not contain
706 any ordered tests applied to the condition codes.
707 EQ and NE tests do not count. */
710 next_insn_tests_no_inequality (rtx insn)
712 rtx next = next_cc0_user (insn);
714 /* If there is no next insn, we have to take the conservative choice. */
715 if (next == 0)
716 return 0;
718 return (INSN_P (next)
719 && ! inequality_comparisons_p (PATTERN (next)));
721 #endif
723 /* This is used by find_single_use to locate an rtx that contains exactly one
724 use of DEST, which is typically either a REG or CC0. It returns a
725 pointer to the innermost rtx expression containing DEST. Appearances of
726 DEST that are being used to totally replace it are not counted. */
728 static rtx *
729 find_single_use_1 (rtx dest, rtx *loc)
731 rtx x = *loc;
732 enum rtx_code code = GET_CODE (x);
733 rtx *result = 0;
734 rtx *this_result;
735 int i;
736 const char *fmt;
738 switch (code)
740 case CONST_INT:
741 case CONST:
742 case LABEL_REF:
743 case SYMBOL_REF:
744 case CONST_DOUBLE:
745 case CONST_VECTOR:
746 case CLOBBER:
747 return 0;
749 case SET:
750 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
751 of a REG that occupies all of the REG, the insn uses DEST if
752 it is mentioned in the destination or the source. Otherwise, we
753 need just check the source. */
754 if (GET_CODE (SET_DEST (x)) != CC0
755 && GET_CODE (SET_DEST (x)) != PC
756 && !REG_P (SET_DEST (x))
757 && ! (GET_CODE (SET_DEST (x)) == SUBREG
758 && REG_P (SUBREG_REG (SET_DEST (x)))
759 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
760 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
761 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
762 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
763 break;
765 return find_single_use_1 (dest, &SET_SRC (x));
767 case MEM:
768 case SUBREG:
769 return find_single_use_1 (dest, &XEXP (x, 0));
771 default:
772 break;
775 /* If it wasn't one of the common cases above, check each expression and
776 vector of this code. Look for a unique usage of DEST. */
778 fmt = GET_RTX_FORMAT (code);
779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
781 if (fmt[i] == 'e')
783 if (dest == XEXP (x, i)
784 || (REG_P (dest) && REG_P (XEXP (x, i))
785 && REGNO (dest) == REGNO (XEXP (x, i))))
786 this_result = loc;
787 else
788 this_result = find_single_use_1 (dest, &XEXP (x, i));
790 if (result == 0)
791 result = this_result;
792 else if (this_result)
793 /* Duplicate usage. */
794 return 0;
796 else if (fmt[i] == 'E')
798 int j;
800 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
802 if (XVECEXP (x, i, j) == dest
803 || (REG_P (dest)
804 && REG_P (XVECEXP (x, i, j))
805 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
806 this_result = loc;
807 else
808 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
810 if (result == 0)
811 result = this_result;
812 else if (this_result)
813 return 0;
818 return result;
821 /* See if DEST, produced in INSN, is used only a single time in the
822 sequel. If so, return a pointer to the innermost rtx expression in which
823 it is used.
825 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
827 This routine will return usually zero either before flow is called (because
828 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
829 note can't be trusted).
831 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
832 care about REG_DEAD notes or LOG_LINKS.
834 Otherwise, we find the single use by finding an insn that has a
835 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
836 only referenced once in that insn, we know that it must be the first
837 and last insn referencing DEST. */
839 rtx *
840 find_single_use (rtx dest, rtx insn, rtx *ploc)
842 rtx next;
843 rtx *result;
844 rtx link;
846 #ifdef HAVE_cc0
847 if (dest == cc0_rtx)
849 next = NEXT_INSN (insn);
850 if (next == 0
851 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
852 return 0;
854 result = find_single_use_1 (dest, &PATTERN (next));
855 if (result && ploc)
856 *ploc = next;
857 return result;
859 #endif
861 if (reload_completed || reload_in_progress || !REG_P (dest))
862 return 0;
864 for (next = next_nonnote_insn (insn);
865 next != 0 && !LABEL_P (next);
866 next = next_nonnote_insn (next))
867 if (INSN_P (next) && dead_or_set_p (next, dest))
869 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
870 if (XEXP (link, 0) == insn)
871 break;
873 if (link)
875 result = find_single_use_1 (dest, &PATTERN (next));
876 if (ploc)
877 *ploc = next;
878 return result;
882 return 0;
885 /* Return 1 if OP is a valid general operand for machine mode MODE.
886 This is either a register reference, a memory reference,
887 or a constant. In the case of a memory reference, the address
888 is checked for general validity for the target machine.
890 Register and memory references must have mode MODE in order to be valid,
891 but some constants have no machine mode and are valid for any mode.
893 If MODE is VOIDmode, OP is checked for validity for whatever mode
894 it has.
896 The main use of this function is as a predicate in match_operand
897 expressions in the machine description.
899 For an explanation of this function's behavior for registers of
900 class NO_REGS, see the comment for `register_operand'. */
903 general_operand (rtx op, enum machine_mode mode)
905 enum rtx_code code = GET_CODE (op);
907 if (mode == VOIDmode)
908 mode = GET_MODE (op);
910 /* Don't accept CONST_INT or anything similar
911 if the caller wants something floating. */
912 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
913 && GET_MODE_CLASS (mode) != MODE_INT
914 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
915 return 0;
917 if (GET_CODE (op) == CONST_INT
918 && mode != VOIDmode
919 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
920 return 0;
922 if (CONSTANT_P (op))
923 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
924 || mode == VOIDmode)
925 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
926 && LEGITIMATE_CONSTANT_P (op));
928 /* Except for certain constants with VOIDmode, already checked for,
929 OP's mode must match MODE if MODE specifies a mode. */
931 if (GET_MODE (op) != mode)
932 return 0;
934 if (code == SUBREG)
936 rtx sub = SUBREG_REG (op);
938 #ifdef INSN_SCHEDULING
939 /* On machines that have insn scheduling, we want all memory
940 reference to be explicit, so outlaw paradoxical SUBREGs. */
941 if (MEM_P (sub)
942 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
943 return 0;
944 #endif
945 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
946 may result in incorrect reference. We should simplify all valid
947 subregs of MEM anyway. But allow this after reload because we
948 might be called from cleanup_subreg_operands.
950 ??? This is a kludge. */
951 if (!reload_completed && SUBREG_BYTE (op) != 0
952 && MEM_P (sub))
953 return 0;
955 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
956 create such rtl, and we must reject it. */
957 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
958 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
959 return 0;
961 op = sub;
962 code = GET_CODE (op);
965 if (code == REG)
966 /* A register whose class is NO_REGS is not a general operand. */
967 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
968 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
970 if (code == MEM)
972 rtx y = XEXP (op, 0);
974 if (! volatile_ok && MEM_VOLATILE_P (op))
975 return 0;
977 /* Use the mem's mode, since it will be reloaded thus. */
978 mode = GET_MODE (op);
979 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
982 return 0;
984 win:
985 return 1;
988 /* Return 1 if OP is a valid memory address for a memory reference
989 of mode MODE.
991 The main use of this function is as a predicate in match_operand
992 expressions in the machine description. */
995 address_operand (rtx op, enum machine_mode mode)
997 return memory_address_p (mode, op);
1000 /* Return 1 if OP is a register reference of mode MODE.
1001 If MODE is VOIDmode, accept a register in any mode.
1003 The main use of this function is as a predicate in match_operand
1004 expressions in the machine description.
1006 As a special exception, registers whose class is NO_REGS are
1007 not accepted by `register_operand'. The reason for this change
1008 is to allow the representation of special architecture artifacts
1009 (such as a condition code register) without extending the rtl
1010 definitions. Since registers of class NO_REGS cannot be used
1011 as registers in any case where register classes are examined,
1012 it is most consistent to keep this function from accepting them. */
1015 register_operand (rtx op, enum machine_mode mode)
1017 if (GET_MODE (op) != mode && mode != VOIDmode)
1018 return 0;
1020 if (GET_CODE (op) == SUBREG)
1022 rtx sub = SUBREG_REG (op);
1024 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1025 because it is guaranteed to be reloaded into one.
1026 Just make sure the MEM is valid in itself.
1027 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1028 but currently it does result from (SUBREG (REG)...) where the
1029 reg went on the stack.) */
1030 if (! reload_completed && MEM_P (sub))
1031 return general_operand (op, mode);
1033 #ifdef CANNOT_CHANGE_MODE_CLASS
1034 if (REG_P (sub)
1035 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1036 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1037 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1038 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1039 return 0;
1040 #endif
1042 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1043 create such rtl, and we must reject it. */
1044 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1045 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1046 return 0;
1048 op = sub;
1051 /* We don't consider registers whose class is NO_REGS
1052 to be a register operand. */
1053 return (REG_P (op)
1054 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1055 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1058 /* Return 1 for a register in Pmode; ignore the tested mode. */
1061 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1063 return register_operand (op, Pmode);
1066 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1067 or a hard register. */
1070 scratch_operand (rtx op, enum machine_mode mode)
1072 if (GET_MODE (op) != mode && mode != VOIDmode)
1073 return 0;
1075 return (GET_CODE (op) == SCRATCH
1076 || (REG_P (op)
1077 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1080 /* Return 1 if OP is a valid immediate operand for mode MODE.
1082 The main use of this function is as a predicate in match_operand
1083 expressions in the machine description. */
1086 immediate_operand (rtx op, enum machine_mode mode)
1088 /* Don't accept CONST_INT or anything similar
1089 if the caller wants something floating. */
1090 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1091 && GET_MODE_CLASS (mode) != MODE_INT
1092 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1093 return 0;
1095 if (GET_CODE (op) == CONST_INT
1096 && mode != VOIDmode
1097 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1098 return 0;
1100 return (CONSTANT_P (op)
1101 && (GET_MODE (op) == mode || mode == VOIDmode
1102 || GET_MODE (op) == VOIDmode)
1103 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1104 && LEGITIMATE_CONSTANT_P (op));
1107 /* Returns 1 if OP is an operand that is a CONST_INT. */
1110 const_int_operand (rtx op, enum machine_mode mode)
1112 if (GET_CODE (op) != CONST_INT)
1113 return 0;
1115 if (mode != VOIDmode
1116 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1117 return 0;
1119 return 1;
1122 /* Returns 1 if OP is an operand that is a constant integer or constant
1123 floating-point number. */
1126 const_double_operand (rtx op, enum machine_mode mode)
1128 /* Don't accept CONST_INT or anything similar
1129 if the caller wants something floating. */
1130 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1131 && GET_MODE_CLASS (mode) != MODE_INT
1132 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1133 return 0;
1135 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1136 && (mode == VOIDmode || GET_MODE (op) == mode
1137 || GET_MODE (op) == VOIDmode));
1140 /* Return 1 if OP is a general operand that is not an immediate operand. */
1143 nonimmediate_operand (rtx op, enum machine_mode mode)
1145 return (general_operand (op, mode) && ! CONSTANT_P (op));
1148 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1151 nonmemory_operand (rtx op, enum machine_mode mode)
1153 if (CONSTANT_P (op))
1155 /* Don't accept CONST_INT or anything similar
1156 if the caller wants something floating. */
1157 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1158 && GET_MODE_CLASS (mode) != MODE_INT
1159 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1160 return 0;
1162 if (GET_CODE (op) == CONST_INT
1163 && mode != VOIDmode
1164 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1165 return 0;
1167 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1168 || mode == VOIDmode)
1169 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1170 && LEGITIMATE_CONSTANT_P (op));
1173 if (GET_MODE (op) != mode && mode != VOIDmode)
1174 return 0;
1176 if (GET_CODE (op) == SUBREG)
1178 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1179 because it is guaranteed to be reloaded into one.
1180 Just make sure the MEM is valid in itself.
1181 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1182 but currently it does result from (SUBREG (REG)...) where the
1183 reg went on the stack.) */
1184 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1185 return general_operand (op, mode);
1186 op = SUBREG_REG (op);
1189 /* We don't consider registers whose class is NO_REGS
1190 to be a register operand. */
1191 return (REG_P (op)
1192 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1193 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1196 /* Return 1 if OP is a valid operand that stands for pushing a
1197 value of mode MODE onto the stack.
1199 The main use of this function is as a predicate in match_operand
1200 expressions in the machine description. */
1203 push_operand (rtx op, enum machine_mode mode)
1205 unsigned int rounded_size = GET_MODE_SIZE (mode);
1207 #ifdef PUSH_ROUNDING
1208 rounded_size = PUSH_ROUNDING (rounded_size);
1209 #endif
1211 if (!MEM_P (op))
1212 return 0;
1214 if (mode != VOIDmode && GET_MODE (op) != mode)
1215 return 0;
1217 op = XEXP (op, 0);
1219 if (rounded_size == GET_MODE_SIZE (mode))
1221 if (GET_CODE (op) != STACK_PUSH_CODE)
1222 return 0;
1224 else
1226 if (GET_CODE (op) != PRE_MODIFY
1227 || GET_CODE (XEXP (op, 1)) != PLUS
1228 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1229 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1230 #ifdef STACK_GROWS_DOWNWARD
1231 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1232 #else
1233 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1234 #endif
1236 return 0;
1239 return XEXP (op, 0) == stack_pointer_rtx;
1242 /* Return 1 if OP is a valid operand that stands for popping a
1243 value of mode MODE off the stack.
1245 The main use of this function is as a predicate in match_operand
1246 expressions in the machine description. */
1249 pop_operand (rtx op, enum machine_mode mode)
1251 if (!MEM_P (op))
1252 return 0;
1254 if (mode != VOIDmode && GET_MODE (op) != mode)
1255 return 0;
1257 op = XEXP (op, 0);
1259 if (GET_CODE (op) != STACK_POP_CODE)
1260 return 0;
1262 return XEXP (op, 0) == stack_pointer_rtx;
1265 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1268 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1270 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1271 return 0;
1273 win:
1274 return 1;
1277 /* Return 1 if OP is a valid memory reference with mode MODE,
1278 including a valid address.
1280 The main use of this function is as a predicate in match_operand
1281 expressions in the machine description. */
1284 memory_operand (rtx op, enum machine_mode mode)
1286 rtx inner;
1288 if (! reload_completed)
1289 /* Note that no SUBREG is a memory operand before end of reload pass,
1290 because (SUBREG (MEM...)) forces reloading into a register. */
1291 return MEM_P (op) && general_operand (op, mode);
1293 if (mode != VOIDmode && GET_MODE (op) != mode)
1294 return 0;
1296 inner = op;
1297 if (GET_CODE (inner) == SUBREG)
1298 inner = SUBREG_REG (inner);
1300 return (MEM_P (inner) && general_operand (op, mode));
1303 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1304 that is, a memory reference whose address is a general_operand. */
1307 indirect_operand (rtx op, enum machine_mode mode)
1309 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1310 if (! reload_completed
1311 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1313 int offset = SUBREG_BYTE (op);
1314 rtx inner = SUBREG_REG (op);
1316 if (mode != VOIDmode && GET_MODE (op) != mode)
1317 return 0;
1319 /* The only way that we can have a general_operand as the resulting
1320 address is if OFFSET is zero and the address already is an operand
1321 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1322 operand. */
1324 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1325 || (GET_CODE (XEXP (inner, 0)) == PLUS
1326 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1327 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1328 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1331 return (MEM_P (op)
1332 && memory_operand (op, mode)
1333 && general_operand (XEXP (op, 0), Pmode));
1336 /* Return 1 if this is a comparison operator. This allows the use of
1337 MATCH_OPERATOR to recognize all the branch insns. */
1340 comparison_operator (rtx op, enum machine_mode mode)
1342 return ((mode == VOIDmode || GET_MODE (op) == mode)
1343 && COMPARISON_P (op));
1346 /* If BODY is an insn body that uses ASM_OPERANDS,
1347 return the number of operands (both input and output) in the insn.
1348 Otherwise return -1. */
1351 asm_noperands (rtx body)
1353 switch (GET_CODE (body))
1355 case ASM_OPERANDS:
1356 /* No output operands: return number of input operands. */
1357 return ASM_OPERANDS_INPUT_LENGTH (body);
1358 case SET:
1359 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1360 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1361 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1362 else
1363 return -1;
1364 case PARALLEL:
1365 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1366 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1368 /* Multiple output operands, or 1 output plus some clobbers:
1369 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1370 int i;
1371 int n_sets;
1373 /* Count backwards through CLOBBERs to determine number of SETs. */
1374 for (i = XVECLEN (body, 0); i > 0; i--)
1376 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1377 break;
1378 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1379 return -1;
1382 /* N_SETS is now number of output operands. */
1383 n_sets = i;
1385 /* Verify that all the SETs we have
1386 came from a single original asm_operands insn
1387 (so that invalid combinations are blocked). */
1388 for (i = 0; i < n_sets; i++)
1390 rtx elt = XVECEXP (body, 0, i);
1391 if (GET_CODE (elt) != SET)
1392 return -1;
1393 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1394 return -1;
1395 /* If these ASM_OPERANDS rtx's came from different original insns
1396 then they aren't allowed together. */
1397 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1398 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1399 return -1;
1401 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1402 + n_sets);
1404 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1406 /* 0 outputs, but some clobbers:
1407 body is [(asm_operands ...) (clobber (reg ...))...]. */
1408 int i;
1410 /* Make sure all the other parallel things really are clobbers. */
1411 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1412 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1413 return -1;
1415 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1417 else
1418 return -1;
1419 default:
1420 return -1;
1424 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1425 copy its operands (both input and output) into the vector OPERANDS,
1426 the locations of the operands within the insn into the vector OPERAND_LOCS,
1427 and the constraints for the operands into CONSTRAINTS.
1428 Write the modes of the operands into MODES.
1429 Return the assembler-template.
1431 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1432 we don't store that info. */
1434 const char *
1435 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1436 const char **constraints, enum machine_mode *modes)
1438 int i;
1439 int noperands;
1440 const char *template = 0;
1442 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1444 rtx asmop = SET_SRC (body);
1445 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1447 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1449 for (i = 1; i < noperands; i++)
1451 if (operand_locs)
1452 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1453 if (operands)
1454 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1455 if (constraints)
1456 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1457 if (modes)
1458 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1461 /* The output is in the SET.
1462 Its constraint is in the ASM_OPERANDS itself. */
1463 if (operands)
1464 operands[0] = SET_DEST (body);
1465 if (operand_locs)
1466 operand_locs[0] = &SET_DEST (body);
1467 if (constraints)
1468 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1469 if (modes)
1470 modes[0] = GET_MODE (SET_DEST (body));
1471 template = ASM_OPERANDS_TEMPLATE (asmop);
1473 else if (GET_CODE (body) == ASM_OPERANDS)
1475 rtx asmop = body;
1476 /* No output operands: BODY is (asm_operands ....). */
1478 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1480 /* The input operands are found in the 1st element vector. */
1481 /* Constraints for inputs are in the 2nd element vector. */
1482 for (i = 0; i < noperands; i++)
1484 if (operand_locs)
1485 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1486 if (operands)
1487 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1488 if (constraints)
1489 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1490 if (modes)
1491 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1493 template = ASM_OPERANDS_TEMPLATE (asmop);
1495 else if (GET_CODE (body) == PARALLEL
1496 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1497 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1499 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1500 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1501 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1502 int nout = 0; /* Does not include CLOBBERs. */
1504 /* At least one output, plus some CLOBBERs. */
1506 /* The outputs are in the SETs.
1507 Their constraints are in the ASM_OPERANDS itself. */
1508 for (i = 0; i < nparallel; i++)
1510 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1511 break; /* Past last SET */
1513 if (operands)
1514 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1515 if (operand_locs)
1516 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1517 if (constraints)
1518 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1519 if (modes)
1520 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1521 nout++;
1524 for (i = 0; i < nin; i++)
1526 if (operand_locs)
1527 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1528 if (operands)
1529 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1530 if (constraints)
1531 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1532 if (modes)
1533 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1536 template = ASM_OPERANDS_TEMPLATE (asmop);
1538 else if (GET_CODE (body) == PARALLEL
1539 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1541 /* No outputs, but some CLOBBERs. */
1543 rtx asmop = XVECEXP (body, 0, 0);
1544 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1546 for (i = 0; i < nin; i++)
1548 if (operand_locs)
1549 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1550 if (operands)
1551 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1552 if (constraints)
1553 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1554 if (modes)
1555 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1558 template = ASM_OPERANDS_TEMPLATE (asmop);
1561 return template;
1564 /* Check if an asm_operand matches its constraints.
1565 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1568 asm_operand_ok (rtx op, const char *constraint)
1570 int result = 0;
1572 /* Use constrain_operands after reload. */
1573 if (reload_completed)
1574 abort ();
1576 while (*constraint)
1578 char c = *constraint;
1579 int len;
1580 switch (c)
1582 case ',':
1583 constraint++;
1584 continue;
1585 case '=':
1586 case '+':
1587 case '*':
1588 case '%':
1589 case '!':
1590 case '#':
1591 case '&':
1592 case '?':
1593 break;
1595 case '0': case '1': case '2': case '3': case '4':
1596 case '5': case '6': case '7': case '8': case '9':
1597 /* For best results, our caller should have given us the
1598 proper matching constraint, but we can't actually fail
1599 the check if they didn't. Indicate that results are
1600 inconclusive. */
1602 constraint++;
1603 while (ISDIGIT (*constraint));
1604 if (! result)
1605 result = -1;
1606 continue;
1608 case 'p':
1609 if (address_operand (op, VOIDmode))
1610 result = 1;
1611 break;
1613 case 'm':
1614 case 'V': /* non-offsettable */
1615 if (memory_operand (op, VOIDmode))
1616 result = 1;
1617 break;
1619 case 'o': /* offsettable */
1620 if (offsettable_nonstrict_memref_p (op))
1621 result = 1;
1622 break;
1624 case '<':
1625 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1626 excepting those that expand_call created. Further, on some
1627 machines which do not have generalized auto inc/dec, an inc/dec
1628 is not a memory_operand.
1630 Match any memory and hope things are resolved after reload. */
1632 if (MEM_P (op)
1633 && (1
1634 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1635 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1636 result = 1;
1637 break;
1639 case '>':
1640 if (MEM_P (op)
1641 && (1
1642 || GET_CODE (XEXP (op, 0)) == PRE_INC
1643 || GET_CODE (XEXP (op, 0)) == POST_INC))
1644 result = 1;
1645 break;
1647 case 'E':
1648 case 'F':
1649 if (GET_CODE (op) == CONST_DOUBLE
1650 || (GET_CODE (op) == CONST_VECTOR
1651 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1652 result = 1;
1653 break;
1655 case 'G':
1656 if (GET_CODE (op) == CONST_DOUBLE
1657 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1658 result = 1;
1659 break;
1660 case 'H':
1661 if (GET_CODE (op) == CONST_DOUBLE
1662 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1663 result = 1;
1664 break;
1666 case 's':
1667 if (GET_CODE (op) == CONST_INT
1668 || (GET_CODE (op) == CONST_DOUBLE
1669 && GET_MODE (op) == VOIDmode))
1670 break;
1671 /* Fall through. */
1673 case 'i':
1674 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1675 result = 1;
1676 break;
1678 case 'n':
1679 if (GET_CODE (op) == CONST_INT
1680 || (GET_CODE (op) == CONST_DOUBLE
1681 && GET_MODE (op) == VOIDmode))
1682 result = 1;
1683 break;
1685 case 'I':
1686 if (GET_CODE (op) == CONST_INT
1687 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1688 result = 1;
1689 break;
1690 case 'J':
1691 if (GET_CODE (op) == CONST_INT
1692 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1693 result = 1;
1694 break;
1695 case 'K':
1696 if (GET_CODE (op) == CONST_INT
1697 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1698 result = 1;
1699 break;
1700 case 'L':
1701 if (GET_CODE (op) == CONST_INT
1702 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1703 result = 1;
1704 break;
1705 case 'M':
1706 if (GET_CODE (op) == CONST_INT
1707 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1708 result = 1;
1709 break;
1710 case 'N':
1711 if (GET_CODE (op) == CONST_INT
1712 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1713 result = 1;
1714 break;
1715 case 'O':
1716 if (GET_CODE (op) == CONST_INT
1717 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1718 result = 1;
1719 break;
1720 case 'P':
1721 if (GET_CODE (op) == CONST_INT
1722 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1723 result = 1;
1724 break;
1726 case 'X':
1727 result = 1;
1728 break;
1730 case 'g':
1731 if (general_operand (op, VOIDmode))
1732 result = 1;
1733 break;
1735 default:
1736 /* For all other letters, we first check for a register class,
1737 otherwise it is an EXTRA_CONSTRAINT. */
1738 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1740 case 'r':
1741 if (GET_MODE (op) == BLKmode)
1742 break;
1743 if (register_operand (op, VOIDmode))
1744 result = 1;
1746 #ifdef EXTRA_CONSTRAINT_STR
1747 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1748 result = 1;
1749 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1750 /* Every memory operand can be reloaded to fit. */
1751 && memory_operand (op, VOIDmode))
1752 result = 1;
1753 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1754 /* Every address operand can be reloaded to fit. */
1755 && address_operand (op, VOIDmode))
1756 result = 1;
1757 #endif
1758 break;
1760 len = CONSTRAINT_LEN (c, constraint);
1762 constraint++;
1763 while (--len && *constraint);
1764 if (len)
1765 return 0;
1768 return result;
1771 /* Given an rtx *P, if it is a sum containing an integer constant term,
1772 return the location (type rtx *) of the pointer to that constant term.
1773 Otherwise, return a null pointer. */
1775 rtx *
1776 find_constant_term_loc (rtx *p)
1778 rtx *tem;
1779 enum rtx_code code = GET_CODE (*p);
1781 /* If *P IS such a constant term, P is its location. */
1783 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1784 || code == CONST)
1785 return p;
1787 /* Otherwise, if not a sum, it has no constant term. */
1789 if (GET_CODE (*p) != PLUS)
1790 return 0;
1792 /* If one of the summands is constant, return its location. */
1794 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1795 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1796 return p;
1798 /* Otherwise, check each summand for containing a constant term. */
1800 if (XEXP (*p, 0) != 0)
1802 tem = find_constant_term_loc (&XEXP (*p, 0));
1803 if (tem != 0)
1804 return tem;
1807 if (XEXP (*p, 1) != 0)
1809 tem = find_constant_term_loc (&XEXP (*p, 1));
1810 if (tem != 0)
1811 return tem;
1814 return 0;
1817 /* Return 1 if OP is a memory reference
1818 whose address contains no side effects
1819 and remains valid after the addition
1820 of a positive integer less than the
1821 size of the object being referenced.
1823 We assume that the original address is valid and do not check it.
1825 This uses strict_memory_address_p as a subroutine, so
1826 don't use it before reload. */
1829 offsettable_memref_p (rtx op)
1831 return ((MEM_P (op))
1832 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1835 /* Similar, but don't require a strictly valid mem ref:
1836 consider pseudo-regs valid as index or base regs. */
1839 offsettable_nonstrict_memref_p (rtx op)
1841 return ((MEM_P (op))
1842 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1845 /* Return 1 if Y is a memory address which contains no side effects
1846 and would remain valid after the addition of a positive integer
1847 less than the size of that mode.
1849 We assume that the original address is valid and do not check it.
1850 We do check that it is valid for narrower modes.
1852 If STRICTP is nonzero, we require a strictly valid address,
1853 for the sake of use in reload.c. */
1856 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1858 enum rtx_code ycode = GET_CODE (y);
1859 rtx z;
1860 rtx y1 = y;
1861 rtx *y2;
1862 int (*addressp) (enum machine_mode, rtx) =
1863 (strictp ? strict_memory_address_p : memory_address_p);
1864 unsigned int mode_sz = GET_MODE_SIZE (mode);
1866 if (CONSTANT_ADDRESS_P (y))
1867 return 1;
1869 /* Adjusting an offsettable address involves changing to a narrower mode.
1870 Make sure that's OK. */
1872 if (mode_dependent_address_p (y))
1873 return 0;
1875 /* ??? How much offset does an offsettable BLKmode reference need?
1876 Clearly that depends on the situation in which it's being used.
1877 However, the current situation in which we test 0xffffffff is
1878 less than ideal. Caveat user. */
1879 if (mode_sz == 0)
1880 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1882 /* If the expression contains a constant term,
1883 see if it remains valid when max possible offset is added. */
1885 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1887 int good;
1889 y1 = *y2;
1890 *y2 = plus_constant (*y2, mode_sz - 1);
1891 /* Use QImode because an odd displacement may be automatically invalid
1892 for any wider mode. But it should be valid for a single byte. */
1893 good = (*addressp) (QImode, y);
1895 /* In any case, restore old contents of memory. */
1896 *y2 = y1;
1897 return good;
1900 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1901 return 0;
1903 /* The offset added here is chosen as the maximum offset that
1904 any instruction could need to add when operating on something
1905 of the specified mode. We assume that if Y and Y+c are
1906 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1907 go inside a LO_SUM here, so we do so as well. */
1908 if (GET_CODE (y) == LO_SUM
1909 && mode != BLKmode
1910 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1911 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1912 plus_constant (XEXP (y, 1), mode_sz - 1));
1913 else
1914 z = plus_constant (y, mode_sz - 1);
1916 /* Use QImode because an odd displacement may be automatically invalid
1917 for any wider mode. But it should be valid for a single byte. */
1918 return (*addressp) (QImode, z);
1921 /* Return 1 if ADDR is an address-expression whose effect depends
1922 on the mode of the memory reference it is used in.
1924 Autoincrement addressing is a typical example of mode-dependence
1925 because the amount of the increment depends on the mode. */
1928 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1930 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1931 return 0;
1932 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1933 win: ATTRIBUTE_UNUSED_LABEL
1934 return 1;
1937 /* Like extract_insn, but save insn extracted and don't extract again, when
1938 called again for the same insn expecting that recog_data still contain the
1939 valid information. This is used primary by gen_attr infrastructure that
1940 often does extract insn again and again. */
1941 void
1942 extract_insn_cached (rtx insn)
1944 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1945 return;
1946 extract_insn (insn);
1947 recog_data.insn = insn;
1949 /* Do cached extract_insn, constrain_operands and complain about failures.
1950 Used by insn_attrtab. */
1951 void
1952 extract_constrain_insn_cached (rtx insn)
1954 extract_insn_cached (insn);
1955 if (which_alternative == -1
1956 && !constrain_operands (reload_completed))
1957 fatal_insn_not_found (insn);
1959 /* Do cached constrain_operands and complain about failures. */
1961 constrain_operands_cached (int strict)
1963 if (which_alternative == -1)
1964 return constrain_operands (strict);
1965 else
1966 return 1;
1969 /* Analyze INSN and fill in recog_data. */
1971 void
1972 extract_insn (rtx insn)
1974 int i;
1975 int icode;
1976 int noperands;
1977 rtx body = PATTERN (insn);
1979 recog_data.insn = NULL;
1980 recog_data.n_operands = 0;
1981 recog_data.n_alternatives = 0;
1982 recog_data.n_dups = 0;
1983 which_alternative = -1;
1985 switch (GET_CODE (body))
1987 case USE:
1988 case CLOBBER:
1989 case ASM_INPUT:
1990 case ADDR_VEC:
1991 case ADDR_DIFF_VEC:
1992 return;
1994 case SET:
1995 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1996 goto asm_insn;
1997 else
1998 goto normal_insn;
1999 case PARALLEL:
2000 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2001 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2002 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2003 goto asm_insn;
2004 else
2005 goto normal_insn;
2006 case ASM_OPERANDS:
2007 asm_insn:
2008 recog_data.n_operands = noperands = asm_noperands (body);
2009 if (noperands >= 0)
2011 /* This insn is an `asm' with operands. */
2013 /* expand_asm_operands makes sure there aren't too many operands. */
2014 if (noperands > MAX_RECOG_OPERANDS)
2015 abort ();
2017 /* Now get the operand values and constraints out of the insn. */
2018 decode_asm_operands (body, recog_data.operand,
2019 recog_data.operand_loc,
2020 recog_data.constraints,
2021 recog_data.operand_mode);
2022 if (noperands > 0)
2024 const char *p = recog_data.constraints[0];
2025 recog_data.n_alternatives = 1;
2026 while (*p)
2027 recog_data.n_alternatives += (*p++ == ',');
2029 break;
2031 fatal_insn_not_found (insn);
2033 default:
2034 normal_insn:
2035 /* Ordinary insn: recognize it, get the operands via insn_extract
2036 and get the constraints. */
2038 icode = recog_memoized (insn);
2039 if (icode < 0)
2040 fatal_insn_not_found (insn);
2042 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2043 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2044 recog_data.n_dups = insn_data[icode].n_dups;
2046 insn_extract (insn);
2048 for (i = 0; i < noperands; i++)
2050 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2051 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2052 /* VOIDmode match_operands gets mode from their real operand. */
2053 if (recog_data.operand_mode[i] == VOIDmode)
2054 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2057 for (i = 0; i < noperands; i++)
2058 recog_data.operand_type[i]
2059 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2060 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2061 : OP_IN);
2063 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2064 abort ();
2067 /* After calling extract_insn, you can use this function to extract some
2068 information from the constraint strings into a more usable form.
2069 The collected data is stored in recog_op_alt. */
2070 void
2071 preprocess_constraints (void)
2073 int i;
2075 for (i = 0; i < recog_data.n_operands; i++)
2076 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2077 * sizeof (struct operand_alternative)));
2079 for (i = 0; i < recog_data.n_operands; i++)
2081 int j;
2082 struct operand_alternative *op_alt;
2083 const char *p = recog_data.constraints[i];
2085 op_alt = recog_op_alt[i];
2087 for (j = 0; j < recog_data.n_alternatives; j++)
2089 op_alt[j].cl = NO_REGS;
2090 op_alt[j].constraint = p;
2091 op_alt[j].matches = -1;
2092 op_alt[j].matched = -1;
2094 if (*p == '\0' || *p == ',')
2096 op_alt[j].anything_ok = 1;
2097 continue;
2100 for (;;)
2102 char c = *p;
2103 if (c == '#')
2105 c = *++p;
2106 while (c != ',' && c != '\0');
2107 if (c == ',' || c == '\0')
2109 p++;
2110 break;
2113 switch (c)
2115 case '=': case '+': case '*': case '%':
2116 case 'E': case 'F': case 'G': case 'H':
2117 case 's': case 'i': case 'n':
2118 case 'I': case 'J': case 'K': case 'L':
2119 case 'M': case 'N': case 'O': case 'P':
2120 /* These don't say anything we care about. */
2121 break;
2123 case '?':
2124 op_alt[j].reject += 6;
2125 break;
2126 case '!':
2127 op_alt[j].reject += 600;
2128 break;
2129 case '&':
2130 op_alt[j].earlyclobber = 1;
2131 break;
2133 case '0': case '1': case '2': case '3': case '4':
2134 case '5': case '6': case '7': case '8': case '9':
2136 char *end;
2137 op_alt[j].matches = strtoul (p, &end, 10);
2138 recog_op_alt[op_alt[j].matches][j].matched = i;
2139 p = end;
2141 continue;
2143 case 'm':
2144 op_alt[j].memory_ok = 1;
2145 break;
2146 case '<':
2147 op_alt[j].decmem_ok = 1;
2148 break;
2149 case '>':
2150 op_alt[j].incmem_ok = 1;
2151 break;
2152 case 'V':
2153 op_alt[j].nonoffmem_ok = 1;
2154 break;
2155 case 'o':
2156 op_alt[j].offmem_ok = 1;
2157 break;
2158 case 'X':
2159 op_alt[j].anything_ok = 1;
2160 break;
2162 case 'p':
2163 op_alt[j].is_address = 1;
2164 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2165 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2166 break;
2168 case 'g':
2169 case 'r':
2170 op_alt[j].cl =
2171 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2172 break;
2174 default:
2175 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2177 op_alt[j].memory_ok = 1;
2178 break;
2180 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2182 op_alt[j].is_address = 1;
2183 op_alt[j].cl
2184 = (reg_class_subunion
2185 [(int) op_alt[j].cl]
2186 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2187 break;
2190 op_alt[j].cl
2191 = (reg_class_subunion
2192 [(int) op_alt[j].cl]
2193 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2194 break;
2196 p += CONSTRAINT_LEN (c, p);
2202 /* Check the operands of an insn against the insn's operand constraints
2203 and return 1 if they are valid.
2204 The information about the insn's operands, constraints, operand modes
2205 etc. is obtained from the global variables set up by extract_insn.
2207 WHICH_ALTERNATIVE is set to a number which indicates which
2208 alternative of constraints was matched: 0 for the first alternative,
2209 1 for the next, etc.
2211 In addition, when two operands are required to match
2212 and it happens that the output operand is (reg) while the
2213 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2214 make the output operand look like the input.
2215 This is because the output operand is the one the template will print.
2217 This is used in final, just before printing the assembler code and by
2218 the routines that determine an insn's attribute.
2220 If STRICT is a positive nonzero value, it means that we have been
2221 called after reload has been completed. In that case, we must
2222 do all checks strictly. If it is zero, it means that we have been called
2223 before reload has completed. In that case, we first try to see if we can
2224 find an alternative that matches strictly. If not, we try again, this
2225 time assuming that reload will fix up the insn. This provides a "best
2226 guess" for the alternative and is used to compute attributes of insns prior
2227 to reload. A negative value of STRICT is used for this internal call. */
2229 struct funny_match
2231 int this, other;
2235 constrain_operands (int strict)
2237 const char *constraints[MAX_RECOG_OPERANDS];
2238 int matching_operands[MAX_RECOG_OPERANDS];
2239 int earlyclobber[MAX_RECOG_OPERANDS];
2240 int c;
2242 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2243 int funny_match_index;
2245 which_alternative = 0;
2246 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2247 return 1;
2249 for (c = 0; c < recog_data.n_operands; c++)
2251 constraints[c] = recog_data.constraints[c];
2252 matching_operands[c] = -1;
2257 int opno;
2258 int lose = 0;
2259 funny_match_index = 0;
2261 for (opno = 0; opno < recog_data.n_operands; opno++)
2263 rtx op = recog_data.operand[opno];
2264 enum machine_mode mode = GET_MODE (op);
2265 const char *p = constraints[opno];
2266 int offset = 0;
2267 int win = 0;
2268 int val;
2269 int len;
2271 earlyclobber[opno] = 0;
2273 /* A unary operator may be accepted by the predicate, but it
2274 is irrelevant for matching constraints. */
2275 if (UNARY_P (op))
2276 op = XEXP (op, 0);
2278 if (GET_CODE (op) == SUBREG)
2280 if (REG_P (SUBREG_REG (op))
2281 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2282 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2283 GET_MODE (SUBREG_REG (op)),
2284 SUBREG_BYTE (op),
2285 GET_MODE (op));
2286 op = SUBREG_REG (op);
2289 /* An empty constraint or empty alternative
2290 allows anything which matched the pattern. */
2291 if (*p == 0 || *p == ',')
2292 win = 1;
2295 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2297 case '\0':
2298 len = 0;
2299 break;
2300 case ',':
2301 c = '\0';
2302 break;
2304 case '?': case '!': case '*': case '%':
2305 case '=': case '+':
2306 break;
2308 case '#':
2309 /* Ignore rest of this alternative as far as
2310 constraint checking is concerned. */
2312 p++;
2313 while (*p && *p != ',');
2314 len = 0;
2315 break;
2317 case '&':
2318 earlyclobber[opno] = 1;
2319 break;
2321 case '0': case '1': case '2': case '3': case '4':
2322 case '5': case '6': case '7': case '8': case '9':
2324 /* This operand must be the same as a previous one.
2325 This kind of constraint is used for instructions such
2326 as add when they take only two operands.
2328 Note that the lower-numbered operand is passed first.
2330 If we are not testing strictly, assume that this
2331 constraint will be satisfied. */
2333 char *end;
2334 int match;
2336 match = strtoul (p, &end, 10);
2337 p = end;
2339 if (strict < 0)
2340 val = 1;
2341 else
2343 rtx op1 = recog_data.operand[match];
2344 rtx op2 = recog_data.operand[opno];
2346 /* A unary operator may be accepted by the predicate,
2347 but it is irrelevant for matching constraints. */
2348 if (UNARY_P (op1))
2349 op1 = XEXP (op1, 0);
2350 if (UNARY_P (op2))
2351 op2 = XEXP (op2, 0);
2353 val = operands_match_p (op1, op2);
2356 matching_operands[opno] = match;
2357 matching_operands[match] = opno;
2359 if (val != 0)
2360 win = 1;
2362 /* If output is *x and input is *--x, arrange later
2363 to change the output to *--x as well, since the
2364 output op is the one that will be printed. */
2365 if (val == 2 && strict > 0)
2367 funny_match[funny_match_index].this = opno;
2368 funny_match[funny_match_index++].other = match;
2371 len = 0;
2372 break;
2374 case 'p':
2375 /* p is used for address_operands. When we are called by
2376 gen_reload, no one will have checked that the address is
2377 strictly valid, i.e., that all pseudos requiring hard regs
2378 have gotten them. */
2379 if (strict <= 0
2380 || (strict_memory_address_p (recog_data.operand_mode[opno],
2381 op)))
2382 win = 1;
2383 break;
2385 /* No need to check general_operand again;
2386 it was done in insn-recog.c. */
2387 case 'g':
2388 /* Anything goes unless it is a REG and really has a hard reg
2389 but the hard reg is not in the class GENERAL_REGS. */
2390 if (strict < 0
2391 || GENERAL_REGS == ALL_REGS
2392 || !REG_P (op)
2393 || (reload_in_progress
2394 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2395 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2396 win = 1;
2397 break;
2399 case 'X':
2400 /* This is used for a MATCH_SCRATCH in the cases when
2401 we don't actually need anything. So anything goes
2402 any time. */
2403 win = 1;
2404 break;
2406 case 'm':
2407 /* Memory operands must be valid, to the extent
2408 required by STRICT. */
2409 if (MEM_P (op))
2411 if (strict > 0
2412 && !strict_memory_address_p (GET_MODE (op),
2413 XEXP (op, 0)))
2414 break;
2415 if (strict == 0
2416 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2417 break;
2418 win = 1;
2420 /* Before reload, accept what reload can turn into mem. */
2421 else if (strict < 0 && CONSTANT_P (op))
2422 win = 1;
2423 /* During reload, accept a pseudo */
2424 else if (reload_in_progress && REG_P (op)
2425 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2426 win = 1;
2427 break;
2429 case '<':
2430 if (MEM_P (op)
2431 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2432 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2433 win = 1;
2434 break;
2436 case '>':
2437 if (MEM_P (op)
2438 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2439 || GET_CODE (XEXP (op, 0)) == POST_INC))
2440 win = 1;
2441 break;
2443 case 'E':
2444 case 'F':
2445 if (GET_CODE (op) == CONST_DOUBLE
2446 || (GET_CODE (op) == CONST_VECTOR
2447 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2448 win = 1;
2449 break;
2451 case 'G':
2452 case 'H':
2453 if (GET_CODE (op) == CONST_DOUBLE
2454 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2455 win = 1;
2456 break;
2458 case 's':
2459 if (GET_CODE (op) == CONST_INT
2460 || (GET_CODE (op) == CONST_DOUBLE
2461 && GET_MODE (op) == VOIDmode))
2462 break;
2463 case 'i':
2464 if (CONSTANT_P (op))
2465 win = 1;
2466 break;
2468 case 'n':
2469 if (GET_CODE (op) == CONST_INT
2470 || (GET_CODE (op) == CONST_DOUBLE
2471 && GET_MODE (op) == VOIDmode))
2472 win = 1;
2473 break;
2475 case 'I':
2476 case 'J':
2477 case 'K':
2478 case 'L':
2479 case 'M':
2480 case 'N':
2481 case 'O':
2482 case 'P':
2483 if (GET_CODE (op) == CONST_INT
2484 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2485 win = 1;
2486 break;
2488 case 'V':
2489 if (MEM_P (op)
2490 && ((strict > 0 && ! offsettable_memref_p (op))
2491 || (strict < 0
2492 && !(CONSTANT_P (op) || MEM_P (op)))
2493 || (reload_in_progress
2494 && !(REG_P (op)
2495 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2496 win = 1;
2497 break;
2499 case 'o':
2500 if ((strict > 0 && offsettable_memref_p (op))
2501 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2502 /* Before reload, accept what reload can handle. */
2503 || (strict < 0
2504 && (CONSTANT_P (op) || MEM_P (op)))
2505 /* During reload, accept a pseudo */
2506 || (reload_in_progress && REG_P (op)
2507 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2508 win = 1;
2509 break;
2511 default:
2513 enum reg_class cl;
2515 cl = (c == 'r'
2516 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2517 if (cl != NO_REGS)
2519 if (strict < 0
2520 || (strict == 0
2521 && REG_P (op)
2522 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2523 || (strict == 0 && GET_CODE (op) == SCRATCH)
2524 || (REG_P (op)
2525 && reg_fits_class_p (op, cl, offset, mode)))
2526 win = 1;
2528 #ifdef EXTRA_CONSTRAINT_STR
2529 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2530 win = 1;
2532 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2533 /* Every memory operand can be reloaded to fit. */
2534 && ((strict < 0 && MEM_P (op))
2535 /* Before reload, accept what reload can turn
2536 into mem. */
2537 || (strict < 0 && CONSTANT_P (op))
2538 /* During reload, accept a pseudo */
2539 || (reload_in_progress && REG_P (op)
2540 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2541 win = 1;
2542 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2543 /* Every address operand can be reloaded to fit. */
2544 && strict < 0)
2545 win = 1;
2546 #endif
2547 break;
2550 while (p += len, c);
2552 constraints[opno] = p;
2553 /* If this operand did not win somehow,
2554 this alternative loses. */
2555 if (! win)
2556 lose = 1;
2558 /* This alternative won; the operands are ok.
2559 Change whichever operands this alternative says to change. */
2560 if (! lose)
2562 int opno, eopno;
2564 /* See if any earlyclobber operand conflicts with some other
2565 operand. */
2567 if (strict > 0)
2568 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2569 /* Ignore earlyclobber operands now in memory,
2570 because we would often report failure when we have
2571 two memory operands, one of which was formerly a REG. */
2572 if (earlyclobber[eopno]
2573 && REG_P (recog_data.operand[eopno]))
2574 for (opno = 0; opno < recog_data.n_operands; opno++)
2575 if ((MEM_P (recog_data.operand[opno])
2576 || recog_data.operand_type[opno] != OP_OUT)
2577 && opno != eopno
2578 /* Ignore things like match_operator operands. */
2579 && *recog_data.constraints[opno] != 0
2580 && ! (matching_operands[opno] == eopno
2581 && operands_match_p (recog_data.operand[opno],
2582 recog_data.operand[eopno]))
2583 && ! safe_from_earlyclobber (recog_data.operand[opno],
2584 recog_data.operand[eopno]))
2585 lose = 1;
2587 if (! lose)
2589 while (--funny_match_index >= 0)
2591 recog_data.operand[funny_match[funny_match_index].other]
2592 = recog_data.operand[funny_match[funny_match_index].this];
2595 return 1;
2599 which_alternative++;
2601 while (which_alternative < recog_data.n_alternatives);
2603 which_alternative = -1;
2604 /* If we are about to reject this, but we are not to test strictly,
2605 try a very loose test. Only return failure if it fails also. */
2606 if (strict == 0)
2607 return constrain_operands (-1);
2608 else
2609 return 0;
2612 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2613 is a hard reg in class CLASS when its regno is offset by OFFSET
2614 and changed to mode MODE.
2615 If REG occupies multiple hard regs, all of them must be in CLASS. */
2618 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2619 enum machine_mode mode)
2621 int regno = REGNO (operand);
2622 if (regno < FIRST_PSEUDO_REGISTER
2623 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2624 regno + offset))
2626 int sr;
2627 regno += offset;
2628 for (sr = hard_regno_nregs[regno][mode] - 1;
2629 sr > 0; sr--)
2630 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2631 regno + sr))
2632 break;
2633 return sr == 0;
2636 return 0;
2639 /* Split single instruction. Helper function for split_all_insns and
2640 split_all_insns_noflow. Return last insn in the sequence if successful,
2641 or NULL if unsuccessful. */
2643 static rtx
2644 split_insn (rtx insn)
2646 /* Split insns here to get max fine-grain parallelism. */
2647 rtx first = PREV_INSN (insn);
2648 rtx last = try_split (PATTERN (insn), insn, 1);
2650 if (last == insn)
2651 return NULL_RTX;
2653 /* try_split returns the NOTE that INSN became. */
2654 SET_INSN_DELETED (insn);
2656 /* ??? Coddle to md files that generate subregs in post-reload
2657 splitters instead of computing the proper hard register. */
2658 if (reload_completed && first != last)
2660 first = NEXT_INSN (first);
2661 for (;;)
2663 if (INSN_P (first))
2664 cleanup_subreg_operands (first);
2665 if (first == last)
2666 break;
2667 first = NEXT_INSN (first);
2670 return last;
2673 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2675 void
2676 split_all_insns (int upd_life)
2678 sbitmap blocks;
2679 bool changed;
2680 basic_block bb;
2682 blocks = sbitmap_alloc (last_basic_block);
2683 sbitmap_zero (blocks);
2684 changed = false;
2686 FOR_EACH_BB_REVERSE (bb)
2688 rtx insn, next;
2689 bool finish = false;
2691 for (insn = BB_HEAD (bb); !finish ; insn = next)
2693 /* Can't use `next_real_insn' because that might go across
2694 CODE_LABELS and short-out basic blocks. */
2695 next = NEXT_INSN (insn);
2696 finish = (insn == BB_END (bb));
2697 if (INSN_P (insn))
2699 rtx set = single_set (insn);
2701 /* Don't split no-op move insns. These should silently
2702 disappear later in final. Splitting such insns would
2703 break the code that handles REG_NO_CONFLICT blocks. */
2704 if (set && set_noop_p (set))
2706 /* Nops get in the way while scheduling, so delete them
2707 now if register allocation has already been done. It
2708 is too risky to try to do this before register
2709 allocation, and there are unlikely to be very many
2710 nops then anyways. */
2711 if (reload_completed)
2713 /* If the no-op set has a REG_UNUSED note, we need
2714 to update liveness information. */
2715 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2717 SET_BIT (blocks, bb->index);
2718 changed = true;
2720 /* ??? Is life info affected by deleting edges? */
2721 delete_insn_and_edges (insn);
2724 else
2726 rtx last = split_insn (insn);
2727 if (last)
2729 /* The split sequence may include barrier, but the
2730 BB boundary we are interested in will be set to
2731 previous one. */
2733 while (BARRIER_P (last))
2734 last = PREV_INSN (last);
2735 SET_BIT (blocks, bb->index);
2736 changed = true;
2743 if (changed)
2745 int old_last_basic_block = last_basic_block;
2747 find_many_sub_basic_blocks (blocks);
2749 if (old_last_basic_block != last_basic_block && upd_life)
2750 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2753 if (changed && upd_life)
2754 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2755 PROP_DEATH_NOTES);
2757 #ifdef ENABLE_CHECKING
2758 verify_flow_info ();
2759 #endif
2761 sbitmap_free (blocks);
2764 /* Same as split_all_insns, but do not expect CFG to be available.
2765 Used by machine dependent reorg passes. */
2767 void
2768 split_all_insns_noflow (void)
2770 rtx next, insn;
2772 for (insn = get_insns (); insn; insn = next)
2774 next = NEXT_INSN (insn);
2775 if (INSN_P (insn))
2777 /* Don't split no-op move insns. These should silently
2778 disappear later in final. Splitting such insns would
2779 break the code that handles REG_NO_CONFLICT blocks. */
2780 rtx set = single_set (insn);
2781 if (set && set_noop_p (set))
2783 /* Nops get in the way while scheduling, so delete them
2784 now if register allocation has already been done. It
2785 is too risky to try to do this before register
2786 allocation, and there are unlikely to be very many
2787 nops then anyways.
2789 ??? Should we use delete_insn when the CFG isn't valid? */
2790 if (reload_completed)
2791 delete_insn_and_edges (insn);
2793 else
2794 split_insn (insn);
2799 #ifdef HAVE_peephole2
2800 struct peep2_insn_data
2802 rtx insn;
2803 regset live_before;
2806 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2807 static int peep2_current;
2809 /* A non-insn marker indicating the last insn of the block.
2810 The live_before regset for this element is correct, indicating
2811 global_live_at_end for the block. */
2812 #define PEEP2_EOB pc_rtx
2814 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2815 does not exist. Used by the recognizer to find the next insn to match
2816 in a multi-insn pattern. */
2819 peep2_next_insn (int n)
2821 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2822 abort ();
2824 n += peep2_current;
2825 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2826 n -= MAX_INSNS_PER_PEEP2 + 1;
2828 if (peep2_insn_data[n].insn == PEEP2_EOB)
2829 return NULL_RTX;
2830 return peep2_insn_data[n].insn;
2833 /* Return true if REGNO is dead before the Nth non-note insn
2834 after `current'. */
2837 peep2_regno_dead_p (int ofs, int regno)
2839 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2840 abort ();
2842 ofs += peep2_current;
2843 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2844 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2846 if (peep2_insn_data[ofs].insn == NULL_RTX)
2847 abort ();
2849 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2852 /* Similarly for a REG. */
2855 peep2_reg_dead_p (int ofs, rtx reg)
2857 int regno, n;
2859 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2860 abort ();
2862 ofs += peep2_current;
2863 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2864 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2866 if (peep2_insn_data[ofs].insn == NULL_RTX)
2867 abort ();
2869 regno = REGNO (reg);
2870 n = hard_regno_nregs[regno][GET_MODE (reg)];
2871 while (--n >= 0)
2872 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2873 return 0;
2874 return 1;
2877 /* Try to find a hard register of mode MODE, matching the register class in
2878 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2879 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2880 in which case the only condition is that the register must be available
2881 before CURRENT_INSN.
2882 Registers that already have bits set in REG_SET will not be considered.
2884 If an appropriate register is available, it will be returned and the
2885 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2886 returned. */
2889 peep2_find_free_register (int from, int to, const char *class_str,
2890 enum machine_mode mode, HARD_REG_SET *reg_set)
2892 static int search_ofs;
2893 enum reg_class cl;
2894 HARD_REG_SET live;
2895 int i;
2897 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2898 abort ();
2900 from += peep2_current;
2901 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2902 from -= MAX_INSNS_PER_PEEP2 + 1;
2903 to += peep2_current;
2904 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2905 to -= MAX_INSNS_PER_PEEP2 + 1;
2907 if (peep2_insn_data[from].insn == NULL_RTX)
2908 abort ();
2909 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2911 while (from != to)
2913 HARD_REG_SET this_live;
2915 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2916 from = 0;
2917 if (peep2_insn_data[from].insn == NULL_RTX)
2918 abort ();
2919 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2920 IOR_HARD_REG_SET (live, this_live);
2923 cl = (class_str[0] == 'r' ? GENERAL_REGS
2924 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2926 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2928 int raw_regno, regno, success, j;
2930 /* Distribute the free registers as much as possible. */
2931 raw_regno = search_ofs + i;
2932 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2933 raw_regno -= FIRST_PSEUDO_REGISTER;
2934 #ifdef REG_ALLOC_ORDER
2935 regno = reg_alloc_order[raw_regno];
2936 #else
2937 regno = raw_regno;
2938 #endif
2940 /* Don't allocate fixed registers. */
2941 if (fixed_regs[regno])
2942 continue;
2943 /* Make sure the register is of the right class. */
2944 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2945 continue;
2946 /* And can support the mode we need. */
2947 if (! HARD_REGNO_MODE_OK (regno, mode))
2948 continue;
2949 /* And that we don't create an extra save/restore. */
2950 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2951 continue;
2952 /* And we don't clobber traceback for noreturn functions. */
2953 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2954 && (! reload_completed || frame_pointer_needed))
2955 continue;
2957 success = 1;
2958 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2960 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2961 || TEST_HARD_REG_BIT (live, regno + j))
2963 success = 0;
2964 break;
2967 if (success)
2969 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2970 SET_HARD_REG_BIT (*reg_set, regno + j);
2972 /* Start the next search with the next register. */
2973 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2974 raw_regno = 0;
2975 search_ofs = raw_regno;
2977 return gen_rtx_REG (mode, regno);
2981 search_ofs = 0;
2982 return NULL_RTX;
2985 /* Perform the peephole2 optimization pass. */
2987 void
2988 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2990 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2991 rtx insn, prev;
2992 regset live;
2993 int i;
2994 basic_block bb;
2995 #ifdef HAVE_conditional_execution
2996 sbitmap blocks;
2997 bool changed;
2998 #endif
2999 bool do_cleanup_cfg = false;
3000 bool do_rebuild_jump_labels = false;
3002 /* Initialize the regsets we're going to use. */
3003 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3004 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3005 live = INITIALIZE_REG_SET (rs_heads[i]);
3007 #ifdef HAVE_conditional_execution
3008 blocks = sbitmap_alloc (last_basic_block);
3009 sbitmap_zero (blocks);
3010 changed = false;
3011 #else
3012 count_or_remove_death_notes (NULL, 1);
3013 #endif
3015 FOR_EACH_BB_REVERSE (bb)
3017 struct propagate_block_info *pbi;
3019 /* Indicate that all slots except the last holds invalid data. */
3020 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3021 peep2_insn_data[i].insn = NULL_RTX;
3023 /* Indicate that the last slot contains live_after data. */
3024 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3025 peep2_current = MAX_INSNS_PER_PEEP2;
3027 /* Start up propagation. */
3028 COPY_REG_SET (live, bb->global_live_at_end);
3029 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3031 #ifdef HAVE_conditional_execution
3032 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3033 #else
3034 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3035 #endif
3037 for (insn = BB_END (bb); ; insn = prev)
3039 prev = PREV_INSN (insn);
3040 if (INSN_P (insn))
3042 rtx try, before_try, x;
3043 int match_len;
3044 rtx note;
3045 bool was_call = false;
3047 /* Record this insn. */
3048 if (--peep2_current < 0)
3049 peep2_current = MAX_INSNS_PER_PEEP2;
3050 peep2_insn_data[peep2_current].insn = insn;
3051 propagate_one_insn (pbi, insn);
3052 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3054 /* Match the peephole. */
3055 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3056 if (try != NULL)
3058 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3059 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3060 cfg-related call notes. */
3061 for (i = 0; i <= match_len; ++i)
3063 int j;
3064 rtx old_insn, new_insn, note;
3066 j = i + peep2_current;
3067 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3068 j -= MAX_INSNS_PER_PEEP2 + 1;
3069 old_insn = peep2_insn_data[j].insn;
3070 if (!CALL_P (old_insn))
3071 continue;
3072 was_call = true;
3074 new_insn = try;
3075 while (new_insn != NULL_RTX)
3077 if (CALL_P (new_insn))
3078 break;
3079 new_insn = NEXT_INSN (new_insn);
3082 if (new_insn == NULL_RTX)
3083 abort ();
3085 CALL_INSN_FUNCTION_USAGE (new_insn)
3086 = CALL_INSN_FUNCTION_USAGE (old_insn);
3088 for (note = REG_NOTES (old_insn);
3089 note;
3090 note = XEXP (note, 1))
3091 switch (REG_NOTE_KIND (note))
3093 case REG_NORETURN:
3094 case REG_SETJMP:
3095 case REG_ALWAYS_RETURN:
3096 REG_NOTES (new_insn)
3097 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3098 XEXP (note, 0),
3099 REG_NOTES (new_insn));
3100 default:
3101 /* Discard all other reg notes. */
3102 break;
3105 /* Croak if there is another call in the sequence. */
3106 while (++i <= match_len)
3108 j = i + peep2_current;
3109 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3110 j -= MAX_INSNS_PER_PEEP2 + 1;
3111 old_insn = peep2_insn_data[j].insn;
3112 if (CALL_P (old_insn))
3113 abort ();
3115 break;
3118 i = match_len + peep2_current;
3119 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3120 i -= MAX_INSNS_PER_PEEP2 + 1;
3122 note = find_reg_note (peep2_insn_data[i].insn,
3123 REG_EH_REGION, NULL_RTX);
3125 /* Replace the old sequence with the new. */
3126 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3127 INSN_LOCATOR (peep2_insn_data[i].insn));
3128 before_try = PREV_INSN (insn);
3129 delete_insn_chain (insn, peep2_insn_data[i].insn);
3131 /* Re-insert the EH_REGION notes. */
3132 if (note || (was_call && nonlocal_goto_handler_labels))
3134 edge eh_edge;
3136 for (eh_edge = bb->succ; eh_edge
3137 ; eh_edge = eh_edge->succ_next)
3138 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3139 break;
3141 for (x = try ; x != before_try ; x = PREV_INSN (x))
3142 if (CALL_P (x)
3143 || (flag_non_call_exceptions
3144 && may_trap_p (PATTERN (x))
3145 && !find_reg_note (x, REG_EH_REGION, NULL)))
3147 if (note)
3148 REG_NOTES (x)
3149 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3150 XEXP (note, 0),
3151 REG_NOTES (x));
3153 if (x != BB_END (bb) && eh_edge)
3155 edge nfte, nehe;
3156 int flags;
3158 nfte = split_block (bb, x);
3159 flags = (eh_edge->flags
3160 & (EDGE_EH | EDGE_ABNORMAL));
3161 if (CALL_P (x))
3162 flags |= EDGE_ABNORMAL_CALL;
3163 nehe = make_edge (nfte->src, eh_edge->dest,
3164 flags);
3166 nehe->probability = eh_edge->probability;
3167 nfte->probability
3168 = REG_BR_PROB_BASE - nehe->probability;
3170 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3171 #ifdef HAVE_conditional_execution
3172 SET_BIT (blocks, nfte->dest->index);
3173 changed = true;
3174 #endif
3175 bb = nfte->src;
3176 eh_edge = nehe;
3180 /* Converting possibly trapping insn to non-trapping is
3181 possible. Zap dummy outgoing edges. */
3182 do_cleanup_cfg |= purge_dead_edges (bb);
3185 #ifdef HAVE_conditional_execution
3186 /* With conditional execution, we cannot back up the
3187 live information so easily, since the conditional
3188 death data structures are not so self-contained.
3189 So record that we've made a modification to this
3190 block and update life information at the end. */
3191 SET_BIT (blocks, bb->index);
3192 changed = true;
3194 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3195 peep2_insn_data[i].insn = NULL_RTX;
3196 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3197 #else
3198 /* Back up lifetime information past the end of the
3199 newly created sequence. */
3200 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3201 i = 0;
3202 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3204 /* Update life information for the new sequence. */
3205 x = try;
3208 if (INSN_P (x))
3210 if (--i < 0)
3211 i = MAX_INSNS_PER_PEEP2;
3212 peep2_insn_data[i].insn = x;
3213 propagate_one_insn (pbi, x);
3214 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3216 x = PREV_INSN (x);
3218 while (x != prev);
3220 /* ??? Should verify that LIVE now matches what we
3221 had before the new sequence. */
3223 peep2_current = i;
3224 #endif
3226 /* If we generated a jump instruction, it won't have
3227 JUMP_LABEL set. Recompute after we're done. */
3228 for (x = try; x != before_try; x = PREV_INSN (x))
3229 if (JUMP_P (x))
3231 do_rebuild_jump_labels = true;
3232 break;
3237 if (insn == BB_HEAD (bb))
3238 break;
3241 free_propagate_block_info (pbi);
3244 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3245 FREE_REG_SET (peep2_insn_data[i].live_before);
3246 FREE_REG_SET (live);
3248 if (do_rebuild_jump_labels)
3249 rebuild_jump_labels (get_insns ());
3251 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3252 we've changed global life since exception handlers are no longer
3253 reachable. */
3254 if (do_cleanup_cfg)
3256 cleanup_cfg (0);
3257 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3259 #ifdef HAVE_conditional_execution
3260 else
3262 count_or_remove_death_notes (blocks, 1);
3263 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3265 sbitmap_free (blocks);
3266 #endif
3268 #endif /* HAVE_peephole2 */
3270 /* Common predicates for use with define_bypass. */
3272 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3273 data not the address operand(s) of the store. IN_INSN must be
3274 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3275 SETs inside. */
3278 store_data_bypass_p (rtx out_insn, rtx in_insn)
3280 rtx out_set, in_set;
3282 in_set = single_set (in_insn);
3283 if (! in_set)
3284 abort ();
3286 if (!MEM_P (SET_DEST (in_set)))
3287 return false;
3289 out_set = single_set (out_insn);
3290 if (out_set)
3292 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3293 return false;
3295 else
3297 rtx out_pat;
3298 int i;
3300 out_pat = PATTERN (out_insn);
3301 if (GET_CODE (out_pat) != PARALLEL)
3302 abort ();
3304 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3306 rtx exp = XVECEXP (out_pat, 0, i);
3308 if (GET_CODE (exp) == CLOBBER)
3309 continue;
3311 if (GET_CODE (exp) != SET)
3312 abort ();
3314 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3315 return false;
3319 return true;
3322 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3323 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3324 or multiple set; IN_INSN should be single_set for truth, but for convenience
3325 of insn categorization may be any JUMP or CALL insn. */
3328 if_test_bypass_p (rtx out_insn, rtx in_insn)
3330 rtx out_set, in_set;
3332 in_set = single_set (in_insn);
3333 if (! in_set)
3335 if (JUMP_P (in_insn) || CALL_P (in_insn))
3336 return false;
3337 abort ();
3340 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3341 return false;
3342 in_set = SET_SRC (in_set);
3344 out_set = single_set (out_insn);
3345 if (out_set)
3347 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3348 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3349 return false;
3351 else
3353 rtx out_pat;
3354 int i;
3356 out_pat = PATTERN (out_insn);
3357 if (GET_CODE (out_pat) != PARALLEL)
3358 abort ();
3360 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3362 rtx exp = XVECEXP (out_pat, 0, i);
3364 if (GET_CODE (exp) == CLOBBER)
3365 continue;
3367 if (GET_CODE (exp) != SET)
3368 abort ();
3370 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3371 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3372 return false;
3376 return true;