* doc/install.texi: Update binutils requirement for powerpc*-linux.
[official-gcc.git] / gcc / recog.c
blobd81ae5b3adc26a9d07e4ef67b9242d4e4fd8118b
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
111 /* Check that X is an insn-body for an `asm' with operands
112 and that the operands mentioned in it are legitimate. */
115 check_asm_operands (rtx x)
117 int noperands;
118 rtx *operands;
119 const char **constraints;
120 int i;
122 /* Post-reload, be more strict with things. */
123 if (reload_completed)
125 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
126 extract_insn (make_insn_raw (x));
127 constrain_operands (1);
128 return which_alternative >= 0;
131 noperands = asm_noperands (x);
132 if (noperands < 0)
133 return 0;
134 if (noperands == 0)
135 return 1;
137 operands = alloca (noperands * sizeof (rtx));
138 constraints = alloca (noperands * sizeof (char *));
140 decode_asm_operands (x, operands, NULL, constraints, NULL);
142 for (i = 0; i < noperands; i++)
144 const char *c = constraints[i];
145 if (c[0] == '%')
146 c++;
147 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
148 c = constraints[c[0] - '0'];
150 if (! asm_operand_ok (operands[i], c))
151 return 0;
154 return 1;
157 /* Static data for the next two routines. */
159 typedef struct change_t
161 rtx object;
162 int old_code;
163 rtx *loc;
164 rtx old;
165 } change_t;
167 static change_t *changes;
168 static int changes_allocated;
170 static int num_changes = 0;
172 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
173 at which NEW will be placed. If OBJECT is zero, no validation is done,
174 the change is simply made.
176 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
177 will be called with the address and mode as parameters. If OBJECT is
178 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
179 the change in place.
181 IN_GROUP is nonzero if this is part of a group of changes that must be
182 performed as a group. In that case, the changes will be stored. The
183 function `apply_change_group' will validate and apply the changes.
185 If IN_GROUP is zero, this is a single change. Try to recognize the insn
186 or validate the memory reference with the change applied. If the result
187 is not valid for the machine, suppress the change and return zero.
188 Otherwise, perform the change and return 1. */
191 validate_change (rtx object, rtx *loc, rtx new, int in_group)
193 rtx old = *loc;
195 if (old == new || rtx_equal_p (old, new))
196 return 1;
198 gcc_assert (in_group != 0 || num_changes == 0);
200 *loc = new;
202 /* Save the information describing this change. */
203 if (num_changes >= changes_allocated)
205 if (changes_allocated == 0)
206 /* This value allows for repeated substitutions inside complex
207 indexed addresses, or changes in up to 5 insns. */
208 changes_allocated = MAX_RECOG_OPERANDS * 5;
209 else
210 changes_allocated *= 2;
212 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
215 changes[num_changes].object = object;
216 changes[num_changes].loc = loc;
217 changes[num_changes].old = old;
219 if (object && !MEM_P (object))
221 /* Set INSN_CODE to force rerecognition of insn. Save old code in
222 case invalid. */
223 changes[num_changes].old_code = INSN_CODE (object);
224 INSN_CODE (object) = -1;
227 num_changes++;
229 /* If we are making a group of changes, return 1. Otherwise, validate the
230 change group we made. */
232 if (in_group)
233 return 1;
234 else
235 return apply_change_group ();
238 /* This subroutine of apply_change_group verifies whether the changes to INSN
239 were valid; i.e. whether INSN can still be recognized. */
242 insn_invalid_p (rtx insn)
244 rtx pat = PATTERN (insn);
245 int num_clobbers = 0;
246 /* If we are before reload and the pattern is a SET, see if we can add
247 clobbers. */
248 int icode = recog (pat, insn,
249 (GET_CODE (pat) == SET
250 && ! reload_completed && ! reload_in_progress)
251 ? &num_clobbers : 0);
252 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
255 /* If this is an asm and the operand aren't legal, then fail. Likewise if
256 this is not an asm and the insn wasn't recognized. */
257 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
258 || (!is_asm && icode < 0))
259 return 1;
261 /* If we have to add CLOBBERs, fail if we have to add ones that reference
262 hard registers since our callers can't know if they are live or not.
263 Otherwise, add them. */
264 if (num_clobbers > 0)
266 rtx newpat;
268 if (added_clobbers_hard_reg_p (icode))
269 return 1;
271 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
272 XVECEXP (newpat, 0, 0) = pat;
273 add_clobbers (newpat, icode);
274 PATTERN (insn) = pat = newpat;
277 /* After reload, verify that all constraints are satisfied. */
278 if (reload_completed)
280 extract_insn (insn);
282 if (! constrain_operands (1))
283 return 1;
286 INSN_CODE (insn) = icode;
287 return 0;
290 /* Return number of changes made and not validated yet. */
292 num_changes_pending (void)
294 return num_changes;
297 /* Tentatively apply the changes numbered NUM and up.
298 Return 1 if all changes are valid, zero otherwise. */
300 static int
301 verify_changes (int num)
303 int i;
304 rtx last_validated = NULL_RTX;
306 /* The changes have been applied and all INSN_CODEs have been reset to force
307 rerecognition.
309 The changes are valid if we aren't given an object, or if we are
310 given a MEM and it still is a valid address, or if this is in insn
311 and it is recognized. In the latter case, if reload has completed,
312 we also require that the operands meet the constraints for
313 the insn. */
315 for (i = num; i < num_changes; i++)
317 rtx object = changes[i].object;
319 /* If there is no object to test or if it is the same as the one we
320 already tested, ignore it. */
321 if (object == 0 || object == last_validated)
322 continue;
324 if (MEM_P (object))
326 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
327 break;
329 else if (insn_invalid_p (object))
331 rtx pat = PATTERN (object);
333 /* Perhaps we couldn't recognize the insn because there were
334 extra CLOBBERs at the end. If so, try to re-recognize
335 without the last CLOBBER (later iterations will cause each of
336 them to be eliminated, in turn). But don't do this if we
337 have an ASM_OPERAND. */
338 if (GET_CODE (pat) == PARALLEL
339 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
340 && asm_noperands (PATTERN (object)) < 0)
342 rtx newpat;
344 if (XVECLEN (pat, 0) == 2)
345 newpat = XVECEXP (pat, 0, 0);
346 else
348 int j;
350 newpat
351 = gen_rtx_PARALLEL (VOIDmode,
352 rtvec_alloc (XVECLEN (pat, 0) - 1));
353 for (j = 0; j < XVECLEN (newpat, 0); j++)
354 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
357 /* Add a new change to this group to replace the pattern
358 with this new pattern. Then consider this change
359 as having succeeded. The change we added will
360 cause the entire call to fail if things remain invalid.
362 Note that this can lose if a later change than the one
363 we are processing specified &XVECEXP (PATTERN (object), 0, X)
364 but this shouldn't occur. */
366 validate_change (object, &PATTERN (object), newpat, 1);
367 continue;
369 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
370 /* If this insn is a CLOBBER or USE, it is always valid, but is
371 never recognized. */
372 continue;
373 else
374 break;
376 last_validated = object;
379 return (i == num_changes);
382 /* A group of changes has previously been issued with validate_change and
383 verified with verify_changes. Update the BB_DIRTY flags of the affected
384 blocks, and clear num_changes. */
386 void
387 confirm_change_group (void)
389 int i;
390 basic_block bb;
392 for (i = 0; i < num_changes; i++)
393 if (changes[i].object
394 && INSN_P (changes[i].object)
395 && (bb = BLOCK_FOR_INSN (changes[i].object)))
396 bb->flags |= BB_DIRTY;
398 num_changes = 0;
401 /* Apply a group of changes previously issued with `validate_change'.
402 If all changes are valid, call confirm_change_group and return 1,
403 otherwise, call cancel_changes and return 0. */
406 apply_change_group (void)
408 if (verify_changes (0))
410 confirm_change_group ();
411 return 1;
413 else
415 cancel_changes (0);
416 return 0;
421 /* Return the number of changes so far in the current group. */
424 num_validated_changes (void)
426 return num_changes;
429 /* Retract the changes numbered NUM and up. */
431 void
432 cancel_changes (int num)
434 int i;
436 /* Back out all the changes. Do this in the opposite order in which
437 they were made. */
438 for (i = num_changes - 1; i >= num; i--)
440 *changes[i].loc = changes[i].old;
441 if (changes[i].object && !MEM_P (changes[i].object))
442 INSN_CODE (changes[i].object) = changes[i].old_code;
444 num_changes = num;
447 /* Replace every occurrence of FROM in X with TO. Mark each change with
448 validate_change passing OBJECT. */
450 static void
451 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
453 int i, j;
454 const char *fmt;
455 rtx x = *loc;
456 enum rtx_code code;
457 enum machine_mode op0_mode = VOIDmode;
458 int prev_changes = num_changes;
459 rtx new;
461 if (!x)
462 return;
464 code = GET_CODE (x);
465 fmt = GET_RTX_FORMAT (code);
466 if (fmt[0] == 'e')
467 op0_mode = GET_MODE (XEXP (x, 0));
469 /* X matches FROM if it is the same rtx or they are both referring to the
470 same register in the same mode. Avoid calling rtx_equal_p unless the
471 operands look similar. */
473 if (x == from
474 || (REG_P (x) && REG_P (from)
475 && GET_MODE (x) == GET_MODE (from)
476 && REGNO (x) == REGNO (from))
477 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
478 && rtx_equal_p (x, from)))
480 validate_change (object, loc, to, 1);
481 return;
484 /* Call ourself recursively to perform the replacements.
485 We must not replace inside already replaced expression, otherwise we
486 get infinite recursion for replacements like (reg X)->(subreg (reg X))
487 done by regmove, so we must special case shared ASM_OPERANDS. */
489 if (GET_CODE (x) == PARALLEL)
491 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
493 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
494 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
496 /* Verify that operands are really shared. */
497 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
498 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
499 (x, 0, j))));
500 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
501 from, to, object);
503 else
504 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
507 else
508 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
510 if (fmt[i] == 'e')
511 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
512 else if (fmt[i] == 'E')
513 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
514 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
517 /* If we didn't substitute, there is nothing more to do. */
518 if (num_changes == prev_changes)
519 return;
521 /* Allow substituted expression to have different mode. This is used by
522 regmove to change mode of pseudo register. */
523 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
524 op0_mode = GET_MODE (XEXP (x, 0));
526 /* Do changes needed to keep rtx consistent. Don't do any other
527 simplifications, as it is not our job. */
529 if (SWAPPABLE_OPERANDS_P (x)
530 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
532 validate_change (object, loc,
533 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
534 : swap_condition (code),
535 GET_MODE (x), XEXP (x, 1),
536 XEXP (x, 0)), 1);
537 x = *loc;
538 code = GET_CODE (x);
541 switch (code)
543 case PLUS:
544 /* If we have a PLUS whose second operand is now a CONST_INT, use
545 simplify_gen_binary to try to simplify it.
546 ??? We may want later to remove this, once simplification is
547 separated from this function. */
548 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
549 validate_change (object, loc,
550 simplify_gen_binary
551 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
552 break;
553 case MINUS:
554 if (GET_CODE (XEXP (x, 1)) == CONST_INT
555 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
556 validate_change (object, loc,
557 simplify_gen_binary
558 (PLUS, GET_MODE (x), XEXP (x, 0),
559 simplify_gen_unary (NEG,
560 GET_MODE (x), XEXP (x, 1),
561 GET_MODE (x))), 1);
562 break;
563 case ZERO_EXTEND:
564 case SIGN_EXTEND:
565 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
567 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
568 op0_mode);
569 /* If any of the above failed, substitute in something that
570 we know won't be recognized. */
571 if (!new)
572 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
573 validate_change (object, loc, new, 1);
575 break;
576 case SUBREG:
577 /* All subregs possible to simplify should be simplified. */
578 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
579 SUBREG_BYTE (x));
581 /* Subregs of VOIDmode operands are incorrect. */
582 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
583 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
584 if (new)
585 validate_change (object, loc, new, 1);
586 break;
587 case ZERO_EXTRACT:
588 case SIGN_EXTRACT:
589 /* If we are replacing a register with memory, try to change the memory
590 to be the mode required for memory in extract operations (this isn't
591 likely to be an insertion operation; if it was, nothing bad will
592 happen, we might just fail in some cases). */
594 if (MEM_P (XEXP (x, 0))
595 && GET_CODE (XEXP (x, 1)) == CONST_INT
596 && GET_CODE (XEXP (x, 2)) == CONST_INT
597 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
598 && !MEM_VOLATILE_P (XEXP (x, 0)))
600 enum machine_mode wanted_mode = VOIDmode;
601 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
602 int pos = INTVAL (XEXP (x, 2));
604 if (GET_CODE (x) == ZERO_EXTRACT)
606 enum machine_mode new_mode
607 = mode_for_extraction (EP_extzv, 1);
608 if (new_mode != MAX_MACHINE_MODE)
609 wanted_mode = new_mode;
611 else if (GET_CODE (x) == SIGN_EXTRACT)
613 enum machine_mode new_mode
614 = mode_for_extraction (EP_extv, 1);
615 if (new_mode != MAX_MACHINE_MODE)
616 wanted_mode = new_mode;
619 /* If we have a narrower mode, we can do something. */
620 if (wanted_mode != VOIDmode
621 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
623 int offset = pos / BITS_PER_UNIT;
624 rtx newmem;
626 /* If the bytes and bits are counted differently, we
627 must adjust the offset. */
628 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
629 offset =
630 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
631 offset);
633 pos %= GET_MODE_BITSIZE (wanted_mode);
635 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
637 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
638 validate_change (object, &XEXP (x, 0), newmem, 1);
642 break;
644 default:
645 break;
649 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
650 with TO. After all changes have been made, validate by seeing
651 if INSN is still valid. */
654 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
656 validate_replace_rtx_1 (loc, from, to, insn);
657 return apply_change_group ();
660 /* Try replacing every occurrence of FROM in INSN with TO. After all
661 changes have been made, validate by seeing if INSN is still valid. */
664 validate_replace_rtx (rtx from, rtx to, rtx insn)
666 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
667 return apply_change_group ();
670 /* Try replacing every occurrence of FROM in INSN with TO. */
672 void
673 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
675 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
678 /* Function called by note_uses to replace used subexpressions. */
679 struct validate_replace_src_data
681 rtx from; /* Old RTX */
682 rtx to; /* New RTX */
683 rtx insn; /* Insn in which substitution is occurring. */
686 static void
687 validate_replace_src_1 (rtx *x, void *data)
689 struct validate_replace_src_data *d
690 = (struct validate_replace_src_data *) data;
692 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
695 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
696 SET_DESTs. */
698 void
699 validate_replace_src_group (rtx from, rtx to, rtx insn)
701 struct validate_replace_src_data d;
703 d.from = from;
704 d.to = to;
705 d.insn = insn;
706 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
709 #ifdef HAVE_cc0
710 /* Return 1 if the insn using CC0 set by INSN does not contain
711 any ordered tests applied to the condition codes.
712 EQ and NE tests do not count. */
715 next_insn_tests_no_inequality (rtx insn)
717 rtx next = next_cc0_user (insn);
719 /* If there is no next insn, we have to take the conservative choice. */
720 if (next == 0)
721 return 0;
723 return (INSN_P (next)
724 && ! inequality_comparisons_p (PATTERN (next)));
726 #endif
728 /* This is used by find_single_use to locate an rtx that contains exactly one
729 use of DEST, which is typically either a REG or CC0. It returns a
730 pointer to the innermost rtx expression containing DEST. Appearances of
731 DEST that are being used to totally replace it are not counted. */
733 static rtx *
734 find_single_use_1 (rtx dest, rtx *loc)
736 rtx x = *loc;
737 enum rtx_code code = GET_CODE (x);
738 rtx *result = 0;
739 rtx *this_result;
740 int i;
741 const char *fmt;
743 switch (code)
745 case CONST_INT:
746 case CONST:
747 case LABEL_REF:
748 case SYMBOL_REF:
749 case CONST_DOUBLE:
750 case CONST_VECTOR:
751 case CLOBBER:
752 return 0;
754 case SET:
755 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
756 of a REG that occupies all of the REG, the insn uses DEST if
757 it is mentioned in the destination or the source. Otherwise, we
758 need just check the source. */
759 if (GET_CODE (SET_DEST (x)) != CC0
760 && GET_CODE (SET_DEST (x)) != PC
761 && !REG_P (SET_DEST (x))
762 && ! (GET_CODE (SET_DEST (x)) == SUBREG
763 && REG_P (SUBREG_REG (SET_DEST (x)))
764 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
765 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
766 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
767 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
768 break;
770 return find_single_use_1 (dest, &SET_SRC (x));
772 case MEM:
773 case SUBREG:
774 return find_single_use_1 (dest, &XEXP (x, 0));
776 default:
777 break;
780 /* If it wasn't one of the common cases above, check each expression and
781 vector of this code. Look for a unique usage of DEST. */
783 fmt = GET_RTX_FORMAT (code);
784 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
786 if (fmt[i] == 'e')
788 if (dest == XEXP (x, i)
789 || (REG_P (dest) && REG_P (XEXP (x, i))
790 && REGNO (dest) == REGNO (XEXP (x, i))))
791 this_result = loc;
792 else
793 this_result = find_single_use_1 (dest, &XEXP (x, i));
795 if (result == 0)
796 result = this_result;
797 else if (this_result)
798 /* Duplicate usage. */
799 return 0;
801 else if (fmt[i] == 'E')
803 int j;
805 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
807 if (XVECEXP (x, i, j) == dest
808 || (REG_P (dest)
809 && REG_P (XVECEXP (x, i, j))
810 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
811 this_result = loc;
812 else
813 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
815 if (result == 0)
816 result = this_result;
817 else if (this_result)
818 return 0;
823 return result;
826 /* See if DEST, produced in INSN, is used only a single time in the
827 sequel. If so, return a pointer to the innermost rtx expression in which
828 it is used.
830 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
832 This routine will return usually zero either before flow is called (because
833 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
834 note can't be trusted).
836 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
837 care about REG_DEAD notes or LOG_LINKS.
839 Otherwise, we find the single use by finding an insn that has a
840 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
841 only referenced once in that insn, we know that it must be the first
842 and last insn referencing DEST. */
844 rtx *
845 find_single_use (rtx dest, rtx insn, rtx *ploc)
847 rtx next;
848 rtx *result;
849 rtx link;
851 #ifdef HAVE_cc0
852 if (dest == cc0_rtx)
854 next = NEXT_INSN (insn);
855 if (next == 0
856 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
857 return 0;
859 result = find_single_use_1 (dest, &PATTERN (next));
860 if (result && ploc)
861 *ploc = next;
862 return result;
864 #endif
866 if (reload_completed || reload_in_progress || !REG_P (dest))
867 return 0;
869 for (next = next_nonnote_insn (insn);
870 next != 0 && !LABEL_P (next);
871 next = next_nonnote_insn (next))
872 if (INSN_P (next) && dead_or_set_p (next, dest))
874 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
875 if (XEXP (link, 0) == insn)
876 break;
878 if (link)
880 result = find_single_use_1 (dest, &PATTERN (next));
881 if (ploc)
882 *ploc = next;
883 return result;
887 return 0;
890 /* Return 1 if OP is a valid general operand for machine mode MODE.
891 This is either a register reference, a memory reference,
892 or a constant. In the case of a memory reference, the address
893 is checked for general validity for the target machine.
895 Register and memory references must have mode MODE in order to be valid,
896 but some constants have no machine mode and are valid for any mode.
898 If MODE is VOIDmode, OP is checked for validity for whatever mode
899 it has.
901 The main use of this function is as a predicate in match_operand
902 expressions in the machine description.
904 For an explanation of this function's behavior for registers of
905 class NO_REGS, see the comment for `register_operand'. */
908 general_operand (rtx op, enum machine_mode mode)
910 enum rtx_code code = GET_CODE (op);
912 if (mode == VOIDmode)
913 mode = GET_MODE (op);
915 /* Don't accept CONST_INT or anything similar
916 if the caller wants something floating. */
917 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
918 && GET_MODE_CLASS (mode) != MODE_INT
919 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
920 return 0;
922 if (GET_CODE (op) == CONST_INT
923 && mode != VOIDmode
924 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
925 return 0;
927 if (CONSTANT_P (op))
928 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
929 || mode == VOIDmode)
930 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
931 && LEGITIMATE_CONSTANT_P (op));
933 /* Except for certain constants with VOIDmode, already checked for,
934 OP's mode must match MODE if MODE specifies a mode. */
936 if (GET_MODE (op) != mode)
937 return 0;
939 if (code == SUBREG)
941 rtx sub = SUBREG_REG (op);
943 #ifdef INSN_SCHEDULING
944 /* On machines that have insn scheduling, we want all memory
945 reference to be explicit, so outlaw paradoxical SUBREGs.
946 However, we must allow them after reload so that they can
947 get cleaned up by cleanup_subreg_operands. */
948 if (!reload_completed && MEM_P (sub)
949 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
950 return 0;
951 #endif
952 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
953 may result in incorrect reference. We should simplify all valid
954 subregs of MEM anyway. But allow this after reload because we
955 might be called from cleanup_subreg_operands.
957 ??? This is a kludge. */
958 if (!reload_completed && SUBREG_BYTE (op) != 0
959 && MEM_P (sub))
960 return 0;
962 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
963 create such rtl, and we must reject it. */
964 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
965 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
966 return 0;
968 op = sub;
969 code = GET_CODE (op);
972 if (code == REG)
973 /* A register whose class is NO_REGS is not a general operand. */
974 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
975 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
977 if (code == MEM)
979 rtx y = XEXP (op, 0);
981 if (! volatile_ok && MEM_VOLATILE_P (op))
982 return 0;
984 /* Use the mem's mode, since it will be reloaded thus. */
985 if (memory_address_p (GET_MODE (op), y))
986 return 1;
989 return 0;
992 /* Return 1 if OP is a valid memory address for a memory reference
993 of mode MODE.
995 The main use of this function is as a predicate in match_operand
996 expressions in the machine description. */
999 address_operand (rtx op, enum machine_mode mode)
1001 return memory_address_p (mode, op);
1004 /* Return 1 if OP is a register reference of mode MODE.
1005 If MODE is VOIDmode, accept a register in any mode.
1007 The main use of this function is as a predicate in match_operand
1008 expressions in the machine description.
1010 As a special exception, registers whose class is NO_REGS are
1011 not accepted by `register_operand'. The reason for this change
1012 is to allow the representation of special architecture artifacts
1013 (such as a condition code register) without extending the rtl
1014 definitions. Since registers of class NO_REGS cannot be used
1015 as registers in any case where register classes are examined,
1016 it is most consistent to keep this function from accepting them. */
1019 register_operand (rtx op, enum machine_mode mode)
1021 if (GET_MODE (op) != mode && mode != VOIDmode)
1022 return 0;
1024 if (GET_CODE (op) == SUBREG)
1026 rtx sub = SUBREG_REG (op);
1028 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1029 because it is guaranteed to be reloaded into one.
1030 Just make sure the MEM is valid in itself.
1031 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1032 but currently it does result from (SUBREG (REG)...) where the
1033 reg went on the stack.) */
1034 if (! reload_completed && MEM_P (sub))
1035 return general_operand (op, mode);
1037 #ifdef CANNOT_CHANGE_MODE_CLASS
1038 if (REG_P (sub)
1039 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1040 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1041 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1042 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1043 return 0;
1044 #endif
1046 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1047 create such rtl, and we must reject it. */
1048 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1049 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1050 return 0;
1052 op = sub;
1055 /* We don't consider registers whose class is NO_REGS
1056 to be a register operand. */
1057 return (REG_P (op)
1058 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1059 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1062 /* Return 1 for a register in Pmode; ignore the tested mode. */
1065 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1067 return register_operand (op, Pmode);
1070 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1071 or a hard register. */
1074 scratch_operand (rtx op, enum machine_mode mode)
1076 if (GET_MODE (op) != mode && mode != VOIDmode)
1077 return 0;
1079 return (GET_CODE (op) == SCRATCH
1080 || (REG_P (op)
1081 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1084 /* Return 1 if OP is a valid immediate operand for mode MODE.
1086 The main use of this function is as a predicate in match_operand
1087 expressions in the machine description. */
1090 immediate_operand (rtx op, enum machine_mode mode)
1092 /* Don't accept CONST_INT or anything similar
1093 if the caller wants something floating. */
1094 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1095 && GET_MODE_CLASS (mode) != MODE_INT
1096 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1097 return 0;
1099 if (GET_CODE (op) == CONST_INT
1100 && mode != VOIDmode
1101 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1102 return 0;
1104 return (CONSTANT_P (op)
1105 && (GET_MODE (op) == mode || mode == VOIDmode
1106 || GET_MODE (op) == VOIDmode)
1107 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1108 && LEGITIMATE_CONSTANT_P (op));
1111 /* Returns 1 if OP is an operand that is a CONST_INT. */
1114 const_int_operand (rtx op, enum machine_mode mode)
1116 if (GET_CODE (op) != CONST_INT)
1117 return 0;
1119 if (mode != VOIDmode
1120 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1121 return 0;
1123 return 1;
1126 /* Returns 1 if OP is an operand that is a constant integer or constant
1127 floating-point number. */
1130 const_double_operand (rtx op, enum machine_mode mode)
1132 /* Don't accept CONST_INT or anything similar
1133 if the caller wants something floating. */
1134 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1135 && GET_MODE_CLASS (mode) != MODE_INT
1136 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1137 return 0;
1139 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1140 && (mode == VOIDmode || GET_MODE (op) == mode
1141 || GET_MODE (op) == VOIDmode));
1144 /* Return 1 if OP is a general operand that is not an immediate operand. */
1147 nonimmediate_operand (rtx op, enum machine_mode mode)
1149 return (general_operand (op, mode) && ! CONSTANT_P (op));
1152 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1155 nonmemory_operand (rtx op, enum machine_mode mode)
1157 if (CONSTANT_P (op))
1159 /* Don't accept CONST_INT or anything similar
1160 if the caller wants something floating. */
1161 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1162 && GET_MODE_CLASS (mode) != MODE_INT
1163 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1164 return 0;
1166 if (GET_CODE (op) == CONST_INT
1167 && mode != VOIDmode
1168 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1169 return 0;
1171 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1172 || mode == VOIDmode)
1173 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1174 && LEGITIMATE_CONSTANT_P (op));
1177 if (GET_MODE (op) != mode && mode != VOIDmode)
1178 return 0;
1180 if (GET_CODE (op) == SUBREG)
1182 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1183 because it is guaranteed to be reloaded into one.
1184 Just make sure the MEM is valid in itself.
1185 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1186 but currently it does result from (SUBREG (REG)...) where the
1187 reg went on the stack.) */
1188 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1189 return general_operand (op, mode);
1190 op = SUBREG_REG (op);
1193 /* We don't consider registers whose class is NO_REGS
1194 to be a register operand. */
1195 return (REG_P (op)
1196 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1197 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1200 /* Return 1 if OP is a valid operand that stands for pushing a
1201 value of mode MODE onto the stack.
1203 The main use of this function is as a predicate in match_operand
1204 expressions in the machine description. */
1207 push_operand (rtx op, enum machine_mode mode)
1209 unsigned int rounded_size = GET_MODE_SIZE (mode);
1211 #ifdef PUSH_ROUNDING
1212 rounded_size = PUSH_ROUNDING (rounded_size);
1213 #endif
1215 if (!MEM_P (op))
1216 return 0;
1218 if (mode != VOIDmode && GET_MODE (op) != mode)
1219 return 0;
1221 op = XEXP (op, 0);
1223 if (rounded_size == GET_MODE_SIZE (mode))
1225 if (GET_CODE (op) != STACK_PUSH_CODE)
1226 return 0;
1228 else
1230 if (GET_CODE (op) != PRE_MODIFY
1231 || GET_CODE (XEXP (op, 1)) != PLUS
1232 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1233 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1234 #ifdef STACK_GROWS_DOWNWARD
1235 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1236 #else
1237 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1238 #endif
1240 return 0;
1243 return XEXP (op, 0) == stack_pointer_rtx;
1246 /* Return 1 if OP is a valid operand that stands for popping a
1247 value of mode MODE off the stack.
1249 The main use of this function is as a predicate in match_operand
1250 expressions in the machine description. */
1253 pop_operand (rtx op, enum machine_mode mode)
1255 if (!MEM_P (op))
1256 return 0;
1258 if (mode != VOIDmode && GET_MODE (op) != mode)
1259 return 0;
1261 op = XEXP (op, 0);
1263 if (GET_CODE (op) != STACK_POP_CODE)
1264 return 0;
1266 return XEXP (op, 0) == stack_pointer_rtx;
1269 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1272 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1274 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1275 return 0;
1277 win:
1278 return 1;
1281 /* Return 1 if OP is a valid memory reference with mode MODE,
1282 including a valid address.
1284 The main use of this function is as a predicate in match_operand
1285 expressions in the machine description. */
1288 memory_operand (rtx op, enum machine_mode mode)
1290 rtx inner;
1292 if (! reload_completed)
1293 /* Note that no SUBREG is a memory operand before end of reload pass,
1294 because (SUBREG (MEM...)) forces reloading into a register. */
1295 return MEM_P (op) && general_operand (op, mode);
1297 if (mode != VOIDmode && GET_MODE (op) != mode)
1298 return 0;
1300 inner = op;
1301 if (GET_CODE (inner) == SUBREG)
1302 inner = SUBREG_REG (inner);
1304 return (MEM_P (inner) && general_operand (op, mode));
1307 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1308 that is, a memory reference whose address is a general_operand. */
1311 indirect_operand (rtx op, enum machine_mode mode)
1313 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1314 if (! reload_completed
1315 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1317 int offset = SUBREG_BYTE (op);
1318 rtx inner = SUBREG_REG (op);
1320 if (mode != VOIDmode && GET_MODE (op) != mode)
1321 return 0;
1323 /* The only way that we can have a general_operand as the resulting
1324 address is if OFFSET is zero and the address already is an operand
1325 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1326 operand. */
1328 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1329 || (GET_CODE (XEXP (inner, 0)) == PLUS
1330 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1331 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1332 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1335 return (MEM_P (op)
1336 && memory_operand (op, mode)
1337 && general_operand (XEXP (op, 0), Pmode));
1340 /* Return 1 if this is a comparison operator. This allows the use of
1341 MATCH_OPERATOR to recognize all the branch insns. */
1344 comparison_operator (rtx op, enum machine_mode mode)
1346 return ((mode == VOIDmode || GET_MODE (op) == mode)
1347 && COMPARISON_P (op));
1350 /* If BODY is an insn body that uses ASM_OPERANDS,
1351 return the number of operands (both input and output) in the insn.
1352 Otherwise return -1. */
1355 asm_noperands (rtx body)
1357 switch (GET_CODE (body))
1359 case ASM_OPERANDS:
1360 /* No output operands: return number of input operands. */
1361 return ASM_OPERANDS_INPUT_LENGTH (body);
1362 case SET:
1363 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1364 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1365 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1366 else
1367 return -1;
1368 case PARALLEL:
1369 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1370 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1372 /* Multiple output operands, or 1 output plus some clobbers:
1373 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1374 int i;
1375 int n_sets;
1377 /* Count backwards through CLOBBERs to determine number of SETs. */
1378 for (i = XVECLEN (body, 0); i > 0; i--)
1380 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1381 break;
1382 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1383 return -1;
1386 /* N_SETS is now number of output operands. */
1387 n_sets = i;
1389 /* Verify that all the SETs we have
1390 came from a single original asm_operands insn
1391 (so that invalid combinations are blocked). */
1392 for (i = 0; i < n_sets; i++)
1394 rtx elt = XVECEXP (body, 0, i);
1395 if (GET_CODE (elt) != SET)
1396 return -1;
1397 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1398 return -1;
1399 /* If these ASM_OPERANDS rtx's came from different original insns
1400 then they aren't allowed together. */
1401 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1402 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1403 return -1;
1405 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1406 + n_sets);
1408 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1410 /* 0 outputs, but some clobbers:
1411 body is [(asm_operands ...) (clobber (reg ...))...]. */
1412 int i;
1414 /* Make sure all the other parallel things really are clobbers. */
1415 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1416 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1417 return -1;
1419 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1421 else
1422 return -1;
1423 default:
1424 return -1;
1428 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1429 copy its operands (both input and output) into the vector OPERANDS,
1430 the locations of the operands within the insn into the vector OPERAND_LOCS,
1431 and the constraints for the operands into CONSTRAINTS.
1432 Write the modes of the operands into MODES.
1433 Return the assembler-template.
1435 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1436 we don't store that info. */
1438 const char *
1439 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1440 const char **constraints, enum machine_mode *modes)
1442 int i;
1443 int noperands;
1444 const char *template = 0;
1446 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1448 rtx asmop = SET_SRC (body);
1449 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1451 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1453 for (i = 1; i < noperands; i++)
1455 if (operand_locs)
1456 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1457 if (operands)
1458 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1459 if (constraints)
1460 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1461 if (modes)
1462 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1465 /* The output is in the SET.
1466 Its constraint is in the ASM_OPERANDS itself. */
1467 if (operands)
1468 operands[0] = SET_DEST (body);
1469 if (operand_locs)
1470 operand_locs[0] = &SET_DEST (body);
1471 if (constraints)
1472 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1473 if (modes)
1474 modes[0] = GET_MODE (SET_DEST (body));
1475 template = ASM_OPERANDS_TEMPLATE (asmop);
1477 else if (GET_CODE (body) == ASM_OPERANDS)
1479 rtx asmop = body;
1480 /* No output operands: BODY is (asm_operands ....). */
1482 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1484 /* The input operands are found in the 1st element vector. */
1485 /* Constraints for inputs are in the 2nd element vector. */
1486 for (i = 0; i < noperands; i++)
1488 if (operand_locs)
1489 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1490 if (operands)
1491 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1492 if (constraints)
1493 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1494 if (modes)
1495 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1497 template = ASM_OPERANDS_TEMPLATE (asmop);
1499 else if (GET_CODE (body) == PARALLEL
1500 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1501 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1503 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1504 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1505 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1506 int nout = 0; /* Does not include CLOBBERs. */
1508 /* At least one output, plus some CLOBBERs. */
1510 /* The outputs are in the SETs.
1511 Their constraints are in the ASM_OPERANDS itself. */
1512 for (i = 0; i < nparallel; i++)
1514 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1515 break; /* Past last SET */
1517 if (operands)
1518 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1519 if (operand_locs)
1520 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1521 if (constraints)
1522 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1523 if (modes)
1524 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1525 nout++;
1528 for (i = 0; i < nin; i++)
1530 if (operand_locs)
1531 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1532 if (operands)
1533 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1534 if (constraints)
1535 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1536 if (modes)
1537 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1540 template = ASM_OPERANDS_TEMPLATE (asmop);
1542 else if (GET_CODE (body) == PARALLEL
1543 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1545 /* No outputs, but some CLOBBERs. */
1547 rtx asmop = XVECEXP (body, 0, 0);
1548 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1550 for (i = 0; i < nin; i++)
1552 if (operand_locs)
1553 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1554 if (operands)
1555 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1556 if (constraints)
1557 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1558 if (modes)
1559 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1562 template = ASM_OPERANDS_TEMPLATE (asmop);
1565 return template;
1568 /* Check if an asm_operand matches its constraints.
1569 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1572 asm_operand_ok (rtx op, const char *constraint)
1574 int result = 0;
1576 /* Use constrain_operands after reload. */
1577 gcc_assert (!reload_completed);
1579 while (*constraint)
1581 char c = *constraint;
1582 int len;
1583 switch (c)
1585 case ',':
1586 constraint++;
1587 continue;
1588 case '=':
1589 case '+':
1590 case '*':
1591 case '%':
1592 case '!':
1593 case '#':
1594 case '&':
1595 case '?':
1596 break;
1598 case '0': case '1': case '2': case '3': case '4':
1599 case '5': case '6': case '7': case '8': case '9':
1600 /* For best results, our caller should have given us the
1601 proper matching constraint, but we can't actually fail
1602 the check if they didn't. Indicate that results are
1603 inconclusive. */
1605 constraint++;
1606 while (ISDIGIT (*constraint));
1607 if (! result)
1608 result = -1;
1609 continue;
1611 case 'p':
1612 if (address_operand (op, VOIDmode))
1613 result = 1;
1614 break;
1616 case 'm':
1617 case 'V': /* non-offsettable */
1618 if (memory_operand (op, VOIDmode))
1619 result = 1;
1620 break;
1622 case 'o': /* offsettable */
1623 if (offsettable_nonstrict_memref_p (op))
1624 result = 1;
1625 break;
1627 case '<':
1628 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1629 excepting those that expand_call created. Further, on some
1630 machines which do not have generalized auto inc/dec, an inc/dec
1631 is not a memory_operand.
1633 Match any memory and hope things are resolved after reload. */
1635 if (MEM_P (op)
1636 && (1
1637 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1638 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1639 result = 1;
1640 break;
1642 case '>':
1643 if (MEM_P (op)
1644 && (1
1645 || GET_CODE (XEXP (op, 0)) == PRE_INC
1646 || GET_CODE (XEXP (op, 0)) == POST_INC))
1647 result = 1;
1648 break;
1650 case 'E':
1651 case 'F':
1652 if (GET_CODE (op) == CONST_DOUBLE
1653 || (GET_CODE (op) == CONST_VECTOR
1654 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1655 result = 1;
1656 break;
1658 case 'G':
1659 if (GET_CODE (op) == CONST_DOUBLE
1660 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1661 result = 1;
1662 break;
1663 case 'H':
1664 if (GET_CODE (op) == CONST_DOUBLE
1665 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1666 result = 1;
1667 break;
1669 case 's':
1670 if (GET_CODE (op) == CONST_INT
1671 || (GET_CODE (op) == CONST_DOUBLE
1672 && GET_MODE (op) == VOIDmode))
1673 break;
1674 /* Fall through. */
1676 case 'i':
1677 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1678 result = 1;
1679 break;
1681 case 'n':
1682 if (GET_CODE (op) == CONST_INT
1683 || (GET_CODE (op) == CONST_DOUBLE
1684 && GET_MODE (op) == VOIDmode))
1685 result = 1;
1686 break;
1688 case 'I':
1689 if (GET_CODE (op) == CONST_INT
1690 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1691 result = 1;
1692 break;
1693 case 'J':
1694 if (GET_CODE (op) == CONST_INT
1695 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1696 result = 1;
1697 break;
1698 case 'K':
1699 if (GET_CODE (op) == CONST_INT
1700 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1701 result = 1;
1702 break;
1703 case 'L':
1704 if (GET_CODE (op) == CONST_INT
1705 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1706 result = 1;
1707 break;
1708 case 'M':
1709 if (GET_CODE (op) == CONST_INT
1710 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1711 result = 1;
1712 break;
1713 case 'N':
1714 if (GET_CODE (op) == CONST_INT
1715 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1716 result = 1;
1717 break;
1718 case 'O':
1719 if (GET_CODE (op) == CONST_INT
1720 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1721 result = 1;
1722 break;
1723 case 'P':
1724 if (GET_CODE (op) == CONST_INT
1725 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1726 result = 1;
1727 break;
1729 case 'X':
1730 result = 1;
1731 break;
1733 case 'g':
1734 if (general_operand (op, VOIDmode))
1735 result = 1;
1736 break;
1738 default:
1739 /* For all other letters, we first check for a register class,
1740 otherwise it is an EXTRA_CONSTRAINT. */
1741 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1743 case 'r':
1744 if (GET_MODE (op) == BLKmode)
1745 break;
1746 if (register_operand (op, VOIDmode))
1747 result = 1;
1749 #ifdef EXTRA_CONSTRAINT_STR
1750 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1751 result = 1;
1752 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1753 /* Every memory operand can be reloaded to fit. */
1754 && memory_operand (op, VOIDmode))
1755 result = 1;
1756 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1757 /* Every address operand can be reloaded to fit. */
1758 && address_operand (op, VOIDmode))
1759 result = 1;
1760 #endif
1761 break;
1763 len = CONSTRAINT_LEN (c, constraint);
1765 constraint++;
1766 while (--len && *constraint);
1767 if (len)
1768 return 0;
1771 return result;
1774 /* Given an rtx *P, if it is a sum containing an integer constant term,
1775 return the location (type rtx *) of the pointer to that constant term.
1776 Otherwise, return a null pointer. */
1778 rtx *
1779 find_constant_term_loc (rtx *p)
1781 rtx *tem;
1782 enum rtx_code code = GET_CODE (*p);
1784 /* If *P IS such a constant term, P is its location. */
1786 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1787 || code == CONST)
1788 return p;
1790 /* Otherwise, if not a sum, it has no constant term. */
1792 if (GET_CODE (*p) != PLUS)
1793 return 0;
1795 /* If one of the summands is constant, return its location. */
1797 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1798 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1799 return p;
1801 /* Otherwise, check each summand for containing a constant term. */
1803 if (XEXP (*p, 0) != 0)
1805 tem = find_constant_term_loc (&XEXP (*p, 0));
1806 if (tem != 0)
1807 return tem;
1810 if (XEXP (*p, 1) != 0)
1812 tem = find_constant_term_loc (&XEXP (*p, 1));
1813 if (tem != 0)
1814 return tem;
1817 return 0;
1820 /* Return 1 if OP is a memory reference
1821 whose address contains no side effects
1822 and remains valid after the addition
1823 of a positive integer less than the
1824 size of the object being referenced.
1826 We assume that the original address is valid and do not check it.
1828 This uses strict_memory_address_p as a subroutine, so
1829 don't use it before reload. */
1832 offsettable_memref_p (rtx op)
1834 return ((MEM_P (op))
1835 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1838 /* Similar, but don't require a strictly valid mem ref:
1839 consider pseudo-regs valid as index or base regs. */
1842 offsettable_nonstrict_memref_p (rtx op)
1844 return ((MEM_P (op))
1845 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1848 /* Return 1 if Y is a memory address which contains no side effects
1849 and would remain valid after the addition of a positive integer
1850 less than the size of that mode.
1852 We assume that the original address is valid and do not check it.
1853 We do check that it is valid for narrower modes.
1855 If STRICTP is nonzero, we require a strictly valid address,
1856 for the sake of use in reload.c. */
1859 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1861 enum rtx_code ycode = GET_CODE (y);
1862 rtx z;
1863 rtx y1 = y;
1864 rtx *y2;
1865 int (*addressp) (enum machine_mode, rtx) =
1866 (strictp ? strict_memory_address_p : memory_address_p);
1867 unsigned int mode_sz = GET_MODE_SIZE (mode);
1869 if (CONSTANT_ADDRESS_P (y))
1870 return 1;
1872 /* Adjusting an offsettable address involves changing to a narrower mode.
1873 Make sure that's OK. */
1875 if (mode_dependent_address_p (y))
1876 return 0;
1878 /* ??? How much offset does an offsettable BLKmode reference need?
1879 Clearly that depends on the situation in which it's being used.
1880 However, the current situation in which we test 0xffffffff is
1881 less than ideal. Caveat user. */
1882 if (mode_sz == 0)
1883 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1885 /* If the expression contains a constant term,
1886 see if it remains valid when max possible offset is added. */
1888 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1890 int good;
1892 y1 = *y2;
1893 *y2 = plus_constant (*y2, mode_sz - 1);
1894 /* Use QImode because an odd displacement may be automatically invalid
1895 for any wider mode. But it should be valid for a single byte. */
1896 good = (*addressp) (QImode, y);
1898 /* In any case, restore old contents of memory. */
1899 *y2 = y1;
1900 return good;
1903 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1904 return 0;
1906 /* The offset added here is chosen as the maximum offset that
1907 any instruction could need to add when operating on something
1908 of the specified mode. We assume that if Y and Y+c are
1909 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1910 go inside a LO_SUM here, so we do so as well. */
1911 if (GET_CODE (y) == LO_SUM
1912 && mode != BLKmode
1913 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1914 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1915 plus_constant (XEXP (y, 1), mode_sz - 1));
1916 else
1917 z = plus_constant (y, mode_sz - 1);
1919 /* Use QImode because an odd displacement may be automatically invalid
1920 for any wider mode. But it should be valid for a single byte. */
1921 return (*addressp) (QImode, z);
1924 /* Return 1 if ADDR is an address-expression whose effect depends
1925 on the mode of the memory reference it is used in.
1927 Autoincrement addressing is a typical example of mode-dependence
1928 because the amount of the increment depends on the mode. */
1931 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1933 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1934 return 0;
1935 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1936 win: ATTRIBUTE_UNUSED_LABEL
1937 return 1;
1940 /* Like extract_insn, but save insn extracted and don't extract again, when
1941 called again for the same insn expecting that recog_data still contain the
1942 valid information. This is used primary by gen_attr infrastructure that
1943 often does extract insn again and again. */
1944 void
1945 extract_insn_cached (rtx insn)
1947 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1948 return;
1949 extract_insn (insn);
1950 recog_data.insn = insn;
1952 /* Do cached extract_insn, constrain_operands and complain about failures.
1953 Used by insn_attrtab. */
1954 void
1955 extract_constrain_insn_cached (rtx insn)
1957 extract_insn_cached (insn);
1958 if (which_alternative == -1
1959 && !constrain_operands (reload_completed))
1960 fatal_insn_not_found (insn);
1962 /* Do cached constrain_operands and complain about failures. */
1964 constrain_operands_cached (int strict)
1966 if (which_alternative == -1)
1967 return constrain_operands (strict);
1968 else
1969 return 1;
1972 /* Analyze INSN and fill in recog_data. */
1974 void
1975 extract_insn (rtx insn)
1977 int i;
1978 int icode;
1979 int noperands;
1980 rtx body = PATTERN (insn);
1982 recog_data.insn = NULL;
1983 recog_data.n_operands = 0;
1984 recog_data.n_alternatives = 0;
1985 recog_data.n_dups = 0;
1986 which_alternative = -1;
1988 switch (GET_CODE (body))
1990 case USE:
1991 case CLOBBER:
1992 case ASM_INPUT:
1993 case ADDR_VEC:
1994 case ADDR_DIFF_VEC:
1995 return;
1997 case SET:
1998 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1999 goto asm_insn;
2000 else
2001 goto normal_insn;
2002 case PARALLEL:
2003 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2004 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2005 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2006 goto asm_insn;
2007 else
2008 goto normal_insn;
2009 case ASM_OPERANDS:
2010 asm_insn:
2011 recog_data.n_operands = noperands = asm_noperands (body);
2012 if (noperands >= 0)
2014 /* This insn is an `asm' with operands. */
2016 /* expand_asm_operands makes sure there aren't too many operands. */
2017 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2019 /* Now get the operand values and constraints out of the insn. */
2020 decode_asm_operands (body, recog_data.operand,
2021 recog_data.operand_loc,
2022 recog_data.constraints,
2023 recog_data.operand_mode);
2024 if (noperands > 0)
2026 const char *p = recog_data.constraints[0];
2027 recog_data.n_alternatives = 1;
2028 while (*p)
2029 recog_data.n_alternatives += (*p++ == ',');
2031 break;
2033 fatal_insn_not_found (insn);
2035 default:
2036 normal_insn:
2037 /* Ordinary insn: recognize it, get the operands via insn_extract
2038 and get the constraints. */
2040 icode = recog_memoized (insn);
2041 if (icode < 0)
2042 fatal_insn_not_found (insn);
2044 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2045 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2046 recog_data.n_dups = insn_data[icode].n_dups;
2048 insn_extract (insn);
2050 for (i = 0; i < noperands; i++)
2052 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2053 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2054 /* VOIDmode match_operands gets mode from their real operand. */
2055 if (recog_data.operand_mode[i] == VOIDmode)
2056 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2059 for (i = 0; i < noperands; i++)
2060 recog_data.operand_type[i]
2061 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2062 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2063 : OP_IN);
2065 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2068 /* After calling extract_insn, you can use this function to extract some
2069 information from the constraint strings into a more usable form.
2070 The collected data is stored in recog_op_alt. */
2071 void
2072 preprocess_constraints (void)
2074 int i;
2076 for (i = 0; i < recog_data.n_operands; i++)
2077 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2078 * sizeof (struct operand_alternative)));
2080 for (i = 0; i < recog_data.n_operands; i++)
2082 int j;
2083 struct operand_alternative *op_alt;
2084 const char *p = recog_data.constraints[i];
2086 op_alt = recog_op_alt[i];
2088 for (j = 0; j < recog_data.n_alternatives; j++)
2090 op_alt[j].cl = NO_REGS;
2091 op_alt[j].constraint = p;
2092 op_alt[j].matches = -1;
2093 op_alt[j].matched = -1;
2095 if (*p == '\0' || *p == ',')
2097 op_alt[j].anything_ok = 1;
2098 continue;
2101 for (;;)
2103 char c = *p;
2104 if (c == '#')
2106 c = *++p;
2107 while (c != ',' && c != '\0');
2108 if (c == ',' || c == '\0')
2110 p++;
2111 break;
2114 switch (c)
2116 case '=': case '+': case '*': case '%':
2117 case 'E': case 'F': case 'G': case 'H':
2118 case 's': case 'i': case 'n':
2119 case 'I': case 'J': case 'K': case 'L':
2120 case 'M': case 'N': case 'O': case 'P':
2121 /* These don't say anything we care about. */
2122 break;
2124 case '?':
2125 op_alt[j].reject += 6;
2126 break;
2127 case '!':
2128 op_alt[j].reject += 600;
2129 break;
2130 case '&':
2131 op_alt[j].earlyclobber = 1;
2132 break;
2134 case '0': case '1': case '2': case '3': case '4':
2135 case '5': case '6': case '7': case '8': case '9':
2137 char *end;
2138 op_alt[j].matches = strtoul (p, &end, 10);
2139 recog_op_alt[op_alt[j].matches][j].matched = i;
2140 p = end;
2142 continue;
2144 case 'm':
2145 op_alt[j].memory_ok = 1;
2146 break;
2147 case '<':
2148 op_alt[j].decmem_ok = 1;
2149 break;
2150 case '>':
2151 op_alt[j].incmem_ok = 1;
2152 break;
2153 case 'V':
2154 op_alt[j].nonoffmem_ok = 1;
2155 break;
2156 case 'o':
2157 op_alt[j].offmem_ok = 1;
2158 break;
2159 case 'X':
2160 op_alt[j].anything_ok = 1;
2161 break;
2163 case 'p':
2164 op_alt[j].is_address = 1;
2165 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2166 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2167 break;
2169 case 'g':
2170 case 'r':
2171 op_alt[j].cl =
2172 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2173 break;
2175 default:
2176 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2178 op_alt[j].memory_ok = 1;
2179 break;
2181 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2183 op_alt[j].is_address = 1;
2184 op_alt[j].cl
2185 = (reg_class_subunion
2186 [(int) op_alt[j].cl]
2187 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2188 break;
2191 op_alt[j].cl
2192 = (reg_class_subunion
2193 [(int) op_alt[j].cl]
2194 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2195 break;
2197 p += CONSTRAINT_LEN (c, p);
2203 /* Check the operands of an insn against the insn's operand constraints
2204 and return 1 if they are valid.
2205 The information about the insn's operands, constraints, operand modes
2206 etc. is obtained from the global variables set up by extract_insn.
2208 WHICH_ALTERNATIVE is set to a number which indicates which
2209 alternative of constraints was matched: 0 for the first alternative,
2210 1 for the next, etc.
2212 In addition, when two operands are required to match
2213 and it happens that the output operand is (reg) while the
2214 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2215 make the output operand look like the input.
2216 This is because the output operand is the one the template will print.
2218 This is used in final, just before printing the assembler code and by
2219 the routines that determine an insn's attribute.
2221 If STRICT is a positive nonzero value, it means that we have been
2222 called after reload has been completed. In that case, we must
2223 do all checks strictly. If it is zero, it means that we have been called
2224 before reload has completed. In that case, we first try to see if we can
2225 find an alternative that matches strictly. If not, we try again, this
2226 time assuming that reload will fix up the insn. This provides a "best
2227 guess" for the alternative and is used to compute attributes of insns prior
2228 to reload. A negative value of STRICT is used for this internal call. */
2230 struct funny_match
2232 int this, other;
2236 constrain_operands (int strict)
2238 const char *constraints[MAX_RECOG_OPERANDS];
2239 int matching_operands[MAX_RECOG_OPERANDS];
2240 int earlyclobber[MAX_RECOG_OPERANDS];
2241 int c;
2243 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2244 int funny_match_index;
2246 which_alternative = 0;
2247 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2248 return 1;
2250 for (c = 0; c < recog_data.n_operands; c++)
2252 constraints[c] = recog_data.constraints[c];
2253 matching_operands[c] = -1;
2258 int seen_earlyclobber_at = -1;
2259 int opno;
2260 int lose = 0;
2261 funny_match_index = 0;
2263 for (opno = 0; opno < recog_data.n_operands; opno++)
2265 rtx op = recog_data.operand[opno];
2266 enum machine_mode mode = GET_MODE (op);
2267 const char *p = constraints[opno];
2268 int offset = 0;
2269 int win = 0;
2270 int val;
2271 int len;
2273 earlyclobber[opno] = 0;
2275 /* A unary operator may be accepted by the predicate, but it
2276 is irrelevant for matching constraints. */
2277 if (UNARY_P (op))
2278 op = XEXP (op, 0);
2280 if (GET_CODE (op) == SUBREG)
2282 if (REG_P (SUBREG_REG (op))
2283 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2284 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2285 GET_MODE (SUBREG_REG (op)),
2286 SUBREG_BYTE (op),
2287 GET_MODE (op));
2288 op = SUBREG_REG (op);
2291 /* An empty constraint or empty alternative
2292 allows anything which matched the pattern. */
2293 if (*p == 0 || *p == ',')
2294 win = 1;
2297 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2299 case '\0':
2300 len = 0;
2301 break;
2302 case ',':
2303 c = '\0';
2304 break;
2306 case '?': case '!': case '*': case '%':
2307 case '=': case '+':
2308 break;
2310 case '#':
2311 /* Ignore rest of this alternative as far as
2312 constraint checking is concerned. */
2314 p++;
2315 while (*p && *p != ',');
2316 len = 0;
2317 break;
2319 case '&':
2320 earlyclobber[opno] = 1;
2321 if (seen_earlyclobber_at < 0)
2322 seen_earlyclobber_at = opno;
2323 break;
2325 case '0': case '1': case '2': case '3': case '4':
2326 case '5': case '6': case '7': case '8': case '9':
2328 /* This operand must be the same as a previous one.
2329 This kind of constraint is used for instructions such
2330 as add when they take only two operands.
2332 Note that the lower-numbered operand is passed first.
2334 If we are not testing strictly, assume that this
2335 constraint will be satisfied. */
2337 char *end;
2338 int match;
2340 match = strtoul (p, &end, 10);
2341 p = end;
2343 if (strict < 0)
2344 val = 1;
2345 else
2347 rtx op1 = recog_data.operand[match];
2348 rtx op2 = recog_data.operand[opno];
2350 /* A unary operator may be accepted by the predicate,
2351 but it is irrelevant for matching constraints. */
2352 if (UNARY_P (op1))
2353 op1 = XEXP (op1, 0);
2354 if (UNARY_P (op2))
2355 op2 = XEXP (op2, 0);
2357 val = operands_match_p (op1, op2);
2360 matching_operands[opno] = match;
2361 matching_operands[match] = opno;
2363 if (val != 0)
2364 win = 1;
2366 /* If output is *x and input is *--x, arrange later
2367 to change the output to *--x as well, since the
2368 output op is the one that will be printed. */
2369 if (val == 2 && strict > 0)
2371 funny_match[funny_match_index].this = opno;
2372 funny_match[funny_match_index++].other = match;
2375 len = 0;
2376 break;
2378 case 'p':
2379 /* p is used for address_operands. When we are called by
2380 gen_reload, no one will have checked that the address is
2381 strictly valid, i.e., that all pseudos requiring hard regs
2382 have gotten them. */
2383 if (strict <= 0
2384 || (strict_memory_address_p (recog_data.operand_mode[opno],
2385 op)))
2386 win = 1;
2387 break;
2389 /* No need to check general_operand again;
2390 it was done in insn-recog.c. */
2391 case 'g':
2392 /* Anything goes unless it is a REG and really has a hard reg
2393 but the hard reg is not in the class GENERAL_REGS. */
2394 if (strict < 0
2395 || GENERAL_REGS == ALL_REGS
2396 || !REG_P (op)
2397 || (reload_in_progress
2398 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2399 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2400 win = 1;
2401 break;
2403 case 'X':
2404 /* This is used for a MATCH_SCRATCH in the cases when
2405 we don't actually need anything. So anything goes
2406 any time. */
2407 win = 1;
2408 break;
2410 case 'm':
2411 /* Memory operands must be valid, to the extent
2412 required by STRICT. */
2413 if (MEM_P (op))
2415 if (strict > 0
2416 && !strict_memory_address_p (GET_MODE (op),
2417 XEXP (op, 0)))
2418 break;
2419 if (strict == 0
2420 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2421 break;
2422 win = 1;
2424 /* Before reload, accept what reload can turn into mem. */
2425 else if (strict < 0 && CONSTANT_P (op))
2426 win = 1;
2427 /* During reload, accept a pseudo */
2428 else if (reload_in_progress && REG_P (op)
2429 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2430 win = 1;
2431 break;
2433 case '<':
2434 if (MEM_P (op)
2435 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2436 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2437 win = 1;
2438 break;
2440 case '>':
2441 if (MEM_P (op)
2442 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2443 || GET_CODE (XEXP (op, 0)) == POST_INC))
2444 win = 1;
2445 break;
2447 case 'E':
2448 case 'F':
2449 if (GET_CODE (op) == CONST_DOUBLE
2450 || (GET_CODE (op) == CONST_VECTOR
2451 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2452 win = 1;
2453 break;
2455 case 'G':
2456 case 'H':
2457 if (GET_CODE (op) == CONST_DOUBLE
2458 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2459 win = 1;
2460 break;
2462 case 's':
2463 if (GET_CODE (op) == CONST_INT
2464 || (GET_CODE (op) == CONST_DOUBLE
2465 && GET_MODE (op) == VOIDmode))
2466 break;
2467 case 'i':
2468 if (CONSTANT_P (op))
2469 win = 1;
2470 break;
2472 case 'n':
2473 if (GET_CODE (op) == CONST_INT
2474 || (GET_CODE (op) == CONST_DOUBLE
2475 && GET_MODE (op) == VOIDmode))
2476 win = 1;
2477 break;
2479 case 'I':
2480 case 'J':
2481 case 'K':
2482 case 'L':
2483 case 'M':
2484 case 'N':
2485 case 'O':
2486 case 'P':
2487 if (GET_CODE (op) == CONST_INT
2488 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2489 win = 1;
2490 break;
2492 case 'V':
2493 if (MEM_P (op)
2494 && ((strict > 0 && ! offsettable_memref_p (op))
2495 || (strict < 0
2496 && !(CONSTANT_P (op) || MEM_P (op)))
2497 || (reload_in_progress
2498 && !(REG_P (op)
2499 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2500 win = 1;
2501 break;
2503 case 'o':
2504 if ((strict > 0 && offsettable_memref_p (op))
2505 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2506 /* Before reload, accept what reload can handle. */
2507 || (strict < 0
2508 && (CONSTANT_P (op) || MEM_P (op)))
2509 /* During reload, accept a pseudo */
2510 || (reload_in_progress && REG_P (op)
2511 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2512 win = 1;
2513 break;
2515 default:
2517 enum reg_class cl;
2519 cl = (c == 'r'
2520 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2521 if (cl != NO_REGS)
2523 if (strict < 0
2524 || (strict == 0
2525 && REG_P (op)
2526 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2527 || (strict == 0 && GET_CODE (op) == SCRATCH)
2528 || (REG_P (op)
2529 && reg_fits_class_p (op, cl, offset, mode)))
2530 win = 1;
2532 #ifdef EXTRA_CONSTRAINT_STR
2533 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2534 win = 1;
2536 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2537 /* Every memory operand can be reloaded to fit. */
2538 && ((strict < 0 && MEM_P (op))
2539 /* Before reload, accept what reload can turn
2540 into mem. */
2541 || (strict < 0 && CONSTANT_P (op))
2542 /* During reload, accept a pseudo */
2543 || (reload_in_progress && REG_P (op)
2544 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2545 win = 1;
2546 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2547 /* Every address operand can be reloaded to fit. */
2548 && strict < 0)
2549 win = 1;
2550 #endif
2551 break;
2554 while (p += len, c);
2556 constraints[opno] = p;
2557 /* If this operand did not win somehow,
2558 this alternative loses. */
2559 if (! win)
2560 lose = 1;
2562 /* This alternative won; the operands are ok.
2563 Change whichever operands this alternative says to change. */
2564 if (! lose)
2566 int opno, eopno;
2568 /* See if any earlyclobber operand conflicts with some other
2569 operand. */
2571 if (strict > 0 && seen_earlyclobber_at >= 0)
2572 for (eopno = seen_earlyclobber_at;
2573 eopno < recog_data.n_operands;
2574 eopno++)
2575 /* Ignore earlyclobber operands now in memory,
2576 because we would often report failure when we have
2577 two memory operands, one of which was formerly a REG. */
2578 if (earlyclobber[eopno]
2579 && REG_P (recog_data.operand[eopno]))
2580 for (opno = 0; opno < recog_data.n_operands; opno++)
2581 if ((MEM_P (recog_data.operand[opno])
2582 || recog_data.operand_type[opno] != OP_OUT)
2583 && opno != eopno
2584 /* Ignore things like match_operator operands. */
2585 && *recog_data.constraints[opno] != 0
2586 && ! (matching_operands[opno] == eopno
2587 && operands_match_p (recog_data.operand[opno],
2588 recog_data.operand[eopno]))
2589 && ! safe_from_earlyclobber (recog_data.operand[opno],
2590 recog_data.operand[eopno]))
2591 lose = 1;
2593 if (! lose)
2595 while (--funny_match_index >= 0)
2597 recog_data.operand[funny_match[funny_match_index].other]
2598 = recog_data.operand[funny_match[funny_match_index].this];
2601 return 1;
2605 which_alternative++;
2607 while (which_alternative < recog_data.n_alternatives);
2609 which_alternative = -1;
2610 /* If we are about to reject this, but we are not to test strictly,
2611 try a very loose test. Only return failure if it fails also. */
2612 if (strict == 0)
2613 return constrain_operands (-1);
2614 else
2615 return 0;
2618 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2619 is a hard reg in class CLASS when its regno is offset by OFFSET
2620 and changed to mode MODE.
2621 If REG occupies multiple hard regs, all of them must be in CLASS. */
2624 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2625 enum machine_mode mode)
2627 int regno = REGNO (operand);
2628 if (regno < FIRST_PSEUDO_REGISTER
2629 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2630 regno + offset))
2632 int sr;
2633 regno += offset;
2634 for (sr = hard_regno_nregs[regno][mode] - 1;
2635 sr > 0; sr--)
2636 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2637 regno + sr))
2638 break;
2639 return sr == 0;
2642 return 0;
2645 /* Split single instruction. Helper function for split_all_insns and
2646 split_all_insns_noflow. Return last insn in the sequence if successful,
2647 or NULL if unsuccessful. */
2649 static rtx
2650 split_insn (rtx insn)
2652 /* Split insns here to get max fine-grain parallelism. */
2653 rtx first = PREV_INSN (insn);
2654 rtx last = try_split (PATTERN (insn), insn, 1);
2656 if (last == insn)
2657 return NULL_RTX;
2659 /* try_split returns the NOTE that INSN became. */
2660 SET_INSN_DELETED (insn);
2662 /* ??? Coddle to md files that generate subregs in post-reload
2663 splitters instead of computing the proper hard register. */
2664 if (reload_completed && first != last)
2666 first = NEXT_INSN (first);
2667 for (;;)
2669 if (INSN_P (first))
2670 cleanup_subreg_operands (first);
2671 if (first == last)
2672 break;
2673 first = NEXT_INSN (first);
2676 return last;
2679 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2681 void
2682 split_all_insns (int upd_life)
2684 sbitmap blocks;
2685 bool changed;
2686 basic_block bb;
2688 blocks = sbitmap_alloc (last_basic_block);
2689 sbitmap_zero (blocks);
2690 changed = false;
2692 FOR_EACH_BB_REVERSE (bb)
2694 rtx insn, next;
2695 bool finish = false;
2697 for (insn = BB_HEAD (bb); !finish ; insn = next)
2699 /* Can't use `next_real_insn' because that might go across
2700 CODE_LABELS and short-out basic blocks. */
2701 next = NEXT_INSN (insn);
2702 finish = (insn == BB_END (bb));
2703 if (INSN_P (insn))
2705 rtx set = single_set (insn);
2707 /* Don't split no-op move insns. These should silently
2708 disappear later in final. Splitting such insns would
2709 break the code that handles REG_NO_CONFLICT blocks. */
2710 if (set && set_noop_p (set))
2712 /* Nops get in the way while scheduling, so delete them
2713 now if register allocation has already been done. It
2714 is too risky to try to do this before register
2715 allocation, and there are unlikely to be very many
2716 nops then anyways. */
2717 if (reload_completed)
2719 /* If the no-op set has a REG_UNUSED note, we need
2720 to update liveness information. */
2721 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2723 SET_BIT (blocks, bb->index);
2724 changed = true;
2726 /* ??? Is life info affected by deleting edges? */
2727 delete_insn_and_edges (insn);
2730 else
2732 rtx last = split_insn (insn);
2733 if (last)
2735 /* The split sequence may include barrier, but the
2736 BB boundary we are interested in will be set to
2737 previous one. */
2739 while (BARRIER_P (last))
2740 last = PREV_INSN (last);
2741 SET_BIT (blocks, bb->index);
2742 changed = true;
2749 if (changed)
2751 int old_last_basic_block = last_basic_block;
2753 find_many_sub_basic_blocks (blocks);
2755 if (old_last_basic_block != last_basic_block && upd_life)
2756 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2759 if (changed && upd_life)
2760 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2761 PROP_DEATH_NOTES);
2763 #ifdef ENABLE_CHECKING
2764 verify_flow_info ();
2765 #endif
2767 sbitmap_free (blocks);
2770 /* Same as split_all_insns, but do not expect CFG to be available.
2771 Used by machine dependent reorg passes. */
2773 void
2774 split_all_insns_noflow (void)
2776 rtx next, insn;
2778 for (insn = get_insns (); insn; insn = next)
2780 next = NEXT_INSN (insn);
2781 if (INSN_P (insn))
2783 /* Don't split no-op move insns. These should silently
2784 disappear later in final. Splitting such insns would
2785 break the code that handles REG_NO_CONFLICT blocks. */
2786 rtx set = single_set (insn);
2787 if (set && set_noop_p (set))
2789 /* Nops get in the way while scheduling, so delete them
2790 now if register allocation has already been done. It
2791 is too risky to try to do this before register
2792 allocation, and there are unlikely to be very many
2793 nops then anyways.
2795 ??? Should we use delete_insn when the CFG isn't valid? */
2796 if (reload_completed)
2797 delete_insn_and_edges (insn);
2799 else
2800 split_insn (insn);
2805 #ifdef HAVE_peephole2
2806 struct peep2_insn_data
2808 rtx insn;
2809 regset live_before;
2812 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2813 static int peep2_current;
2815 /* A non-insn marker indicating the last insn of the block.
2816 The live_before regset for this element is correct, indicating
2817 global_live_at_end for the block. */
2818 #define PEEP2_EOB pc_rtx
2820 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2821 does not exist. Used by the recognizer to find the next insn to match
2822 in a multi-insn pattern. */
2825 peep2_next_insn (int n)
2827 gcc_assert (n < MAX_INSNS_PER_PEEP2 + 1);
2829 n += peep2_current;
2830 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2831 n -= MAX_INSNS_PER_PEEP2 + 1;
2833 if (peep2_insn_data[n].insn == PEEP2_EOB)
2834 return NULL_RTX;
2835 return peep2_insn_data[n].insn;
2838 /* Return true if REGNO is dead before the Nth non-note insn
2839 after `current'. */
2842 peep2_regno_dead_p (int ofs, int regno)
2844 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2846 ofs += peep2_current;
2847 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2848 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2850 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2852 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2855 /* Similarly for a REG. */
2858 peep2_reg_dead_p (int ofs, rtx reg)
2860 int regno, n;
2862 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2864 ofs += peep2_current;
2865 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2866 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2868 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2870 regno = REGNO (reg);
2871 n = hard_regno_nregs[regno][GET_MODE (reg)];
2872 while (--n >= 0)
2873 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2874 return 0;
2875 return 1;
2878 /* Try to find a hard register of mode MODE, matching the register class in
2879 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2880 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2881 in which case the only condition is that the register must be available
2882 before CURRENT_INSN.
2883 Registers that already have bits set in REG_SET will not be considered.
2885 If an appropriate register is available, it will be returned and the
2886 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2887 returned. */
2890 peep2_find_free_register (int from, int to, const char *class_str,
2891 enum machine_mode mode, HARD_REG_SET *reg_set)
2893 static int search_ofs;
2894 enum reg_class cl;
2895 HARD_REG_SET live;
2896 int i;
2898 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2899 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2901 from += peep2_current;
2902 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2903 from -= MAX_INSNS_PER_PEEP2 + 1;
2904 to += peep2_current;
2905 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2906 to -= MAX_INSNS_PER_PEEP2 + 1;
2908 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2909 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2911 while (from != to)
2913 HARD_REG_SET this_live;
2915 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2916 from = 0;
2917 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2918 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2919 IOR_HARD_REG_SET (live, this_live);
2922 cl = (class_str[0] == 'r' ? GENERAL_REGS
2923 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2925 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2927 int raw_regno, regno, success, j;
2929 /* Distribute the free registers as much as possible. */
2930 raw_regno = search_ofs + i;
2931 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2932 raw_regno -= FIRST_PSEUDO_REGISTER;
2933 #ifdef REG_ALLOC_ORDER
2934 regno = reg_alloc_order[raw_regno];
2935 #else
2936 regno = raw_regno;
2937 #endif
2939 /* Don't allocate fixed registers. */
2940 if (fixed_regs[regno])
2941 continue;
2942 /* Make sure the register is of the right class. */
2943 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2944 continue;
2945 /* And can support the mode we need. */
2946 if (! HARD_REGNO_MODE_OK (regno, mode))
2947 continue;
2948 /* And that we don't create an extra save/restore. */
2949 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2950 continue;
2951 /* And we don't clobber traceback for noreturn functions. */
2952 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2953 && (! reload_completed || frame_pointer_needed))
2954 continue;
2956 success = 1;
2957 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2959 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2960 || TEST_HARD_REG_BIT (live, regno + j))
2962 success = 0;
2963 break;
2966 if (success)
2968 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2969 SET_HARD_REG_BIT (*reg_set, regno + j);
2971 /* Start the next search with the next register. */
2972 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2973 raw_regno = 0;
2974 search_ofs = raw_regno;
2976 return gen_rtx_REG (mode, regno);
2980 search_ofs = 0;
2981 return NULL_RTX;
2984 /* Perform the peephole2 optimization pass. */
2986 void
2987 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2989 rtx insn, prev;
2990 regset live;
2991 int i;
2992 basic_block bb;
2993 #ifdef HAVE_conditional_execution
2994 sbitmap blocks;
2995 bool changed;
2996 #endif
2997 bool do_cleanup_cfg = false;
2998 bool do_global_life_update = false;
2999 bool do_rebuild_jump_labels = false;
3001 /* Initialize the regsets we're going to use. */
3002 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3003 peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
3004 live = ALLOC_REG_SET (&reg_obstack);
3006 #ifdef HAVE_conditional_execution
3007 blocks = sbitmap_alloc (last_basic_block);
3008 sbitmap_zero (blocks);
3009 changed = false;
3010 #else
3011 count_or_remove_death_notes (NULL, 1);
3012 #endif
3014 FOR_EACH_BB_REVERSE (bb)
3016 struct propagate_block_info *pbi;
3017 reg_set_iterator rsi;
3018 unsigned int j;
3020 /* Indicate that all slots except the last holds invalid data. */
3021 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3022 peep2_insn_data[i].insn = NULL_RTX;
3024 /* Indicate that the last slot contains live_after data. */
3025 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3026 peep2_current = MAX_INSNS_PER_PEEP2;
3028 /* Start up propagation. */
3029 COPY_REG_SET (live, bb->global_live_at_end);
3030 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3032 #ifdef HAVE_conditional_execution
3033 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3034 #else
3035 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3036 #endif
3038 for (insn = BB_END (bb); ; insn = prev)
3040 prev = PREV_INSN (insn);
3041 if (INSN_P (insn))
3043 rtx try, before_try, x;
3044 int match_len;
3045 rtx note;
3046 bool was_call = false;
3048 /* Record this insn. */
3049 if (--peep2_current < 0)
3050 peep2_current = MAX_INSNS_PER_PEEP2;
3051 peep2_insn_data[peep2_current].insn = insn;
3052 propagate_one_insn (pbi, insn);
3053 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3055 /* Match the peephole. */
3056 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3057 if (try != NULL)
3059 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3060 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3061 cfg-related call notes. */
3062 for (i = 0; i <= match_len; ++i)
3064 int j;
3065 rtx old_insn, new_insn, note;
3067 j = i + peep2_current;
3068 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3069 j -= MAX_INSNS_PER_PEEP2 + 1;
3070 old_insn = peep2_insn_data[j].insn;
3071 if (!CALL_P (old_insn))
3072 continue;
3073 was_call = true;
3075 new_insn = try;
3076 while (new_insn != NULL_RTX)
3078 if (CALL_P (new_insn))
3079 break;
3080 new_insn = NEXT_INSN (new_insn);
3083 gcc_assert (new_insn != NULL_RTX);
3085 CALL_INSN_FUNCTION_USAGE (new_insn)
3086 = CALL_INSN_FUNCTION_USAGE (old_insn);
3088 for (note = REG_NOTES (old_insn);
3089 note;
3090 note = XEXP (note, 1))
3091 switch (REG_NOTE_KIND (note))
3093 case REG_NORETURN:
3094 case REG_SETJMP:
3095 REG_NOTES (new_insn)
3096 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3097 XEXP (note, 0),
3098 REG_NOTES (new_insn));
3099 default:
3100 /* Discard all other reg notes. */
3101 break;
3104 /* Croak if there is another call in the sequence. */
3105 while (++i <= match_len)
3107 j = i + peep2_current;
3108 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3109 j -= MAX_INSNS_PER_PEEP2 + 1;
3110 old_insn = peep2_insn_data[j].insn;
3111 gcc_assert (!CALL_P (old_insn));
3113 break;
3116 i = match_len + peep2_current;
3117 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3118 i -= MAX_INSNS_PER_PEEP2 + 1;
3120 note = find_reg_note (peep2_insn_data[i].insn,
3121 REG_EH_REGION, NULL_RTX);
3123 /* Replace the old sequence with the new. */
3124 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3125 INSN_LOCATOR (peep2_insn_data[i].insn));
3126 before_try = PREV_INSN (insn);
3127 delete_insn_chain (insn, peep2_insn_data[i].insn);
3129 /* Re-insert the EH_REGION notes. */
3130 if (note || (was_call && nonlocal_goto_handler_labels))
3132 edge eh_edge;
3133 edge_iterator ei;
3135 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3136 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3137 break;
3139 for (x = try ; x != before_try ; x = PREV_INSN (x))
3140 if (CALL_P (x)
3141 || (flag_non_call_exceptions
3142 && may_trap_p (PATTERN (x))
3143 && !find_reg_note (x, REG_EH_REGION, NULL)))
3145 if (note)
3146 REG_NOTES (x)
3147 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3148 XEXP (note, 0),
3149 REG_NOTES (x));
3151 if (x != BB_END (bb) && eh_edge)
3153 edge nfte, nehe;
3154 int flags;
3156 nfte = split_block (bb, x);
3157 flags = (eh_edge->flags
3158 & (EDGE_EH | EDGE_ABNORMAL));
3159 if (CALL_P (x))
3160 flags |= EDGE_ABNORMAL_CALL;
3161 nehe = make_edge (nfte->src, eh_edge->dest,
3162 flags);
3164 nehe->probability = eh_edge->probability;
3165 nfte->probability
3166 = REG_BR_PROB_BASE - nehe->probability;
3168 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3169 #ifdef HAVE_conditional_execution
3170 SET_BIT (blocks, nfte->dest->index);
3171 changed = true;
3172 #endif
3173 bb = nfte->src;
3174 eh_edge = nehe;
3178 /* Converting possibly trapping insn to non-trapping is
3179 possible. Zap dummy outgoing edges. */
3180 do_cleanup_cfg |= purge_dead_edges (bb);
3183 #ifdef HAVE_conditional_execution
3184 /* With conditional execution, we cannot back up the
3185 live information so easily, since the conditional
3186 death data structures are not so self-contained.
3187 So record that we've made a modification to this
3188 block and update life information at the end. */
3189 SET_BIT (blocks, bb->index);
3190 changed = true;
3192 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3193 peep2_insn_data[i].insn = NULL_RTX;
3194 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3195 #else
3196 /* Back up lifetime information past the end of the
3197 newly created sequence. */
3198 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3199 i = 0;
3200 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3202 /* Update life information for the new sequence. */
3203 x = try;
3206 if (INSN_P (x))
3208 if (--i < 0)
3209 i = MAX_INSNS_PER_PEEP2;
3210 peep2_insn_data[i].insn = x;
3211 propagate_one_insn (pbi, x);
3212 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3214 x = PREV_INSN (x);
3216 while (x != prev);
3218 /* ??? Should verify that LIVE now matches what we
3219 had before the new sequence. */
3221 peep2_current = i;
3222 #endif
3224 /* If we generated a jump instruction, it won't have
3225 JUMP_LABEL set. Recompute after we're done. */
3226 for (x = try; x != before_try; x = PREV_INSN (x))
3227 if (JUMP_P (x))
3229 do_rebuild_jump_labels = true;
3230 break;
3235 if (insn == BB_HEAD (bb))
3236 break;
3239 /* Some peepholes can decide the don't need one or more of their
3240 inputs. If this happens, local life update is not enough. */
3241 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->global_live_at_start, live,
3242 0, j, rsi)
3244 do_global_life_update = true;
3245 break;
3248 free_propagate_block_info (pbi);
3251 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3252 FREE_REG_SET (peep2_insn_data[i].live_before);
3253 FREE_REG_SET (live);
3255 if (do_rebuild_jump_labels)
3256 rebuild_jump_labels (get_insns ());
3258 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3259 we've changed global life since exception handlers are no longer
3260 reachable. */
3261 if (do_cleanup_cfg)
3263 cleanup_cfg (0);
3264 do_global_life_update = true;
3266 if (do_global_life_update)
3267 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3268 #ifdef HAVE_conditional_execution
3269 else
3271 count_or_remove_death_notes (blocks, 1);
3272 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3274 sbitmap_free (blocks);
3275 #endif
3277 #endif /* HAVE_peephole2 */
3279 /* Common predicates for use with define_bypass. */
3281 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3282 data not the address operand(s) of the store. IN_INSN must be
3283 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3284 SETs inside. */
3287 store_data_bypass_p (rtx out_insn, rtx in_insn)
3289 rtx out_set, in_set;
3291 in_set = single_set (in_insn);
3292 gcc_assert (in_set);
3294 if (!MEM_P (SET_DEST (in_set)))
3295 return false;
3297 out_set = single_set (out_insn);
3298 if (out_set)
3300 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3301 return false;
3303 else
3305 rtx out_pat;
3306 int i;
3308 out_pat = PATTERN (out_insn);
3309 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3311 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3313 rtx exp = XVECEXP (out_pat, 0, i);
3315 if (GET_CODE (exp) == CLOBBER)
3316 continue;
3318 gcc_assert (GET_CODE (exp) == SET);
3320 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3321 return false;
3325 return true;
3328 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3329 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3330 or multiple set; IN_INSN should be single_set for truth, but for convenience
3331 of insn categorization may be any JUMP or CALL insn. */
3334 if_test_bypass_p (rtx out_insn, rtx in_insn)
3336 rtx out_set, in_set;
3338 in_set = single_set (in_insn);
3339 if (! in_set)
3341 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3342 return false;
3345 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3346 return false;
3347 in_set = SET_SRC (in_set);
3349 out_set = single_set (out_insn);
3350 if (out_set)
3352 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3353 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3354 return false;
3356 else
3358 rtx out_pat;
3359 int i;
3361 out_pat = PATTERN (out_insn);
3362 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3364 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3366 rtx exp = XVECEXP (out_pat, 0, i);
3368 if (GET_CODE (exp) == CLOBBER)
3369 continue;
3371 gcc_assert (GET_CODE (exp) == SET);
3373 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3374 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3375 return false;
3379 return true;