2004-10-07 J"orn Rennecke <joern.rennecke@st.com>
[official-gcc.git] / gcc / recog.c
blob61e1186d07abb5b0383353df4d5c75eb834b0910
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (rtx insn)
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
131 check_asm_operands (rtx x)
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
158 for (i = 0; i < noperands; i++)
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
170 return 1;
173 /* Static data for the next two routines. */
175 typedef struct change_t
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
183 static change_t *changes;
184 static int changes_allocated;
186 static int num_changes = 0;
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 gcc_assert (in_group != 0 || num_changes == 0);
216 *loc = new;
218 /* Save the information describing this change. */
219 if (num_changes >= changes_allocated)
221 if (changes_allocated == 0)
222 /* This value allows for repeated substitutions inside complex
223 indexed addresses, or changes in up to 5 insns. */
224 changes_allocated = MAX_RECOG_OPERANDS * 5;
225 else
226 changes_allocated *= 2;
228 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
231 changes[num_changes].object = object;
232 changes[num_changes].loc = loc;
233 changes[num_changes].old = old;
235 if (object && !MEM_P (object))
237 /* Set INSN_CODE to force rerecognition of insn. Save old code in
238 case invalid. */
239 changes[num_changes].old_code = INSN_CODE (object);
240 INSN_CODE (object) = -1;
243 num_changes++;
245 /* If we are making a group of changes, return 1. Otherwise, validate the
246 change group we made. */
248 if (in_group)
249 return 1;
250 else
251 return apply_change_group ();
254 /* This subroutine of apply_change_group verifies whether the changes to INSN
255 were valid; i.e. whether INSN can still be recognized. */
258 insn_invalid_p (rtx insn)
260 rtx pat = PATTERN (insn);
261 int num_clobbers = 0;
262 /* If we are before reload and the pattern is a SET, see if we can add
263 clobbers. */
264 int icode = recog (pat, insn,
265 (GET_CODE (pat) == SET
266 && ! reload_completed && ! reload_in_progress)
267 ? &num_clobbers : 0);
268 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
271 /* If this is an asm and the operand aren't legal, then fail. Likewise if
272 this is not an asm and the insn wasn't recognized. */
273 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
274 || (!is_asm && icode < 0))
275 return 1;
277 /* If we have to add CLOBBERs, fail if we have to add ones that reference
278 hard registers since our callers can't know if they are live or not.
279 Otherwise, add them. */
280 if (num_clobbers > 0)
282 rtx newpat;
284 if (added_clobbers_hard_reg_p (icode))
285 return 1;
287 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
288 XVECEXP (newpat, 0, 0) = pat;
289 add_clobbers (newpat, icode);
290 PATTERN (insn) = pat = newpat;
293 /* After reload, verify that all constraints are satisfied. */
294 if (reload_completed)
296 extract_insn (insn);
298 if (! constrain_operands (1))
299 return 1;
302 INSN_CODE (insn) = icode;
303 return 0;
306 /* Return number of changes made and not validated yet. */
308 num_changes_pending (void)
310 return num_changes;
313 /* Apply a group of changes previously issued with `validate_change'.
314 Return 1 if all changes are valid, zero otherwise. */
317 apply_change_group (void)
319 int i;
320 rtx last_validated = NULL_RTX;
322 /* The changes have been applied and all INSN_CODEs have been reset to force
323 rerecognition.
325 The changes are valid if we aren't given an object, or if we are
326 given a MEM and it still is a valid address, or if this is in insn
327 and it is recognized. In the latter case, if reload has completed,
328 we also require that the operands meet the constraints for
329 the insn. */
331 for (i = 0; i < num_changes; i++)
333 rtx object = changes[i].object;
335 /* If there is no object to test or if it is the same as the one we
336 already tested, ignore it. */
337 if (object == 0 || object == last_validated)
338 continue;
340 if (MEM_P (object))
342 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
343 break;
345 else if (insn_invalid_p (object))
347 rtx pat = PATTERN (object);
349 /* Perhaps we couldn't recognize the insn because there were
350 extra CLOBBERs at the end. If so, try to re-recognize
351 without the last CLOBBER (later iterations will cause each of
352 them to be eliminated, in turn). But don't do this if we
353 have an ASM_OPERAND. */
354 if (GET_CODE (pat) == PARALLEL
355 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
356 && asm_noperands (PATTERN (object)) < 0)
358 rtx newpat;
360 if (XVECLEN (pat, 0) == 2)
361 newpat = XVECEXP (pat, 0, 0);
362 else
364 int j;
366 newpat
367 = gen_rtx_PARALLEL (VOIDmode,
368 rtvec_alloc (XVECLEN (pat, 0) - 1));
369 for (j = 0; j < XVECLEN (newpat, 0); j++)
370 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
373 /* Add a new change to this group to replace the pattern
374 with this new pattern. Then consider this change
375 as having succeeded. The change we added will
376 cause the entire call to fail if things remain invalid.
378 Note that this can lose if a later change than the one
379 we are processing specified &XVECEXP (PATTERN (object), 0, X)
380 but this shouldn't occur. */
382 validate_change (object, &PATTERN (object), newpat, 1);
383 continue;
385 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
386 /* If this insn is a CLOBBER or USE, it is always valid, but is
387 never recognized. */
388 continue;
389 else
390 break;
392 last_validated = object;
395 if (i == num_changes)
397 basic_block bb;
399 for (i = 0; i < num_changes; i++)
400 if (changes[i].object
401 && INSN_P (changes[i].object)
402 && (bb = BLOCK_FOR_INSN (changes[i].object)))
403 bb->flags |= BB_DIRTY;
405 num_changes = 0;
406 return 1;
408 else
410 cancel_changes (0);
411 return 0;
415 /* Return the number of changes so far in the current group. */
418 num_validated_changes (void)
420 return num_changes;
423 /* Retract the changes numbered NUM and up. */
425 void
426 cancel_changes (int num)
428 int i;
430 /* Back out all the changes. Do this in the opposite order in which
431 they were made. */
432 for (i = num_changes - 1; i >= num; i--)
434 *changes[i].loc = changes[i].old;
435 if (changes[i].object && !MEM_P (changes[i].object))
436 INSN_CODE (changes[i].object) = changes[i].old_code;
438 num_changes = num;
441 /* Replace every occurrence of FROM in X with TO. Mark each change with
442 validate_change passing OBJECT. */
444 static void
445 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
447 int i, j;
448 const char *fmt;
449 rtx x = *loc;
450 enum rtx_code code;
451 enum machine_mode op0_mode = VOIDmode;
452 int prev_changes = num_changes;
453 rtx new;
455 if (!x)
456 return;
458 code = GET_CODE (x);
459 fmt = GET_RTX_FORMAT (code);
460 if (fmt[0] == 'e')
461 op0_mode = GET_MODE (XEXP (x, 0));
463 /* X matches FROM if it is the same rtx or they are both referring to the
464 same register in the same mode. Avoid calling rtx_equal_p unless the
465 operands look similar. */
467 if (x == from
468 || (REG_P (x) && REG_P (from)
469 && GET_MODE (x) == GET_MODE (from)
470 && REGNO (x) == REGNO (from))
471 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
472 && rtx_equal_p (x, from)))
474 validate_change (object, loc, to, 1);
475 return;
478 /* Call ourself recursively to perform the replacements.
479 We must not replace inside already replaced expression, otherwise we
480 get infinite recursion for replacements like (reg X)->(subreg (reg X))
481 done by regmove, so we must special case shared ASM_OPERANDS. */
483 if (GET_CODE (x) == PARALLEL)
485 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
487 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
488 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
490 /* Verify that operands are really shared. */
491 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
492 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
493 (x, 0, j))));
494 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
495 from, to, object);
497 else
498 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
501 else
502 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
504 if (fmt[i] == 'e')
505 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
506 else if (fmt[i] == 'E')
507 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
508 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
511 /* If we didn't substitute, there is nothing more to do. */
512 if (num_changes == prev_changes)
513 return;
515 /* Allow substituted expression to have different mode. This is used by
516 regmove to change mode of pseudo register. */
517 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
518 op0_mode = GET_MODE (XEXP (x, 0));
520 /* Do changes needed to keep rtx consistent. Don't do any other
521 simplifications, as it is not our job. */
523 if (SWAPPABLE_OPERANDS_P (x)
524 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
526 validate_change (object, loc,
527 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
528 : swap_condition (code),
529 GET_MODE (x), XEXP (x, 1),
530 XEXP (x, 0)), 1);
531 x = *loc;
532 code = GET_CODE (x);
535 switch (code)
537 case PLUS:
538 /* If we have a PLUS whose second operand is now a CONST_INT, use
539 simplify_gen_binary to try to simplify it.
540 ??? We may want later to remove this, once simplification is
541 separated from this function. */
542 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
543 validate_change (object, loc,
544 simplify_gen_binary
545 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
546 break;
547 case MINUS:
548 if (GET_CODE (XEXP (x, 1)) == CONST_INT
549 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
550 validate_change (object, loc,
551 simplify_gen_binary
552 (PLUS, GET_MODE (x), XEXP (x, 0),
553 simplify_gen_unary (NEG,
554 GET_MODE (x), XEXP (x, 1),
555 GET_MODE (x))), 1);
556 break;
557 case ZERO_EXTEND:
558 case SIGN_EXTEND:
559 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
561 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
562 op0_mode);
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
565 if (!new)
566 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
567 validate_change (object, loc, new, 1);
569 break;
570 case SUBREG:
571 /* All subregs possible to simplify should be simplified. */
572 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
573 SUBREG_BYTE (x));
575 /* Subregs of VOIDmode operands are incorrect. */
576 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
577 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
578 if (new)
579 validate_change (object, loc, new, 1);
580 break;
581 case ZERO_EXTRACT:
582 case SIGN_EXTRACT:
583 /* If we are replacing a register with memory, try to change the memory
584 to be the mode required for memory in extract operations (this isn't
585 likely to be an insertion operation; if it was, nothing bad will
586 happen, we might just fail in some cases). */
588 if (MEM_P (XEXP (x, 0))
589 && GET_CODE (XEXP (x, 1)) == CONST_INT
590 && GET_CODE (XEXP (x, 2)) == CONST_INT
591 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
592 && !MEM_VOLATILE_P (XEXP (x, 0)))
594 enum machine_mode wanted_mode = VOIDmode;
595 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
596 int pos = INTVAL (XEXP (x, 2));
598 if (GET_CODE (x) == ZERO_EXTRACT)
600 enum machine_mode new_mode
601 = mode_for_extraction (EP_extzv, 1);
602 if (new_mode != MAX_MACHINE_MODE)
603 wanted_mode = new_mode;
605 else if (GET_CODE (x) == SIGN_EXTRACT)
607 enum machine_mode new_mode
608 = mode_for_extraction (EP_extv, 1);
609 if (new_mode != MAX_MACHINE_MODE)
610 wanted_mode = new_mode;
613 /* If we have a narrower mode, we can do something. */
614 if (wanted_mode != VOIDmode
615 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
617 int offset = pos / BITS_PER_UNIT;
618 rtx newmem;
620 /* If the bytes and bits are counted differently, we
621 must adjust the offset. */
622 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
623 offset =
624 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
625 offset);
627 pos %= GET_MODE_BITSIZE (wanted_mode);
629 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
631 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
632 validate_change (object, &XEXP (x, 0), newmem, 1);
636 break;
638 default:
639 break;
643 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
644 with TO. After all changes have been made, validate by seeing
645 if INSN is still valid. */
648 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
650 validate_replace_rtx_1 (loc, from, to, insn);
651 return apply_change_group ();
654 /* Try replacing every occurrence of FROM in INSN with TO. After all
655 changes have been made, validate by seeing if INSN is still valid. */
658 validate_replace_rtx (rtx from, rtx to, rtx insn)
660 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
661 return apply_change_group ();
664 /* Try replacing every occurrence of FROM in INSN with TO. */
666 void
667 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
669 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
672 /* Function called by note_uses to replace used subexpressions. */
673 struct validate_replace_src_data
675 rtx from; /* Old RTX */
676 rtx to; /* New RTX */
677 rtx insn; /* Insn in which substitution is occurring. */
680 static void
681 validate_replace_src_1 (rtx *x, void *data)
683 struct validate_replace_src_data *d
684 = (struct validate_replace_src_data *) data;
686 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
689 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
690 SET_DESTs. */
692 void
693 validate_replace_src_group (rtx from, rtx to, rtx insn)
695 struct validate_replace_src_data d;
697 d.from = from;
698 d.to = to;
699 d.insn = insn;
700 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
703 #ifdef HAVE_cc0
704 /* Return 1 if the insn using CC0 set by INSN does not contain
705 any ordered tests applied to the condition codes.
706 EQ and NE tests do not count. */
709 next_insn_tests_no_inequality (rtx insn)
711 rtx next = next_cc0_user (insn);
713 /* If there is no next insn, we have to take the conservative choice. */
714 if (next == 0)
715 return 0;
717 return (INSN_P (next)
718 && ! inequality_comparisons_p (PATTERN (next)));
720 #endif
722 /* This is used by find_single_use to locate an rtx that contains exactly one
723 use of DEST, which is typically either a REG or CC0. It returns a
724 pointer to the innermost rtx expression containing DEST. Appearances of
725 DEST that are being used to totally replace it are not counted. */
727 static rtx *
728 find_single_use_1 (rtx dest, rtx *loc)
730 rtx x = *loc;
731 enum rtx_code code = GET_CODE (x);
732 rtx *result = 0;
733 rtx *this_result;
734 int i;
735 const char *fmt;
737 switch (code)
739 case CONST_INT:
740 case CONST:
741 case LABEL_REF:
742 case SYMBOL_REF:
743 case CONST_DOUBLE:
744 case CONST_VECTOR:
745 case CLOBBER:
746 return 0;
748 case SET:
749 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
750 of a REG that occupies all of the REG, the insn uses DEST if
751 it is mentioned in the destination or the source. Otherwise, we
752 need just check the source. */
753 if (GET_CODE (SET_DEST (x)) != CC0
754 && GET_CODE (SET_DEST (x)) != PC
755 && !REG_P (SET_DEST (x))
756 && ! (GET_CODE (SET_DEST (x)) == SUBREG
757 && REG_P (SUBREG_REG (SET_DEST (x)))
758 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
759 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
760 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
761 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
762 break;
764 return find_single_use_1 (dest, &SET_SRC (x));
766 case MEM:
767 case SUBREG:
768 return find_single_use_1 (dest, &XEXP (x, 0));
770 default:
771 break;
774 /* If it wasn't one of the common cases above, check each expression and
775 vector of this code. Look for a unique usage of DEST. */
777 fmt = GET_RTX_FORMAT (code);
778 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
780 if (fmt[i] == 'e')
782 if (dest == XEXP (x, i)
783 || (REG_P (dest) && REG_P (XEXP (x, i))
784 && REGNO (dest) == REGNO (XEXP (x, i))))
785 this_result = loc;
786 else
787 this_result = find_single_use_1 (dest, &XEXP (x, i));
789 if (result == 0)
790 result = this_result;
791 else if (this_result)
792 /* Duplicate usage. */
793 return 0;
795 else if (fmt[i] == 'E')
797 int j;
799 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
801 if (XVECEXP (x, i, j) == dest
802 || (REG_P (dest)
803 && REG_P (XVECEXP (x, i, j))
804 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
805 this_result = loc;
806 else
807 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
809 if (result == 0)
810 result = this_result;
811 else if (this_result)
812 return 0;
817 return result;
820 /* See if DEST, produced in INSN, is used only a single time in the
821 sequel. If so, return a pointer to the innermost rtx expression in which
822 it is used.
824 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
826 This routine will return usually zero either before flow is called (because
827 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
828 note can't be trusted).
830 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
831 care about REG_DEAD notes or LOG_LINKS.
833 Otherwise, we find the single use by finding an insn that has a
834 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
835 only referenced once in that insn, we know that it must be the first
836 and last insn referencing DEST. */
838 rtx *
839 find_single_use (rtx dest, rtx insn, rtx *ploc)
841 rtx next;
842 rtx *result;
843 rtx link;
845 #ifdef HAVE_cc0
846 if (dest == cc0_rtx)
848 next = NEXT_INSN (insn);
849 if (next == 0
850 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
851 return 0;
853 result = find_single_use_1 (dest, &PATTERN (next));
854 if (result && ploc)
855 *ploc = next;
856 return result;
858 #endif
860 if (reload_completed || reload_in_progress || !REG_P (dest))
861 return 0;
863 for (next = next_nonnote_insn (insn);
864 next != 0 && !LABEL_P (next);
865 next = next_nonnote_insn (next))
866 if (INSN_P (next) && dead_or_set_p (next, dest))
868 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
869 if (XEXP (link, 0) == insn)
870 break;
872 if (link)
874 result = find_single_use_1 (dest, &PATTERN (next));
875 if (ploc)
876 *ploc = next;
877 return result;
881 return 0;
884 /* Return 1 if OP is a valid general operand for machine mode MODE.
885 This is either a register reference, a memory reference,
886 or a constant. In the case of a memory reference, the address
887 is checked for general validity for the target machine.
889 Register and memory references must have mode MODE in order to be valid,
890 but some constants have no machine mode and are valid for any mode.
892 If MODE is VOIDmode, OP is checked for validity for whatever mode
893 it has.
895 The main use of this function is as a predicate in match_operand
896 expressions in the machine description.
898 For an explanation of this function's behavior for registers of
899 class NO_REGS, see the comment for `register_operand'. */
902 general_operand (rtx op, enum machine_mode mode)
904 enum rtx_code code = GET_CODE (op);
906 if (mode == VOIDmode)
907 mode = GET_MODE (op);
909 /* Don't accept CONST_INT or anything similar
910 if the caller wants something floating. */
911 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
912 && GET_MODE_CLASS (mode) != MODE_INT
913 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
914 return 0;
916 if (GET_CODE (op) == CONST_INT
917 && mode != VOIDmode
918 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
919 return 0;
921 if (CONSTANT_P (op))
922 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
923 || mode == VOIDmode)
924 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
925 && LEGITIMATE_CONSTANT_P (op));
927 /* Except for certain constants with VOIDmode, already checked for,
928 OP's mode must match MODE if MODE specifies a mode. */
930 if (GET_MODE (op) != mode)
931 return 0;
933 if (code == SUBREG)
935 rtx sub = SUBREG_REG (op);
937 #ifdef INSN_SCHEDULING
938 /* On machines that have insn scheduling, we want all memory
939 reference to be explicit, so outlaw paradoxical SUBREGs. */
940 if (MEM_P (sub)
941 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
942 return 0;
943 #endif
944 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
945 may result in incorrect reference. We should simplify all valid
946 subregs of MEM anyway. But allow this after reload because we
947 might be called from cleanup_subreg_operands.
949 ??? This is a kludge. */
950 if (!reload_completed && SUBREG_BYTE (op) != 0
951 && MEM_P (sub))
952 return 0;
954 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
955 create such rtl, and we must reject it. */
956 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
957 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
958 return 0;
960 op = sub;
961 code = GET_CODE (op);
964 if (code == REG)
965 /* A register whose class is NO_REGS is not a general operand. */
966 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
967 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
969 if (code == MEM)
971 rtx y = XEXP (op, 0);
973 if (! volatile_ok && MEM_VOLATILE_P (op))
974 return 0;
976 /* Use the mem's mode, since it will be reloaded thus. */
977 if (memory_address_p (GET_MODE (op), y))
978 return 1;
981 return 0;
984 /* Return 1 if OP is a valid memory address for a memory reference
985 of mode MODE.
987 The main use of this function is as a predicate in match_operand
988 expressions in the machine description. */
991 address_operand (rtx op, enum machine_mode mode)
993 return memory_address_p (mode, op);
996 /* Return 1 if OP is a register reference of mode MODE.
997 If MODE is VOIDmode, accept a register in any mode.
999 The main use of this function is as a predicate in match_operand
1000 expressions in the machine description.
1002 As a special exception, registers whose class is NO_REGS are
1003 not accepted by `register_operand'. The reason for this change
1004 is to allow the representation of special architecture artifacts
1005 (such as a condition code register) without extending the rtl
1006 definitions. Since registers of class NO_REGS cannot be used
1007 as registers in any case where register classes are examined,
1008 it is most consistent to keep this function from accepting them. */
1011 register_operand (rtx op, enum machine_mode mode)
1013 if (GET_MODE (op) != mode && mode != VOIDmode)
1014 return 0;
1016 if (GET_CODE (op) == SUBREG)
1018 rtx sub = SUBREG_REG (op);
1020 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1021 because it is guaranteed to be reloaded into one.
1022 Just make sure the MEM is valid in itself.
1023 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1024 but currently it does result from (SUBREG (REG)...) where the
1025 reg went on the stack.) */
1026 if (! reload_completed && MEM_P (sub))
1027 return general_operand (op, mode);
1029 #ifdef CANNOT_CHANGE_MODE_CLASS
1030 if (REG_P (sub)
1031 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1032 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1033 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1034 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1035 return 0;
1036 #endif
1038 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1039 create such rtl, and we must reject it. */
1040 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1041 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1042 return 0;
1044 op = sub;
1047 /* We don't consider registers whose class is NO_REGS
1048 to be a register operand. */
1049 return (REG_P (op)
1050 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1051 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1054 /* Return 1 for a register in Pmode; ignore the tested mode. */
1057 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1059 return register_operand (op, Pmode);
1062 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1063 or a hard register. */
1066 scratch_operand (rtx op, enum machine_mode mode)
1068 if (GET_MODE (op) != mode && mode != VOIDmode)
1069 return 0;
1071 return (GET_CODE (op) == SCRATCH
1072 || (REG_P (op)
1073 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1076 /* Return 1 if OP is a valid immediate operand for mode MODE.
1078 The main use of this function is as a predicate in match_operand
1079 expressions in the machine description. */
1082 immediate_operand (rtx op, enum machine_mode mode)
1084 /* Don't accept CONST_INT or anything similar
1085 if the caller wants something floating. */
1086 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1087 && GET_MODE_CLASS (mode) != MODE_INT
1088 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1089 return 0;
1091 if (GET_CODE (op) == CONST_INT
1092 && mode != VOIDmode
1093 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1094 return 0;
1096 return (CONSTANT_P (op)
1097 && (GET_MODE (op) == mode || mode == VOIDmode
1098 || GET_MODE (op) == VOIDmode)
1099 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1100 && LEGITIMATE_CONSTANT_P (op));
1103 /* Returns 1 if OP is an operand that is a CONST_INT. */
1106 const_int_operand (rtx op, enum machine_mode mode)
1108 if (GET_CODE (op) != CONST_INT)
1109 return 0;
1111 if (mode != VOIDmode
1112 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1113 return 0;
1115 return 1;
1118 /* Returns 1 if OP is an operand that is a constant integer or constant
1119 floating-point number. */
1122 const_double_operand (rtx op, enum machine_mode mode)
1124 /* Don't accept CONST_INT or anything similar
1125 if the caller wants something floating. */
1126 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1127 && GET_MODE_CLASS (mode) != MODE_INT
1128 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1129 return 0;
1131 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1132 && (mode == VOIDmode || GET_MODE (op) == mode
1133 || GET_MODE (op) == VOIDmode));
1136 /* Return 1 if OP is a general operand that is not an immediate operand. */
1139 nonimmediate_operand (rtx op, enum machine_mode mode)
1141 return (general_operand (op, mode) && ! CONSTANT_P (op));
1144 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1147 nonmemory_operand (rtx op, enum machine_mode mode)
1149 if (CONSTANT_P (op))
1151 /* Don't accept CONST_INT or anything similar
1152 if the caller wants something floating. */
1153 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1154 && GET_MODE_CLASS (mode) != MODE_INT
1155 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1156 return 0;
1158 if (GET_CODE (op) == CONST_INT
1159 && mode != VOIDmode
1160 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1161 return 0;
1163 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1164 || mode == VOIDmode)
1165 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1166 && LEGITIMATE_CONSTANT_P (op));
1169 if (GET_MODE (op) != mode && mode != VOIDmode)
1170 return 0;
1172 if (GET_CODE (op) == SUBREG)
1174 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1175 because it is guaranteed to be reloaded into one.
1176 Just make sure the MEM is valid in itself.
1177 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1178 but currently it does result from (SUBREG (REG)...) where the
1179 reg went on the stack.) */
1180 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1181 return general_operand (op, mode);
1182 op = SUBREG_REG (op);
1185 /* We don't consider registers whose class is NO_REGS
1186 to be a register operand. */
1187 return (REG_P (op)
1188 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1189 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1192 /* Return 1 if OP is a valid operand that stands for pushing a
1193 value of mode MODE onto the stack.
1195 The main use of this function is as a predicate in match_operand
1196 expressions in the machine description. */
1199 push_operand (rtx op, enum machine_mode mode)
1201 unsigned int rounded_size = GET_MODE_SIZE (mode);
1203 #ifdef PUSH_ROUNDING
1204 rounded_size = PUSH_ROUNDING (rounded_size);
1205 #endif
1207 if (!MEM_P (op))
1208 return 0;
1210 if (mode != VOIDmode && GET_MODE (op) != mode)
1211 return 0;
1213 op = XEXP (op, 0);
1215 if (rounded_size == GET_MODE_SIZE (mode))
1217 if (GET_CODE (op) != STACK_PUSH_CODE)
1218 return 0;
1220 else
1222 if (GET_CODE (op) != PRE_MODIFY
1223 || GET_CODE (XEXP (op, 1)) != PLUS
1224 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1225 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1226 #ifdef STACK_GROWS_DOWNWARD
1227 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1228 #else
1229 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1230 #endif
1232 return 0;
1235 return XEXP (op, 0) == stack_pointer_rtx;
1238 /* Return 1 if OP is a valid operand that stands for popping a
1239 value of mode MODE off the stack.
1241 The main use of this function is as a predicate in match_operand
1242 expressions in the machine description. */
1245 pop_operand (rtx op, enum machine_mode mode)
1247 if (!MEM_P (op))
1248 return 0;
1250 if (mode != VOIDmode && GET_MODE (op) != mode)
1251 return 0;
1253 op = XEXP (op, 0);
1255 if (GET_CODE (op) != STACK_POP_CODE)
1256 return 0;
1258 return XEXP (op, 0) == stack_pointer_rtx;
1261 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1264 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1266 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1267 return 0;
1269 win:
1270 return 1;
1273 /* Return 1 if OP is a valid memory reference with mode MODE,
1274 including a valid address.
1276 The main use of this function is as a predicate in match_operand
1277 expressions in the machine description. */
1280 memory_operand (rtx op, enum machine_mode mode)
1282 rtx inner;
1284 if (! reload_completed)
1285 /* Note that no SUBREG is a memory operand before end of reload pass,
1286 because (SUBREG (MEM...)) forces reloading into a register. */
1287 return MEM_P (op) && general_operand (op, mode);
1289 if (mode != VOIDmode && GET_MODE (op) != mode)
1290 return 0;
1292 inner = op;
1293 if (GET_CODE (inner) == SUBREG)
1294 inner = SUBREG_REG (inner);
1296 return (MEM_P (inner) && general_operand (op, mode));
1299 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1300 that is, a memory reference whose address is a general_operand. */
1303 indirect_operand (rtx op, enum machine_mode mode)
1305 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1306 if (! reload_completed
1307 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1309 int offset = SUBREG_BYTE (op);
1310 rtx inner = SUBREG_REG (op);
1312 if (mode != VOIDmode && GET_MODE (op) != mode)
1313 return 0;
1315 /* The only way that we can have a general_operand as the resulting
1316 address is if OFFSET is zero and the address already is an operand
1317 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1318 operand. */
1320 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1321 || (GET_CODE (XEXP (inner, 0)) == PLUS
1322 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1323 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1324 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1327 return (MEM_P (op)
1328 && memory_operand (op, mode)
1329 && general_operand (XEXP (op, 0), Pmode));
1332 /* Return 1 if this is a comparison operator. This allows the use of
1333 MATCH_OPERATOR to recognize all the branch insns. */
1336 comparison_operator (rtx op, enum machine_mode mode)
1338 return ((mode == VOIDmode || GET_MODE (op) == mode)
1339 && COMPARISON_P (op));
1342 /* If BODY is an insn body that uses ASM_OPERANDS,
1343 return the number of operands (both input and output) in the insn.
1344 Otherwise return -1. */
1347 asm_noperands (rtx body)
1349 switch (GET_CODE (body))
1351 case ASM_OPERANDS:
1352 /* No output operands: return number of input operands. */
1353 return ASM_OPERANDS_INPUT_LENGTH (body);
1354 case SET:
1355 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1356 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1357 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1358 else
1359 return -1;
1360 case PARALLEL:
1361 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1362 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1364 /* Multiple output operands, or 1 output plus some clobbers:
1365 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1366 int i;
1367 int n_sets;
1369 /* Count backwards through CLOBBERs to determine number of SETs. */
1370 for (i = XVECLEN (body, 0); i > 0; i--)
1372 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1373 break;
1374 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1375 return -1;
1378 /* N_SETS is now number of output operands. */
1379 n_sets = i;
1381 /* Verify that all the SETs we have
1382 came from a single original asm_operands insn
1383 (so that invalid combinations are blocked). */
1384 for (i = 0; i < n_sets; i++)
1386 rtx elt = XVECEXP (body, 0, i);
1387 if (GET_CODE (elt) != SET)
1388 return -1;
1389 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1390 return -1;
1391 /* If these ASM_OPERANDS rtx's came from different original insns
1392 then they aren't allowed together. */
1393 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1394 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1395 return -1;
1397 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1398 + n_sets);
1400 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1402 /* 0 outputs, but some clobbers:
1403 body is [(asm_operands ...) (clobber (reg ...))...]. */
1404 int i;
1406 /* Make sure all the other parallel things really are clobbers. */
1407 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1408 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1409 return -1;
1411 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1413 else
1414 return -1;
1415 default:
1416 return -1;
1420 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1421 copy its operands (both input and output) into the vector OPERANDS,
1422 the locations of the operands within the insn into the vector OPERAND_LOCS,
1423 and the constraints for the operands into CONSTRAINTS.
1424 Write the modes of the operands into MODES.
1425 Return the assembler-template.
1427 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1428 we don't store that info. */
1430 const char *
1431 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1432 const char **constraints, enum machine_mode *modes)
1434 int i;
1435 int noperands;
1436 const char *template = 0;
1438 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1440 rtx asmop = SET_SRC (body);
1441 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1443 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1445 for (i = 1; i < noperands; i++)
1447 if (operand_locs)
1448 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1449 if (operands)
1450 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1451 if (constraints)
1452 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1453 if (modes)
1454 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1457 /* The output is in the SET.
1458 Its constraint is in the ASM_OPERANDS itself. */
1459 if (operands)
1460 operands[0] = SET_DEST (body);
1461 if (operand_locs)
1462 operand_locs[0] = &SET_DEST (body);
1463 if (constraints)
1464 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1465 if (modes)
1466 modes[0] = GET_MODE (SET_DEST (body));
1467 template = ASM_OPERANDS_TEMPLATE (asmop);
1469 else if (GET_CODE (body) == ASM_OPERANDS)
1471 rtx asmop = body;
1472 /* No output operands: BODY is (asm_operands ....). */
1474 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1476 /* The input operands are found in the 1st element vector. */
1477 /* Constraints for inputs are in the 2nd element vector. */
1478 for (i = 0; i < noperands; i++)
1480 if (operand_locs)
1481 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1482 if (operands)
1483 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1484 if (constraints)
1485 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1486 if (modes)
1487 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1489 template = ASM_OPERANDS_TEMPLATE (asmop);
1491 else if (GET_CODE (body) == PARALLEL
1492 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1493 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1495 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1496 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1497 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1498 int nout = 0; /* Does not include CLOBBERs. */
1500 /* At least one output, plus some CLOBBERs. */
1502 /* The outputs are in the SETs.
1503 Their constraints are in the ASM_OPERANDS itself. */
1504 for (i = 0; i < nparallel; i++)
1506 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1507 break; /* Past last SET */
1509 if (operands)
1510 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1511 if (operand_locs)
1512 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1513 if (constraints)
1514 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1515 if (modes)
1516 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1517 nout++;
1520 for (i = 0; i < nin; i++)
1522 if (operand_locs)
1523 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1524 if (operands)
1525 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1526 if (constraints)
1527 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1528 if (modes)
1529 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1532 template = ASM_OPERANDS_TEMPLATE (asmop);
1534 else if (GET_CODE (body) == PARALLEL
1535 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1537 /* No outputs, but some CLOBBERs. */
1539 rtx asmop = XVECEXP (body, 0, 0);
1540 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1542 for (i = 0; i < nin; i++)
1544 if (operand_locs)
1545 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1546 if (operands)
1547 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1548 if (constraints)
1549 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1550 if (modes)
1551 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1554 template = ASM_OPERANDS_TEMPLATE (asmop);
1557 return template;
1560 /* Check if an asm_operand matches its constraints.
1561 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1564 asm_operand_ok (rtx op, const char *constraint)
1566 int result = 0;
1568 /* Use constrain_operands after reload. */
1569 gcc_assert (!reload_completed);
1571 while (*constraint)
1573 char c = *constraint;
1574 int len;
1575 switch (c)
1577 case ',':
1578 constraint++;
1579 continue;
1580 case '=':
1581 case '+':
1582 case '*':
1583 case '%':
1584 case '!':
1585 case '#':
1586 case '&':
1587 case '?':
1588 break;
1590 case '0': case '1': case '2': case '3': case '4':
1591 case '5': case '6': case '7': case '8': case '9':
1592 /* For best results, our caller should have given us the
1593 proper matching constraint, but we can't actually fail
1594 the check if they didn't. Indicate that results are
1595 inconclusive. */
1597 constraint++;
1598 while (ISDIGIT (*constraint));
1599 if (! result)
1600 result = -1;
1601 continue;
1603 case 'p':
1604 if (address_operand (op, VOIDmode))
1605 result = 1;
1606 break;
1608 case 'm':
1609 case 'V': /* non-offsettable */
1610 if (memory_operand (op, VOIDmode))
1611 result = 1;
1612 break;
1614 case 'o': /* offsettable */
1615 if (offsettable_nonstrict_memref_p (op))
1616 result = 1;
1617 break;
1619 case '<':
1620 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1621 excepting those that expand_call created. Further, on some
1622 machines which do not have generalized auto inc/dec, an inc/dec
1623 is not a memory_operand.
1625 Match any memory and hope things are resolved after reload. */
1627 if (MEM_P (op)
1628 && (1
1629 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1630 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1631 result = 1;
1632 break;
1634 case '>':
1635 if (MEM_P (op)
1636 && (1
1637 || GET_CODE (XEXP (op, 0)) == PRE_INC
1638 || GET_CODE (XEXP (op, 0)) == POST_INC))
1639 result = 1;
1640 break;
1642 case 'E':
1643 case 'F':
1644 if (GET_CODE (op) == CONST_DOUBLE
1645 || (GET_CODE (op) == CONST_VECTOR
1646 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1647 result = 1;
1648 break;
1650 case 'G':
1651 if (GET_CODE (op) == CONST_DOUBLE
1652 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1653 result = 1;
1654 break;
1655 case 'H':
1656 if (GET_CODE (op) == CONST_DOUBLE
1657 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1658 result = 1;
1659 break;
1661 case 's':
1662 if (GET_CODE (op) == CONST_INT
1663 || (GET_CODE (op) == CONST_DOUBLE
1664 && GET_MODE (op) == VOIDmode))
1665 break;
1666 /* Fall through. */
1668 case 'i':
1669 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1670 result = 1;
1671 break;
1673 case 'n':
1674 if (GET_CODE (op) == CONST_INT
1675 || (GET_CODE (op) == CONST_DOUBLE
1676 && GET_MODE (op) == VOIDmode))
1677 result = 1;
1678 break;
1680 case 'I':
1681 if (GET_CODE (op) == CONST_INT
1682 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1683 result = 1;
1684 break;
1685 case 'J':
1686 if (GET_CODE (op) == CONST_INT
1687 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1688 result = 1;
1689 break;
1690 case 'K':
1691 if (GET_CODE (op) == CONST_INT
1692 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1693 result = 1;
1694 break;
1695 case 'L':
1696 if (GET_CODE (op) == CONST_INT
1697 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1698 result = 1;
1699 break;
1700 case 'M':
1701 if (GET_CODE (op) == CONST_INT
1702 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1703 result = 1;
1704 break;
1705 case 'N':
1706 if (GET_CODE (op) == CONST_INT
1707 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1708 result = 1;
1709 break;
1710 case 'O':
1711 if (GET_CODE (op) == CONST_INT
1712 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1713 result = 1;
1714 break;
1715 case 'P':
1716 if (GET_CODE (op) == CONST_INT
1717 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1718 result = 1;
1719 break;
1721 case 'X':
1722 result = 1;
1723 break;
1725 case 'g':
1726 if (general_operand (op, VOIDmode))
1727 result = 1;
1728 break;
1730 default:
1731 /* For all other letters, we first check for a register class,
1732 otherwise it is an EXTRA_CONSTRAINT. */
1733 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1735 case 'r':
1736 if (GET_MODE (op) == BLKmode)
1737 break;
1738 if (register_operand (op, VOIDmode))
1739 result = 1;
1741 #ifdef EXTRA_CONSTRAINT_STR
1742 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1743 result = 1;
1744 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1745 /* Every memory operand can be reloaded to fit. */
1746 && memory_operand (op, VOIDmode))
1747 result = 1;
1748 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1749 /* Every address operand can be reloaded to fit. */
1750 && address_operand (op, VOIDmode))
1751 result = 1;
1752 #endif
1753 break;
1755 len = CONSTRAINT_LEN (c, constraint);
1757 constraint++;
1758 while (--len && *constraint);
1759 if (len)
1760 return 0;
1763 return result;
1766 /* Given an rtx *P, if it is a sum containing an integer constant term,
1767 return the location (type rtx *) of the pointer to that constant term.
1768 Otherwise, return a null pointer. */
1770 rtx *
1771 find_constant_term_loc (rtx *p)
1773 rtx *tem;
1774 enum rtx_code code = GET_CODE (*p);
1776 /* If *P IS such a constant term, P is its location. */
1778 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1779 || code == CONST)
1780 return p;
1782 /* Otherwise, if not a sum, it has no constant term. */
1784 if (GET_CODE (*p) != PLUS)
1785 return 0;
1787 /* If one of the summands is constant, return its location. */
1789 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1790 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1791 return p;
1793 /* Otherwise, check each summand for containing a constant term. */
1795 if (XEXP (*p, 0) != 0)
1797 tem = find_constant_term_loc (&XEXP (*p, 0));
1798 if (tem != 0)
1799 return tem;
1802 if (XEXP (*p, 1) != 0)
1804 tem = find_constant_term_loc (&XEXP (*p, 1));
1805 if (tem != 0)
1806 return tem;
1809 return 0;
1812 /* Return 1 if OP is a memory reference
1813 whose address contains no side effects
1814 and remains valid after the addition
1815 of a positive integer less than the
1816 size of the object being referenced.
1818 We assume that the original address is valid and do not check it.
1820 This uses strict_memory_address_p as a subroutine, so
1821 don't use it before reload. */
1824 offsettable_memref_p (rtx op)
1826 return ((MEM_P (op))
1827 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1830 /* Similar, but don't require a strictly valid mem ref:
1831 consider pseudo-regs valid as index or base regs. */
1834 offsettable_nonstrict_memref_p (rtx op)
1836 return ((MEM_P (op))
1837 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1840 /* Return 1 if Y is a memory address which contains no side effects
1841 and would remain valid after the addition of a positive integer
1842 less than the size of that mode.
1844 We assume that the original address is valid and do not check it.
1845 We do check that it is valid for narrower modes.
1847 If STRICTP is nonzero, we require a strictly valid address,
1848 for the sake of use in reload.c. */
1851 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1853 enum rtx_code ycode = GET_CODE (y);
1854 rtx z;
1855 rtx y1 = y;
1856 rtx *y2;
1857 int (*addressp) (enum machine_mode, rtx) =
1858 (strictp ? strict_memory_address_p : memory_address_p);
1859 unsigned int mode_sz = GET_MODE_SIZE (mode);
1861 if (CONSTANT_ADDRESS_P (y))
1862 return 1;
1864 /* Adjusting an offsettable address involves changing to a narrower mode.
1865 Make sure that's OK. */
1867 if (mode_dependent_address_p (y))
1868 return 0;
1870 /* ??? How much offset does an offsettable BLKmode reference need?
1871 Clearly that depends on the situation in which it's being used.
1872 However, the current situation in which we test 0xffffffff is
1873 less than ideal. Caveat user. */
1874 if (mode_sz == 0)
1875 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1877 /* If the expression contains a constant term,
1878 see if it remains valid when max possible offset is added. */
1880 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1882 int good;
1884 y1 = *y2;
1885 *y2 = plus_constant (*y2, mode_sz - 1);
1886 /* Use QImode because an odd displacement may be automatically invalid
1887 for any wider mode. But it should be valid for a single byte. */
1888 good = (*addressp) (QImode, y);
1890 /* In any case, restore old contents of memory. */
1891 *y2 = y1;
1892 return good;
1895 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1896 return 0;
1898 /* The offset added here is chosen as the maximum offset that
1899 any instruction could need to add when operating on something
1900 of the specified mode. We assume that if Y and Y+c are
1901 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1902 go inside a LO_SUM here, so we do so as well. */
1903 if (GET_CODE (y) == LO_SUM
1904 && mode != BLKmode
1905 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1906 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1907 plus_constant (XEXP (y, 1), mode_sz - 1));
1908 else
1909 z = plus_constant (y, mode_sz - 1);
1911 /* Use QImode because an odd displacement may be automatically invalid
1912 for any wider mode. But it should be valid for a single byte. */
1913 return (*addressp) (QImode, z);
1916 /* Return 1 if ADDR is an address-expression whose effect depends
1917 on the mode of the memory reference it is used in.
1919 Autoincrement addressing is a typical example of mode-dependence
1920 because the amount of the increment depends on the mode. */
1923 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1925 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1926 return 0;
1927 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1928 win: ATTRIBUTE_UNUSED_LABEL
1929 return 1;
1932 /* Like extract_insn, but save insn extracted and don't extract again, when
1933 called again for the same insn expecting that recog_data still contain the
1934 valid information. This is used primary by gen_attr infrastructure that
1935 often does extract insn again and again. */
1936 void
1937 extract_insn_cached (rtx insn)
1939 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1940 return;
1941 extract_insn (insn);
1942 recog_data.insn = insn;
1944 /* Do cached extract_insn, constrain_operands and complain about failures.
1945 Used by insn_attrtab. */
1946 void
1947 extract_constrain_insn_cached (rtx insn)
1949 extract_insn_cached (insn);
1950 if (which_alternative == -1
1951 && !constrain_operands (reload_completed))
1952 fatal_insn_not_found (insn);
1954 /* Do cached constrain_operands and complain about failures. */
1956 constrain_operands_cached (int strict)
1958 if (which_alternative == -1)
1959 return constrain_operands (strict);
1960 else
1961 return 1;
1964 /* Analyze INSN and fill in recog_data. */
1966 void
1967 extract_insn (rtx insn)
1969 int i;
1970 int icode;
1971 int noperands;
1972 rtx body = PATTERN (insn);
1974 recog_data.insn = NULL;
1975 recog_data.n_operands = 0;
1976 recog_data.n_alternatives = 0;
1977 recog_data.n_dups = 0;
1978 which_alternative = -1;
1980 switch (GET_CODE (body))
1982 case USE:
1983 case CLOBBER:
1984 case ASM_INPUT:
1985 case ADDR_VEC:
1986 case ADDR_DIFF_VEC:
1987 return;
1989 case SET:
1990 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1991 goto asm_insn;
1992 else
1993 goto normal_insn;
1994 case PARALLEL:
1995 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1996 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1997 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1998 goto asm_insn;
1999 else
2000 goto normal_insn;
2001 case ASM_OPERANDS:
2002 asm_insn:
2003 recog_data.n_operands = noperands = asm_noperands (body);
2004 if (noperands >= 0)
2006 /* This insn is an `asm' with operands. */
2008 /* expand_asm_operands makes sure there aren't too many operands. */
2009 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2011 /* Now get the operand values and constraints out of the insn. */
2012 decode_asm_operands (body, recog_data.operand,
2013 recog_data.operand_loc,
2014 recog_data.constraints,
2015 recog_data.operand_mode);
2016 if (noperands > 0)
2018 const char *p = recog_data.constraints[0];
2019 recog_data.n_alternatives = 1;
2020 while (*p)
2021 recog_data.n_alternatives += (*p++ == ',');
2023 break;
2025 fatal_insn_not_found (insn);
2027 default:
2028 normal_insn:
2029 /* Ordinary insn: recognize it, get the operands via insn_extract
2030 and get the constraints. */
2032 icode = recog_memoized (insn);
2033 if (icode < 0)
2034 fatal_insn_not_found (insn);
2036 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2037 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2038 recog_data.n_dups = insn_data[icode].n_dups;
2040 insn_extract (insn);
2042 for (i = 0; i < noperands; i++)
2044 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2045 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2046 /* VOIDmode match_operands gets mode from their real operand. */
2047 if (recog_data.operand_mode[i] == VOIDmode)
2048 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2051 for (i = 0; i < noperands; i++)
2052 recog_data.operand_type[i]
2053 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2054 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2055 : OP_IN);
2057 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2060 /* After calling extract_insn, you can use this function to extract some
2061 information from the constraint strings into a more usable form.
2062 The collected data is stored in recog_op_alt. */
2063 void
2064 preprocess_constraints (void)
2066 int i;
2068 for (i = 0; i < recog_data.n_operands; i++)
2069 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2070 * sizeof (struct operand_alternative)));
2072 for (i = 0; i < recog_data.n_operands; i++)
2074 int j;
2075 struct operand_alternative *op_alt;
2076 const char *p = recog_data.constraints[i];
2078 op_alt = recog_op_alt[i];
2080 for (j = 0; j < recog_data.n_alternatives; j++)
2082 op_alt[j].cl = NO_REGS;
2083 op_alt[j].constraint = p;
2084 op_alt[j].matches = -1;
2085 op_alt[j].matched = -1;
2087 if (*p == '\0' || *p == ',')
2089 op_alt[j].anything_ok = 1;
2090 continue;
2093 for (;;)
2095 char c = *p;
2096 if (c == '#')
2098 c = *++p;
2099 while (c != ',' && c != '\0');
2100 if (c == ',' || c == '\0')
2102 p++;
2103 break;
2106 switch (c)
2108 case '=': case '+': case '*': case '%':
2109 case 'E': case 'F': case 'G': case 'H':
2110 case 's': case 'i': case 'n':
2111 case 'I': case 'J': case 'K': case 'L':
2112 case 'M': case 'N': case 'O': case 'P':
2113 /* These don't say anything we care about. */
2114 break;
2116 case '?':
2117 op_alt[j].reject += 6;
2118 break;
2119 case '!':
2120 op_alt[j].reject += 600;
2121 break;
2122 case '&':
2123 op_alt[j].earlyclobber = 1;
2124 break;
2126 case '0': case '1': case '2': case '3': case '4':
2127 case '5': case '6': case '7': case '8': case '9':
2129 char *end;
2130 op_alt[j].matches = strtoul (p, &end, 10);
2131 recog_op_alt[op_alt[j].matches][j].matched = i;
2132 p = end;
2134 continue;
2136 case 'm':
2137 op_alt[j].memory_ok = 1;
2138 break;
2139 case '<':
2140 op_alt[j].decmem_ok = 1;
2141 break;
2142 case '>':
2143 op_alt[j].incmem_ok = 1;
2144 break;
2145 case 'V':
2146 op_alt[j].nonoffmem_ok = 1;
2147 break;
2148 case 'o':
2149 op_alt[j].offmem_ok = 1;
2150 break;
2151 case 'X':
2152 op_alt[j].anything_ok = 1;
2153 break;
2155 case 'p':
2156 op_alt[j].is_address = 1;
2157 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2158 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2159 break;
2161 case 'g':
2162 case 'r':
2163 op_alt[j].cl =
2164 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2165 break;
2167 default:
2168 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2170 op_alt[j].memory_ok = 1;
2171 break;
2173 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2175 op_alt[j].is_address = 1;
2176 op_alt[j].cl
2177 = (reg_class_subunion
2178 [(int) op_alt[j].cl]
2179 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2180 break;
2183 op_alt[j].cl
2184 = (reg_class_subunion
2185 [(int) op_alt[j].cl]
2186 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2187 break;
2189 p += CONSTRAINT_LEN (c, p);
2195 /* Check the operands of an insn against the insn's operand constraints
2196 and return 1 if they are valid.
2197 The information about the insn's operands, constraints, operand modes
2198 etc. is obtained from the global variables set up by extract_insn.
2200 WHICH_ALTERNATIVE is set to a number which indicates which
2201 alternative of constraints was matched: 0 for the first alternative,
2202 1 for the next, etc.
2204 In addition, when two operands are required to match
2205 and it happens that the output operand is (reg) while the
2206 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2207 make the output operand look like the input.
2208 This is because the output operand is the one the template will print.
2210 This is used in final, just before printing the assembler code and by
2211 the routines that determine an insn's attribute.
2213 If STRICT is a positive nonzero value, it means that we have been
2214 called after reload has been completed. In that case, we must
2215 do all checks strictly. If it is zero, it means that we have been called
2216 before reload has completed. In that case, we first try to see if we can
2217 find an alternative that matches strictly. If not, we try again, this
2218 time assuming that reload will fix up the insn. This provides a "best
2219 guess" for the alternative and is used to compute attributes of insns prior
2220 to reload. A negative value of STRICT is used for this internal call. */
2222 struct funny_match
2224 int this, other;
2228 constrain_operands (int strict)
2230 const char *constraints[MAX_RECOG_OPERANDS];
2231 int matching_operands[MAX_RECOG_OPERANDS];
2232 int earlyclobber[MAX_RECOG_OPERANDS];
2233 int c;
2235 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2236 int funny_match_index;
2238 which_alternative = 0;
2239 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2240 return 1;
2242 for (c = 0; c < recog_data.n_operands; c++)
2244 constraints[c] = recog_data.constraints[c];
2245 matching_operands[c] = -1;
2250 int opno;
2251 int lose = 0;
2252 funny_match_index = 0;
2254 for (opno = 0; opno < recog_data.n_operands; opno++)
2256 rtx op = recog_data.operand[opno];
2257 enum machine_mode mode = GET_MODE (op);
2258 const char *p = constraints[opno];
2259 int offset = 0;
2260 int win = 0;
2261 int val;
2262 int len;
2264 earlyclobber[opno] = 0;
2266 /* A unary operator may be accepted by the predicate, but it
2267 is irrelevant for matching constraints. */
2268 if (UNARY_P (op))
2269 op = XEXP (op, 0);
2271 if (GET_CODE (op) == SUBREG)
2273 if (REG_P (SUBREG_REG (op))
2274 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2275 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2276 GET_MODE (SUBREG_REG (op)),
2277 SUBREG_BYTE (op),
2278 GET_MODE (op));
2279 op = SUBREG_REG (op);
2282 /* An empty constraint or empty alternative
2283 allows anything which matched the pattern. */
2284 if (*p == 0 || *p == ',')
2285 win = 1;
2288 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2290 case '\0':
2291 len = 0;
2292 break;
2293 case ',':
2294 c = '\0';
2295 break;
2297 case '?': case '!': case '*': case '%':
2298 case '=': case '+':
2299 break;
2301 case '#':
2302 /* Ignore rest of this alternative as far as
2303 constraint checking is concerned. */
2305 p++;
2306 while (*p && *p != ',');
2307 len = 0;
2308 break;
2310 case '&':
2311 earlyclobber[opno] = 1;
2312 break;
2314 case '0': case '1': case '2': case '3': case '4':
2315 case '5': case '6': case '7': case '8': case '9':
2317 /* This operand must be the same as a previous one.
2318 This kind of constraint is used for instructions such
2319 as add when they take only two operands.
2321 Note that the lower-numbered operand is passed first.
2323 If we are not testing strictly, assume that this
2324 constraint will be satisfied. */
2326 char *end;
2327 int match;
2329 match = strtoul (p, &end, 10);
2330 p = end;
2332 if (strict < 0)
2333 val = 1;
2334 else
2336 rtx op1 = recog_data.operand[match];
2337 rtx op2 = recog_data.operand[opno];
2339 /* A unary operator may be accepted by the predicate,
2340 but it is irrelevant for matching constraints. */
2341 if (UNARY_P (op1))
2342 op1 = XEXP (op1, 0);
2343 if (UNARY_P (op2))
2344 op2 = XEXP (op2, 0);
2346 val = operands_match_p (op1, op2);
2349 matching_operands[opno] = match;
2350 matching_operands[match] = opno;
2352 if (val != 0)
2353 win = 1;
2355 /* If output is *x and input is *--x, arrange later
2356 to change the output to *--x as well, since the
2357 output op is the one that will be printed. */
2358 if (val == 2 && strict > 0)
2360 funny_match[funny_match_index].this = opno;
2361 funny_match[funny_match_index++].other = match;
2364 len = 0;
2365 break;
2367 case 'p':
2368 /* p is used for address_operands. When we are called by
2369 gen_reload, no one will have checked that the address is
2370 strictly valid, i.e., that all pseudos requiring hard regs
2371 have gotten them. */
2372 if (strict <= 0
2373 || (strict_memory_address_p (recog_data.operand_mode[opno],
2374 op)))
2375 win = 1;
2376 break;
2378 /* No need to check general_operand again;
2379 it was done in insn-recog.c. */
2380 case 'g':
2381 /* Anything goes unless it is a REG and really has a hard reg
2382 but the hard reg is not in the class GENERAL_REGS. */
2383 if (strict < 0
2384 || GENERAL_REGS == ALL_REGS
2385 || !REG_P (op)
2386 || (reload_in_progress
2387 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2388 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2389 win = 1;
2390 break;
2392 case 'X':
2393 /* This is used for a MATCH_SCRATCH in the cases when
2394 we don't actually need anything. So anything goes
2395 any time. */
2396 win = 1;
2397 break;
2399 case 'm':
2400 /* Memory operands must be valid, to the extent
2401 required by STRICT. */
2402 if (MEM_P (op))
2404 if (strict > 0
2405 && !strict_memory_address_p (GET_MODE (op),
2406 XEXP (op, 0)))
2407 break;
2408 if (strict == 0
2409 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2410 break;
2411 win = 1;
2413 /* Before reload, accept what reload can turn into mem. */
2414 else if (strict < 0 && CONSTANT_P (op))
2415 win = 1;
2416 /* During reload, accept a pseudo */
2417 else if (reload_in_progress && REG_P (op)
2418 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2419 win = 1;
2420 break;
2422 case '<':
2423 if (MEM_P (op)
2424 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2425 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2426 win = 1;
2427 break;
2429 case '>':
2430 if (MEM_P (op)
2431 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2432 || GET_CODE (XEXP (op, 0)) == POST_INC))
2433 win = 1;
2434 break;
2436 case 'E':
2437 case 'F':
2438 if (GET_CODE (op) == CONST_DOUBLE
2439 || (GET_CODE (op) == CONST_VECTOR
2440 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2441 win = 1;
2442 break;
2444 case 'G':
2445 case 'H':
2446 if (GET_CODE (op) == CONST_DOUBLE
2447 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2448 win = 1;
2449 break;
2451 case 's':
2452 if (GET_CODE (op) == CONST_INT
2453 || (GET_CODE (op) == CONST_DOUBLE
2454 && GET_MODE (op) == VOIDmode))
2455 break;
2456 case 'i':
2457 if (CONSTANT_P (op))
2458 win = 1;
2459 break;
2461 case 'n':
2462 if (GET_CODE (op) == CONST_INT
2463 || (GET_CODE (op) == CONST_DOUBLE
2464 && GET_MODE (op) == VOIDmode))
2465 win = 1;
2466 break;
2468 case 'I':
2469 case 'J':
2470 case 'K':
2471 case 'L':
2472 case 'M':
2473 case 'N':
2474 case 'O':
2475 case 'P':
2476 if (GET_CODE (op) == CONST_INT
2477 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2478 win = 1;
2479 break;
2481 case 'V':
2482 if (MEM_P (op)
2483 && ((strict > 0 && ! offsettable_memref_p (op))
2484 || (strict < 0
2485 && !(CONSTANT_P (op) || MEM_P (op)))
2486 || (reload_in_progress
2487 && !(REG_P (op)
2488 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2489 win = 1;
2490 break;
2492 case 'o':
2493 if ((strict > 0 && offsettable_memref_p (op))
2494 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2495 /* Before reload, accept what reload can handle. */
2496 || (strict < 0
2497 && (CONSTANT_P (op) || MEM_P (op)))
2498 /* During reload, accept a pseudo */
2499 || (reload_in_progress && REG_P (op)
2500 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2501 win = 1;
2502 break;
2504 default:
2506 enum reg_class cl;
2508 cl = (c == 'r'
2509 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2510 if (cl != NO_REGS)
2512 if (strict < 0
2513 || (strict == 0
2514 && REG_P (op)
2515 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2516 || (strict == 0 && GET_CODE (op) == SCRATCH)
2517 || (REG_P (op)
2518 && reg_fits_class_p (op, cl, offset, mode)))
2519 win = 1;
2521 #ifdef EXTRA_CONSTRAINT_STR
2522 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2523 win = 1;
2525 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2526 /* Every memory operand can be reloaded to fit. */
2527 && ((strict < 0 && MEM_P (op))
2528 /* Before reload, accept what reload can turn
2529 into mem. */
2530 || (strict < 0 && CONSTANT_P (op))
2531 /* During reload, accept a pseudo */
2532 || (reload_in_progress && REG_P (op)
2533 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2534 win = 1;
2535 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2536 /* Every address operand can be reloaded to fit. */
2537 && strict < 0)
2538 win = 1;
2539 #endif
2540 break;
2543 while (p += len, c);
2545 constraints[opno] = p;
2546 /* If this operand did not win somehow,
2547 this alternative loses. */
2548 if (! win)
2549 lose = 1;
2551 /* This alternative won; the operands are ok.
2552 Change whichever operands this alternative says to change. */
2553 if (! lose)
2555 int opno, eopno;
2557 /* See if any earlyclobber operand conflicts with some other
2558 operand. */
2560 if (strict > 0)
2561 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2562 /* Ignore earlyclobber operands now in memory,
2563 because we would often report failure when we have
2564 two memory operands, one of which was formerly a REG. */
2565 if (earlyclobber[eopno]
2566 && REG_P (recog_data.operand[eopno]))
2567 for (opno = 0; opno < recog_data.n_operands; opno++)
2568 if ((MEM_P (recog_data.operand[opno])
2569 || recog_data.operand_type[opno] != OP_OUT)
2570 && opno != eopno
2571 /* Ignore things like match_operator operands. */
2572 && *recog_data.constraints[opno] != 0
2573 && ! (matching_operands[opno] == eopno
2574 && operands_match_p (recog_data.operand[opno],
2575 recog_data.operand[eopno]))
2576 && ! safe_from_earlyclobber (recog_data.operand[opno],
2577 recog_data.operand[eopno]))
2578 lose = 1;
2580 if (! lose)
2582 while (--funny_match_index >= 0)
2584 recog_data.operand[funny_match[funny_match_index].other]
2585 = recog_data.operand[funny_match[funny_match_index].this];
2588 return 1;
2592 which_alternative++;
2594 while (which_alternative < recog_data.n_alternatives);
2596 which_alternative = -1;
2597 /* If we are about to reject this, but we are not to test strictly,
2598 try a very loose test. Only return failure if it fails also. */
2599 if (strict == 0)
2600 return constrain_operands (-1);
2601 else
2602 return 0;
2605 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2606 is a hard reg in class CLASS when its regno is offset by OFFSET
2607 and changed to mode MODE.
2608 If REG occupies multiple hard regs, all of them must be in CLASS. */
2611 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2612 enum machine_mode mode)
2614 int regno = REGNO (operand);
2615 if (regno < FIRST_PSEUDO_REGISTER
2616 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2617 regno + offset))
2619 int sr;
2620 regno += offset;
2621 for (sr = hard_regno_nregs[regno][mode] - 1;
2622 sr > 0; sr--)
2623 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2624 regno + sr))
2625 break;
2626 return sr == 0;
2629 return 0;
2632 /* Split single instruction. Helper function for split_all_insns and
2633 split_all_insns_noflow. Return last insn in the sequence if successful,
2634 or NULL if unsuccessful. */
2636 static rtx
2637 split_insn (rtx insn)
2639 /* Split insns here to get max fine-grain parallelism. */
2640 rtx first = PREV_INSN (insn);
2641 rtx last = try_split (PATTERN (insn), insn, 1);
2643 if (last == insn)
2644 return NULL_RTX;
2646 /* try_split returns the NOTE that INSN became. */
2647 SET_INSN_DELETED (insn);
2649 /* ??? Coddle to md files that generate subregs in post-reload
2650 splitters instead of computing the proper hard register. */
2651 if (reload_completed && first != last)
2653 first = NEXT_INSN (first);
2654 for (;;)
2656 if (INSN_P (first))
2657 cleanup_subreg_operands (first);
2658 if (first == last)
2659 break;
2660 first = NEXT_INSN (first);
2663 return last;
2666 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2668 void
2669 split_all_insns (int upd_life)
2671 sbitmap blocks;
2672 bool changed;
2673 basic_block bb;
2675 blocks = sbitmap_alloc (last_basic_block);
2676 sbitmap_zero (blocks);
2677 changed = false;
2679 FOR_EACH_BB_REVERSE (bb)
2681 rtx insn, next;
2682 bool finish = false;
2684 for (insn = BB_HEAD (bb); !finish ; insn = next)
2686 /* Can't use `next_real_insn' because that might go across
2687 CODE_LABELS and short-out basic blocks. */
2688 next = NEXT_INSN (insn);
2689 finish = (insn == BB_END (bb));
2690 if (INSN_P (insn))
2692 rtx set = single_set (insn);
2694 /* Don't split no-op move insns. These should silently
2695 disappear later in final. Splitting such insns would
2696 break the code that handles REG_NO_CONFLICT blocks. */
2697 if (set && set_noop_p (set))
2699 /* Nops get in the way while scheduling, so delete them
2700 now if register allocation has already been done. It
2701 is too risky to try to do this before register
2702 allocation, and there are unlikely to be very many
2703 nops then anyways. */
2704 if (reload_completed)
2706 /* If the no-op set has a REG_UNUSED note, we need
2707 to update liveness information. */
2708 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2710 SET_BIT (blocks, bb->index);
2711 changed = true;
2713 /* ??? Is life info affected by deleting edges? */
2714 delete_insn_and_edges (insn);
2717 else
2719 rtx last = split_insn (insn);
2720 if (last)
2722 /* The split sequence may include barrier, but the
2723 BB boundary we are interested in will be set to
2724 previous one. */
2726 while (BARRIER_P (last))
2727 last = PREV_INSN (last);
2728 SET_BIT (blocks, bb->index);
2729 changed = true;
2736 if (changed)
2738 int old_last_basic_block = last_basic_block;
2740 find_many_sub_basic_blocks (blocks);
2742 if (old_last_basic_block != last_basic_block && upd_life)
2743 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2746 if (changed && upd_life)
2747 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2748 PROP_DEATH_NOTES);
2750 #ifdef ENABLE_CHECKING
2751 verify_flow_info ();
2752 #endif
2754 sbitmap_free (blocks);
2757 /* Same as split_all_insns, but do not expect CFG to be available.
2758 Used by machine dependent reorg passes. */
2760 void
2761 split_all_insns_noflow (void)
2763 rtx next, insn;
2765 for (insn = get_insns (); insn; insn = next)
2767 next = NEXT_INSN (insn);
2768 if (INSN_P (insn))
2770 /* Don't split no-op move insns. These should silently
2771 disappear later in final. Splitting such insns would
2772 break the code that handles REG_NO_CONFLICT blocks. */
2773 rtx set = single_set (insn);
2774 if (set && set_noop_p (set))
2776 /* Nops get in the way while scheduling, so delete them
2777 now if register allocation has already been done. It
2778 is too risky to try to do this before register
2779 allocation, and there are unlikely to be very many
2780 nops then anyways.
2782 ??? Should we use delete_insn when the CFG isn't valid? */
2783 if (reload_completed)
2784 delete_insn_and_edges (insn);
2786 else
2787 split_insn (insn);
2792 #ifdef HAVE_peephole2
2793 struct peep2_insn_data
2795 rtx insn;
2796 regset live_before;
2799 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2800 static int peep2_current;
2802 /* A non-insn marker indicating the last insn of the block.
2803 The live_before regset for this element is correct, indicating
2804 global_live_at_end for the block. */
2805 #define PEEP2_EOB pc_rtx
2807 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2808 does not exist. Used by the recognizer to find the next insn to match
2809 in a multi-insn pattern. */
2812 peep2_next_insn (int n)
2814 gcc_assert (n < MAX_INSNS_PER_PEEP2 + 1);
2816 n += peep2_current;
2817 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2818 n -= MAX_INSNS_PER_PEEP2 + 1;
2820 if (peep2_insn_data[n].insn == PEEP2_EOB)
2821 return NULL_RTX;
2822 return peep2_insn_data[n].insn;
2825 /* Return true if REGNO is dead before the Nth non-note insn
2826 after `current'. */
2829 peep2_regno_dead_p (int ofs, int regno)
2831 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2833 ofs += peep2_current;
2834 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2835 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2837 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2839 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2842 /* Similarly for a REG. */
2845 peep2_reg_dead_p (int ofs, rtx reg)
2847 int regno, n;
2849 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2851 ofs += peep2_current;
2852 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2853 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2855 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2857 regno = REGNO (reg);
2858 n = hard_regno_nregs[regno][GET_MODE (reg)];
2859 while (--n >= 0)
2860 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2861 return 0;
2862 return 1;
2865 /* Try to find a hard register of mode MODE, matching the register class in
2866 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2867 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2868 in which case the only condition is that the register must be available
2869 before CURRENT_INSN.
2870 Registers that already have bits set in REG_SET will not be considered.
2872 If an appropriate register is available, it will be returned and the
2873 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2874 returned. */
2877 peep2_find_free_register (int from, int to, const char *class_str,
2878 enum machine_mode mode, HARD_REG_SET *reg_set)
2880 static int search_ofs;
2881 enum reg_class cl;
2882 HARD_REG_SET live;
2883 int i;
2885 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2886 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2888 from += peep2_current;
2889 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2890 from -= MAX_INSNS_PER_PEEP2 + 1;
2891 to += peep2_current;
2892 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2893 to -= MAX_INSNS_PER_PEEP2 + 1;
2895 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2896 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2898 while (from != to)
2900 HARD_REG_SET this_live;
2902 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2903 from = 0;
2904 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2905 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2906 IOR_HARD_REG_SET (live, this_live);
2909 cl = (class_str[0] == 'r' ? GENERAL_REGS
2910 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2912 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2914 int raw_regno, regno, success, j;
2916 /* Distribute the free registers as much as possible. */
2917 raw_regno = search_ofs + i;
2918 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2919 raw_regno -= FIRST_PSEUDO_REGISTER;
2920 #ifdef REG_ALLOC_ORDER
2921 regno = reg_alloc_order[raw_regno];
2922 #else
2923 regno = raw_regno;
2924 #endif
2926 /* Don't allocate fixed registers. */
2927 if (fixed_regs[regno])
2928 continue;
2929 /* Make sure the register is of the right class. */
2930 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2931 continue;
2932 /* And can support the mode we need. */
2933 if (! HARD_REGNO_MODE_OK (regno, mode))
2934 continue;
2935 /* And that we don't create an extra save/restore. */
2936 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2937 continue;
2938 /* And we don't clobber traceback for noreturn functions. */
2939 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2940 && (! reload_completed || frame_pointer_needed))
2941 continue;
2943 success = 1;
2944 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2946 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2947 || TEST_HARD_REG_BIT (live, regno + j))
2949 success = 0;
2950 break;
2953 if (success)
2955 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2956 SET_HARD_REG_BIT (*reg_set, regno + j);
2958 /* Start the next search with the next register. */
2959 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2960 raw_regno = 0;
2961 search_ofs = raw_regno;
2963 return gen_rtx_REG (mode, regno);
2967 search_ofs = 0;
2968 return NULL_RTX;
2971 /* Perform the peephole2 optimization pass. */
2973 void
2974 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2976 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2977 rtx insn, prev;
2978 regset live;
2979 int i;
2980 basic_block bb;
2981 #ifdef HAVE_conditional_execution
2982 sbitmap blocks;
2983 bool changed;
2984 #endif
2985 bool do_cleanup_cfg = false;
2986 bool do_rebuild_jump_labels = false;
2988 /* Initialize the regsets we're going to use. */
2989 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2990 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
2991 live = INITIALIZE_REG_SET (rs_heads[i]);
2993 #ifdef HAVE_conditional_execution
2994 blocks = sbitmap_alloc (last_basic_block);
2995 sbitmap_zero (blocks);
2996 changed = false;
2997 #else
2998 count_or_remove_death_notes (NULL, 1);
2999 #endif
3001 FOR_EACH_BB_REVERSE (bb)
3003 struct propagate_block_info *pbi;
3005 /* Indicate that all slots except the last holds invalid data. */
3006 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3007 peep2_insn_data[i].insn = NULL_RTX;
3009 /* Indicate that the last slot contains live_after data. */
3010 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3011 peep2_current = MAX_INSNS_PER_PEEP2;
3013 /* Start up propagation. */
3014 COPY_REG_SET (live, bb->global_live_at_end);
3015 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3017 #ifdef HAVE_conditional_execution
3018 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3019 #else
3020 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3021 #endif
3023 for (insn = BB_END (bb); ; insn = prev)
3025 prev = PREV_INSN (insn);
3026 if (INSN_P (insn))
3028 rtx try, before_try, x;
3029 int match_len;
3030 rtx note;
3031 bool was_call = false;
3033 /* Record this insn. */
3034 if (--peep2_current < 0)
3035 peep2_current = MAX_INSNS_PER_PEEP2;
3036 peep2_insn_data[peep2_current].insn = insn;
3037 propagate_one_insn (pbi, insn);
3038 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3040 /* Match the peephole. */
3041 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3042 if (try != NULL)
3044 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3045 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3046 cfg-related call notes. */
3047 for (i = 0; i <= match_len; ++i)
3049 int j;
3050 rtx old_insn, new_insn, note;
3052 j = i + peep2_current;
3053 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3054 j -= MAX_INSNS_PER_PEEP2 + 1;
3055 old_insn = peep2_insn_data[j].insn;
3056 if (!CALL_P (old_insn))
3057 continue;
3058 was_call = true;
3060 new_insn = try;
3061 while (new_insn != NULL_RTX)
3063 if (CALL_P (new_insn))
3064 break;
3065 new_insn = NEXT_INSN (new_insn);
3068 gcc_assert (new_insn != NULL_RTX);
3070 CALL_INSN_FUNCTION_USAGE (new_insn)
3071 = CALL_INSN_FUNCTION_USAGE (old_insn);
3073 for (note = REG_NOTES (old_insn);
3074 note;
3075 note = XEXP (note, 1))
3076 switch (REG_NOTE_KIND (note))
3078 case REG_NORETURN:
3079 case REG_SETJMP:
3080 case REG_ALWAYS_RETURN:
3081 REG_NOTES (new_insn)
3082 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3083 XEXP (note, 0),
3084 REG_NOTES (new_insn));
3085 default:
3086 /* Discard all other reg notes. */
3087 break;
3090 /* Croak if there is another call in the sequence. */
3091 while (++i <= match_len)
3093 j = i + peep2_current;
3094 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3095 j -= MAX_INSNS_PER_PEEP2 + 1;
3096 old_insn = peep2_insn_data[j].insn;
3097 gcc_assert (!CALL_P (old_insn));
3099 break;
3102 i = match_len + peep2_current;
3103 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3104 i -= MAX_INSNS_PER_PEEP2 + 1;
3106 note = find_reg_note (peep2_insn_data[i].insn,
3107 REG_EH_REGION, NULL_RTX);
3109 /* Replace the old sequence with the new. */
3110 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3111 INSN_LOCATOR (peep2_insn_data[i].insn));
3112 before_try = PREV_INSN (insn);
3113 delete_insn_chain (insn, peep2_insn_data[i].insn);
3115 /* Re-insert the EH_REGION notes. */
3116 if (note || (was_call && nonlocal_goto_handler_labels))
3118 edge eh_edge;
3119 edge_iterator ei;
3121 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3122 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3123 break;
3125 for (x = try ; x != before_try ; x = PREV_INSN (x))
3126 if (CALL_P (x)
3127 || (flag_non_call_exceptions
3128 && may_trap_p (PATTERN (x))
3129 && !find_reg_note (x, REG_EH_REGION, NULL)))
3131 if (note)
3132 REG_NOTES (x)
3133 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3134 XEXP (note, 0),
3135 REG_NOTES (x));
3137 if (x != BB_END (bb) && eh_edge)
3139 edge nfte, nehe;
3140 int flags;
3142 nfte = split_block (bb, x);
3143 flags = (eh_edge->flags
3144 & (EDGE_EH | EDGE_ABNORMAL));
3145 if (CALL_P (x))
3146 flags |= EDGE_ABNORMAL_CALL;
3147 nehe = make_edge (nfte->src, eh_edge->dest,
3148 flags);
3150 nehe->probability = eh_edge->probability;
3151 nfte->probability
3152 = REG_BR_PROB_BASE - nehe->probability;
3154 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3155 #ifdef HAVE_conditional_execution
3156 SET_BIT (blocks, nfte->dest->index);
3157 changed = true;
3158 #endif
3159 bb = nfte->src;
3160 eh_edge = nehe;
3164 /* Converting possibly trapping insn to non-trapping is
3165 possible. Zap dummy outgoing edges. */
3166 do_cleanup_cfg |= purge_dead_edges (bb);
3169 #ifdef HAVE_conditional_execution
3170 /* With conditional execution, we cannot back up the
3171 live information so easily, since the conditional
3172 death data structures are not so self-contained.
3173 So record that we've made a modification to this
3174 block and update life information at the end. */
3175 SET_BIT (blocks, bb->index);
3176 changed = true;
3178 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3179 peep2_insn_data[i].insn = NULL_RTX;
3180 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3181 #else
3182 /* Back up lifetime information past the end of the
3183 newly created sequence. */
3184 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3185 i = 0;
3186 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3188 /* Update life information for the new sequence. */
3189 x = try;
3192 if (INSN_P (x))
3194 if (--i < 0)
3195 i = MAX_INSNS_PER_PEEP2;
3196 peep2_insn_data[i].insn = x;
3197 propagate_one_insn (pbi, x);
3198 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3200 x = PREV_INSN (x);
3202 while (x != prev);
3204 /* ??? Should verify that LIVE now matches what we
3205 had before the new sequence. */
3207 peep2_current = i;
3208 #endif
3210 /* If we generated a jump instruction, it won't have
3211 JUMP_LABEL set. Recompute after we're done. */
3212 for (x = try; x != before_try; x = PREV_INSN (x))
3213 if (JUMP_P (x))
3215 do_rebuild_jump_labels = true;
3216 break;
3221 if (insn == BB_HEAD (bb))
3222 break;
3225 free_propagate_block_info (pbi);
3228 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3229 FREE_REG_SET (peep2_insn_data[i].live_before);
3230 FREE_REG_SET (live);
3232 if (do_rebuild_jump_labels)
3233 rebuild_jump_labels (get_insns ());
3235 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3236 we've changed global life since exception handlers are no longer
3237 reachable. */
3238 if (do_cleanup_cfg)
3240 cleanup_cfg (0);
3241 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3243 #ifdef HAVE_conditional_execution
3244 else
3246 count_or_remove_death_notes (blocks, 1);
3247 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3249 sbitmap_free (blocks);
3250 #endif
3252 #endif /* HAVE_peephole2 */
3254 /* Common predicates for use with define_bypass. */
3256 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3257 data not the address operand(s) of the store. IN_INSN must be
3258 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3259 SETs inside. */
3262 store_data_bypass_p (rtx out_insn, rtx in_insn)
3264 rtx out_set, in_set;
3266 in_set = single_set (in_insn);
3267 gcc_assert (in_set);
3269 if (!MEM_P (SET_DEST (in_set)))
3270 return false;
3272 out_set = single_set (out_insn);
3273 if (out_set)
3275 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3276 return false;
3278 else
3280 rtx out_pat;
3281 int i;
3283 out_pat = PATTERN (out_insn);
3284 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3286 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3288 rtx exp = XVECEXP (out_pat, 0, i);
3290 if (GET_CODE (exp) == CLOBBER)
3291 continue;
3293 gcc_assert (GET_CODE (exp) == SET);
3295 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3296 return false;
3300 return true;
3303 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3304 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3305 or multiple set; IN_INSN should be single_set for truth, but for convenience
3306 of insn categorization may be any JUMP or CALL insn. */
3309 if_test_bypass_p (rtx out_insn, rtx in_insn)
3311 rtx out_set, in_set;
3313 in_set = single_set (in_insn);
3314 if (! in_set)
3316 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3317 return false;
3320 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3321 return false;
3322 in_set = SET_SRC (in_set);
3324 out_set = single_set (out_insn);
3325 if (out_set)
3327 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3328 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3329 return false;
3331 else
3333 rtx out_pat;
3334 int i;
3336 out_pat = PATTERN (out_insn);
3337 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3339 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3341 rtx exp = XVECEXP (out_pat, 0, i);
3343 if (GET_CODE (exp) == CLOBBER)
3344 continue;
3346 gcc_assert (GET_CODE (exp) == SET);
3348 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3349 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3350 return false;
3354 return true;