Updated for libbid move.
[official-gcc.git] / gcc / recog.c
blob89d4a2ed3372ee04679c0963aee497748530ddfa
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
33 #include "recog.h"
34 #include "regs.h"
35 #include "addresses.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "flags.h"
39 #include "real.h"
40 #include "toplev.h"
41 #include "basic-block.h"
42 #include "output.h"
43 #include "reload.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
64 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
65 static void validate_replace_src_1 (rtx *, void *);
66 static rtx split_insn (rtx);
68 /* Nonzero means allow operands to be volatile.
69 This should be 0 if you are generating rtl, such as if you are calling
70 the functions in optabs.c and expmed.c (most of the time).
71 This should be 1 if all valid insns need to be recognized,
72 such as in regclass.c and final.c and reload.c.
74 init_recog and init_recog_no_volatile are responsible for setting this. */
76 int volatile_ok;
78 struct recog_data recog_data;
80 /* Contains a vector of operand_alternative structures for every operand.
81 Set up by preprocess_constraints. */
82 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
84 /* On return from `constrain_operands', indicate which alternative
85 was satisfied. */
87 int which_alternative;
89 /* Nonzero after end of reload pass.
90 Set to 1 or 0 by toplev.c.
91 Controls the significance of (SUBREG (MEM)). */
93 int reload_completed;
95 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
96 int epilogue_completed;
98 /* Initialize data used by the function `recog'.
99 This must be called once in the compilation of a function
100 before any insn recognition may be done in the function. */
102 void
103 init_recog_no_volatile (void)
105 volatile_ok = 0;
108 void
109 init_recog (void)
111 volatile_ok = 1;
115 /* Check that X is an insn-body for an `asm' with operands
116 and that the operands mentioned in it are legitimate. */
119 check_asm_operands (rtx x)
121 int noperands;
122 rtx *operands;
123 const char **constraints;
124 int i;
126 /* Post-reload, be more strict with things. */
127 if (reload_completed)
129 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
130 extract_insn (make_insn_raw (x));
131 constrain_operands (1);
132 return which_alternative >= 0;
135 noperands = asm_noperands (x);
136 if (noperands < 0)
137 return 0;
138 if (noperands == 0)
139 return 1;
141 operands = alloca (noperands * sizeof (rtx));
142 constraints = alloca (noperands * sizeof (char *));
144 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
146 for (i = 0; i < noperands; i++)
148 const char *c = constraints[i];
149 if (c[0] == '%')
150 c++;
151 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
152 c = constraints[c[0] - '0'];
154 if (! asm_operand_ok (operands[i], c))
155 return 0;
158 return 1;
161 /* Static data for the next two routines. */
163 typedef struct change_t
165 rtx object;
166 int old_code;
167 rtx *loc;
168 rtx old;
169 bool unshare;
170 } change_t;
172 static change_t *changes;
173 static int changes_allocated;
175 static int num_changes = 0;
177 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
178 at which NEW will be placed. If OBJECT is zero, no validation is done,
179 the change is simply made.
181 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
182 will be called with the address and mode as parameters. If OBJECT is
183 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
184 the change in place.
186 IN_GROUP is nonzero if this is part of a group of changes that must be
187 performed as a group. In that case, the changes will be stored. The
188 function `apply_change_group' will validate and apply the changes.
190 If IN_GROUP is zero, this is a single change. Try to recognize the insn
191 or validate the memory reference with the change applied. If the result
192 is not valid for the machine, suppress the change and return zero.
193 Otherwise, perform the change and return 1. */
195 static bool
196 validate_change_1 (rtx object, rtx *loc, rtx new, bool in_group, bool unshare)
198 rtx old = *loc;
200 if (old == new || rtx_equal_p (old, new))
201 return 1;
203 gcc_assert (in_group != 0 || num_changes == 0);
205 *loc = new;
207 /* Save the information describing this change. */
208 if (num_changes >= changes_allocated)
210 if (changes_allocated == 0)
211 /* This value allows for repeated substitutions inside complex
212 indexed addresses, or changes in up to 5 insns. */
213 changes_allocated = MAX_RECOG_OPERANDS * 5;
214 else
215 changes_allocated *= 2;
217 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
220 changes[num_changes].object = object;
221 changes[num_changes].loc = loc;
222 changes[num_changes].old = old;
223 changes[num_changes].unshare = unshare;
225 if (object && !MEM_P (object))
227 /* Set INSN_CODE to force rerecognition of insn. Save old code in
228 case invalid. */
229 changes[num_changes].old_code = INSN_CODE (object);
230 INSN_CODE (object) = -1;
233 num_changes++;
235 /* If we are making a group of changes, return 1. Otherwise, validate the
236 change group we made. */
238 if (in_group)
239 return 1;
240 else
241 return apply_change_group ();
244 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
245 UNSHARE to false. */
247 bool
248 validate_change (rtx object, rtx *loc, rtx new, bool in_group)
250 return validate_change_1 (object, loc, new, in_group, false);
253 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
254 UNSHARE to true. */
256 bool
257 validate_unshare_change (rtx object, rtx *loc, rtx new, bool in_group)
259 return validate_change_1 (object, loc, new, in_group, true);
263 /* Keep X canonicalized if some changes have made it non-canonical; only
264 modifies the operands of X, not (for example) its code. Simplifications
265 are not the job of this routine.
267 Return true if anything was changed. */
268 bool
269 canonicalize_change_group (rtx insn, rtx x)
271 if (COMMUTATIVE_P (x)
272 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
274 /* Oops, the caller has made X no longer canonical.
275 Let's redo the changes in the correct order. */
276 rtx tem = XEXP (x, 0);
277 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
278 validate_change (insn, &XEXP (x, 1), tem, 1);
279 return true;
281 else
282 return false;
286 /* This subroutine of apply_change_group verifies whether the changes to INSN
287 were valid; i.e. whether INSN can still be recognized. */
290 insn_invalid_p (rtx insn)
292 rtx pat = PATTERN (insn);
293 int num_clobbers = 0;
294 /* If we are before reload and the pattern is a SET, see if we can add
295 clobbers. */
296 int icode = recog (pat, insn,
297 (GET_CODE (pat) == SET
298 && ! reload_completed && ! reload_in_progress)
299 ? &num_clobbers : 0);
300 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
303 /* If this is an asm and the operand aren't legal, then fail. Likewise if
304 this is not an asm and the insn wasn't recognized. */
305 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
306 || (!is_asm && icode < 0))
307 return 1;
309 /* If we have to add CLOBBERs, fail if we have to add ones that reference
310 hard registers since our callers can't know if they are live or not.
311 Otherwise, add them. */
312 if (num_clobbers > 0)
314 rtx newpat;
316 if (added_clobbers_hard_reg_p (icode))
317 return 1;
319 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
320 XVECEXP (newpat, 0, 0) = pat;
321 add_clobbers (newpat, icode);
322 PATTERN (insn) = pat = newpat;
325 /* After reload, verify that all constraints are satisfied. */
326 if (reload_completed)
328 extract_insn (insn);
330 if (! constrain_operands (1))
331 return 1;
334 INSN_CODE (insn) = icode;
335 return 0;
338 /* Return number of changes made and not validated yet. */
340 num_changes_pending (void)
342 return num_changes;
345 /* Tentatively apply the changes numbered NUM and up.
346 Return 1 if all changes are valid, zero otherwise. */
349 verify_changes (int num)
351 int i;
352 rtx last_validated = NULL_RTX;
354 /* The changes have been applied and all INSN_CODEs have been reset to force
355 rerecognition.
357 The changes are valid if we aren't given an object, or if we are
358 given a MEM and it still is a valid address, or if this is in insn
359 and it is recognized. In the latter case, if reload has completed,
360 we also require that the operands meet the constraints for
361 the insn. */
363 for (i = num; i < num_changes; i++)
365 rtx object = changes[i].object;
367 /* If there is no object to test or if it is the same as the one we
368 already tested, ignore it. */
369 if (object == 0 || object == last_validated)
370 continue;
372 if (MEM_P (object))
374 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
375 break;
377 else if (insn_invalid_p (object))
379 rtx pat = PATTERN (object);
381 /* Perhaps we couldn't recognize the insn because there were
382 extra CLOBBERs at the end. If so, try to re-recognize
383 without the last CLOBBER (later iterations will cause each of
384 them to be eliminated, in turn). But don't do this if we
385 have an ASM_OPERAND. */
386 if (GET_CODE (pat) == PARALLEL
387 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
388 && asm_noperands (PATTERN (object)) < 0)
390 rtx newpat;
392 if (XVECLEN (pat, 0) == 2)
393 newpat = XVECEXP (pat, 0, 0);
394 else
396 int j;
398 newpat
399 = gen_rtx_PARALLEL (VOIDmode,
400 rtvec_alloc (XVECLEN (pat, 0) - 1));
401 for (j = 0; j < XVECLEN (newpat, 0); j++)
402 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
405 /* Add a new change to this group to replace the pattern
406 with this new pattern. Then consider this change
407 as having succeeded. The change we added will
408 cause the entire call to fail if things remain invalid.
410 Note that this can lose if a later change than the one
411 we are processing specified &XVECEXP (PATTERN (object), 0, X)
412 but this shouldn't occur. */
414 validate_change (object, &PATTERN (object), newpat, 1);
415 continue;
417 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
418 /* If this insn is a CLOBBER or USE, it is always valid, but is
419 never recognized. */
420 continue;
421 else
422 break;
424 last_validated = object;
427 return (i == num_changes);
430 /* A group of changes has previously been issued with validate_change
431 and verified with verify_changes. Call df_insn_rescan for each of
432 the insn changed and clear num_changes. */
434 void
435 confirm_change_group (void)
437 int i;
438 rtx last_object = NULL;
440 for (i = 0; i < num_changes; i++)
442 rtx object = changes[i].object;
444 if (changes[i].unshare)
445 *changes[i].loc = copy_rtx (*changes[i].loc);
447 /* Avoid unnecesary rescaning when multiple changes to same instruction
448 are made. */
449 if (object)
451 if (object != last_object && last_object && INSN_P (last_object))
452 df_insn_rescan (last_object);
453 last_object = object;
457 if (last_object && INSN_P (last_object))
458 df_insn_rescan (last_object);
459 num_changes = 0;
462 /* Apply a group of changes previously issued with `validate_change'.
463 If all changes are valid, call confirm_change_group and return 1,
464 otherwise, call cancel_changes and return 0. */
467 apply_change_group (void)
469 if (verify_changes (0))
471 confirm_change_group ();
472 return 1;
474 else
476 cancel_changes (0);
477 return 0;
482 /* Return the number of changes so far in the current group. */
485 num_validated_changes (void)
487 return num_changes;
490 /* Retract the changes numbered NUM and up. */
492 void
493 cancel_changes (int num)
495 int i;
497 /* Back out all the changes. Do this in the opposite order in which
498 they were made. */
499 for (i = num_changes - 1; i >= num; i--)
501 *changes[i].loc = changes[i].old;
502 if (changes[i].object && !MEM_P (changes[i].object))
503 INSN_CODE (changes[i].object) = changes[i].old_code;
505 num_changes = num;
508 /* Replace every occurrence of FROM in X with TO. Mark each change with
509 validate_change passing OBJECT. */
511 static void
512 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
514 int i, j;
515 const char *fmt;
516 rtx x = *loc;
517 enum rtx_code code;
518 enum machine_mode op0_mode = VOIDmode;
519 int prev_changes = num_changes;
520 rtx new;
522 if (!x)
523 return;
525 code = GET_CODE (x);
526 fmt = GET_RTX_FORMAT (code);
527 if (fmt[0] == 'e')
528 op0_mode = GET_MODE (XEXP (x, 0));
530 /* X matches FROM if it is the same rtx or they are both referring to the
531 same register in the same mode. Avoid calling rtx_equal_p unless the
532 operands look similar. */
534 if (x == from
535 || (REG_P (x) && REG_P (from)
536 && GET_MODE (x) == GET_MODE (from)
537 && REGNO (x) == REGNO (from))
538 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
539 && rtx_equal_p (x, from)))
541 validate_unshare_change (object, loc, to, 1);
542 return;
545 /* Call ourself recursively to perform the replacements.
546 We must not replace inside already replaced expression, otherwise we
547 get infinite recursion for replacements like (reg X)->(subreg (reg X))
548 done by regmove, so we must special case shared ASM_OPERANDS. */
550 if (GET_CODE (x) == PARALLEL)
552 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
554 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
555 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
557 /* Verify that operands are really shared. */
558 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
559 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
560 (x, 0, j))));
561 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
562 from, to, object);
564 else
565 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
568 else
569 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
571 if (fmt[i] == 'e')
572 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
573 else if (fmt[i] == 'E')
574 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
575 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
578 /* If we didn't substitute, there is nothing more to do. */
579 if (num_changes == prev_changes)
580 return;
582 /* Allow substituted expression to have different mode. This is used by
583 regmove to change mode of pseudo register. */
584 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
585 op0_mode = GET_MODE (XEXP (x, 0));
587 /* Do changes needed to keep rtx consistent. Don't do any other
588 simplifications, as it is not our job. */
590 if (SWAPPABLE_OPERANDS_P (x)
591 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
593 validate_change (object, loc,
594 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
595 : swap_condition (code),
596 GET_MODE (x), XEXP (x, 1),
597 XEXP (x, 0)), 1);
598 x = *loc;
599 code = GET_CODE (x);
602 switch (code)
604 case PLUS:
605 /* If we have a PLUS whose second operand is now a CONST_INT, use
606 simplify_gen_binary to try to simplify it.
607 ??? We may want later to remove this, once simplification is
608 separated from this function. */
609 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
610 validate_change (object, loc,
611 simplify_gen_binary
612 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
613 break;
614 case MINUS:
615 if (GET_CODE (XEXP (x, 1)) == CONST_INT
616 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
617 validate_change (object, loc,
618 simplify_gen_binary
619 (PLUS, GET_MODE (x), XEXP (x, 0),
620 simplify_gen_unary (NEG,
621 GET_MODE (x), XEXP (x, 1),
622 GET_MODE (x))), 1);
623 break;
624 case ZERO_EXTEND:
625 case SIGN_EXTEND:
626 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
628 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
629 op0_mode);
630 /* If any of the above failed, substitute in something that
631 we know won't be recognized. */
632 if (!new)
633 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
634 validate_change (object, loc, new, 1);
636 break;
637 case SUBREG:
638 /* All subregs possible to simplify should be simplified. */
639 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
640 SUBREG_BYTE (x));
642 /* Subregs of VOIDmode operands are incorrect. */
643 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
644 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
645 if (new)
646 validate_change (object, loc, new, 1);
647 break;
648 case ZERO_EXTRACT:
649 case SIGN_EXTRACT:
650 /* If we are replacing a register with memory, try to change the memory
651 to be the mode required for memory in extract operations (this isn't
652 likely to be an insertion operation; if it was, nothing bad will
653 happen, we might just fail in some cases). */
655 if (MEM_P (XEXP (x, 0))
656 && GET_CODE (XEXP (x, 1)) == CONST_INT
657 && GET_CODE (XEXP (x, 2)) == CONST_INT
658 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
659 && !MEM_VOLATILE_P (XEXP (x, 0)))
661 enum machine_mode wanted_mode = VOIDmode;
662 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
663 int pos = INTVAL (XEXP (x, 2));
665 if (GET_CODE (x) == ZERO_EXTRACT)
667 enum machine_mode new_mode
668 = mode_for_extraction (EP_extzv, 1);
669 if (new_mode != MAX_MACHINE_MODE)
670 wanted_mode = new_mode;
672 else if (GET_CODE (x) == SIGN_EXTRACT)
674 enum machine_mode new_mode
675 = mode_for_extraction (EP_extv, 1);
676 if (new_mode != MAX_MACHINE_MODE)
677 wanted_mode = new_mode;
680 /* If we have a narrower mode, we can do something. */
681 if (wanted_mode != VOIDmode
682 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
684 int offset = pos / BITS_PER_UNIT;
685 rtx newmem;
687 /* If the bytes and bits are counted differently, we
688 must adjust the offset. */
689 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
690 offset =
691 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
692 offset);
694 pos %= GET_MODE_BITSIZE (wanted_mode);
696 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
698 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
699 validate_change (object, &XEXP (x, 0), newmem, 1);
703 break;
705 default:
706 break;
710 /* Try replacing every occurrence of FROM in INSN with TO. After all
711 changes have been made, validate by seeing if INSN is still valid. */
714 validate_replace_rtx (rtx from, rtx to, rtx insn)
716 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
717 return apply_change_group ();
720 /* Try replacing every occurrence of FROM in INSN with TO. */
722 void
723 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
725 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
728 /* Function called by note_uses to replace used subexpressions. */
729 struct validate_replace_src_data
731 rtx from; /* Old RTX */
732 rtx to; /* New RTX */
733 rtx insn; /* Insn in which substitution is occurring. */
736 static void
737 validate_replace_src_1 (rtx *x, void *data)
739 struct validate_replace_src_data *d
740 = (struct validate_replace_src_data *) data;
742 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
745 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
746 SET_DESTs. */
748 void
749 validate_replace_src_group (rtx from, rtx to, rtx insn)
751 struct validate_replace_src_data d;
753 d.from = from;
754 d.to = to;
755 d.insn = insn;
756 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
759 /* Try simplify INSN.
760 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
761 pattern and return true if something was simplified. */
763 bool
764 validate_simplify_insn (rtx insn)
766 int i;
767 rtx pat = NULL;
768 rtx newpat = NULL;
770 pat = PATTERN (insn);
772 if (GET_CODE (pat) == SET)
774 newpat = simplify_rtx (SET_SRC (pat));
775 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
776 validate_change (insn, &SET_SRC (pat), newpat, 1);
777 newpat = simplify_rtx (SET_DEST (pat));
778 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
779 validate_change (insn, &SET_DEST (pat), newpat, 1);
781 else if (GET_CODE (pat) == PARALLEL)
782 for (i = 0; i < XVECLEN (pat, 0); i++)
784 rtx s = XVECEXP (pat, 0, i);
786 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
788 newpat = simplify_rtx (SET_SRC (s));
789 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
790 validate_change (insn, &SET_SRC (s), newpat, 1);
791 newpat = simplify_rtx (SET_DEST (s));
792 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
793 validate_change (insn, &SET_DEST (s), newpat, 1);
796 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
799 #ifdef HAVE_cc0
800 /* Return 1 if the insn using CC0 set by INSN does not contain
801 any ordered tests applied to the condition codes.
802 EQ and NE tests do not count. */
805 next_insn_tests_no_inequality (rtx insn)
807 rtx next = next_cc0_user (insn);
809 /* If there is no next insn, we have to take the conservative choice. */
810 if (next == 0)
811 return 0;
813 return (INSN_P (next)
814 && ! inequality_comparisons_p (PATTERN (next)));
816 #endif
818 /* Return 1 if OP is a valid general operand for machine mode MODE.
819 This is either a register reference, a memory reference,
820 or a constant. In the case of a memory reference, the address
821 is checked for general validity for the target machine.
823 Register and memory references must have mode MODE in order to be valid,
824 but some constants have no machine mode and are valid for any mode.
826 If MODE is VOIDmode, OP is checked for validity for whatever mode
827 it has.
829 The main use of this function is as a predicate in match_operand
830 expressions in the machine description.
832 For an explanation of this function's behavior for registers of
833 class NO_REGS, see the comment for `register_operand'. */
836 general_operand (rtx op, enum machine_mode mode)
838 enum rtx_code code = GET_CODE (op);
840 if (mode == VOIDmode)
841 mode = GET_MODE (op);
843 /* Don't accept CONST_INT or anything similar
844 if the caller wants something floating. */
845 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
846 && GET_MODE_CLASS (mode) != MODE_INT
847 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
848 return 0;
850 if (GET_CODE (op) == CONST_INT
851 && mode != VOIDmode
852 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
853 return 0;
855 if (CONSTANT_P (op))
856 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
857 || mode == VOIDmode)
858 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
859 && LEGITIMATE_CONSTANT_P (op));
861 /* Except for certain constants with VOIDmode, already checked for,
862 OP's mode must match MODE if MODE specifies a mode. */
864 if (GET_MODE (op) != mode)
865 return 0;
867 if (code == SUBREG)
869 rtx sub = SUBREG_REG (op);
871 #ifdef INSN_SCHEDULING
872 /* On machines that have insn scheduling, we want all memory
873 reference to be explicit, so outlaw paradoxical SUBREGs.
874 However, we must allow them after reload so that they can
875 get cleaned up by cleanup_subreg_operands. */
876 if (!reload_completed && MEM_P (sub)
877 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
878 return 0;
879 #endif
880 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
881 may result in incorrect reference. We should simplify all valid
882 subregs of MEM anyway. But allow this after reload because we
883 might be called from cleanup_subreg_operands.
885 ??? This is a kludge. */
886 if (!reload_completed && SUBREG_BYTE (op) != 0
887 && MEM_P (sub))
888 return 0;
890 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
891 create such rtl, and we must reject it. */
892 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
893 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
894 return 0;
896 op = sub;
897 code = GET_CODE (op);
900 if (code == REG)
901 /* A register whose class is NO_REGS is not a general operand. */
902 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
903 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
905 if (code == MEM)
907 rtx y = XEXP (op, 0);
909 if (! volatile_ok && MEM_VOLATILE_P (op))
910 return 0;
912 /* Use the mem's mode, since it will be reloaded thus. */
913 if (memory_address_p (GET_MODE (op), y))
914 return 1;
917 return 0;
920 /* Return 1 if OP is a valid memory address for a memory reference
921 of mode MODE.
923 The main use of this function is as a predicate in match_operand
924 expressions in the machine description. */
927 address_operand (rtx op, enum machine_mode mode)
929 return memory_address_p (mode, op);
932 /* Return 1 if OP is a register reference of mode MODE.
933 If MODE is VOIDmode, accept a register in any mode.
935 The main use of this function is as a predicate in match_operand
936 expressions in the machine description.
938 As a special exception, registers whose class is NO_REGS are
939 not accepted by `register_operand'. The reason for this change
940 is to allow the representation of special architecture artifacts
941 (such as a condition code register) without extending the rtl
942 definitions. Since registers of class NO_REGS cannot be used
943 as registers in any case where register classes are examined,
944 it is most consistent to keep this function from accepting them. */
947 register_operand (rtx op, enum machine_mode mode)
949 if (GET_MODE (op) != mode && mode != VOIDmode)
950 return 0;
952 if (GET_CODE (op) == SUBREG)
954 rtx sub = SUBREG_REG (op);
956 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
957 because it is guaranteed to be reloaded into one.
958 Just make sure the MEM is valid in itself.
959 (Ideally, (SUBREG (MEM)...) should not exist after reload,
960 but currently it does result from (SUBREG (REG)...) where the
961 reg went on the stack.) */
962 if (! reload_completed && MEM_P (sub))
963 return general_operand (op, mode);
965 #ifdef CANNOT_CHANGE_MODE_CLASS
966 if (REG_P (sub)
967 && REGNO (sub) < FIRST_PSEUDO_REGISTER
968 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
969 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
970 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
971 return 0;
972 #endif
974 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
975 create such rtl, and we must reject it. */
976 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
977 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
978 return 0;
980 op = sub;
983 /* We don't consider registers whose class is NO_REGS
984 to be a register operand. */
985 return (REG_P (op)
986 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
987 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
990 /* Return 1 for a register in Pmode; ignore the tested mode. */
993 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
995 return register_operand (op, Pmode);
998 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
999 or a hard register. */
1002 scratch_operand (rtx op, enum machine_mode mode)
1004 if (GET_MODE (op) != mode && mode != VOIDmode)
1005 return 0;
1007 return (GET_CODE (op) == SCRATCH
1008 || (REG_P (op)
1009 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1012 /* Return 1 if OP is a valid immediate operand for mode MODE.
1014 The main use of this function is as a predicate in match_operand
1015 expressions in the machine description. */
1018 immediate_operand (rtx op, enum machine_mode mode)
1020 /* Don't accept CONST_INT or anything similar
1021 if the caller wants something floating. */
1022 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1023 && GET_MODE_CLASS (mode) != MODE_INT
1024 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1025 return 0;
1027 if (GET_CODE (op) == CONST_INT
1028 && mode != VOIDmode
1029 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1030 return 0;
1032 return (CONSTANT_P (op)
1033 && (GET_MODE (op) == mode || mode == VOIDmode
1034 || GET_MODE (op) == VOIDmode)
1035 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1036 && LEGITIMATE_CONSTANT_P (op));
1039 /* Returns 1 if OP is an operand that is a CONST_INT. */
1042 const_int_operand (rtx op, enum machine_mode mode)
1044 if (GET_CODE (op) != CONST_INT)
1045 return 0;
1047 if (mode != VOIDmode
1048 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1049 return 0;
1051 return 1;
1054 /* Returns 1 if OP is an operand that is a constant integer or constant
1055 floating-point number. */
1058 const_double_operand (rtx op, enum machine_mode mode)
1060 /* Don't accept CONST_INT or anything similar
1061 if the caller wants something floating. */
1062 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1063 && GET_MODE_CLASS (mode) != MODE_INT
1064 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1065 return 0;
1067 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1068 && (mode == VOIDmode || GET_MODE (op) == mode
1069 || GET_MODE (op) == VOIDmode));
1072 /* Return 1 if OP is a general operand that is not an immediate operand. */
1075 nonimmediate_operand (rtx op, enum machine_mode mode)
1077 return (general_operand (op, mode) && ! CONSTANT_P (op));
1080 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1083 nonmemory_operand (rtx op, enum machine_mode mode)
1085 if (CONSTANT_P (op))
1087 /* Don't accept CONST_INT or anything similar
1088 if the caller wants something floating. */
1089 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1090 && GET_MODE_CLASS (mode) != MODE_INT
1091 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1092 return 0;
1094 if (GET_CODE (op) == CONST_INT
1095 && mode != VOIDmode
1096 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1097 return 0;
1099 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1100 || mode == VOIDmode)
1101 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1102 && LEGITIMATE_CONSTANT_P (op));
1105 if (GET_MODE (op) != mode && mode != VOIDmode)
1106 return 0;
1108 if (GET_CODE (op) == SUBREG)
1110 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1111 because it is guaranteed to be reloaded into one.
1112 Just make sure the MEM is valid in itself.
1113 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1114 but currently it does result from (SUBREG (REG)...) where the
1115 reg went on the stack.) */
1116 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1117 return general_operand (op, mode);
1118 op = SUBREG_REG (op);
1121 /* We don't consider registers whose class is NO_REGS
1122 to be a register operand. */
1123 return (REG_P (op)
1124 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1125 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1128 /* Return 1 if OP is a valid operand that stands for pushing a
1129 value of mode MODE onto the stack.
1131 The main use of this function is as a predicate in match_operand
1132 expressions in the machine description. */
1135 push_operand (rtx op, enum machine_mode mode)
1137 unsigned int rounded_size = GET_MODE_SIZE (mode);
1139 #ifdef PUSH_ROUNDING
1140 rounded_size = PUSH_ROUNDING (rounded_size);
1141 #endif
1143 if (!MEM_P (op))
1144 return 0;
1146 if (mode != VOIDmode && GET_MODE (op) != mode)
1147 return 0;
1149 op = XEXP (op, 0);
1151 if (rounded_size == GET_MODE_SIZE (mode))
1153 if (GET_CODE (op) != STACK_PUSH_CODE)
1154 return 0;
1156 else
1158 if (GET_CODE (op) != PRE_MODIFY
1159 || GET_CODE (XEXP (op, 1)) != PLUS
1160 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1161 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1162 #ifdef STACK_GROWS_DOWNWARD
1163 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1164 #else
1165 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1166 #endif
1168 return 0;
1171 return XEXP (op, 0) == stack_pointer_rtx;
1174 /* Return 1 if OP is a valid operand that stands for popping a
1175 value of mode MODE off the stack.
1177 The main use of this function is as a predicate in match_operand
1178 expressions in the machine description. */
1181 pop_operand (rtx op, enum machine_mode mode)
1183 if (!MEM_P (op))
1184 return 0;
1186 if (mode != VOIDmode && GET_MODE (op) != mode)
1187 return 0;
1189 op = XEXP (op, 0);
1191 if (GET_CODE (op) != STACK_POP_CODE)
1192 return 0;
1194 return XEXP (op, 0) == stack_pointer_rtx;
1197 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1200 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1202 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1203 return 0;
1205 win:
1206 return 1;
1209 /* Return 1 if OP is a valid memory reference with mode MODE,
1210 including a valid address.
1212 The main use of this function is as a predicate in match_operand
1213 expressions in the machine description. */
1216 memory_operand (rtx op, enum machine_mode mode)
1218 rtx inner;
1220 if (! reload_completed)
1221 /* Note that no SUBREG is a memory operand before end of reload pass,
1222 because (SUBREG (MEM...)) forces reloading into a register. */
1223 return MEM_P (op) && general_operand (op, mode);
1225 if (mode != VOIDmode && GET_MODE (op) != mode)
1226 return 0;
1228 inner = op;
1229 if (GET_CODE (inner) == SUBREG)
1230 inner = SUBREG_REG (inner);
1232 return (MEM_P (inner) && general_operand (op, mode));
1235 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1236 that is, a memory reference whose address is a general_operand. */
1239 indirect_operand (rtx op, enum machine_mode mode)
1241 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1242 if (! reload_completed
1243 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1245 int offset = SUBREG_BYTE (op);
1246 rtx inner = SUBREG_REG (op);
1248 if (mode != VOIDmode && GET_MODE (op) != mode)
1249 return 0;
1251 /* The only way that we can have a general_operand as the resulting
1252 address is if OFFSET is zero and the address already is an operand
1253 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1254 operand. */
1256 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1257 || (GET_CODE (XEXP (inner, 0)) == PLUS
1258 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1259 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1260 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1263 return (MEM_P (op)
1264 && memory_operand (op, mode)
1265 && general_operand (XEXP (op, 0), Pmode));
1268 /* Return 1 if this is a comparison operator. This allows the use of
1269 MATCH_OPERATOR to recognize all the branch insns. */
1272 comparison_operator (rtx op, enum machine_mode mode)
1274 return ((mode == VOIDmode || GET_MODE (op) == mode)
1275 && COMPARISON_P (op));
1278 /* If BODY is an insn body that uses ASM_OPERANDS,
1279 return the number of operands (both input and output) in the insn.
1280 Otherwise return -1. */
1283 asm_noperands (rtx body)
1285 switch (GET_CODE (body))
1287 case ASM_OPERANDS:
1288 /* No output operands: return number of input operands. */
1289 return ASM_OPERANDS_INPUT_LENGTH (body);
1290 case SET:
1291 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1292 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1293 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1294 else
1295 return -1;
1296 case PARALLEL:
1297 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1298 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1300 /* Multiple output operands, or 1 output plus some clobbers:
1301 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1302 int i;
1303 int n_sets;
1305 /* Count backwards through CLOBBERs to determine number of SETs. */
1306 for (i = XVECLEN (body, 0); i > 0; i--)
1308 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1309 break;
1310 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1311 return -1;
1314 /* N_SETS is now number of output operands. */
1315 n_sets = i;
1317 /* Verify that all the SETs we have
1318 came from a single original asm_operands insn
1319 (so that invalid combinations are blocked). */
1320 for (i = 0; i < n_sets; i++)
1322 rtx elt = XVECEXP (body, 0, i);
1323 if (GET_CODE (elt) != SET)
1324 return -1;
1325 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1326 return -1;
1327 /* If these ASM_OPERANDS rtx's came from different original insns
1328 then they aren't allowed together. */
1329 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1330 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1331 return -1;
1333 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1334 + n_sets);
1336 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1338 /* 0 outputs, but some clobbers:
1339 body is [(asm_operands ...) (clobber (reg ...))...]. */
1340 int i;
1342 /* Make sure all the other parallel things really are clobbers. */
1343 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1344 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1345 return -1;
1347 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1349 else
1350 return -1;
1351 default:
1352 return -1;
1356 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1357 copy its operands (both input and output) into the vector OPERANDS,
1358 the locations of the operands within the insn into the vector OPERAND_LOCS,
1359 and the constraints for the operands into CONSTRAINTS.
1360 Write the modes of the operands into MODES.
1361 Return the assembler-template.
1363 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1364 we don't store that info. */
1366 const char *
1367 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1368 const char **constraints, enum machine_mode *modes,
1369 location_t *loc)
1371 int i;
1372 int noperands;
1373 rtx asmop = 0;
1375 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1377 asmop = SET_SRC (body);
1378 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1380 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1382 for (i = 1; i < noperands; i++)
1384 if (operand_locs)
1385 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1386 if (operands)
1387 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1388 if (constraints)
1389 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1390 if (modes)
1391 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1394 /* The output is in the SET.
1395 Its constraint is in the ASM_OPERANDS itself. */
1396 if (operands)
1397 operands[0] = SET_DEST (body);
1398 if (operand_locs)
1399 operand_locs[0] = &SET_DEST (body);
1400 if (constraints)
1401 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1402 if (modes)
1403 modes[0] = GET_MODE (SET_DEST (body));
1405 else if (GET_CODE (body) == ASM_OPERANDS)
1407 asmop = body;
1408 /* No output operands: BODY is (asm_operands ....). */
1410 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1412 /* The input operands are found in the 1st element vector. */
1413 /* Constraints for inputs are in the 2nd element vector. */
1414 for (i = 0; i < noperands; i++)
1416 if (operand_locs)
1417 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1418 if (operands)
1419 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1420 if (constraints)
1421 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1422 if (modes)
1423 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1426 else if (GET_CODE (body) == PARALLEL
1427 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1428 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1430 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1431 int nin;
1432 int nout = 0; /* Does not include CLOBBERs. */
1434 asmop = SET_SRC (XVECEXP (body, 0, 0));
1435 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1437 /* At least one output, plus some CLOBBERs. */
1439 /* The outputs are in the SETs.
1440 Their constraints are in the ASM_OPERANDS itself. */
1441 for (i = 0; i < nparallel; i++)
1443 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1444 break; /* Past last SET */
1446 if (operands)
1447 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1448 if (operand_locs)
1449 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1450 if (constraints)
1451 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1452 if (modes)
1453 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1454 nout++;
1457 for (i = 0; i < nin; i++)
1459 if (operand_locs)
1460 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1461 if (operands)
1462 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1463 if (constraints)
1464 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1465 if (modes)
1466 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1469 else if (GET_CODE (body) == PARALLEL
1470 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1472 /* No outputs, but some CLOBBERs. */
1474 int nin;
1476 asmop = XVECEXP (body, 0, 0);
1477 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1479 for (i = 0; i < nin; i++)
1481 if (operand_locs)
1482 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1483 if (operands)
1484 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1485 if (constraints)
1486 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1487 if (modes)
1488 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1493 if (loc)
1495 #ifdef USE_MAPPED_LOCATION
1496 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1497 #else
1498 loc->file = ASM_OPERANDS_SOURCE_FILE (asmop);
1499 loc->line = ASM_OPERANDS_SOURCE_LINE (asmop);
1500 #endif
1503 return ASM_OPERANDS_TEMPLATE (asmop);
1506 /* Check if an asm_operand matches its constraints.
1507 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1510 asm_operand_ok (rtx op, const char *constraint)
1512 int result = 0;
1514 /* Use constrain_operands after reload. */
1515 gcc_assert (!reload_completed);
1517 while (*constraint)
1519 char c = *constraint;
1520 int len;
1521 switch (c)
1523 case ',':
1524 constraint++;
1525 continue;
1526 case '=':
1527 case '+':
1528 case '*':
1529 case '%':
1530 case '!':
1531 case '#':
1532 case '&':
1533 case '?':
1534 break;
1536 case '0': case '1': case '2': case '3': case '4':
1537 case '5': case '6': case '7': case '8': case '9':
1538 /* For best results, our caller should have given us the
1539 proper matching constraint, but we can't actually fail
1540 the check if they didn't. Indicate that results are
1541 inconclusive. */
1543 constraint++;
1544 while (ISDIGIT (*constraint));
1545 if (! result)
1546 result = -1;
1547 continue;
1549 case 'p':
1550 if (address_operand (op, VOIDmode))
1551 result = 1;
1552 break;
1554 case 'm':
1555 case 'V': /* non-offsettable */
1556 if (memory_operand (op, VOIDmode))
1557 result = 1;
1558 break;
1560 case 'o': /* offsettable */
1561 if (offsettable_nonstrict_memref_p (op))
1562 result = 1;
1563 break;
1565 case '<':
1566 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1567 excepting those that expand_call created. Further, on some
1568 machines which do not have generalized auto inc/dec, an inc/dec
1569 is not a memory_operand.
1571 Match any memory and hope things are resolved after reload. */
1573 if (MEM_P (op)
1574 && (1
1575 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1576 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1577 result = 1;
1578 break;
1580 case '>':
1581 if (MEM_P (op)
1582 && (1
1583 || GET_CODE (XEXP (op, 0)) == PRE_INC
1584 || GET_CODE (XEXP (op, 0)) == POST_INC))
1585 result = 1;
1586 break;
1588 case 'E':
1589 case 'F':
1590 if (GET_CODE (op) == CONST_DOUBLE
1591 || (GET_CODE (op) == CONST_VECTOR
1592 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1593 result = 1;
1594 break;
1596 case 'G':
1597 if (GET_CODE (op) == CONST_DOUBLE
1598 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1599 result = 1;
1600 break;
1601 case 'H':
1602 if (GET_CODE (op) == CONST_DOUBLE
1603 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1604 result = 1;
1605 break;
1607 case 's':
1608 if (GET_CODE (op) == CONST_INT
1609 || (GET_CODE (op) == CONST_DOUBLE
1610 && GET_MODE (op) == VOIDmode))
1611 break;
1612 /* Fall through. */
1614 case 'i':
1615 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1616 result = 1;
1617 break;
1619 case 'n':
1620 if (GET_CODE (op) == CONST_INT
1621 || (GET_CODE (op) == CONST_DOUBLE
1622 && GET_MODE (op) == VOIDmode))
1623 result = 1;
1624 break;
1626 case 'I':
1627 if (GET_CODE (op) == CONST_INT
1628 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1629 result = 1;
1630 break;
1631 case 'J':
1632 if (GET_CODE (op) == CONST_INT
1633 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1634 result = 1;
1635 break;
1636 case 'K':
1637 if (GET_CODE (op) == CONST_INT
1638 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1639 result = 1;
1640 break;
1641 case 'L':
1642 if (GET_CODE (op) == CONST_INT
1643 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1644 result = 1;
1645 break;
1646 case 'M':
1647 if (GET_CODE (op) == CONST_INT
1648 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1649 result = 1;
1650 break;
1651 case 'N':
1652 if (GET_CODE (op) == CONST_INT
1653 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1654 result = 1;
1655 break;
1656 case 'O':
1657 if (GET_CODE (op) == CONST_INT
1658 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1659 result = 1;
1660 break;
1661 case 'P':
1662 if (GET_CODE (op) == CONST_INT
1663 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1664 result = 1;
1665 break;
1667 case 'X':
1668 result = 1;
1669 break;
1671 case 'g':
1672 if (general_operand (op, VOIDmode))
1673 result = 1;
1674 break;
1676 default:
1677 /* For all other letters, we first check for a register class,
1678 otherwise it is an EXTRA_CONSTRAINT. */
1679 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1681 case 'r':
1682 if (GET_MODE (op) == BLKmode)
1683 break;
1684 if (register_operand (op, VOIDmode))
1685 result = 1;
1687 #ifdef EXTRA_CONSTRAINT_STR
1688 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1689 result = 1;
1690 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1691 /* Every memory operand can be reloaded to fit. */
1692 && memory_operand (op, VOIDmode))
1693 result = 1;
1694 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1695 /* Every address operand can be reloaded to fit. */
1696 && address_operand (op, VOIDmode))
1697 result = 1;
1698 #endif
1699 break;
1701 len = CONSTRAINT_LEN (c, constraint);
1703 constraint++;
1704 while (--len && *constraint);
1705 if (len)
1706 return 0;
1709 return result;
1712 /* Given an rtx *P, if it is a sum containing an integer constant term,
1713 return the location (type rtx *) of the pointer to that constant term.
1714 Otherwise, return a null pointer. */
1716 rtx *
1717 find_constant_term_loc (rtx *p)
1719 rtx *tem;
1720 enum rtx_code code = GET_CODE (*p);
1722 /* If *P IS such a constant term, P is its location. */
1724 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1725 || code == CONST)
1726 return p;
1728 /* Otherwise, if not a sum, it has no constant term. */
1730 if (GET_CODE (*p) != PLUS)
1731 return 0;
1733 /* If one of the summands is constant, return its location. */
1735 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1736 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1737 return p;
1739 /* Otherwise, check each summand for containing a constant term. */
1741 if (XEXP (*p, 0) != 0)
1743 tem = find_constant_term_loc (&XEXP (*p, 0));
1744 if (tem != 0)
1745 return tem;
1748 if (XEXP (*p, 1) != 0)
1750 tem = find_constant_term_loc (&XEXP (*p, 1));
1751 if (tem != 0)
1752 return tem;
1755 return 0;
1758 /* Return 1 if OP is a memory reference
1759 whose address contains no side effects
1760 and remains valid after the addition
1761 of a positive integer less than the
1762 size of the object being referenced.
1764 We assume that the original address is valid and do not check it.
1766 This uses strict_memory_address_p as a subroutine, so
1767 don't use it before reload. */
1770 offsettable_memref_p (rtx op)
1772 return ((MEM_P (op))
1773 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1776 /* Similar, but don't require a strictly valid mem ref:
1777 consider pseudo-regs valid as index or base regs. */
1780 offsettable_nonstrict_memref_p (rtx op)
1782 return ((MEM_P (op))
1783 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1786 /* Return 1 if Y is a memory address which contains no side effects
1787 and would remain valid after the addition of a positive integer
1788 less than the size of that mode.
1790 We assume that the original address is valid and do not check it.
1791 We do check that it is valid for narrower modes.
1793 If STRICTP is nonzero, we require a strictly valid address,
1794 for the sake of use in reload.c. */
1797 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1799 enum rtx_code ycode = GET_CODE (y);
1800 rtx z;
1801 rtx y1 = y;
1802 rtx *y2;
1803 int (*addressp) (enum machine_mode, rtx) =
1804 (strictp ? strict_memory_address_p : memory_address_p);
1805 unsigned int mode_sz = GET_MODE_SIZE (mode);
1807 if (CONSTANT_ADDRESS_P (y))
1808 return 1;
1810 /* Adjusting an offsettable address involves changing to a narrower mode.
1811 Make sure that's OK. */
1813 if (mode_dependent_address_p (y))
1814 return 0;
1816 /* ??? How much offset does an offsettable BLKmode reference need?
1817 Clearly that depends on the situation in which it's being used.
1818 However, the current situation in which we test 0xffffffff is
1819 less than ideal. Caveat user. */
1820 if (mode_sz == 0)
1821 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1823 /* If the expression contains a constant term,
1824 see if it remains valid when max possible offset is added. */
1826 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1828 int good;
1830 y1 = *y2;
1831 *y2 = plus_constant (*y2, mode_sz - 1);
1832 /* Use QImode because an odd displacement may be automatically invalid
1833 for any wider mode. But it should be valid for a single byte. */
1834 good = (*addressp) (QImode, y);
1836 /* In any case, restore old contents of memory. */
1837 *y2 = y1;
1838 return good;
1841 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1842 return 0;
1844 /* The offset added here is chosen as the maximum offset that
1845 any instruction could need to add when operating on something
1846 of the specified mode. We assume that if Y and Y+c are
1847 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1848 go inside a LO_SUM here, so we do so as well. */
1849 if (GET_CODE (y) == LO_SUM
1850 && mode != BLKmode
1851 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1852 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1853 plus_constant (XEXP (y, 1), mode_sz - 1));
1854 else
1855 z = plus_constant (y, mode_sz - 1);
1857 /* Use QImode because an odd displacement may be automatically invalid
1858 for any wider mode. But it should be valid for a single byte. */
1859 return (*addressp) (QImode, z);
1862 /* Return 1 if ADDR is an address-expression whose effect depends
1863 on the mode of the memory reference it is used in.
1865 Autoincrement addressing is a typical example of mode-dependence
1866 because the amount of the increment depends on the mode. */
1869 mode_dependent_address_p (rtx addr)
1871 /* Auto-increment addressing with anything other than post_modify
1872 or pre_modify always introduces a mode dependency. Catch such
1873 cases now instead of deferring to the target. */
1874 if (GET_CODE (addr) == PRE_INC
1875 || GET_CODE (addr) == POST_INC
1876 || GET_CODE (addr) == PRE_DEC
1877 || GET_CODE (addr) == POST_DEC)
1878 return 1;
1880 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1881 return 0;
1882 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1883 win: ATTRIBUTE_UNUSED_LABEL
1884 return 1;
1887 /* Like extract_insn, but save insn extracted and don't extract again, when
1888 called again for the same insn expecting that recog_data still contain the
1889 valid information. This is used primary by gen_attr infrastructure that
1890 often does extract insn again and again. */
1891 void
1892 extract_insn_cached (rtx insn)
1894 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1895 return;
1896 extract_insn (insn);
1897 recog_data.insn = insn;
1900 /* Do cached extract_insn, constrain_operands and complain about failures.
1901 Used by insn_attrtab. */
1902 void
1903 extract_constrain_insn_cached (rtx insn)
1905 extract_insn_cached (insn);
1906 if (which_alternative == -1
1907 && !constrain_operands (reload_completed))
1908 fatal_insn_not_found (insn);
1911 /* Do cached constrain_operands and complain about failures. */
1913 constrain_operands_cached (int strict)
1915 if (which_alternative == -1)
1916 return constrain_operands (strict);
1917 else
1918 return 1;
1921 /* Analyze INSN and fill in recog_data. */
1923 void
1924 extract_insn (rtx insn)
1926 int i;
1927 int icode;
1928 int noperands;
1929 rtx body = PATTERN (insn);
1931 recog_data.insn = NULL;
1932 recog_data.n_operands = 0;
1933 recog_data.n_alternatives = 0;
1934 recog_data.n_dups = 0;
1935 which_alternative = -1;
1937 switch (GET_CODE (body))
1939 case USE:
1940 case CLOBBER:
1941 case ASM_INPUT:
1942 case ADDR_VEC:
1943 case ADDR_DIFF_VEC:
1944 return;
1946 case SET:
1947 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1948 goto asm_insn;
1949 else
1950 goto normal_insn;
1951 case PARALLEL:
1952 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1953 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1954 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1955 goto asm_insn;
1956 else
1957 goto normal_insn;
1958 case ASM_OPERANDS:
1959 asm_insn:
1960 recog_data.n_operands = noperands = asm_noperands (body);
1961 if (noperands >= 0)
1963 /* This insn is an `asm' with operands. */
1965 /* expand_asm_operands makes sure there aren't too many operands. */
1966 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
1968 /* Now get the operand values and constraints out of the insn. */
1969 decode_asm_operands (body, recog_data.operand,
1970 recog_data.operand_loc,
1971 recog_data.constraints,
1972 recog_data.operand_mode, NULL);
1973 if (noperands > 0)
1975 const char *p = recog_data.constraints[0];
1976 recog_data.n_alternatives = 1;
1977 while (*p)
1978 recog_data.n_alternatives += (*p++ == ',');
1980 break;
1982 fatal_insn_not_found (insn);
1984 default:
1985 normal_insn:
1986 /* Ordinary insn: recognize it, get the operands via insn_extract
1987 and get the constraints. */
1989 icode = recog_memoized (insn);
1990 if (icode < 0)
1991 fatal_insn_not_found (insn);
1993 recog_data.n_operands = noperands = insn_data[icode].n_operands;
1994 recog_data.n_alternatives = insn_data[icode].n_alternatives;
1995 recog_data.n_dups = insn_data[icode].n_dups;
1997 insn_extract (insn);
1999 for (i = 0; i < noperands; i++)
2001 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2002 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2003 /* VOIDmode match_operands gets mode from their real operand. */
2004 if (recog_data.operand_mode[i] == VOIDmode)
2005 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2008 for (i = 0; i < noperands; i++)
2009 recog_data.operand_type[i]
2010 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2011 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2012 : OP_IN);
2014 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2017 /* After calling extract_insn, you can use this function to extract some
2018 information from the constraint strings into a more usable form.
2019 The collected data is stored in recog_op_alt. */
2020 void
2021 preprocess_constraints (void)
2023 int i;
2025 for (i = 0; i < recog_data.n_operands; i++)
2026 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2027 * sizeof (struct operand_alternative)));
2029 for (i = 0; i < recog_data.n_operands; i++)
2031 int j;
2032 struct operand_alternative *op_alt;
2033 const char *p = recog_data.constraints[i];
2035 op_alt = recog_op_alt[i];
2037 for (j = 0; j < recog_data.n_alternatives; j++)
2039 op_alt[j].cl = NO_REGS;
2040 op_alt[j].constraint = p;
2041 op_alt[j].matches = -1;
2042 op_alt[j].matched = -1;
2044 if (*p == '\0' || *p == ',')
2046 op_alt[j].anything_ok = 1;
2047 continue;
2050 for (;;)
2052 char c = *p;
2053 if (c == '#')
2055 c = *++p;
2056 while (c != ',' && c != '\0');
2057 if (c == ',' || c == '\0')
2059 p++;
2060 break;
2063 switch (c)
2065 case '=': case '+': case '*': case '%':
2066 case 'E': case 'F': case 'G': case 'H':
2067 case 's': case 'i': case 'n':
2068 case 'I': case 'J': case 'K': case 'L':
2069 case 'M': case 'N': case 'O': case 'P':
2070 /* These don't say anything we care about. */
2071 break;
2073 case '?':
2074 op_alt[j].reject += 6;
2075 break;
2076 case '!':
2077 op_alt[j].reject += 600;
2078 break;
2079 case '&':
2080 op_alt[j].earlyclobber = 1;
2081 break;
2083 case '0': case '1': case '2': case '3': case '4':
2084 case '5': case '6': case '7': case '8': case '9':
2086 char *end;
2087 op_alt[j].matches = strtoul (p, &end, 10);
2088 recog_op_alt[op_alt[j].matches][j].matched = i;
2089 p = end;
2091 continue;
2093 case 'm':
2094 op_alt[j].memory_ok = 1;
2095 break;
2096 case '<':
2097 op_alt[j].decmem_ok = 1;
2098 break;
2099 case '>':
2100 op_alt[j].incmem_ok = 1;
2101 break;
2102 case 'V':
2103 op_alt[j].nonoffmem_ok = 1;
2104 break;
2105 case 'o':
2106 op_alt[j].offmem_ok = 1;
2107 break;
2108 case 'X':
2109 op_alt[j].anything_ok = 1;
2110 break;
2112 case 'p':
2113 op_alt[j].is_address = 1;
2114 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2115 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2116 break;
2118 case 'g':
2119 case 'r':
2120 op_alt[j].cl =
2121 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2122 break;
2124 default:
2125 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2127 op_alt[j].memory_ok = 1;
2128 break;
2130 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2132 op_alt[j].is_address = 1;
2133 op_alt[j].cl
2134 = (reg_class_subunion
2135 [(int) op_alt[j].cl]
2136 [(int) base_reg_class (VOIDmode, ADDRESS,
2137 SCRATCH)]);
2138 break;
2141 op_alt[j].cl
2142 = (reg_class_subunion
2143 [(int) op_alt[j].cl]
2144 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2145 break;
2147 p += CONSTRAINT_LEN (c, p);
2153 /* Check the operands of an insn against the insn's operand constraints
2154 and return 1 if they are valid.
2155 The information about the insn's operands, constraints, operand modes
2156 etc. is obtained from the global variables set up by extract_insn.
2158 WHICH_ALTERNATIVE is set to a number which indicates which
2159 alternative of constraints was matched: 0 for the first alternative,
2160 1 for the next, etc.
2162 In addition, when two operands are required to match
2163 and it happens that the output operand is (reg) while the
2164 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2165 make the output operand look like the input.
2166 This is because the output operand is the one the template will print.
2168 This is used in final, just before printing the assembler code and by
2169 the routines that determine an insn's attribute.
2171 If STRICT is a positive nonzero value, it means that we have been
2172 called after reload has been completed. In that case, we must
2173 do all checks strictly. If it is zero, it means that we have been called
2174 before reload has completed. In that case, we first try to see if we can
2175 find an alternative that matches strictly. If not, we try again, this
2176 time assuming that reload will fix up the insn. This provides a "best
2177 guess" for the alternative and is used to compute attributes of insns prior
2178 to reload. A negative value of STRICT is used for this internal call. */
2180 struct funny_match
2182 int this, other;
2186 constrain_operands (int strict)
2188 const char *constraints[MAX_RECOG_OPERANDS];
2189 int matching_operands[MAX_RECOG_OPERANDS];
2190 int earlyclobber[MAX_RECOG_OPERANDS];
2191 int c;
2193 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2194 int funny_match_index;
2196 which_alternative = 0;
2197 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2198 return 1;
2200 for (c = 0; c < recog_data.n_operands; c++)
2202 constraints[c] = recog_data.constraints[c];
2203 matching_operands[c] = -1;
2208 int seen_earlyclobber_at = -1;
2209 int opno;
2210 int lose = 0;
2211 funny_match_index = 0;
2213 for (opno = 0; opno < recog_data.n_operands; opno++)
2215 rtx op = recog_data.operand[opno];
2216 enum machine_mode mode = GET_MODE (op);
2217 const char *p = constraints[opno];
2218 int offset = 0;
2219 int win = 0;
2220 int val;
2221 int len;
2223 earlyclobber[opno] = 0;
2225 /* A unary operator may be accepted by the predicate, but it
2226 is irrelevant for matching constraints. */
2227 if (UNARY_P (op))
2228 op = XEXP (op, 0);
2230 if (GET_CODE (op) == SUBREG)
2232 if (REG_P (SUBREG_REG (op))
2233 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2234 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2235 GET_MODE (SUBREG_REG (op)),
2236 SUBREG_BYTE (op),
2237 GET_MODE (op));
2238 op = SUBREG_REG (op);
2241 /* An empty constraint or empty alternative
2242 allows anything which matched the pattern. */
2243 if (*p == 0 || *p == ',')
2244 win = 1;
2247 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2249 case '\0':
2250 len = 0;
2251 break;
2252 case ',':
2253 c = '\0';
2254 break;
2256 case '?': case '!': case '*': case '%':
2257 case '=': case '+':
2258 break;
2260 case '#':
2261 /* Ignore rest of this alternative as far as
2262 constraint checking is concerned. */
2264 p++;
2265 while (*p && *p != ',');
2266 len = 0;
2267 break;
2269 case '&':
2270 earlyclobber[opno] = 1;
2271 if (seen_earlyclobber_at < 0)
2272 seen_earlyclobber_at = opno;
2273 break;
2275 case '0': case '1': case '2': case '3': case '4':
2276 case '5': case '6': case '7': case '8': case '9':
2278 /* This operand must be the same as a previous one.
2279 This kind of constraint is used for instructions such
2280 as add when they take only two operands.
2282 Note that the lower-numbered operand is passed first.
2284 If we are not testing strictly, assume that this
2285 constraint will be satisfied. */
2287 char *end;
2288 int match;
2290 match = strtoul (p, &end, 10);
2291 p = end;
2293 if (strict < 0)
2294 val = 1;
2295 else
2297 rtx op1 = recog_data.operand[match];
2298 rtx op2 = recog_data.operand[opno];
2300 /* A unary operator may be accepted by the predicate,
2301 but it is irrelevant for matching constraints. */
2302 if (UNARY_P (op1))
2303 op1 = XEXP (op1, 0);
2304 if (UNARY_P (op2))
2305 op2 = XEXP (op2, 0);
2307 val = operands_match_p (op1, op2);
2310 matching_operands[opno] = match;
2311 matching_operands[match] = opno;
2313 if (val != 0)
2314 win = 1;
2316 /* If output is *x and input is *--x, arrange later
2317 to change the output to *--x as well, since the
2318 output op is the one that will be printed. */
2319 if (val == 2 && strict > 0)
2321 funny_match[funny_match_index].this = opno;
2322 funny_match[funny_match_index++].other = match;
2325 len = 0;
2326 break;
2328 case 'p':
2329 /* p is used for address_operands. When we are called by
2330 gen_reload, no one will have checked that the address is
2331 strictly valid, i.e., that all pseudos requiring hard regs
2332 have gotten them. */
2333 if (strict <= 0
2334 || (strict_memory_address_p (recog_data.operand_mode[opno],
2335 op)))
2336 win = 1;
2337 break;
2339 /* No need to check general_operand again;
2340 it was done in insn-recog.c. Well, except that reload
2341 doesn't check the validity of its replacements, but
2342 that should only matter when there's a bug. */
2343 case 'g':
2344 /* Anything goes unless it is a REG and really has a hard reg
2345 but the hard reg is not in the class GENERAL_REGS. */
2346 if (REG_P (op))
2348 if (strict < 0
2349 || GENERAL_REGS == ALL_REGS
2350 || (reload_in_progress
2351 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2352 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2353 win = 1;
2355 else if (strict < 0 || general_operand (op, mode))
2356 win = 1;
2357 break;
2359 case 'X':
2360 /* This is used for a MATCH_SCRATCH in the cases when
2361 we don't actually need anything. So anything goes
2362 any time. */
2363 win = 1;
2364 break;
2366 case 'm':
2367 /* Memory operands must be valid, to the extent
2368 required by STRICT. */
2369 if (MEM_P (op))
2371 if (strict > 0
2372 && !strict_memory_address_p (GET_MODE (op),
2373 XEXP (op, 0)))
2374 break;
2375 if (strict == 0
2376 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2377 break;
2378 win = 1;
2380 /* Before reload, accept what reload can turn into mem. */
2381 else if (strict < 0 && CONSTANT_P (op))
2382 win = 1;
2383 /* During reload, accept a pseudo */
2384 else if (reload_in_progress && REG_P (op)
2385 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2386 win = 1;
2387 break;
2389 case '<':
2390 if (MEM_P (op)
2391 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2392 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2393 win = 1;
2394 break;
2396 case '>':
2397 if (MEM_P (op)
2398 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2399 || GET_CODE (XEXP (op, 0)) == POST_INC))
2400 win = 1;
2401 break;
2403 case 'E':
2404 case 'F':
2405 if (GET_CODE (op) == CONST_DOUBLE
2406 || (GET_CODE (op) == CONST_VECTOR
2407 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2408 win = 1;
2409 break;
2411 case 'G':
2412 case 'H':
2413 if (GET_CODE (op) == CONST_DOUBLE
2414 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2415 win = 1;
2416 break;
2418 case 's':
2419 if (GET_CODE (op) == CONST_INT
2420 || (GET_CODE (op) == CONST_DOUBLE
2421 && GET_MODE (op) == VOIDmode))
2422 break;
2423 case 'i':
2424 if (CONSTANT_P (op))
2425 win = 1;
2426 break;
2428 case 'n':
2429 if (GET_CODE (op) == CONST_INT
2430 || (GET_CODE (op) == CONST_DOUBLE
2431 && GET_MODE (op) == VOIDmode))
2432 win = 1;
2433 break;
2435 case 'I':
2436 case 'J':
2437 case 'K':
2438 case 'L':
2439 case 'M':
2440 case 'N':
2441 case 'O':
2442 case 'P':
2443 if (GET_CODE (op) == CONST_INT
2444 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2445 win = 1;
2446 break;
2448 case 'V':
2449 if (MEM_P (op)
2450 && ((strict > 0 && ! offsettable_memref_p (op))
2451 || (strict < 0
2452 && !(CONSTANT_P (op) || MEM_P (op)))
2453 || (reload_in_progress
2454 && !(REG_P (op)
2455 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2456 win = 1;
2457 break;
2459 case 'o':
2460 if ((strict > 0 && offsettable_memref_p (op))
2461 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2462 /* Before reload, accept what reload can handle. */
2463 || (strict < 0
2464 && (CONSTANT_P (op) || MEM_P (op)))
2465 /* During reload, accept a pseudo */
2466 || (reload_in_progress && REG_P (op)
2467 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2468 win = 1;
2469 break;
2471 default:
2473 enum reg_class cl;
2475 cl = (c == 'r'
2476 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2477 if (cl != NO_REGS)
2479 if (strict < 0
2480 || (strict == 0
2481 && REG_P (op)
2482 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2483 || (strict == 0 && GET_CODE (op) == SCRATCH)
2484 || (REG_P (op)
2485 && reg_fits_class_p (op, cl, offset, mode)))
2486 win = 1;
2488 #ifdef EXTRA_CONSTRAINT_STR
2489 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2490 win = 1;
2492 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2493 /* Every memory operand can be reloaded to fit. */
2494 && ((strict < 0 && MEM_P (op))
2495 /* Before reload, accept what reload can turn
2496 into mem. */
2497 || (strict < 0 && CONSTANT_P (op))
2498 /* During reload, accept a pseudo */
2499 || (reload_in_progress && REG_P (op)
2500 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2501 win = 1;
2502 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2503 /* Every address operand can be reloaded to fit. */
2504 && strict < 0)
2505 win = 1;
2506 #endif
2507 break;
2510 while (p += len, c);
2512 constraints[opno] = p;
2513 /* If this operand did not win somehow,
2514 this alternative loses. */
2515 if (! win)
2516 lose = 1;
2518 /* This alternative won; the operands are ok.
2519 Change whichever operands this alternative says to change. */
2520 if (! lose)
2522 int opno, eopno;
2524 /* See if any earlyclobber operand conflicts with some other
2525 operand. */
2527 if (strict > 0 && seen_earlyclobber_at >= 0)
2528 for (eopno = seen_earlyclobber_at;
2529 eopno < recog_data.n_operands;
2530 eopno++)
2531 /* Ignore earlyclobber operands now in memory,
2532 because we would often report failure when we have
2533 two memory operands, one of which was formerly a REG. */
2534 if (earlyclobber[eopno]
2535 && REG_P (recog_data.operand[eopno]))
2536 for (opno = 0; opno < recog_data.n_operands; opno++)
2537 if ((MEM_P (recog_data.operand[opno])
2538 || recog_data.operand_type[opno] != OP_OUT)
2539 && opno != eopno
2540 /* Ignore things like match_operator operands. */
2541 && *recog_data.constraints[opno] != 0
2542 && ! (matching_operands[opno] == eopno
2543 && operands_match_p (recog_data.operand[opno],
2544 recog_data.operand[eopno]))
2545 && ! safe_from_earlyclobber (recog_data.operand[opno],
2546 recog_data.operand[eopno]))
2547 lose = 1;
2549 if (! lose)
2551 while (--funny_match_index >= 0)
2553 recog_data.operand[funny_match[funny_match_index].other]
2554 = recog_data.operand[funny_match[funny_match_index].this];
2557 return 1;
2561 which_alternative++;
2563 while (which_alternative < recog_data.n_alternatives);
2565 which_alternative = -1;
2566 /* If we are about to reject this, but we are not to test strictly,
2567 try a very loose test. Only return failure if it fails also. */
2568 if (strict == 0)
2569 return constrain_operands (-1);
2570 else
2571 return 0;
2574 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2575 is a hard reg in class CLASS when its regno is offset by OFFSET
2576 and changed to mode MODE.
2577 If REG occupies multiple hard regs, all of them must be in CLASS. */
2580 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2581 enum machine_mode mode)
2583 int regno = REGNO (operand);
2585 if (cl == NO_REGS)
2586 return 0;
2588 return (regno < FIRST_PSEUDO_REGISTER
2589 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2590 mode, regno + offset));
2593 /* Split single instruction. Helper function for split_all_insns and
2594 split_all_insns_noflow. Return last insn in the sequence if successful,
2595 or NULL if unsuccessful. */
2597 static rtx
2598 split_insn (rtx insn)
2600 /* Split insns here to get max fine-grain parallelism. */
2601 rtx first = PREV_INSN (insn);
2602 rtx last = try_split (PATTERN (insn), insn, 1);
2604 if (last == insn)
2605 return NULL_RTX;
2607 /* try_split returns the NOTE that INSN became. */
2608 SET_INSN_DELETED (insn);
2610 /* ??? Coddle to md files that generate subregs in post-reload
2611 splitters instead of computing the proper hard register. */
2612 if (reload_completed && first != last)
2614 first = NEXT_INSN (first);
2615 for (;;)
2617 if (INSN_P (first))
2618 cleanup_subreg_operands (first);
2619 if (first == last)
2620 break;
2621 first = NEXT_INSN (first);
2624 return last;
2627 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2629 void
2630 split_all_insns (void)
2632 sbitmap blocks;
2633 bool changed;
2634 basic_block bb;
2636 blocks = sbitmap_alloc (last_basic_block);
2637 sbitmap_zero (blocks);
2638 changed = false;
2640 FOR_EACH_BB_REVERSE (bb)
2642 rtx insn, next;
2643 bool finish = false;
2645 for (insn = BB_HEAD (bb); !finish ; insn = next)
2647 /* Can't use `next_real_insn' because that might go across
2648 CODE_LABELS and short-out basic blocks. */
2649 next = NEXT_INSN (insn);
2650 finish = (insn == BB_END (bb));
2651 if (INSN_P (insn))
2653 rtx set = single_set (insn);
2655 /* Don't split no-op move insns. These should silently
2656 disappear later in final. Splitting such insns would
2657 break the code that handles REG_NO_CONFLICT blocks. */
2658 if (set && set_noop_p (set))
2660 /* Nops get in the way while scheduling, so delete them
2661 now if register allocation has already been done. It
2662 is too risky to try to do this before register
2663 allocation, and there are unlikely to be very many
2664 nops then anyways. */
2665 if (reload_completed)
2666 delete_insn_and_edges (insn);
2668 else
2670 rtx last = split_insn (insn);
2671 if (last)
2673 /* The split sequence may include barrier, but the
2674 BB boundary we are interested in will be set to
2675 previous one. */
2677 while (BARRIER_P (last))
2678 last = PREV_INSN (last);
2679 SET_BIT (blocks, bb->index);
2680 changed = true;
2687 if (changed)
2688 find_many_sub_basic_blocks (blocks);
2690 #ifdef ENABLE_CHECKING
2691 verify_flow_info ();
2692 #endif
2694 sbitmap_free (blocks);
2697 /* Same as split_all_insns, but do not expect CFG to be available.
2698 Used by machine dependent reorg passes. */
2700 unsigned int
2701 split_all_insns_noflow (void)
2703 rtx next, insn;
2705 for (insn = get_insns (); insn; insn = next)
2707 next = NEXT_INSN (insn);
2708 if (INSN_P (insn))
2710 /* Don't split no-op move insns. These should silently
2711 disappear later in final. Splitting such insns would
2712 break the code that handles REG_NO_CONFLICT blocks. */
2713 rtx set = single_set (insn);
2714 if (set && set_noop_p (set))
2716 /* Nops get in the way while scheduling, so delete them
2717 now if register allocation has already been done. It
2718 is too risky to try to do this before register
2719 allocation, and there are unlikely to be very many
2720 nops then anyways.
2722 ??? Should we use delete_insn when the CFG isn't valid? */
2723 if (reload_completed)
2724 delete_insn_and_edges (insn);
2726 else
2727 split_insn (insn);
2730 return 0;
2733 #ifdef HAVE_peephole2
2734 struct peep2_insn_data
2736 rtx insn;
2737 regset live_before;
2740 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2741 static int peep2_current;
2742 /* The number of instructions available to match a peep2. */
2743 int peep2_current_count;
2745 /* A non-insn marker indicating the last insn of the block.
2746 The live_before regset for this element is correct, indicating
2747 DF_LIVE_OUT for the block. */
2748 #define PEEP2_EOB pc_rtx
2750 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2751 does not exist. Used by the recognizer to find the next insn to match
2752 in a multi-insn pattern. */
2755 peep2_next_insn (int n)
2757 gcc_assert (n <= peep2_current_count);
2759 n += peep2_current;
2760 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2761 n -= MAX_INSNS_PER_PEEP2 + 1;
2763 return peep2_insn_data[n].insn;
2766 /* Return true if REGNO is dead before the Nth non-note insn
2767 after `current'. */
2770 peep2_regno_dead_p (int ofs, int regno)
2772 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2774 ofs += peep2_current;
2775 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2776 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2778 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2780 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2783 /* Similarly for a REG. */
2786 peep2_reg_dead_p (int ofs, rtx reg)
2788 int regno, n;
2790 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2792 ofs += peep2_current;
2793 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2794 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2796 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2798 regno = REGNO (reg);
2799 n = hard_regno_nregs[regno][GET_MODE (reg)];
2800 while (--n >= 0)
2801 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2802 return 0;
2803 return 1;
2806 /* Try to find a hard register of mode MODE, matching the register class in
2807 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2808 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2809 in which case the only condition is that the register must be available
2810 before CURRENT_INSN.
2811 Registers that already have bits set in REG_SET will not be considered.
2813 If an appropriate register is available, it will be returned and the
2814 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2815 returned. */
2818 peep2_find_free_register (int from, int to, const char *class_str,
2819 enum machine_mode mode, HARD_REG_SET *reg_set)
2821 static int search_ofs;
2822 enum reg_class cl;
2823 HARD_REG_SET live;
2824 int i;
2826 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2827 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2829 from += peep2_current;
2830 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2831 from -= MAX_INSNS_PER_PEEP2 + 1;
2832 to += peep2_current;
2833 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2834 to -= MAX_INSNS_PER_PEEP2 + 1;
2836 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2837 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2839 while (from != to)
2841 HARD_REG_SET this_live;
2843 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2844 from = 0;
2845 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2846 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2847 IOR_HARD_REG_SET (live, this_live);
2850 cl = (class_str[0] == 'r' ? GENERAL_REGS
2851 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2853 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2855 int raw_regno, regno, success, j;
2857 /* Distribute the free registers as much as possible. */
2858 raw_regno = search_ofs + i;
2859 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2860 raw_regno -= FIRST_PSEUDO_REGISTER;
2861 #ifdef REG_ALLOC_ORDER
2862 regno = reg_alloc_order[raw_regno];
2863 #else
2864 regno = raw_regno;
2865 #endif
2867 /* Don't allocate fixed registers. */
2868 if (fixed_regs[regno])
2869 continue;
2870 /* Make sure the register is of the right class. */
2871 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2872 continue;
2873 /* And can support the mode we need. */
2874 if (! HARD_REGNO_MODE_OK (regno, mode))
2875 continue;
2876 /* And that we don't create an extra save/restore. */
2877 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
2878 continue;
2879 /* And we don't clobber traceback for noreturn functions. */
2880 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2881 && (! reload_completed || frame_pointer_needed))
2882 continue;
2884 success = 1;
2885 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2887 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2888 || TEST_HARD_REG_BIT (live, regno + j))
2890 success = 0;
2891 break;
2894 if (success)
2896 add_to_hard_reg_set (reg_set, mode, regno);
2898 /* Start the next search with the next register. */
2899 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2900 raw_regno = 0;
2901 search_ofs = raw_regno;
2903 return gen_rtx_REG (mode, regno);
2907 search_ofs = 0;
2908 return NULL_RTX;
2911 /* Perform the peephole2 optimization pass. */
2913 static void
2914 peephole2_optimize (void)
2916 rtx insn, prev;
2917 bitmap live;
2918 int i;
2919 basic_block bb;
2920 bool do_cleanup_cfg = false;
2921 bool do_rebuild_jump_labels = false;
2923 df_set_flags (DF_LR_RUN_DCE);
2924 df_analyze ();
2926 /* Initialize the regsets we're going to use. */
2927 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2928 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
2929 live = BITMAP_ALLOC (&reg_obstack);
2931 FOR_EACH_BB_REVERSE (bb)
2933 /* Indicate that all slots except the last holds invalid data. */
2934 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
2935 peep2_insn_data[i].insn = NULL_RTX;
2936 peep2_current_count = 0;
2938 /* Indicate that the last slot contains live_after data. */
2939 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
2940 peep2_current = MAX_INSNS_PER_PEEP2;
2942 /* Start up propagation. */
2943 bitmap_copy (live, DF_LR_OUT (bb));
2944 df_simulate_artificial_refs_at_end (bb, live);
2945 bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
2947 for (insn = BB_END (bb); ; insn = prev)
2949 prev = PREV_INSN (insn);
2950 if (INSN_P (insn))
2952 rtx try, before_try, x;
2953 int match_len;
2954 rtx note;
2955 bool was_call = false;
2957 /* Record this insn. */
2958 if (--peep2_current < 0)
2959 peep2_current = MAX_INSNS_PER_PEEP2;
2960 if (peep2_current_count < MAX_INSNS_PER_PEEP2
2961 && peep2_insn_data[peep2_current].insn == NULL_RTX)
2962 peep2_current_count++;
2963 peep2_insn_data[peep2_current].insn = insn;
2964 df_simulate_one_insn_backwards (bb, insn, live);
2965 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
2967 if (RTX_FRAME_RELATED_P (insn))
2969 /* If an insn has RTX_FRAME_RELATED_P set, peephole
2970 substitution would lose the
2971 REG_FRAME_RELATED_EXPR that is attached. */
2972 peep2_current_count = 0;
2973 try = NULL;
2975 else
2976 /* Match the peephole. */
2977 try = peephole2_insns (PATTERN (insn), insn, &match_len);
2979 if (try != NULL)
2981 /* If we are splitting a CALL_INSN, look for the CALL_INSN
2982 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
2983 cfg-related call notes. */
2984 for (i = 0; i <= match_len; ++i)
2986 int j;
2987 rtx old_insn, new_insn, note;
2989 j = i + peep2_current;
2990 if (j >= MAX_INSNS_PER_PEEP2 + 1)
2991 j -= MAX_INSNS_PER_PEEP2 + 1;
2992 old_insn = peep2_insn_data[j].insn;
2993 if (!CALL_P (old_insn))
2994 continue;
2995 was_call = true;
2997 new_insn = try;
2998 while (new_insn != NULL_RTX)
3000 if (CALL_P (new_insn))
3001 break;
3002 new_insn = NEXT_INSN (new_insn);
3005 gcc_assert (new_insn != NULL_RTX);
3007 CALL_INSN_FUNCTION_USAGE (new_insn)
3008 = CALL_INSN_FUNCTION_USAGE (old_insn);
3010 for (note = REG_NOTES (old_insn);
3011 note;
3012 note = XEXP (note, 1))
3013 switch (REG_NOTE_KIND (note))
3015 case REG_NORETURN:
3016 case REG_SETJMP:
3017 REG_NOTES (new_insn)
3018 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3019 XEXP (note, 0),
3020 REG_NOTES (new_insn));
3021 default:
3022 /* Discard all other reg notes. */
3023 break;
3026 /* Croak if there is another call in the sequence. */
3027 while (++i <= match_len)
3029 j = i + peep2_current;
3030 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3031 j -= MAX_INSNS_PER_PEEP2 + 1;
3032 old_insn = peep2_insn_data[j].insn;
3033 gcc_assert (!CALL_P (old_insn));
3035 break;
3038 i = match_len + peep2_current;
3039 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3040 i -= MAX_INSNS_PER_PEEP2 + 1;
3042 note = find_reg_note (peep2_insn_data[i].insn,
3043 REG_EH_REGION, NULL_RTX);
3045 /* Replace the old sequence with the new. */
3046 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3047 INSN_LOCATOR (peep2_insn_data[i].insn));
3048 before_try = PREV_INSN (insn);
3049 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3051 /* Re-insert the EH_REGION notes. */
3052 if (note || (was_call && nonlocal_goto_handler_labels))
3054 edge eh_edge;
3055 edge_iterator ei;
3057 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3058 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3059 break;
3061 for (x = try ; x != before_try ; x = PREV_INSN (x))
3062 if (CALL_P (x)
3063 || (flag_non_call_exceptions
3064 && may_trap_p (PATTERN (x))
3065 && !find_reg_note (x, REG_EH_REGION, NULL)))
3067 if (note)
3068 REG_NOTES (x)
3069 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3070 XEXP (note, 0),
3071 REG_NOTES (x));
3073 if (x != BB_END (bb) && eh_edge)
3075 edge nfte, nehe;
3076 int flags;
3078 nfte = split_block (bb, x);
3079 flags = (eh_edge->flags
3080 & (EDGE_EH | EDGE_ABNORMAL));
3081 if (CALL_P (x))
3082 flags |= EDGE_ABNORMAL_CALL;
3083 nehe = make_edge (nfte->src, eh_edge->dest,
3084 flags);
3086 nehe->probability = eh_edge->probability;
3087 nfte->probability
3088 = REG_BR_PROB_BASE - nehe->probability;
3090 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3091 bb = nfte->src;
3092 eh_edge = nehe;
3096 /* Converting possibly trapping insn to non-trapping is
3097 possible. Zap dummy outgoing edges. */
3098 do_cleanup_cfg |= purge_dead_edges (bb);
3101 #ifdef HAVE_conditional_execution
3102 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3103 peep2_insn_data[i].insn = NULL_RTX;
3104 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3105 peep2_current_count = 0;
3106 #else
3107 /* Back up lifetime information past the end of the
3108 newly created sequence. */
3109 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3110 i = 0;
3111 bitmap_copy (live, peep2_insn_data[i].live_before);
3113 /* Update life information for the new sequence. */
3114 x = try;
3117 if (INSN_P (x))
3119 if (--i < 0)
3120 i = MAX_INSNS_PER_PEEP2;
3121 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3122 && peep2_insn_data[i].insn == NULL_RTX)
3123 peep2_current_count++;
3124 peep2_insn_data[i].insn = x;
3125 df_insn_rescan (x);
3126 df_simulate_one_insn_backwards (bb, x, live);
3127 bitmap_copy (peep2_insn_data[i].live_before, live);
3129 x = PREV_INSN (x);
3131 while (x != prev);
3133 peep2_current = i;
3134 #endif
3136 /* If we generated a jump instruction, it won't have
3137 JUMP_LABEL set. Recompute after we're done. */
3138 for (x = try; x != before_try; x = PREV_INSN (x))
3139 if (JUMP_P (x))
3141 do_rebuild_jump_labels = true;
3142 break;
3147 if (insn == BB_HEAD (bb))
3148 break;
3152 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3153 BITMAP_FREE (peep2_insn_data[i].live_before);
3154 BITMAP_FREE (live);
3155 if (do_rebuild_jump_labels)
3156 rebuild_jump_labels (get_insns ());
3158 #endif /* HAVE_peephole2 */
3160 /* Common predicates for use with define_bypass. */
3162 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3163 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3164 must be either a single_set or a PARALLEL with SETs inside. */
3167 store_data_bypass_p (rtx out_insn, rtx in_insn)
3169 rtx out_set, in_set;
3170 rtx out_pat, in_pat;
3171 rtx out_exp, in_exp;
3172 int i, j;
3174 in_set = single_set (in_insn);
3175 if (in_set)
3177 if (!MEM_P (SET_DEST (in_set)))
3178 return false;
3180 out_set = single_set (out_insn);
3181 if (out_set)
3183 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3184 return false;
3186 else
3188 out_pat = PATTERN (out_insn);
3190 if (GET_CODE (out_pat) != PARALLEL)
3191 return false;
3193 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3195 out_exp = XVECEXP (out_pat, 0, i);
3197 if (GET_CODE (out_exp) == CLOBBER)
3198 continue;
3200 gcc_assert (GET_CODE (out_exp) == SET);
3202 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3203 return false;
3207 else
3209 in_pat = PATTERN (in_insn);
3210 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3212 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3214 in_exp = XVECEXP (in_pat, 0, i);
3216 if (GET_CODE (in_exp) == CLOBBER)
3217 continue;
3219 gcc_assert (GET_CODE (in_exp) == SET);
3221 if (!MEM_P (SET_DEST (in_exp)))
3222 return false;
3224 out_set = single_set (out_insn);
3225 if (out_set)
3227 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3228 return false;
3230 else
3232 out_pat = PATTERN (out_insn);
3233 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3235 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3237 out_exp = XVECEXP (out_pat, 0, j);
3239 if (GET_CODE (out_exp) == CLOBBER)
3240 continue;
3242 gcc_assert (GET_CODE (out_exp) == SET);
3244 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3245 return false;
3251 return true;
3254 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3255 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3256 or multiple set; IN_INSN should be single_set for truth, but for convenience
3257 of insn categorization may be any JUMP or CALL insn. */
3260 if_test_bypass_p (rtx out_insn, rtx in_insn)
3262 rtx out_set, in_set;
3264 in_set = single_set (in_insn);
3265 if (! in_set)
3267 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3268 return false;
3271 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3272 return false;
3273 in_set = SET_SRC (in_set);
3275 out_set = single_set (out_insn);
3276 if (out_set)
3278 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3279 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3280 return false;
3282 else
3284 rtx out_pat;
3285 int i;
3287 out_pat = PATTERN (out_insn);
3288 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3290 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3292 rtx exp = XVECEXP (out_pat, 0, i);
3294 if (GET_CODE (exp) == CLOBBER)
3295 continue;
3297 gcc_assert (GET_CODE (exp) == SET);
3299 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3300 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3301 return false;
3305 return true;
3308 static bool
3309 gate_handle_peephole2 (void)
3311 return (optimize > 0 && flag_peephole2);
3314 static unsigned int
3315 rest_of_handle_peephole2 (void)
3317 #ifdef HAVE_peephole2
3318 peephole2_optimize ();
3319 #endif
3320 return 0;
3323 struct tree_opt_pass pass_peephole2 =
3325 "peephole2", /* name */
3326 gate_handle_peephole2, /* gate */
3327 rest_of_handle_peephole2, /* execute */
3328 NULL, /* sub */
3329 NULL, /* next */
3330 0, /* static_pass_number */
3331 TV_PEEPHOLE2, /* tv_id */
3332 0, /* properties_required */
3333 0, /* properties_provided */
3334 0, /* properties_destroyed */
3335 0, /* todo_flags_start */
3336 TODO_df_finish |
3337 TODO_dump_func, /* todo_flags_finish */
3338 'z' /* letter */
3341 static unsigned int
3342 rest_of_handle_split_all_insns (void)
3344 split_all_insns ();
3345 return 0;
3348 struct tree_opt_pass pass_split_all_insns =
3350 "split1", /* name */
3351 NULL, /* gate */
3352 rest_of_handle_split_all_insns, /* execute */
3353 NULL, /* sub */
3354 NULL, /* next */
3355 0, /* static_pass_number */
3356 0, /* tv_id */
3357 0, /* properties_required */
3358 0, /* properties_provided */
3359 0, /* properties_destroyed */
3360 0, /* todo_flags_start */
3361 TODO_dump_func, /* todo_flags_finish */
3362 0 /* letter */
3365 static unsigned int
3366 rest_of_handle_split_after_reload (void)
3368 /* If optimizing, then go ahead and split insns now. */
3369 #ifndef STACK_REGS
3370 if (optimize > 0)
3371 #endif
3372 split_all_insns ();
3373 return 0;
3376 struct tree_opt_pass pass_split_after_reload =
3378 "split2", /* name */
3379 NULL, /* gate */
3380 rest_of_handle_split_after_reload, /* execute */
3381 NULL, /* sub */
3382 NULL, /* next */
3383 0, /* static_pass_number */
3384 0, /* tv_id */
3385 0, /* properties_required */
3386 0, /* properties_provided */
3387 0, /* properties_destroyed */
3388 0, /* todo_flags_start */
3389 TODO_dump_func, /* todo_flags_finish */
3390 0 /* letter */
3393 static bool
3394 gate_handle_split_before_regstack (void)
3396 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3397 /* If flow2 creates new instructions which need splitting
3398 and scheduling after reload is not done, they might not be
3399 split until final which doesn't allow splitting
3400 if HAVE_ATTR_length. */
3401 # ifdef INSN_SCHEDULING
3402 return (optimize && !flag_schedule_insns_after_reload);
3403 # else
3404 return (optimize);
3405 # endif
3406 #else
3407 return 0;
3408 #endif
3411 static unsigned int
3412 rest_of_handle_split_before_regstack (void)
3414 split_all_insns ();
3415 return 0;
3418 struct tree_opt_pass pass_split_before_regstack =
3420 "split3", /* name */
3421 gate_handle_split_before_regstack, /* gate */
3422 rest_of_handle_split_before_regstack, /* execute */
3423 NULL, /* sub */
3424 NULL, /* next */
3425 0, /* static_pass_number */
3426 0, /* tv_id */
3427 0, /* properties_required */
3428 0, /* properties_provided */
3429 0, /* properties_destroyed */
3430 0, /* todo_flags_start */
3431 TODO_dump_func, /* todo_flags_finish */
3432 0 /* letter */
3435 static bool
3436 gate_handle_split_before_sched2 (void)
3438 #ifdef INSN_SCHEDULING
3439 return optimize > 0 && flag_schedule_insns_after_reload;
3440 #else
3441 return 0;
3442 #endif
3445 static unsigned int
3446 rest_of_handle_split_before_sched2 (void)
3448 #ifdef INSN_SCHEDULING
3449 split_all_insns ();
3450 #endif
3451 return 0;
3454 struct tree_opt_pass pass_split_before_sched2 =
3456 "split4", /* name */
3457 gate_handle_split_before_sched2, /* gate */
3458 rest_of_handle_split_before_sched2, /* execute */
3459 NULL, /* sub */
3460 NULL, /* next */
3461 0, /* static_pass_number */
3462 0, /* tv_id */
3463 0, /* properties_required */
3464 0, /* properties_provided */
3465 0, /* properties_destroyed */
3466 0, /* todo_flags_start */
3467 TODO_verify_flow |
3468 TODO_dump_func, /* todo_flags_finish */
3469 0 /* letter */
3472 /* The placement of the splitting that we do for shorten_branches
3473 depends on whether regstack is used by the target or not. */
3474 static bool
3475 gate_do_final_split (void)
3477 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3478 return 1;
3479 #else
3480 return 0;
3481 #endif
3484 struct tree_opt_pass pass_split_for_shorten_branches =
3486 "split5", /* name */
3487 gate_do_final_split, /* gate */
3488 split_all_insns_noflow, /* execute */
3489 NULL, /* sub */
3490 NULL, /* next */
3491 0, /* static_pass_number */
3492 0, /* tv_id */
3493 0, /* properties_required */
3494 0, /* properties_provided */
3495 0, /* properties_destroyed */
3496 0, /* todo_flags_start */
3497 TODO_dump_func, /* todo_flags_finish */
3498 0 /* letter */