Merge from trunk @ 138209
[official-gcc.git] / gcc / recog.c
bloba10c63691851dd7c30560a50519a73319b0cb5dd
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
64 #ifndef HAVE_ATTR_enabled
65 static inline bool
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
68 return true;
70 #endif
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
73 static void validate_replace_src_1 (rtx *, void *);
74 static rtx split_insn (rtx);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in regclass.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
84 int volatile_ok;
86 struct recog_data recog_data;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
92 /* On return from `constrain_operands', indicate which alternative
93 was satisfied. */
95 int which_alternative;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
110 void
111 init_recog_no_volatile (void)
113 volatile_ok = 0;
116 void
117 init_recog (void)
119 volatile_ok = 1;
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x)
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = XALLOCAVEC (rtx, noperands);
150 constraints = XALLOCAVEC (const char *, noperands);
152 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
160 c = constraints[c[0] - '0'];
162 if (! asm_operand_ok (operands[i], c))
163 return 0;
166 return 1;
169 /* Static data for the next two routines. */
171 typedef struct change_t
173 rtx object;
174 int old_code;
175 rtx *loc;
176 rtx old;
177 bool unshare;
178 } change_t;
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
194 IN_GROUP is nonzero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
203 static bool
204 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
206 rtx old = *loc;
208 if (old == new_rtx || rtx_equal_p (old, new_rtx))
209 return 1;
211 gcc_assert (in_group != 0 || num_changes == 0);
213 *loc = new_rtx;
215 /* Save the information describing this change. */
216 if (num_changes >= changes_allocated)
218 if (changes_allocated == 0)
219 /* This value allows for repeated substitutions inside complex
220 indexed addresses, or changes in up to 5 insns. */
221 changes_allocated = MAX_RECOG_OPERANDS * 5;
222 else
223 changes_allocated *= 2;
225 changes = XRESIZEVEC (change_t, changes, changes_allocated);
228 changes[num_changes].object = object;
229 changes[num_changes].loc = loc;
230 changes[num_changes].old = old;
231 changes[num_changes].unshare = unshare;
233 if (object && !MEM_P (object))
235 /* Set INSN_CODE to force rerecognition of insn. Save old code in
236 case invalid. */
237 changes[num_changes].old_code = INSN_CODE (object);
238 INSN_CODE (object) = -1;
241 num_changes++;
243 /* If we are making a group of changes, return 1. Otherwise, validate the
244 change group we made. */
246 if (in_group)
247 return 1;
248 else
249 return apply_change_group ();
252 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
253 UNSHARE to false. */
255 bool
256 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
258 return validate_change_1 (object, loc, new_rtx, in_group, false);
261 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
262 UNSHARE to true. */
264 bool
265 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
267 return validate_change_1 (object, loc, new_rtx, in_group, true);
271 /* Keep X canonicalized if some changes have made it non-canonical; only
272 modifies the operands of X, not (for example) its code. Simplifications
273 are not the job of this routine.
275 Return true if anything was changed. */
276 bool
277 canonicalize_change_group (rtx insn, rtx x)
279 if (COMMUTATIVE_P (x)
280 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
282 /* Oops, the caller has made X no longer canonical.
283 Let's redo the changes in the correct order. */
284 rtx tem = XEXP (x, 0);
285 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
286 validate_change (insn, &XEXP (x, 1), tem, 1);
287 return true;
289 else
290 return false;
294 /* This subroutine of apply_change_group verifies whether the changes to INSN
295 were valid; i.e. whether INSN can still be recognized. */
298 insn_invalid_p (rtx insn)
300 rtx pat = PATTERN (insn);
301 int num_clobbers = 0;
302 /* If we are before reload and the pattern is a SET, see if we can add
303 clobbers. */
304 int icode = recog (pat, insn,
305 (GET_CODE (pat) == SET
306 && ! reload_completed && ! reload_in_progress)
307 ? &num_clobbers : 0);
308 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
311 /* If this is an asm and the operand aren't legal, then fail. Likewise if
312 this is not an asm and the insn wasn't recognized. */
313 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
314 || (!is_asm && icode < 0))
315 return 1;
317 /* If we have to add CLOBBERs, fail if we have to add ones that reference
318 hard registers since our callers can't know if they are live or not.
319 Otherwise, add them. */
320 if (num_clobbers > 0)
322 rtx newpat;
324 if (added_clobbers_hard_reg_p (icode))
325 return 1;
327 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
328 XVECEXP (newpat, 0, 0) = pat;
329 add_clobbers (newpat, icode);
330 PATTERN (insn) = pat = newpat;
333 /* After reload, verify that all constraints are satisfied. */
334 if (reload_completed)
336 extract_insn (insn);
338 if (! constrain_operands (1))
339 return 1;
342 INSN_CODE (insn) = icode;
343 return 0;
346 /* Return number of changes made and not validated yet. */
348 num_changes_pending (void)
350 return num_changes;
353 /* Tentatively apply the changes numbered NUM and up.
354 Return 1 if all changes are valid, zero otherwise. */
357 verify_changes (int num)
359 int i;
360 rtx last_validated = NULL_RTX;
362 /* The changes have been applied and all INSN_CODEs have been reset to force
363 rerecognition.
365 The changes are valid if we aren't given an object, or if we are
366 given a MEM and it still is a valid address, or if this is in insn
367 and it is recognized. In the latter case, if reload has completed,
368 we also require that the operands meet the constraints for
369 the insn. */
371 for (i = num; i < num_changes; i++)
373 rtx object = changes[i].object;
375 /* If there is no object to test or if it is the same as the one we
376 already tested, ignore it. */
377 if (object == 0 || object == last_validated)
378 continue;
380 if (MEM_P (object))
382 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
383 break;
385 else if (insn_invalid_p (object))
387 rtx pat = PATTERN (object);
389 /* Perhaps we couldn't recognize the insn because there were
390 extra CLOBBERs at the end. If so, try to re-recognize
391 without the last CLOBBER (later iterations will cause each of
392 them to be eliminated, in turn). But don't do this if we
393 have an ASM_OPERAND. */
394 if (GET_CODE (pat) == PARALLEL
395 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
396 && asm_noperands (PATTERN (object)) < 0)
398 rtx newpat;
400 if (XVECLEN (pat, 0) == 2)
401 newpat = XVECEXP (pat, 0, 0);
402 else
404 int j;
406 newpat
407 = gen_rtx_PARALLEL (VOIDmode,
408 rtvec_alloc (XVECLEN (pat, 0) - 1));
409 for (j = 0; j < XVECLEN (newpat, 0); j++)
410 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
413 /* Add a new change to this group to replace the pattern
414 with this new pattern. Then consider this change
415 as having succeeded. The change we added will
416 cause the entire call to fail if things remain invalid.
418 Note that this can lose if a later change than the one
419 we are processing specified &XVECEXP (PATTERN (object), 0, X)
420 but this shouldn't occur. */
422 validate_change (object, &PATTERN (object), newpat, 1);
423 continue;
425 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
426 /* If this insn is a CLOBBER or USE, it is always valid, but is
427 never recognized. */
428 continue;
429 else
430 break;
432 last_validated = object;
435 return (i == num_changes);
438 /* A group of changes has previously been issued with validate_change
439 and verified with verify_changes. Call df_insn_rescan for each of
440 the insn changed and clear num_changes. */
442 void
443 confirm_change_group (void)
445 int i;
446 rtx last_object = NULL;
448 for (i = 0; i < num_changes; i++)
450 rtx object = changes[i].object;
452 if (changes[i].unshare)
453 *changes[i].loc = copy_rtx (*changes[i].loc);
455 /* Avoid unnecessary rescanning when multiple changes to same instruction
456 are made. */
457 if (object)
459 if (object != last_object && last_object && INSN_P (last_object))
460 df_insn_rescan (last_object);
461 last_object = object;
465 if (last_object && INSN_P (last_object))
466 df_insn_rescan (last_object);
467 num_changes = 0;
470 /* Apply a group of changes previously issued with `validate_change'.
471 If all changes are valid, call confirm_change_group and return 1,
472 otherwise, call cancel_changes and return 0. */
475 apply_change_group (void)
477 if (verify_changes (0))
479 confirm_change_group ();
480 return 1;
482 else
484 cancel_changes (0);
485 return 0;
490 /* Return the number of changes so far in the current group. */
493 num_validated_changes (void)
495 return num_changes;
498 /* Retract the changes numbered NUM and up. */
500 void
501 cancel_changes (int num)
503 int i;
505 /* Back out all the changes. Do this in the opposite order in which
506 they were made. */
507 for (i = num_changes - 1; i >= num; i--)
509 *changes[i].loc = changes[i].old;
510 if (changes[i].object && !MEM_P (changes[i].object))
511 INSN_CODE (changes[i].object) = changes[i].old_code;
513 num_changes = num;
516 /* Replace every occurrence of FROM in X with TO. Mark each change with
517 validate_change passing OBJECT. */
519 static void
520 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
522 int i, j;
523 const char *fmt;
524 rtx x = *loc;
525 enum rtx_code code;
526 enum machine_mode op0_mode = VOIDmode;
527 int prev_changes = num_changes;
528 rtx new_rtx;
530 if (!x)
531 return;
533 code = GET_CODE (x);
534 fmt = GET_RTX_FORMAT (code);
535 if (fmt[0] == 'e')
536 op0_mode = GET_MODE (XEXP (x, 0));
538 /* X matches FROM if it is the same rtx or they are both referring to the
539 same register in the same mode. Avoid calling rtx_equal_p unless the
540 operands look similar. */
542 if (x == from
543 || (REG_P (x) && REG_P (from)
544 && GET_MODE (x) == GET_MODE (from)
545 && REGNO (x) == REGNO (from))
546 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
547 && rtx_equal_p (x, from)))
549 validate_unshare_change (object, loc, to, 1);
550 return;
553 /* Call ourself recursively to perform the replacements.
554 We must not replace inside already replaced expression, otherwise we
555 get infinite recursion for replacements like (reg X)->(subreg (reg X))
556 done by regmove, so we must special case shared ASM_OPERANDS. */
558 if (GET_CODE (x) == PARALLEL)
560 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
562 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
563 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
565 /* Verify that operands are really shared. */
566 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
567 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
568 (x, 0, j))));
569 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
570 from, to, object);
572 else
573 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
576 else
577 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
579 if (fmt[i] == 'e')
580 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
581 else if (fmt[i] == 'E')
582 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
583 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
586 /* If we didn't substitute, there is nothing more to do. */
587 if (num_changes == prev_changes)
588 return;
590 /* Allow substituted expression to have different mode. This is used by
591 regmove to change mode of pseudo register. */
592 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
593 op0_mode = GET_MODE (XEXP (x, 0));
595 /* Do changes needed to keep rtx consistent. Don't do any other
596 simplifications, as it is not our job. */
598 if (SWAPPABLE_OPERANDS_P (x)
599 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
601 validate_unshare_change (object, loc,
602 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
603 : swap_condition (code),
604 GET_MODE (x), XEXP (x, 1),
605 XEXP (x, 0)), 1);
606 x = *loc;
607 code = GET_CODE (x);
610 switch (code)
612 case PLUS:
613 /* If we have a PLUS whose second operand is now a CONST_INT, use
614 simplify_gen_binary to try to simplify it.
615 ??? We may want later to remove this, once simplification is
616 separated from this function. */
617 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
618 validate_change (object, loc,
619 simplify_gen_binary
620 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
621 break;
622 case MINUS:
623 if (GET_CODE (XEXP (x, 1)) == CONST_INT
624 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
625 validate_change (object, loc,
626 simplify_gen_binary
627 (PLUS, GET_MODE (x), XEXP (x, 0),
628 simplify_gen_unary (NEG,
629 GET_MODE (x), XEXP (x, 1),
630 GET_MODE (x))), 1);
631 break;
632 case ZERO_EXTEND:
633 case SIGN_EXTEND:
634 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
636 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
637 op0_mode);
638 /* If any of the above failed, substitute in something that
639 we know won't be recognized. */
640 if (!new_rtx)
641 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
642 validate_change (object, loc, new_rtx, 1);
644 break;
645 case SUBREG:
646 /* All subregs possible to simplify should be simplified. */
647 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
648 SUBREG_BYTE (x));
650 /* Subregs of VOIDmode operands are incorrect. */
651 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
652 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
653 if (new_rtx)
654 validate_change (object, loc, new_rtx, 1);
655 break;
656 case ZERO_EXTRACT:
657 case SIGN_EXTRACT:
658 /* If we are replacing a register with memory, try to change the memory
659 to be the mode required for memory in extract operations (this isn't
660 likely to be an insertion operation; if it was, nothing bad will
661 happen, we might just fail in some cases). */
663 if (MEM_P (XEXP (x, 0))
664 && GET_CODE (XEXP (x, 1)) == CONST_INT
665 && GET_CODE (XEXP (x, 2)) == CONST_INT
666 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
667 && !MEM_VOLATILE_P (XEXP (x, 0)))
669 enum machine_mode wanted_mode = VOIDmode;
670 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
671 int pos = INTVAL (XEXP (x, 2));
673 if (GET_CODE (x) == ZERO_EXTRACT)
675 enum machine_mode new_mode
676 = mode_for_extraction (EP_extzv, 1);
677 if (new_mode != MAX_MACHINE_MODE)
678 wanted_mode = new_mode;
680 else if (GET_CODE (x) == SIGN_EXTRACT)
682 enum machine_mode new_mode
683 = mode_for_extraction (EP_extv, 1);
684 if (new_mode != MAX_MACHINE_MODE)
685 wanted_mode = new_mode;
688 /* If we have a narrower mode, we can do something. */
689 if (wanted_mode != VOIDmode
690 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
692 int offset = pos / BITS_PER_UNIT;
693 rtx newmem;
695 /* If the bytes and bits are counted differently, we
696 must adjust the offset. */
697 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
698 offset =
699 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
700 offset);
702 pos %= GET_MODE_BITSIZE (wanted_mode);
704 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
706 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
707 validate_change (object, &XEXP (x, 0), newmem, 1);
711 break;
713 default:
714 break;
718 /* Try replacing every occurrence of FROM in INSN with TO. After all
719 changes have been made, validate by seeing if INSN is still valid. */
722 validate_replace_rtx (rtx from, rtx to, rtx insn)
724 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
725 return apply_change_group ();
728 /* Try replacing every occurrence of FROM in INSN with TO. */
730 void
731 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
733 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
736 /* Function called by note_uses to replace used subexpressions. */
737 struct validate_replace_src_data
739 rtx from; /* Old RTX */
740 rtx to; /* New RTX */
741 rtx insn; /* Insn in which substitution is occurring. */
744 static void
745 validate_replace_src_1 (rtx *x, void *data)
747 struct validate_replace_src_data *d
748 = (struct validate_replace_src_data *) data;
750 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
753 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
754 SET_DESTs. */
756 void
757 validate_replace_src_group (rtx from, rtx to, rtx insn)
759 struct validate_replace_src_data d;
761 d.from = from;
762 d.to = to;
763 d.insn = insn;
764 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
767 /* Try simplify INSN.
768 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
769 pattern and return true if something was simplified. */
771 bool
772 validate_simplify_insn (rtx insn)
774 int i;
775 rtx pat = NULL;
776 rtx newpat = NULL;
778 pat = PATTERN (insn);
780 if (GET_CODE (pat) == SET)
782 newpat = simplify_rtx (SET_SRC (pat));
783 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
784 validate_change (insn, &SET_SRC (pat), newpat, 1);
785 newpat = simplify_rtx (SET_DEST (pat));
786 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
787 validate_change (insn, &SET_DEST (pat), newpat, 1);
789 else if (GET_CODE (pat) == PARALLEL)
790 for (i = 0; i < XVECLEN (pat, 0); i++)
792 rtx s = XVECEXP (pat, 0, i);
794 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
796 newpat = simplify_rtx (SET_SRC (s));
797 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
798 validate_change (insn, &SET_SRC (s), newpat, 1);
799 newpat = simplify_rtx (SET_DEST (s));
800 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
801 validate_change (insn, &SET_DEST (s), newpat, 1);
804 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
807 #ifdef HAVE_cc0
808 /* Return 1 if the insn using CC0 set by INSN does not contain
809 any ordered tests applied to the condition codes.
810 EQ and NE tests do not count. */
813 next_insn_tests_no_inequality (rtx insn)
815 rtx next = next_cc0_user (insn);
817 /* If there is no next insn, we have to take the conservative choice. */
818 if (next == 0)
819 return 0;
821 return (INSN_P (next)
822 && ! inequality_comparisons_p (PATTERN (next)));
824 #endif
826 /* Return 1 if OP is a valid general operand for machine mode MODE.
827 This is either a register reference, a memory reference,
828 or a constant. In the case of a memory reference, the address
829 is checked for general validity for the target machine.
831 Register and memory references must have mode MODE in order to be valid,
832 but some constants have no machine mode and are valid for any mode.
834 If MODE is VOIDmode, OP is checked for validity for whatever mode
835 it has.
837 The main use of this function is as a predicate in match_operand
838 expressions in the machine description.
840 For an explanation of this function's behavior for registers of
841 class NO_REGS, see the comment for `register_operand'. */
844 general_operand (rtx op, enum machine_mode mode)
846 enum rtx_code code = GET_CODE (op);
848 if (mode == VOIDmode)
849 mode = GET_MODE (op);
851 /* Don't accept CONST_INT or anything similar
852 if the caller wants something floating. */
853 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
854 && GET_MODE_CLASS (mode) != MODE_INT
855 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
856 return 0;
858 if (GET_CODE (op) == CONST_INT
859 && mode != VOIDmode
860 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
861 return 0;
863 if (CONSTANT_P (op))
864 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
865 || mode == VOIDmode)
866 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
867 && LEGITIMATE_CONSTANT_P (op));
869 /* Except for certain constants with VOIDmode, already checked for,
870 OP's mode must match MODE if MODE specifies a mode. */
872 if (GET_MODE (op) != mode)
873 return 0;
875 if (code == SUBREG)
877 rtx sub = SUBREG_REG (op);
879 #ifdef INSN_SCHEDULING
880 /* On machines that have insn scheduling, we want all memory
881 reference to be explicit, so outlaw paradoxical SUBREGs.
882 However, we must allow them after reload so that they can
883 get cleaned up by cleanup_subreg_operands. */
884 if (!reload_completed && MEM_P (sub)
885 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
886 return 0;
887 #endif
888 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
889 may result in incorrect reference. We should simplify all valid
890 subregs of MEM anyway. But allow this after reload because we
891 might be called from cleanup_subreg_operands.
893 ??? This is a kludge. */
894 if (!reload_completed && SUBREG_BYTE (op) != 0
895 && MEM_P (sub))
896 return 0;
898 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
899 create such rtl, and we must reject it. */
900 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
901 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
902 return 0;
904 op = sub;
905 code = GET_CODE (op);
908 if (code == REG)
909 /* A register whose class is NO_REGS is not a general operand. */
910 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
911 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
913 if (code == MEM)
915 rtx y = XEXP (op, 0);
917 if (! volatile_ok && MEM_VOLATILE_P (op))
918 return 0;
920 /* Use the mem's mode, since it will be reloaded thus. */
921 if (memory_address_p (GET_MODE (op), y))
922 return 1;
925 return 0;
928 /* Return 1 if OP is a valid memory address for a memory reference
929 of mode MODE.
931 The main use of this function is as a predicate in match_operand
932 expressions in the machine description. */
935 address_operand (rtx op, enum machine_mode mode)
937 return memory_address_p (mode, op);
940 /* Return 1 if OP is a register reference of mode MODE.
941 If MODE is VOIDmode, accept a register in any mode.
943 The main use of this function is as a predicate in match_operand
944 expressions in the machine description.
946 As a special exception, registers whose class is NO_REGS are
947 not accepted by `register_operand'. The reason for this change
948 is to allow the representation of special architecture artifacts
949 (such as a condition code register) without extending the rtl
950 definitions. Since registers of class NO_REGS cannot be used
951 as registers in any case where register classes are examined,
952 it is most consistent to keep this function from accepting them. */
955 register_operand (rtx op, enum machine_mode mode)
957 if (GET_MODE (op) != mode && mode != VOIDmode)
958 return 0;
960 if (GET_CODE (op) == SUBREG)
962 rtx sub = SUBREG_REG (op);
964 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
965 because it is guaranteed to be reloaded into one.
966 Just make sure the MEM is valid in itself.
967 (Ideally, (SUBREG (MEM)...) should not exist after reload,
968 but currently it does result from (SUBREG (REG)...) where the
969 reg went on the stack.) */
970 if (! reload_completed && MEM_P (sub))
971 return general_operand (op, mode);
973 #ifdef CANNOT_CHANGE_MODE_CLASS
974 if (REG_P (sub)
975 && REGNO (sub) < FIRST_PSEUDO_REGISTER
976 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
977 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
978 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
979 return 0;
980 #endif
982 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
983 create such rtl, and we must reject it. */
984 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
985 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
986 return 0;
988 op = sub;
991 /* We don't consider registers whose class is NO_REGS
992 to be a register operand. */
993 return (REG_P (op)
994 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
995 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
998 /* Return 1 for a register in Pmode; ignore the tested mode. */
1001 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1003 return register_operand (op, Pmode);
1006 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1007 or a hard register. */
1010 scratch_operand (rtx op, enum machine_mode mode)
1012 if (GET_MODE (op) != mode && mode != VOIDmode)
1013 return 0;
1015 return (GET_CODE (op) == SCRATCH
1016 || (REG_P (op)
1017 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1020 /* Return 1 if OP is a valid immediate operand for mode MODE.
1022 The main use of this function is as a predicate in match_operand
1023 expressions in the machine description. */
1026 immediate_operand (rtx op, enum machine_mode mode)
1028 /* Don't accept CONST_INT or anything similar
1029 if the caller wants something floating. */
1030 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1031 && GET_MODE_CLASS (mode) != MODE_INT
1032 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1033 return 0;
1035 if (GET_CODE (op) == CONST_INT
1036 && mode != VOIDmode
1037 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1038 return 0;
1040 return (CONSTANT_P (op)
1041 && (GET_MODE (op) == mode || mode == VOIDmode
1042 || GET_MODE (op) == VOIDmode)
1043 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1044 && LEGITIMATE_CONSTANT_P (op));
1047 /* Returns 1 if OP is an operand that is a CONST_INT. */
1050 const_int_operand (rtx op, enum machine_mode mode)
1052 if (GET_CODE (op) != CONST_INT)
1053 return 0;
1055 if (mode != VOIDmode
1056 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1057 return 0;
1059 return 1;
1062 /* Returns 1 if OP is an operand that is a constant integer or constant
1063 floating-point number. */
1066 const_double_operand (rtx op, enum machine_mode mode)
1068 /* Don't accept CONST_INT or anything similar
1069 if the caller wants something floating. */
1070 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1071 && GET_MODE_CLASS (mode) != MODE_INT
1072 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1073 return 0;
1075 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1076 && (mode == VOIDmode || GET_MODE (op) == mode
1077 || GET_MODE (op) == VOIDmode));
1080 /* Return 1 if OP is a general operand that is not an immediate operand. */
1083 nonimmediate_operand (rtx op, enum machine_mode mode)
1085 return (general_operand (op, mode) && ! CONSTANT_P (op));
1088 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1091 nonmemory_operand (rtx op, enum machine_mode mode)
1093 if (CONSTANT_P (op))
1095 /* Don't accept CONST_INT or anything similar
1096 if the caller wants something floating. */
1097 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1098 && GET_MODE_CLASS (mode) != MODE_INT
1099 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1100 return 0;
1102 if (GET_CODE (op) == CONST_INT
1103 && mode != VOIDmode
1104 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1105 return 0;
1107 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1108 || mode == VOIDmode)
1109 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1110 && LEGITIMATE_CONSTANT_P (op));
1113 if (GET_MODE (op) != mode && mode != VOIDmode)
1114 return 0;
1116 if (GET_CODE (op) == SUBREG)
1118 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1119 because it is guaranteed to be reloaded into one.
1120 Just make sure the MEM is valid in itself.
1121 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1122 but currently it does result from (SUBREG (REG)...) where the
1123 reg went on the stack.) */
1124 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1125 return general_operand (op, mode);
1126 op = SUBREG_REG (op);
1129 /* We don't consider registers whose class is NO_REGS
1130 to be a register operand. */
1131 return (REG_P (op)
1132 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1133 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1136 /* Return 1 if OP is a valid operand that stands for pushing a
1137 value of mode MODE onto the stack.
1139 The main use of this function is as a predicate in match_operand
1140 expressions in the machine description. */
1143 push_operand (rtx op, enum machine_mode mode)
1145 unsigned int rounded_size = GET_MODE_SIZE (mode);
1147 #ifdef PUSH_ROUNDING
1148 rounded_size = PUSH_ROUNDING (rounded_size);
1149 #endif
1151 if (!MEM_P (op))
1152 return 0;
1154 if (mode != VOIDmode && GET_MODE (op) != mode)
1155 return 0;
1157 op = XEXP (op, 0);
1159 if (rounded_size == GET_MODE_SIZE (mode))
1161 if (GET_CODE (op) != STACK_PUSH_CODE)
1162 return 0;
1164 else
1166 if (GET_CODE (op) != PRE_MODIFY
1167 || GET_CODE (XEXP (op, 1)) != PLUS
1168 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1169 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1170 #ifdef STACK_GROWS_DOWNWARD
1171 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1172 #else
1173 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1174 #endif
1176 return 0;
1179 return XEXP (op, 0) == stack_pointer_rtx;
1182 /* Return 1 if OP is a valid operand that stands for popping a
1183 value of mode MODE off the stack.
1185 The main use of this function is as a predicate in match_operand
1186 expressions in the machine description. */
1189 pop_operand (rtx op, enum machine_mode mode)
1191 if (!MEM_P (op))
1192 return 0;
1194 if (mode != VOIDmode && GET_MODE (op) != mode)
1195 return 0;
1197 op = XEXP (op, 0);
1199 if (GET_CODE (op) != STACK_POP_CODE)
1200 return 0;
1202 return XEXP (op, 0) == stack_pointer_rtx;
1205 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1208 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1210 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1211 return 0;
1213 win:
1214 return 1;
1217 /* Return 1 if OP is a valid memory reference with mode MODE,
1218 including a valid address.
1220 The main use of this function is as a predicate in match_operand
1221 expressions in the machine description. */
1224 memory_operand (rtx op, enum machine_mode mode)
1226 rtx inner;
1228 if (! reload_completed)
1229 /* Note that no SUBREG is a memory operand before end of reload pass,
1230 because (SUBREG (MEM...)) forces reloading into a register. */
1231 return MEM_P (op) && general_operand (op, mode);
1233 if (mode != VOIDmode && GET_MODE (op) != mode)
1234 return 0;
1236 inner = op;
1237 if (GET_CODE (inner) == SUBREG)
1238 inner = SUBREG_REG (inner);
1240 return (MEM_P (inner) && general_operand (op, mode));
1243 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1244 that is, a memory reference whose address is a general_operand. */
1247 indirect_operand (rtx op, enum machine_mode mode)
1249 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1250 if (! reload_completed
1251 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1253 int offset = SUBREG_BYTE (op);
1254 rtx inner = SUBREG_REG (op);
1256 if (mode != VOIDmode && GET_MODE (op) != mode)
1257 return 0;
1259 /* The only way that we can have a general_operand as the resulting
1260 address is if OFFSET is zero and the address already is an operand
1261 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1262 operand. */
1264 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1265 || (GET_CODE (XEXP (inner, 0)) == PLUS
1266 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1267 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1268 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1271 return (MEM_P (op)
1272 && memory_operand (op, mode)
1273 && general_operand (XEXP (op, 0), Pmode));
1276 /* Return 1 if this is a comparison operator. This allows the use of
1277 MATCH_OPERATOR to recognize all the branch insns. */
1280 comparison_operator (rtx op, enum machine_mode mode)
1282 return ((mode == VOIDmode || GET_MODE (op) == mode)
1283 && COMPARISON_P (op));
1286 /* If BODY is an insn body that uses ASM_OPERANDS,
1287 return the number of operands (both input and output) in the insn.
1288 Otherwise return -1. */
1291 asm_noperands (const_rtx body)
1293 switch (GET_CODE (body))
1295 case ASM_OPERANDS:
1296 /* No output operands: return number of input operands. */
1297 return ASM_OPERANDS_INPUT_LENGTH (body);
1298 case SET:
1299 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1300 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1301 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1302 else
1303 return -1;
1304 case PARALLEL:
1305 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1306 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1308 /* Multiple output operands, or 1 output plus some clobbers:
1309 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1310 int i;
1311 int n_sets;
1313 /* Count backwards through CLOBBERs to determine number of SETs. */
1314 for (i = XVECLEN (body, 0); i > 0; i--)
1316 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1317 break;
1318 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1319 return -1;
1322 /* N_SETS is now number of output operands. */
1323 n_sets = i;
1325 /* Verify that all the SETs we have
1326 came from a single original asm_operands insn
1327 (so that invalid combinations are blocked). */
1328 for (i = 0; i < n_sets; i++)
1330 rtx elt = XVECEXP (body, 0, i);
1331 if (GET_CODE (elt) != SET)
1332 return -1;
1333 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1334 return -1;
1335 /* If these ASM_OPERANDS rtx's came from different original insns
1336 then they aren't allowed together. */
1337 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1338 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1339 return -1;
1341 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1342 + n_sets);
1344 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1346 /* 0 outputs, but some clobbers:
1347 body is [(asm_operands ...) (clobber (reg ...))...]. */
1348 int i;
1350 /* Make sure all the other parallel things really are clobbers. */
1351 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1352 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1353 return -1;
1355 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1357 else
1358 return -1;
1359 default:
1360 return -1;
1364 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1365 copy its operands (both input and output) into the vector OPERANDS,
1366 the locations of the operands within the insn into the vector OPERAND_LOCS,
1367 and the constraints for the operands into CONSTRAINTS.
1368 Write the modes of the operands into MODES.
1369 Return the assembler-template.
1371 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1372 we don't store that info. */
1374 const char *
1375 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1376 const char **constraints, enum machine_mode *modes,
1377 location_t *loc)
1379 int i;
1380 int noperands;
1381 rtx asmop = 0;
1383 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1385 asmop = SET_SRC (body);
1386 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1388 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1390 for (i = 1; i < noperands; i++)
1392 if (operand_locs)
1393 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1394 if (operands)
1395 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1396 if (constraints)
1397 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1398 if (modes)
1399 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1402 /* The output is in the SET.
1403 Its constraint is in the ASM_OPERANDS itself. */
1404 if (operands)
1405 operands[0] = SET_DEST (body);
1406 if (operand_locs)
1407 operand_locs[0] = &SET_DEST (body);
1408 if (constraints)
1409 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1410 if (modes)
1411 modes[0] = GET_MODE (SET_DEST (body));
1413 else if (GET_CODE (body) == ASM_OPERANDS)
1415 asmop = body;
1416 /* No output operands: BODY is (asm_operands ....). */
1418 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1420 /* The input operands are found in the 1st element vector. */
1421 /* Constraints for inputs are in the 2nd element vector. */
1422 for (i = 0; i < noperands; i++)
1424 if (operand_locs)
1425 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1426 if (operands)
1427 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1428 if (constraints)
1429 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1430 if (modes)
1431 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1434 else if (GET_CODE (body) == PARALLEL
1435 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1436 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1438 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1439 int nin;
1440 int nout = 0; /* Does not include CLOBBERs. */
1442 asmop = SET_SRC (XVECEXP (body, 0, 0));
1443 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1445 /* At least one output, plus some CLOBBERs. */
1447 /* The outputs are in the SETs.
1448 Their constraints are in the ASM_OPERANDS itself. */
1449 for (i = 0; i < nparallel; i++)
1451 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1452 break; /* Past last SET */
1454 if (operands)
1455 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1456 if (operand_locs)
1457 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1458 if (constraints)
1459 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1460 if (modes)
1461 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1462 nout++;
1465 for (i = 0; i < nin; i++)
1467 if (operand_locs)
1468 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1469 if (operands)
1470 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1471 if (constraints)
1472 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1473 if (modes)
1474 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1477 else if (GET_CODE (body) == PARALLEL
1478 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1480 /* No outputs, but some CLOBBERs. */
1482 int nin;
1484 asmop = XVECEXP (body, 0, 0);
1485 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1487 for (i = 0; i < nin; i++)
1489 if (operand_locs)
1490 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1491 if (operands)
1492 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1493 if (constraints)
1494 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1495 if (modes)
1496 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1501 if (loc)
1502 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1504 return ASM_OPERANDS_TEMPLATE (asmop);
1507 /* Check if an asm_operand matches its constraints.
1508 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1511 asm_operand_ok (rtx op, const char *constraint)
1513 int result = 0;
1515 /* Use constrain_operands after reload. */
1516 gcc_assert (!reload_completed);
1518 while (*constraint)
1520 char c = *constraint;
1521 int len;
1522 switch (c)
1524 case ',':
1525 constraint++;
1526 continue;
1527 case '=':
1528 case '+':
1529 case '*':
1530 case '%':
1531 case '!':
1532 case '#':
1533 case '&':
1534 case '?':
1535 break;
1537 case '0': case '1': case '2': case '3': case '4':
1538 case '5': case '6': case '7': case '8': case '9':
1539 /* For best results, our caller should have given us the
1540 proper matching constraint, but we can't actually fail
1541 the check if they didn't. Indicate that results are
1542 inconclusive. */
1544 constraint++;
1545 while (ISDIGIT (*constraint));
1546 if (! result)
1547 result = -1;
1548 continue;
1550 case 'p':
1551 if (address_operand (op, VOIDmode))
1552 result = 1;
1553 break;
1555 case TARGET_MEM_CONSTRAINT:
1556 case 'V': /* non-offsettable */
1557 if (memory_operand (op, VOIDmode))
1558 result = 1;
1559 break;
1561 case 'o': /* offsettable */
1562 if (offsettable_nonstrict_memref_p (op))
1563 result = 1;
1564 break;
1566 case '<':
1567 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1568 excepting those that expand_call created. Further, on some
1569 machines which do not have generalized auto inc/dec, an inc/dec
1570 is not a memory_operand.
1572 Match any memory and hope things are resolved after reload. */
1574 if (MEM_P (op)
1575 && (1
1576 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1577 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1578 result = 1;
1579 break;
1581 case '>':
1582 if (MEM_P (op)
1583 && (1
1584 || GET_CODE (XEXP (op, 0)) == PRE_INC
1585 || GET_CODE (XEXP (op, 0)) == POST_INC))
1586 result = 1;
1587 break;
1589 case 'E':
1590 case 'F':
1591 if (GET_CODE (op) == CONST_DOUBLE
1592 || (GET_CODE (op) == CONST_VECTOR
1593 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1594 result = 1;
1595 break;
1597 case 'G':
1598 if (GET_CODE (op) == CONST_DOUBLE
1599 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1600 result = 1;
1601 break;
1602 case 'H':
1603 if (GET_CODE (op) == CONST_DOUBLE
1604 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1605 result = 1;
1606 break;
1608 case 's':
1609 if (GET_CODE (op) == CONST_INT
1610 || (GET_CODE (op) == CONST_DOUBLE
1611 && GET_MODE (op) == VOIDmode))
1612 break;
1613 /* Fall through. */
1615 case 'i':
1616 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1617 result = 1;
1618 break;
1620 case 'n':
1621 if (GET_CODE (op) == CONST_INT
1622 || (GET_CODE (op) == CONST_DOUBLE
1623 && GET_MODE (op) == VOIDmode))
1624 result = 1;
1625 break;
1627 case 'I':
1628 if (GET_CODE (op) == CONST_INT
1629 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1630 result = 1;
1631 break;
1632 case 'J':
1633 if (GET_CODE (op) == CONST_INT
1634 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1635 result = 1;
1636 break;
1637 case 'K':
1638 if (GET_CODE (op) == CONST_INT
1639 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1640 result = 1;
1641 break;
1642 case 'L':
1643 if (GET_CODE (op) == CONST_INT
1644 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1645 result = 1;
1646 break;
1647 case 'M':
1648 if (GET_CODE (op) == CONST_INT
1649 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1650 result = 1;
1651 break;
1652 case 'N':
1653 if (GET_CODE (op) == CONST_INT
1654 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1655 result = 1;
1656 break;
1657 case 'O':
1658 if (GET_CODE (op) == CONST_INT
1659 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1660 result = 1;
1661 break;
1662 case 'P':
1663 if (GET_CODE (op) == CONST_INT
1664 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1665 result = 1;
1666 break;
1668 case 'X':
1669 result = 1;
1670 break;
1672 case 'g':
1673 if (general_operand (op, VOIDmode))
1674 result = 1;
1675 break;
1677 default:
1678 /* For all other letters, we first check for a register class,
1679 otherwise it is an EXTRA_CONSTRAINT. */
1680 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1682 case 'r':
1683 if (GET_MODE (op) == BLKmode)
1684 break;
1685 if (register_operand (op, VOIDmode))
1686 result = 1;
1688 #ifdef EXTRA_CONSTRAINT_STR
1689 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1690 /* Every memory operand can be reloaded to fit. */
1691 result = result || memory_operand (op, VOIDmode);
1692 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1693 /* Every address operand can be reloaded to fit. */
1694 result = result || address_operand (op, VOIDmode);
1695 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1696 result = 1;
1697 #endif
1698 break;
1700 len = CONSTRAINT_LEN (c, constraint);
1702 constraint++;
1703 while (--len && *constraint);
1704 if (len)
1705 return 0;
1708 return result;
1711 /* Given an rtx *P, if it is a sum containing an integer constant term,
1712 return the location (type rtx *) of the pointer to that constant term.
1713 Otherwise, return a null pointer. */
1715 rtx *
1716 find_constant_term_loc (rtx *p)
1718 rtx *tem;
1719 enum rtx_code code = GET_CODE (*p);
1721 /* If *P IS such a constant term, P is its location. */
1723 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1724 || code == CONST)
1725 return p;
1727 /* Otherwise, if not a sum, it has no constant term. */
1729 if (GET_CODE (*p) != PLUS)
1730 return 0;
1732 /* If one of the summands is constant, return its location. */
1734 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1735 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1736 return p;
1738 /* Otherwise, check each summand for containing a constant term. */
1740 if (XEXP (*p, 0) != 0)
1742 tem = find_constant_term_loc (&XEXP (*p, 0));
1743 if (tem != 0)
1744 return tem;
1747 if (XEXP (*p, 1) != 0)
1749 tem = find_constant_term_loc (&XEXP (*p, 1));
1750 if (tem != 0)
1751 return tem;
1754 return 0;
1757 /* Return 1 if OP is a memory reference
1758 whose address contains no side effects
1759 and remains valid after the addition
1760 of a positive integer less than the
1761 size of the object being referenced.
1763 We assume that the original address is valid and do not check it.
1765 This uses strict_memory_address_p as a subroutine, so
1766 don't use it before reload. */
1769 offsettable_memref_p (rtx op)
1771 return ((MEM_P (op))
1772 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1775 /* Similar, but don't require a strictly valid mem ref:
1776 consider pseudo-regs valid as index or base regs. */
1779 offsettable_nonstrict_memref_p (rtx op)
1781 return ((MEM_P (op))
1782 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1785 /* Return 1 if Y is a memory address which contains no side effects
1786 and would remain valid after the addition of a positive integer
1787 less than the size of that mode.
1789 We assume that the original address is valid and do not check it.
1790 We do check that it is valid for narrower modes.
1792 If STRICTP is nonzero, we require a strictly valid address,
1793 for the sake of use in reload.c. */
1796 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1798 enum rtx_code ycode = GET_CODE (y);
1799 rtx z;
1800 rtx y1 = y;
1801 rtx *y2;
1802 int (*addressp) (enum machine_mode, rtx) =
1803 (strictp ? strict_memory_address_p : memory_address_p);
1804 unsigned int mode_sz = GET_MODE_SIZE (mode);
1806 if (CONSTANT_ADDRESS_P (y))
1807 return 1;
1809 /* Adjusting an offsettable address involves changing to a narrower mode.
1810 Make sure that's OK. */
1812 if (mode_dependent_address_p (y))
1813 return 0;
1815 /* ??? How much offset does an offsettable BLKmode reference need?
1816 Clearly that depends on the situation in which it's being used.
1817 However, the current situation in which we test 0xffffffff is
1818 less than ideal. Caveat user. */
1819 if (mode_sz == 0)
1820 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1822 /* If the expression contains a constant term,
1823 see if it remains valid when max possible offset is added. */
1825 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1827 int good;
1829 y1 = *y2;
1830 *y2 = plus_constant (*y2, mode_sz - 1);
1831 /* Use QImode because an odd displacement may be automatically invalid
1832 for any wider mode. But it should be valid for a single byte. */
1833 good = (*addressp) (QImode, y);
1835 /* In any case, restore old contents of memory. */
1836 *y2 = y1;
1837 return good;
1840 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1841 return 0;
1843 /* The offset added here is chosen as the maximum offset that
1844 any instruction could need to add when operating on something
1845 of the specified mode. We assume that if Y and Y+c are
1846 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1847 go inside a LO_SUM here, so we do so as well. */
1848 if (GET_CODE (y) == LO_SUM
1849 && mode != BLKmode
1850 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1851 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1852 plus_constant (XEXP (y, 1), mode_sz - 1));
1853 else
1854 z = plus_constant (y, mode_sz - 1);
1856 /* Use QImode because an odd displacement may be automatically invalid
1857 for any wider mode. But it should be valid for a single byte. */
1858 return (*addressp) (QImode, z);
1861 /* Return 1 if ADDR is an address-expression whose effect depends
1862 on the mode of the memory reference it is used in.
1864 Autoincrement addressing is a typical example of mode-dependence
1865 because the amount of the increment depends on the mode. */
1868 mode_dependent_address_p (rtx addr)
1870 /* Auto-increment addressing with anything other than post_modify
1871 or pre_modify always introduces a mode dependency. Catch such
1872 cases now instead of deferring to the target. */
1873 if (GET_CODE (addr) == PRE_INC
1874 || GET_CODE (addr) == POST_INC
1875 || GET_CODE (addr) == PRE_DEC
1876 || GET_CODE (addr) == POST_DEC)
1877 return 1;
1879 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1880 return 0;
1881 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1882 win: ATTRIBUTE_UNUSED_LABEL
1883 return 1;
1886 /* Like extract_insn, but save insn extracted and don't extract again, when
1887 called again for the same insn expecting that recog_data still contain the
1888 valid information. This is used primary by gen_attr infrastructure that
1889 often does extract insn again and again. */
1890 void
1891 extract_insn_cached (rtx insn)
1893 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1894 return;
1895 extract_insn (insn);
1896 recog_data.insn = insn;
1899 /* Do cached extract_insn, constrain_operands and complain about failures.
1900 Used by insn_attrtab. */
1901 void
1902 extract_constrain_insn_cached (rtx insn)
1904 extract_insn_cached (insn);
1905 if (which_alternative == -1
1906 && !constrain_operands (reload_completed))
1907 fatal_insn_not_found (insn);
1910 /* Do cached constrain_operands and complain about failures. */
1912 constrain_operands_cached (int strict)
1914 if (which_alternative == -1)
1915 return constrain_operands (strict);
1916 else
1917 return 1;
1920 /* Analyze INSN and fill in recog_data. */
1922 void
1923 extract_insn (rtx insn)
1925 int i;
1926 int icode;
1927 int noperands;
1928 rtx body = PATTERN (insn);
1930 recog_data.n_operands = 0;
1931 recog_data.n_alternatives = 0;
1932 recog_data.n_dups = 0;
1934 switch (GET_CODE (body))
1936 case USE:
1937 case CLOBBER:
1938 case ASM_INPUT:
1939 case ADDR_VEC:
1940 case ADDR_DIFF_VEC:
1941 return;
1943 case SET:
1944 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1945 goto asm_insn;
1946 else
1947 goto normal_insn;
1948 case PARALLEL:
1949 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1950 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1951 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1952 goto asm_insn;
1953 else
1954 goto normal_insn;
1955 case ASM_OPERANDS:
1956 asm_insn:
1957 recog_data.n_operands = noperands = asm_noperands (body);
1958 if (noperands >= 0)
1960 /* This insn is an `asm' with operands. */
1962 /* expand_asm_operands makes sure there aren't too many operands. */
1963 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
1965 /* Now get the operand values and constraints out of the insn. */
1966 decode_asm_operands (body, recog_data.operand,
1967 recog_data.operand_loc,
1968 recog_data.constraints,
1969 recog_data.operand_mode, NULL);
1970 if (noperands > 0)
1972 const char *p = recog_data.constraints[0];
1973 recog_data.n_alternatives = 1;
1974 while (*p)
1975 recog_data.n_alternatives += (*p++ == ',');
1977 break;
1979 fatal_insn_not_found (insn);
1981 default:
1982 normal_insn:
1983 /* Ordinary insn: recognize it, get the operands via insn_extract
1984 and get the constraints. */
1986 icode = recog_memoized (insn);
1987 if (icode < 0)
1988 fatal_insn_not_found (insn);
1990 recog_data.n_operands = noperands = insn_data[icode].n_operands;
1991 recog_data.n_alternatives = insn_data[icode].n_alternatives;
1992 recog_data.n_dups = insn_data[icode].n_dups;
1994 insn_extract (insn);
1996 for (i = 0; i < noperands; i++)
1998 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
1999 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2000 /* VOIDmode match_operands gets mode from their real operand. */
2001 if (recog_data.operand_mode[i] == VOIDmode)
2002 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2005 for (i = 0; i < noperands; i++)
2006 recog_data.operand_type[i]
2007 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2008 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2009 : OP_IN);
2011 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2013 if (INSN_CODE (insn) < 0)
2014 for (i = 0; i < recog_data.n_alternatives; i++)
2015 recog_data.alternative_enabled_p[i] = true;
2016 else
2018 recog_data.insn = insn;
2019 for (i = 0; i < recog_data.n_alternatives; i++)
2021 which_alternative = i;
2022 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2026 recog_data.insn = NULL;
2027 which_alternative = -1;
2030 /* After calling extract_insn, you can use this function to extract some
2031 information from the constraint strings into a more usable form.
2032 The collected data is stored in recog_op_alt. */
2033 void
2034 preprocess_constraints (void)
2036 int i;
2038 for (i = 0; i < recog_data.n_operands; i++)
2039 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2040 * sizeof (struct operand_alternative)));
2042 for (i = 0; i < recog_data.n_operands; i++)
2044 int j;
2045 struct operand_alternative *op_alt;
2046 const char *p = recog_data.constraints[i];
2048 op_alt = recog_op_alt[i];
2050 for (j = 0; j < recog_data.n_alternatives; j++)
2052 op_alt[j].cl = NO_REGS;
2053 op_alt[j].constraint = p;
2054 op_alt[j].matches = -1;
2055 op_alt[j].matched = -1;
2057 if (!recog_data.alternative_enabled_p[j])
2059 p = skip_alternative (p);
2060 continue;
2063 if (*p == '\0' || *p == ',')
2065 op_alt[j].anything_ok = 1;
2066 continue;
2069 for (;;)
2071 char c = *p;
2072 if (c == '#')
2074 c = *++p;
2075 while (c != ',' && c != '\0');
2076 if (c == ',' || c == '\0')
2078 p++;
2079 break;
2082 switch (c)
2084 case '=': case '+': case '*': case '%':
2085 case 'E': case 'F': case 'G': case 'H':
2086 case 's': case 'i': case 'n':
2087 case 'I': case 'J': case 'K': case 'L':
2088 case 'M': case 'N': case 'O': case 'P':
2089 /* These don't say anything we care about. */
2090 break;
2092 case '?':
2093 op_alt[j].reject += 6;
2094 break;
2095 case '!':
2096 op_alt[j].reject += 600;
2097 break;
2098 case '&':
2099 op_alt[j].earlyclobber = 1;
2100 break;
2102 case '0': case '1': case '2': case '3': case '4':
2103 case '5': case '6': case '7': case '8': case '9':
2105 char *end;
2106 op_alt[j].matches = strtoul (p, &end, 10);
2107 recog_op_alt[op_alt[j].matches][j].matched = i;
2108 p = end;
2110 continue;
2112 case TARGET_MEM_CONSTRAINT:
2113 op_alt[j].memory_ok = 1;
2114 break;
2115 case '<':
2116 op_alt[j].decmem_ok = 1;
2117 break;
2118 case '>':
2119 op_alt[j].incmem_ok = 1;
2120 break;
2121 case 'V':
2122 op_alt[j].nonoffmem_ok = 1;
2123 break;
2124 case 'o':
2125 op_alt[j].offmem_ok = 1;
2126 break;
2127 case 'X':
2128 op_alt[j].anything_ok = 1;
2129 break;
2131 case 'p':
2132 op_alt[j].is_address = 1;
2133 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2134 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2135 break;
2137 case 'g':
2138 case 'r':
2139 op_alt[j].cl =
2140 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2141 break;
2143 default:
2144 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2146 op_alt[j].memory_ok = 1;
2147 break;
2149 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2151 op_alt[j].is_address = 1;
2152 op_alt[j].cl
2153 = (reg_class_subunion
2154 [(int) op_alt[j].cl]
2155 [(int) base_reg_class (VOIDmode, ADDRESS,
2156 SCRATCH)]);
2157 break;
2160 op_alt[j].cl
2161 = (reg_class_subunion
2162 [(int) op_alt[j].cl]
2163 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2164 break;
2166 p += CONSTRAINT_LEN (c, p);
2172 /* Check the operands of an insn against the insn's operand constraints
2173 and return 1 if they are valid.
2174 The information about the insn's operands, constraints, operand modes
2175 etc. is obtained from the global variables set up by extract_insn.
2177 WHICH_ALTERNATIVE is set to a number which indicates which
2178 alternative of constraints was matched: 0 for the first alternative,
2179 1 for the next, etc.
2181 In addition, when two operands are required to match
2182 and it happens that the output operand is (reg) while the
2183 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2184 make the output operand look like the input.
2185 This is because the output operand is the one the template will print.
2187 This is used in final, just before printing the assembler code and by
2188 the routines that determine an insn's attribute.
2190 If STRICT is a positive nonzero value, it means that we have been
2191 called after reload has been completed. In that case, we must
2192 do all checks strictly. If it is zero, it means that we have been called
2193 before reload has completed. In that case, we first try to see if we can
2194 find an alternative that matches strictly. If not, we try again, this
2195 time assuming that reload will fix up the insn. This provides a "best
2196 guess" for the alternative and is used to compute attributes of insns prior
2197 to reload. A negative value of STRICT is used for this internal call. */
2199 struct funny_match
2201 int this_op, other;
2205 constrain_operands (int strict)
2207 const char *constraints[MAX_RECOG_OPERANDS];
2208 int matching_operands[MAX_RECOG_OPERANDS];
2209 int earlyclobber[MAX_RECOG_OPERANDS];
2210 int c;
2212 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2213 int funny_match_index;
2215 which_alternative = 0;
2216 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2217 return 1;
2219 for (c = 0; c < recog_data.n_operands; c++)
2221 constraints[c] = recog_data.constraints[c];
2222 matching_operands[c] = -1;
2227 int seen_earlyclobber_at = -1;
2228 int opno;
2229 int lose = 0;
2230 funny_match_index = 0;
2232 if (!recog_data.alternative_enabled_p[which_alternative])
2234 int i;
2236 for (i = 0; i < recog_data.n_operands; i++)
2237 constraints[i] = skip_alternative (constraints[i]);
2239 which_alternative++;
2240 continue;
2243 for (opno = 0; opno < recog_data.n_operands; opno++)
2245 rtx op = recog_data.operand[opno];
2246 enum machine_mode mode = GET_MODE (op);
2247 const char *p = constraints[opno];
2248 int offset = 0;
2249 int win = 0;
2250 int val;
2251 int len;
2253 earlyclobber[opno] = 0;
2255 /* A unary operator may be accepted by the predicate, but it
2256 is irrelevant for matching constraints. */
2257 if (UNARY_P (op))
2258 op = XEXP (op, 0);
2260 if (GET_CODE (op) == SUBREG)
2262 if (REG_P (SUBREG_REG (op))
2263 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2264 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2265 GET_MODE (SUBREG_REG (op)),
2266 SUBREG_BYTE (op),
2267 GET_MODE (op));
2268 op = SUBREG_REG (op);
2271 /* An empty constraint or empty alternative
2272 allows anything which matched the pattern. */
2273 if (*p == 0 || *p == ',')
2274 win = 1;
2277 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2279 case '\0':
2280 len = 0;
2281 break;
2282 case ',':
2283 c = '\0';
2284 break;
2286 case '?': case '!': case '*': case '%':
2287 case '=': case '+':
2288 break;
2290 case '#':
2291 /* Ignore rest of this alternative as far as
2292 constraint checking is concerned. */
2294 p++;
2295 while (*p && *p != ',');
2296 len = 0;
2297 break;
2299 case '&':
2300 earlyclobber[opno] = 1;
2301 if (seen_earlyclobber_at < 0)
2302 seen_earlyclobber_at = opno;
2303 break;
2305 case '0': case '1': case '2': case '3': case '4':
2306 case '5': case '6': case '7': case '8': case '9':
2308 /* This operand must be the same as a previous one.
2309 This kind of constraint is used for instructions such
2310 as add when they take only two operands.
2312 Note that the lower-numbered operand is passed first.
2314 If we are not testing strictly, assume that this
2315 constraint will be satisfied. */
2317 char *end;
2318 int match;
2320 match = strtoul (p, &end, 10);
2321 p = end;
2323 if (strict < 0)
2324 val = 1;
2325 else
2327 rtx op1 = recog_data.operand[match];
2328 rtx op2 = recog_data.operand[opno];
2330 /* A unary operator may be accepted by the predicate,
2331 but it is irrelevant for matching constraints. */
2332 if (UNARY_P (op1))
2333 op1 = XEXP (op1, 0);
2334 if (UNARY_P (op2))
2335 op2 = XEXP (op2, 0);
2337 val = operands_match_p (op1, op2);
2340 matching_operands[opno] = match;
2341 matching_operands[match] = opno;
2343 if (val != 0)
2344 win = 1;
2346 /* If output is *x and input is *--x, arrange later
2347 to change the output to *--x as well, since the
2348 output op is the one that will be printed. */
2349 if (val == 2 && strict > 0)
2351 funny_match[funny_match_index].this_op = opno;
2352 funny_match[funny_match_index++].other = match;
2355 len = 0;
2356 break;
2358 case 'p':
2359 /* p is used for address_operands. When we are called by
2360 gen_reload, no one will have checked that the address is
2361 strictly valid, i.e., that all pseudos requiring hard regs
2362 have gotten them. */
2363 if (strict <= 0
2364 || (strict_memory_address_p (recog_data.operand_mode[opno],
2365 op)))
2366 win = 1;
2367 break;
2369 /* No need to check general_operand again;
2370 it was done in insn-recog.c. Well, except that reload
2371 doesn't check the validity of its replacements, but
2372 that should only matter when there's a bug. */
2373 case 'g':
2374 /* Anything goes unless it is a REG and really has a hard reg
2375 but the hard reg is not in the class GENERAL_REGS. */
2376 if (REG_P (op))
2378 if (strict < 0
2379 || GENERAL_REGS == ALL_REGS
2380 || (reload_in_progress
2381 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2382 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2383 win = 1;
2385 else if (strict < 0 || general_operand (op, mode))
2386 win = 1;
2387 break;
2389 case 'X':
2390 /* This is used for a MATCH_SCRATCH in the cases when
2391 we don't actually need anything. So anything goes
2392 any time. */
2393 win = 1;
2394 break;
2396 case TARGET_MEM_CONSTRAINT:
2397 /* Memory operands must be valid, to the extent
2398 required by STRICT. */
2399 if (MEM_P (op))
2401 if (strict > 0
2402 && !strict_memory_address_p (GET_MODE (op),
2403 XEXP (op, 0)))
2404 break;
2405 if (strict == 0
2406 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2407 break;
2408 win = 1;
2410 /* Before reload, accept what reload can turn into mem. */
2411 else if (strict < 0 && CONSTANT_P (op))
2412 win = 1;
2413 /* During reload, accept a pseudo */
2414 else if (reload_in_progress && REG_P (op)
2415 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2416 win = 1;
2417 break;
2419 case '<':
2420 if (MEM_P (op)
2421 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2422 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2423 win = 1;
2424 break;
2426 case '>':
2427 if (MEM_P (op)
2428 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2429 || GET_CODE (XEXP (op, 0)) == POST_INC))
2430 win = 1;
2431 break;
2433 case 'E':
2434 case 'F':
2435 if (GET_CODE (op) == CONST_DOUBLE
2436 || (GET_CODE (op) == CONST_VECTOR
2437 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2438 win = 1;
2439 break;
2441 case 'G':
2442 case 'H':
2443 if (GET_CODE (op) == CONST_DOUBLE
2444 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2445 win = 1;
2446 break;
2448 case 's':
2449 if (GET_CODE (op) == CONST_INT
2450 || (GET_CODE (op) == CONST_DOUBLE
2451 && GET_MODE (op) == VOIDmode))
2452 break;
2453 case 'i':
2454 if (CONSTANT_P (op))
2455 win = 1;
2456 break;
2458 case 'n':
2459 if (GET_CODE (op) == CONST_INT
2460 || (GET_CODE (op) == CONST_DOUBLE
2461 && GET_MODE (op) == VOIDmode))
2462 win = 1;
2463 break;
2465 case 'I':
2466 case 'J':
2467 case 'K':
2468 case 'L':
2469 case 'M':
2470 case 'N':
2471 case 'O':
2472 case 'P':
2473 if (GET_CODE (op) == CONST_INT
2474 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2475 win = 1;
2476 break;
2478 case 'V':
2479 if (MEM_P (op)
2480 && ((strict > 0 && ! offsettable_memref_p (op))
2481 || (strict < 0
2482 && !(CONSTANT_P (op) || MEM_P (op)))
2483 || (reload_in_progress
2484 && !(REG_P (op)
2485 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2486 win = 1;
2487 break;
2489 case 'o':
2490 if ((strict > 0 && offsettable_memref_p (op))
2491 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2492 /* Before reload, accept what reload can handle. */
2493 || (strict < 0
2494 && (CONSTANT_P (op) || MEM_P (op)))
2495 /* During reload, accept a pseudo */
2496 || (reload_in_progress && REG_P (op)
2497 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2498 win = 1;
2499 break;
2501 default:
2503 enum reg_class cl;
2505 cl = (c == 'r'
2506 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2507 if (cl != NO_REGS)
2509 if (strict < 0
2510 || (strict == 0
2511 && REG_P (op)
2512 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2513 || (strict == 0 && GET_CODE (op) == SCRATCH)
2514 || (REG_P (op)
2515 && reg_fits_class_p (op, cl, offset, mode)))
2516 win = 1;
2518 #ifdef EXTRA_CONSTRAINT_STR
2519 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2520 win = 1;
2522 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2523 /* Every memory operand can be reloaded to fit. */
2524 && ((strict < 0 && MEM_P (op))
2525 /* Before reload, accept what reload can turn
2526 into mem. */
2527 || (strict < 0 && CONSTANT_P (op))
2528 /* During reload, accept a pseudo */
2529 || (reload_in_progress && REG_P (op)
2530 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2531 win = 1;
2532 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2533 /* Every address operand can be reloaded to fit. */
2534 && strict < 0)
2535 win = 1;
2536 #endif
2537 break;
2540 while (p += len, c);
2542 constraints[opno] = p;
2543 /* If this operand did not win somehow,
2544 this alternative loses. */
2545 if (! win)
2546 lose = 1;
2548 /* This alternative won; the operands are ok.
2549 Change whichever operands this alternative says to change. */
2550 if (! lose)
2552 int opno, eopno;
2554 /* See if any earlyclobber operand conflicts with some other
2555 operand. */
2557 if (strict > 0 && seen_earlyclobber_at >= 0)
2558 for (eopno = seen_earlyclobber_at;
2559 eopno < recog_data.n_operands;
2560 eopno++)
2561 /* Ignore earlyclobber operands now in memory,
2562 because we would often report failure when we have
2563 two memory operands, one of which was formerly a REG. */
2564 if (earlyclobber[eopno]
2565 && REG_P (recog_data.operand[eopno]))
2566 for (opno = 0; opno < recog_data.n_operands; opno++)
2567 if ((MEM_P (recog_data.operand[opno])
2568 || recog_data.operand_type[opno] != OP_OUT)
2569 && opno != eopno
2570 /* Ignore things like match_operator operands. */
2571 && *recog_data.constraints[opno] != 0
2572 && ! (matching_operands[opno] == eopno
2573 && operands_match_p (recog_data.operand[opno],
2574 recog_data.operand[eopno]))
2575 && ! safe_from_earlyclobber (recog_data.operand[opno],
2576 recog_data.operand[eopno]))
2577 lose = 1;
2579 if (! lose)
2581 while (--funny_match_index >= 0)
2583 recog_data.operand[funny_match[funny_match_index].other]
2584 = recog_data.operand[funny_match[funny_match_index].this_op];
2587 return 1;
2591 which_alternative++;
2593 while (which_alternative < recog_data.n_alternatives);
2595 which_alternative = -1;
2596 /* If we are about to reject this, but we are not to test strictly,
2597 try a very loose test. Only return failure if it fails also. */
2598 if (strict == 0)
2599 return constrain_operands (-1);
2600 else
2601 return 0;
2604 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2605 is a hard reg in class CLASS when its regno is offset by OFFSET
2606 and changed to mode MODE.
2607 If REG occupies multiple hard regs, all of them must be in CLASS. */
2610 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2611 enum machine_mode mode)
2613 int regno = REGNO (operand);
2615 if (cl == NO_REGS)
2616 return 0;
2618 return (regno < FIRST_PSEUDO_REGISTER
2619 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2620 mode, regno + offset));
2623 /* Split single instruction. Helper function for split_all_insns and
2624 split_all_insns_noflow. Return last insn in the sequence if successful,
2625 or NULL if unsuccessful. */
2627 static rtx
2628 split_insn (rtx insn)
2630 /* Split insns here to get max fine-grain parallelism. */
2631 rtx first = PREV_INSN (insn);
2632 rtx last = try_split (PATTERN (insn), insn, 1);
2634 if (last == insn)
2635 return NULL_RTX;
2637 /* try_split returns the NOTE that INSN became. */
2638 SET_INSN_DELETED (insn);
2640 /* ??? Coddle to md files that generate subregs in post-reload
2641 splitters instead of computing the proper hard register. */
2642 if (reload_completed && first != last)
2644 first = NEXT_INSN (first);
2645 for (;;)
2647 if (INSN_P (first))
2648 cleanup_subreg_operands (first);
2649 if (first == last)
2650 break;
2651 first = NEXT_INSN (first);
2654 return last;
2657 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2659 void
2660 split_all_insns (void)
2662 sbitmap blocks;
2663 bool changed;
2664 basic_block bb;
2666 blocks = sbitmap_alloc (last_basic_block);
2667 sbitmap_zero (blocks);
2668 changed = false;
2670 FOR_EACH_BB_REVERSE (bb)
2672 rtx insn, next;
2673 bool finish = false;
2675 for (insn = BB_HEAD (bb); !finish ; insn = next)
2677 /* Can't use `next_real_insn' because that might go across
2678 CODE_LABELS and short-out basic blocks. */
2679 next = NEXT_INSN (insn);
2680 finish = (insn == BB_END (bb));
2681 if (INSN_P (insn))
2683 rtx set = single_set (insn);
2685 /* Don't split no-op move insns. These should silently
2686 disappear later in final. Splitting such insns would
2687 break the code that handles LIBCALL blocks. */
2688 if (set && set_noop_p (set))
2690 /* Nops get in the way while scheduling, so delete them
2691 now if register allocation has already been done. It
2692 is too risky to try to do this before register
2693 allocation, and there are unlikely to be very many
2694 nops then anyways. */
2695 if (reload_completed)
2696 delete_insn_and_edges (insn);
2698 else
2700 rtx last = split_insn (insn);
2701 if (last)
2703 /* The split sequence may include barrier, but the
2704 BB boundary we are interested in will be set to
2705 previous one. */
2707 while (BARRIER_P (last))
2708 last = PREV_INSN (last);
2709 SET_BIT (blocks, bb->index);
2710 changed = true;
2717 if (changed)
2718 find_many_sub_basic_blocks (blocks);
2720 #ifdef ENABLE_CHECKING
2721 verify_flow_info ();
2722 #endif
2724 sbitmap_free (blocks);
2727 /* Same as split_all_insns, but do not expect CFG to be available.
2728 Used by machine dependent reorg passes. */
2730 unsigned int
2731 split_all_insns_noflow (void)
2733 rtx next, insn;
2735 for (insn = get_insns (); insn; insn = next)
2737 next = NEXT_INSN (insn);
2738 if (INSN_P (insn))
2740 /* Don't split no-op move insns. These should silently
2741 disappear later in final. Splitting such insns would
2742 break the code that handles LIBCALL blocks. */
2743 rtx set = single_set (insn);
2744 if (set && set_noop_p (set))
2746 /* Nops get in the way while scheduling, so delete them
2747 now if register allocation has already been done. It
2748 is too risky to try to do this before register
2749 allocation, and there are unlikely to be very many
2750 nops then anyways.
2752 ??? Should we use delete_insn when the CFG isn't valid? */
2753 if (reload_completed)
2754 delete_insn_and_edges (insn);
2756 else
2757 split_insn (insn);
2760 return 0;
2763 #ifdef HAVE_peephole2
2764 struct peep2_insn_data
2766 rtx insn;
2767 regset live_before;
2770 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2771 static int peep2_current;
2772 /* The number of instructions available to match a peep2. */
2773 int peep2_current_count;
2775 /* A non-insn marker indicating the last insn of the block.
2776 The live_before regset for this element is correct, indicating
2777 DF_LIVE_OUT for the block. */
2778 #define PEEP2_EOB pc_rtx
2780 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2781 does not exist. Used by the recognizer to find the next insn to match
2782 in a multi-insn pattern. */
2785 peep2_next_insn (int n)
2787 gcc_assert (n <= peep2_current_count);
2789 n += peep2_current;
2790 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2791 n -= MAX_INSNS_PER_PEEP2 + 1;
2793 return peep2_insn_data[n].insn;
2796 /* Return true if REGNO is dead before the Nth non-note insn
2797 after `current'. */
2800 peep2_regno_dead_p (int ofs, int regno)
2802 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2804 ofs += peep2_current;
2805 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2806 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2808 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2810 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2813 /* Similarly for a REG. */
2816 peep2_reg_dead_p (int ofs, rtx reg)
2818 int regno, n;
2820 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2822 ofs += peep2_current;
2823 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2824 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2826 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2828 regno = REGNO (reg);
2829 n = hard_regno_nregs[regno][GET_MODE (reg)];
2830 while (--n >= 0)
2831 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2832 return 0;
2833 return 1;
2836 /* Offset for searching free register for peephole2. */
2837 static int peep2_free_reg_search_ofs;
2839 /* Try to find a hard register of mode MODE, matching the register class in
2840 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2841 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2842 in which case the only condition is that the register must be available
2843 before CURRENT_INSN.
2844 Registers that already have bits set in REG_SET will not be considered.
2846 If an appropriate register is available, it will be returned and the
2847 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2848 returned. */
2851 peep2_find_free_register (int from, int to, const char *class_str,
2852 enum machine_mode mode, HARD_REG_SET *reg_set)
2854 enum reg_class cl;
2855 HARD_REG_SET live;
2856 int i;
2858 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2859 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2861 from += peep2_current;
2862 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2863 from -= MAX_INSNS_PER_PEEP2 + 1;
2864 to += peep2_current;
2865 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2866 to -= MAX_INSNS_PER_PEEP2 + 1;
2868 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2869 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2871 while (from != to)
2873 HARD_REG_SET this_live;
2875 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2876 from = 0;
2877 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2878 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2879 IOR_HARD_REG_SET (live, this_live);
2882 cl = (class_str[0] == 'r' ? GENERAL_REGS
2883 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2885 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2887 int raw_regno, regno, success, j;
2889 /* Distribute the free registers as much as possible. */
2890 raw_regno = peep2_free_reg_search_ofs + i;
2891 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2892 raw_regno -= FIRST_PSEUDO_REGISTER;
2893 #ifdef REG_ALLOC_ORDER
2894 regno = reg_alloc_order[raw_regno];
2895 #else
2896 regno = raw_regno;
2897 #endif
2899 /* Don't allocate fixed registers. */
2900 if (fixed_regs[regno])
2901 continue;
2902 /* Don't allocate global registers. */
2903 if (global_regs[regno])
2904 continue;
2905 /* Make sure the register is of the right class. */
2906 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2907 continue;
2908 /* And can support the mode we need. */
2909 if (! HARD_REGNO_MODE_OK (regno, mode))
2910 continue;
2911 /* And that we don't create an extra save/restore. */
2912 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
2913 continue;
2914 if (! targetm.hard_regno_scratch_ok (regno))
2915 continue;
2917 /* And we don't clobber traceback for noreturn functions. */
2918 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2919 && (! reload_completed || frame_pointer_needed))
2920 continue;
2922 success = 1;
2923 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2925 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2926 || TEST_HARD_REG_BIT (live, regno + j))
2928 success = 0;
2929 break;
2932 if (success)
2934 add_to_hard_reg_set (reg_set, mode, regno);
2936 /* Start the next search with the next register. */
2937 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2938 raw_regno = 0;
2939 peep2_free_reg_search_ofs = raw_regno;
2941 return gen_rtx_REG (mode, regno);
2945 peep2_free_reg_search_ofs = 0;
2946 return NULL_RTX;
2949 /* Perform the peephole2 optimization pass. */
2951 static void
2952 peephole2_optimize (void)
2954 rtx insn, prev;
2955 bitmap live;
2956 int i;
2957 basic_block bb;
2958 bool do_cleanup_cfg = false;
2959 bool do_rebuild_jump_labels = false;
2961 peep2_free_reg_search_ofs = 0;
2962 df_set_flags (DF_LR_RUN_DCE);
2963 df_analyze ();
2965 /* Initialize the regsets we're going to use. */
2966 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2967 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
2968 live = BITMAP_ALLOC (&reg_obstack);
2970 FOR_EACH_BB_REVERSE (bb)
2972 /* Indicate that all slots except the last holds invalid data. */
2973 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
2974 peep2_insn_data[i].insn = NULL_RTX;
2975 peep2_current_count = 0;
2977 /* Indicate that the last slot contains live_after data. */
2978 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
2979 peep2_current = MAX_INSNS_PER_PEEP2;
2981 /* Start up propagation. */
2982 bitmap_copy (live, DF_LR_OUT (bb));
2983 df_simulate_artificial_refs_at_end (bb, live);
2984 bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
2986 for (insn = BB_END (bb); ; insn = prev)
2988 prev = PREV_INSN (insn);
2989 if (INSN_P (insn))
2991 rtx attempt, before_try, x;
2992 int match_len;
2993 rtx note;
2994 bool was_call = false;
2996 /* Record this insn. */
2997 if (--peep2_current < 0)
2998 peep2_current = MAX_INSNS_PER_PEEP2;
2999 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3000 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3001 peep2_current_count++;
3002 peep2_insn_data[peep2_current].insn = insn;
3003 df_simulate_one_insn (bb, insn, live);
3004 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3006 if (RTX_FRAME_RELATED_P (insn))
3008 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3009 substitution would lose the
3010 REG_FRAME_RELATED_EXPR that is attached. */
3011 peep2_current_count = 0;
3012 attempt = NULL;
3014 else
3015 /* Match the peephole. */
3016 attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3018 if (attempt != NULL)
3020 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3021 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3022 cfg-related call notes. */
3023 for (i = 0; i <= match_len; ++i)
3025 int j;
3026 rtx old_insn, new_insn, note;
3028 j = i + peep2_current;
3029 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3030 j -= MAX_INSNS_PER_PEEP2 + 1;
3031 old_insn = peep2_insn_data[j].insn;
3032 if (!CALL_P (old_insn))
3033 continue;
3034 was_call = true;
3036 new_insn = attempt;
3037 while (new_insn != NULL_RTX)
3039 if (CALL_P (new_insn))
3040 break;
3041 new_insn = NEXT_INSN (new_insn);
3044 gcc_assert (new_insn != NULL_RTX);
3046 CALL_INSN_FUNCTION_USAGE (new_insn)
3047 = CALL_INSN_FUNCTION_USAGE (old_insn);
3049 for (note = REG_NOTES (old_insn);
3050 note;
3051 note = XEXP (note, 1))
3052 switch (REG_NOTE_KIND (note))
3054 case REG_NORETURN:
3055 case REG_SETJMP:
3056 add_reg_note (new_insn, REG_NOTE_KIND (note),
3057 XEXP (note, 0));
3058 break;
3059 default:
3060 /* Discard all other reg notes. */
3061 break;
3064 /* Croak if there is another call in the sequence. */
3065 while (++i <= match_len)
3067 j = i + peep2_current;
3068 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3069 j -= MAX_INSNS_PER_PEEP2 + 1;
3070 old_insn = peep2_insn_data[j].insn;
3071 gcc_assert (!CALL_P (old_insn));
3073 break;
3076 i = match_len + peep2_current;
3077 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3078 i -= MAX_INSNS_PER_PEEP2 + 1;
3080 note = find_reg_note (peep2_insn_data[i].insn,
3081 REG_EH_REGION, NULL_RTX);
3083 /* Replace the old sequence with the new. */
3084 attempt = emit_insn_after_setloc (attempt,
3085 peep2_insn_data[i].insn,
3086 INSN_LOCATOR (peep2_insn_data[i].insn));
3087 before_try = PREV_INSN (insn);
3088 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3090 /* Re-insert the EH_REGION notes. */
3091 if (note || (was_call && nonlocal_goto_handler_labels))
3093 edge eh_edge;
3094 edge_iterator ei;
3096 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3097 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3098 break;
3100 for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3101 if (CALL_P (x)
3102 || (flag_non_call_exceptions
3103 && may_trap_p (PATTERN (x))
3104 && !find_reg_note (x, REG_EH_REGION, NULL)))
3106 if (note)
3107 add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
3109 if (x != BB_END (bb) && eh_edge)
3111 edge nfte, nehe;
3112 int flags;
3114 nfte = split_block (bb, x);
3115 flags = (eh_edge->flags
3116 & (EDGE_EH | EDGE_ABNORMAL));
3117 if (CALL_P (x))
3118 flags |= EDGE_ABNORMAL_CALL;
3119 nehe = make_edge (nfte->src, eh_edge->dest,
3120 flags);
3122 nehe->probability = eh_edge->probability;
3123 nfte->probability
3124 = REG_BR_PROB_BASE - nehe->probability;
3126 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3127 bb = nfte->src;
3128 eh_edge = nehe;
3132 /* Converting possibly trapping insn to non-trapping is
3133 possible. Zap dummy outgoing edges. */
3134 do_cleanup_cfg |= purge_dead_edges (bb);
3137 #ifdef HAVE_conditional_execution
3138 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3139 peep2_insn_data[i].insn = NULL_RTX;
3140 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3141 peep2_current_count = 0;
3142 #else
3143 /* Back up lifetime information past the end of the
3144 newly created sequence. */
3145 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3146 i = 0;
3147 bitmap_copy (live, peep2_insn_data[i].live_before);
3149 /* Update life information for the new sequence. */
3150 x = attempt;
3153 if (INSN_P (x))
3155 if (--i < 0)
3156 i = MAX_INSNS_PER_PEEP2;
3157 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3158 && peep2_insn_data[i].insn == NULL_RTX)
3159 peep2_current_count++;
3160 peep2_insn_data[i].insn = x;
3161 df_insn_rescan (x);
3162 df_simulate_one_insn (bb, x, live);
3163 bitmap_copy (peep2_insn_data[i].live_before, live);
3165 x = PREV_INSN (x);
3167 while (x != prev);
3169 peep2_current = i;
3170 #endif
3172 /* If we generated a jump instruction, it won't have
3173 JUMP_LABEL set. Recompute after we're done. */
3174 for (x = attempt; x != before_try; x = PREV_INSN (x))
3175 if (JUMP_P (x))
3177 do_rebuild_jump_labels = true;
3178 break;
3183 if (insn == BB_HEAD (bb))
3184 break;
3188 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3189 BITMAP_FREE (peep2_insn_data[i].live_before);
3190 BITMAP_FREE (live);
3191 if (do_rebuild_jump_labels)
3192 rebuild_jump_labels (get_insns ());
3194 #endif /* HAVE_peephole2 */
3196 /* Common predicates for use with define_bypass. */
3198 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3199 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3200 must be either a single_set or a PARALLEL with SETs inside. */
3203 store_data_bypass_p (rtx out_insn, rtx in_insn)
3205 rtx out_set, in_set;
3206 rtx out_pat, in_pat;
3207 rtx out_exp, in_exp;
3208 int i, j;
3210 in_set = single_set (in_insn);
3211 if (in_set)
3213 if (!MEM_P (SET_DEST (in_set)))
3214 return false;
3216 out_set = single_set (out_insn);
3217 if (out_set)
3219 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3220 return false;
3222 else
3224 out_pat = PATTERN (out_insn);
3226 if (GET_CODE (out_pat) != PARALLEL)
3227 return false;
3229 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3231 out_exp = XVECEXP (out_pat, 0, i);
3233 if (GET_CODE (out_exp) == CLOBBER)
3234 continue;
3236 gcc_assert (GET_CODE (out_exp) == SET);
3238 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3239 return false;
3243 else
3245 in_pat = PATTERN (in_insn);
3246 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3248 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3250 in_exp = XVECEXP (in_pat, 0, i);
3252 if (GET_CODE (in_exp) == CLOBBER)
3253 continue;
3255 gcc_assert (GET_CODE (in_exp) == SET);
3257 if (!MEM_P (SET_DEST (in_exp)))
3258 return false;
3260 out_set = single_set (out_insn);
3261 if (out_set)
3263 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3264 return false;
3266 else
3268 out_pat = PATTERN (out_insn);
3269 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3271 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3273 out_exp = XVECEXP (out_pat, 0, j);
3275 if (GET_CODE (out_exp) == CLOBBER)
3276 continue;
3278 gcc_assert (GET_CODE (out_exp) == SET);
3280 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3281 return false;
3287 return true;
3290 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3291 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3292 or multiple set; IN_INSN should be single_set for truth, but for convenience
3293 of insn categorization may be any JUMP or CALL insn. */
3296 if_test_bypass_p (rtx out_insn, rtx in_insn)
3298 rtx out_set, in_set;
3300 in_set = single_set (in_insn);
3301 if (! in_set)
3303 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3304 return false;
3307 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3308 return false;
3309 in_set = SET_SRC (in_set);
3311 out_set = single_set (out_insn);
3312 if (out_set)
3314 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3315 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3316 return false;
3318 else
3320 rtx out_pat;
3321 int i;
3323 out_pat = PATTERN (out_insn);
3324 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3326 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3328 rtx exp = XVECEXP (out_pat, 0, i);
3330 if (GET_CODE (exp) == CLOBBER)
3331 continue;
3333 gcc_assert (GET_CODE (exp) == SET);
3335 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3336 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3337 return false;
3341 return true;
3344 static bool
3345 gate_handle_peephole2 (void)
3347 return (optimize > 0 && flag_peephole2);
3350 static unsigned int
3351 rest_of_handle_peephole2 (void)
3353 #ifdef HAVE_peephole2
3354 peephole2_optimize ();
3355 #endif
3356 return 0;
3359 struct rtl_opt_pass pass_peephole2 =
3362 RTL_PASS,
3363 "peephole2", /* name */
3364 gate_handle_peephole2, /* gate */
3365 rest_of_handle_peephole2, /* execute */
3366 NULL, /* sub */
3367 NULL, /* next */
3368 0, /* static_pass_number */
3369 TV_PEEPHOLE2, /* tv_id */
3370 0, /* properties_required */
3371 0, /* properties_provided */
3372 0, /* properties_destroyed */
3373 0, /* todo_flags_start */
3374 TODO_df_finish | TODO_verify_rtl_sharing |
3375 TODO_dump_func /* todo_flags_finish */
3379 static unsigned int
3380 rest_of_handle_split_all_insns (void)
3382 split_all_insns ();
3383 return 0;
3386 struct rtl_opt_pass pass_split_all_insns =
3389 RTL_PASS,
3390 "split1", /* name */
3391 NULL, /* gate */
3392 rest_of_handle_split_all_insns, /* execute */
3393 NULL, /* sub */
3394 NULL, /* next */
3395 0, /* static_pass_number */
3396 0, /* tv_id */
3397 0, /* properties_required */
3398 0, /* properties_provided */
3399 0, /* properties_destroyed */
3400 0, /* todo_flags_start */
3401 TODO_dump_func /* todo_flags_finish */
3405 static unsigned int
3406 rest_of_handle_split_after_reload (void)
3408 /* If optimizing, then go ahead and split insns now. */
3409 #ifndef STACK_REGS
3410 if (optimize > 0)
3411 #endif
3412 split_all_insns ();
3413 return 0;
3416 struct rtl_opt_pass pass_split_after_reload =
3419 RTL_PASS,
3420 "split2", /* name */
3421 NULL, /* gate */
3422 rest_of_handle_split_after_reload, /* execute */
3423 NULL, /* sub */
3424 NULL, /* next */
3425 0, /* static_pass_number */
3426 0, /* tv_id */
3427 0, /* properties_required */
3428 0, /* properties_provided */
3429 0, /* properties_destroyed */
3430 0, /* todo_flags_start */
3431 TODO_dump_func /* todo_flags_finish */
3435 static bool
3436 gate_handle_split_before_regstack (void)
3438 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3439 /* If flow2 creates new instructions which need splitting
3440 and scheduling after reload is not done, they might not be
3441 split until final which doesn't allow splitting
3442 if HAVE_ATTR_length. */
3443 # ifdef INSN_SCHEDULING
3444 return (optimize && !flag_schedule_insns_after_reload);
3445 # else
3446 return (optimize);
3447 # endif
3448 #else
3449 return 0;
3450 #endif
3453 static unsigned int
3454 rest_of_handle_split_before_regstack (void)
3456 split_all_insns ();
3457 return 0;
3460 struct rtl_opt_pass pass_split_before_regstack =
3463 RTL_PASS,
3464 "split3", /* name */
3465 gate_handle_split_before_regstack, /* gate */
3466 rest_of_handle_split_before_regstack, /* execute */
3467 NULL, /* sub */
3468 NULL, /* next */
3469 0, /* static_pass_number */
3470 0, /* tv_id */
3471 0, /* properties_required */
3472 0, /* properties_provided */
3473 0, /* properties_destroyed */
3474 0, /* todo_flags_start */
3475 TODO_dump_func /* todo_flags_finish */
3479 static bool
3480 gate_handle_split_before_sched2 (void)
3482 #ifdef INSN_SCHEDULING
3483 return optimize > 0 && flag_schedule_insns_after_reload;
3484 #else
3485 return 0;
3486 #endif
3489 static unsigned int
3490 rest_of_handle_split_before_sched2 (void)
3492 #ifdef INSN_SCHEDULING
3493 split_all_insns ();
3494 #endif
3495 return 0;
3498 struct rtl_opt_pass pass_split_before_sched2 =
3501 RTL_PASS,
3502 "split4", /* name */
3503 gate_handle_split_before_sched2, /* gate */
3504 rest_of_handle_split_before_sched2, /* execute */
3505 NULL, /* sub */
3506 NULL, /* next */
3507 0, /* static_pass_number */
3508 0, /* tv_id */
3509 0, /* properties_required */
3510 0, /* properties_provided */
3511 0, /* properties_destroyed */
3512 0, /* todo_flags_start */
3513 TODO_verify_flow |
3514 TODO_dump_func /* todo_flags_finish */
3518 /* The placement of the splitting that we do for shorten_branches
3519 depends on whether regstack is used by the target or not. */
3520 static bool
3521 gate_do_final_split (void)
3523 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3524 return 1;
3525 #else
3526 return 0;
3527 #endif
3530 struct rtl_opt_pass pass_split_for_shorten_branches =
3533 RTL_PASS,
3534 "split5", /* name */
3535 gate_do_final_split, /* gate */
3536 split_all_insns_noflow, /* execute */
3537 NULL, /* sub */
3538 NULL, /* next */
3539 0, /* static_pass_number */
3540 0, /* tv_id */
3541 0, /* properties_required */
3542 0, /* properties_provided */
3543 0, /* properties_destroyed */
3544 0, /* todo_flags_start */
3545 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */