re PR driver/31353 (gcc --help=target gives a linker error.)
[official-gcc.git] / gcc / recog.c
blob5e1df5cd46cd3d2483102859951b4718aec839fd
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
33 #include "recog.h"
34 #include "regs.h"
35 #include "addresses.h"
36 #include "expr.h"
37 #include "function.h"
38 #include "flags.h"
39 #include "real.h"
40 #include "toplev.h"
41 #include "basic-block.h"
42 #include "output.h"
43 #include "reload.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
47 #ifndef STACK_PUSH_CODE
48 #ifdef STACK_GROWS_DOWNWARD
49 #define STACK_PUSH_CODE PRE_DEC
50 #else
51 #define STACK_PUSH_CODE PRE_INC
52 #endif
53 #endif
55 #ifndef STACK_POP_CODE
56 #ifdef STACK_GROWS_DOWNWARD
57 #define STACK_POP_CODE POST_INC
58 #else
59 #define STACK_POP_CODE POST_DEC
60 #endif
61 #endif
63 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
64 static rtx *find_single_use_1 (rtx, rtx *);
65 static void validate_replace_src_1 (rtx *, void *);
66 static rtx split_insn (rtx);
68 /* Nonzero means allow operands to be volatile.
69 This should be 0 if you are generating rtl, such as if you are calling
70 the functions in optabs.c and expmed.c (most of the time).
71 This should be 1 if all valid insns need to be recognized,
72 such as in regclass.c and final.c and reload.c.
74 init_recog and init_recog_no_volatile are responsible for setting this. */
76 int volatile_ok;
78 struct recog_data recog_data;
80 /* Contains a vector of operand_alternative structures for every operand.
81 Set up by preprocess_constraints. */
82 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
84 /* On return from `constrain_operands', indicate which alternative
85 was satisfied. */
87 int which_alternative;
89 /* Nonzero after end of reload pass.
90 Set to 1 or 0 by toplev.c.
91 Controls the significance of (SUBREG (MEM)). */
93 int reload_completed;
95 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
96 int epilogue_completed;
98 /* Initialize data used by the function `recog'.
99 This must be called once in the compilation of a function
100 before any insn recognition may be done in the function. */
102 void
103 init_recog_no_volatile (void)
105 volatile_ok = 0;
108 void
109 init_recog (void)
111 volatile_ok = 1;
115 /* Check that X is an insn-body for an `asm' with operands
116 and that the operands mentioned in it are legitimate. */
119 check_asm_operands (rtx x)
121 int noperands;
122 rtx *operands;
123 const char **constraints;
124 int i;
126 /* Post-reload, be more strict with things. */
127 if (reload_completed)
129 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
130 extract_insn (make_insn_raw (x));
131 constrain_operands (1);
132 return which_alternative >= 0;
135 noperands = asm_noperands (x);
136 if (noperands < 0)
137 return 0;
138 if (noperands == 0)
139 return 1;
141 operands = alloca (noperands * sizeof (rtx));
142 constraints = alloca (noperands * sizeof (char *));
144 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
146 for (i = 0; i < noperands; i++)
148 const char *c = constraints[i];
149 if (c[0] == '%')
150 c++;
151 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
152 c = constraints[c[0] - '0'];
154 if (! asm_operand_ok (operands[i], c))
155 return 0;
158 return 1;
161 /* Static data for the next two routines. */
163 typedef struct change_t
165 rtx object;
166 int old_code;
167 rtx *loc;
168 rtx old;
169 } change_t;
171 static change_t *changes;
172 static int changes_allocated;
174 static int num_changes = 0;
176 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
177 at which NEW will be placed. If OBJECT is zero, no validation is done,
178 the change is simply made.
180 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
181 will be called with the address and mode as parameters. If OBJECT is
182 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
183 the change in place.
185 IN_GROUP is nonzero if this is part of a group of changes that must be
186 performed as a group. In that case, the changes will be stored. The
187 function `apply_change_group' will validate and apply the changes.
189 If IN_GROUP is zero, this is a single change. Try to recognize the insn
190 or validate the memory reference with the change applied. If the result
191 is not valid for the machine, suppress the change and return zero.
192 Otherwise, perform the change and return 1. */
195 validate_change (rtx object, rtx *loc, rtx new, int in_group)
197 rtx old = *loc;
199 if (old == new || rtx_equal_p (old, new))
200 return 1;
202 gcc_assert (in_group != 0 || num_changes == 0);
204 *loc = new;
206 /* Save the information describing this change. */
207 if (num_changes >= changes_allocated)
209 if (changes_allocated == 0)
210 /* This value allows for repeated substitutions inside complex
211 indexed addresses, or changes in up to 5 insns. */
212 changes_allocated = MAX_RECOG_OPERANDS * 5;
213 else
214 changes_allocated *= 2;
216 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
219 changes[num_changes].object = object;
220 changes[num_changes].loc = loc;
221 changes[num_changes].old = old;
223 if (object && !MEM_P (object))
225 /* Set INSN_CODE to force rerecognition of insn. Save old code in
226 case invalid. */
227 changes[num_changes].old_code = INSN_CODE (object);
228 INSN_CODE (object) = -1;
231 num_changes++;
233 /* If we are making a group of changes, return 1. Otherwise, validate the
234 change group we made. */
236 if (in_group)
237 return 1;
238 else
239 return apply_change_group ();
242 /* Keep X canonicalized if some changes have made it non-canonical; only
243 modifies the operands of X, not (for example) its code. Simplifications
244 are not the job of this routine.
246 Return true if anything was changed. */
247 bool
248 canonicalize_change_group (rtx insn, rtx x)
250 if (COMMUTATIVE_P (x)
251 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
253 /* Oops, the caller has made X no longer canonical.
254 Let's redo the changes in the correct order. */
255 rtx tem = XEXP (x, 0);
256 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
257 validate_change (insn, &XEXP (x, 1), tem, 1);
258 return true;
260 else
261 return false;
265 /* This subroutine of apply_change_group verifies whether the changes to INSN
266 were valid; i.e. whether INSN can still be recognized. */
269 insn_invalid_p (rtx insn)
271 rtx pat = PATTERN (insn);
272 int num_clobbers = 0;
273 /* If we are before reload and the pattern is a SET, see if we can add
274 clobbers. */
275 int icode = recog (pat, insn,
276 (GET_CODE (pat) == SET
277 && ! reload_completed && ! reload_in_progress)
278 ? &num_clobbers : 0);
279 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
282 /* If this is an asm and the operand aren't legal, then fail. Likewise if
283 this is not an asm and the insn wasn't recognized. */
284 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
285 || (!is_asm && icode < 0))
286 return 1;
288 /* If we have to add CLOBBERs, fail if we have to add ones that reference
289 hard registers since our callers can't know if they are live or not.
290 Otherwise, add them. */
291 if (num_clobbers > 0)
293 rtx newpat;
295 if (added_clobbers_hard_reg_p (icode))
296 return 1;
298 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
299 XVECEXP (newpat, 0, 0) = pat;
300 add_clobbers (newpat, icode);
301 PATTERN (insn) = pat = newpat;
304 /* After reload, verify that all constraints are satisfied. */
305 if (reload_completed)
307 extract_insn (insn);
309 if (! constrain_operands (1))
310 return 1;
313 INSN_CODE (insn) = icode;
314 return 0;
317 /* Return number of changes made and not validated yet. */
319 num_changes_pending (void)
321 return num_changes;
324 /* Tentatively apply the changes numbered NUM and up.
325 Return 1 if all changes are valid, zero otherwise. */
328 verify_changes (int num)
330 int i;
331 rtx last_validated = NULL_RTX;
333 /* The changes have been applied and all INSN_CODEs have been reset to force
334 rerecognition.
336 The changes are valid if we aren't given an object, or if we are
337 given a MEM and it still is a valid address, or if this is in insn
338 and it is recognized. In the latter case, if reload has completed,
339 we also require that the operands meet the constraints for
340 the insn. */
342 for (i = num; i < num_changes; i++)
344 rtx object = changes[i].object;
346 /* If there is no object to test or if it is the same as the one we
347 already tested, ignore it. */
348 if (object == 0 || object == last_validated)
349 continue;
351 if (MEM_P (object))
353 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
354 break;
356 else if (insn_invalid_p (object))
358 rtx pat = PATTERN (object);
360 /* Perhaps we couldn't recognize the insn because there were
361 extra CLOBBERs at the end. If so, try to re-recognize
362 without the last CLOBBER (later iterations will cause each of
363 them to be eliminated, in turn). But don't do this if we
364 have an ASM_OPERAND. */
365 if (GET_CODE (pat) == PARALLEL
366 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
367 && asm_noperands (PATTERN (object)) < 0)
369 rtx newpat;
371 if (XVECLEN (pat, 0) == 2)
372 newpat = XVECEXP (pat, 0, 0);
373 else
375 int j;
377 newpat
378 = gen_rtx_PARALLEL (VOIDmode,
379 rtvec_alloc (XVECLEN (pat, 0) - 1));
380 for (j = 0; j < XVECLEN (newpat, 0); j++)
381 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
384 /* Add a new change to this group to replace the pattern
385 with this new pattern. Then consider this change
386 as having succeeded. The change we added will
387 cause the entire call to fail if things remain invalid.
389 Note that this can lose if a later change than the one
390 we are processing specified &XVECEXP (PATTERN (object), 0, X)
391 but this shouldn't occur. */
393 validate_change (object, &PATTERN (object), newpat, 1);
394 continue;
396 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
397 /* If this insn is a CLOBBER or USE, it is always valid, but is
398 never recognized. */
399 continue;
400 else
401 break;
403 last_validated = object;
406 return (i == num_changes);
409 /* A group of changes has previously been issued with validate_change and
410 verified with verify_changes. Update the BB_DIRTY flags of the affected
411 blocks, and clear num_changes. */
413 void
414 confirm_change_group (void)
416 int i;
417 basic_block bb;
419 for (i = 0; i < num_changes; i++)
420 if (changes[i].object
421 && INSN_P (changes[i].object)
422 && (bb = BLOCK_FOR_INSN (changes[i].object)))
423 bb->flags |= BB_DIRTY;
425 num_changes = 0;
428 /* Apply a group of changes previously issued with `validate_change'.
429 If all changes are valid, call confirm_change_group and return 1,
430 otherwise, call cancel_changes and return 0. */
433 apply_change_group (void)
435 if (verify_changes (0))
437 confirm_change_group ();
438 return 1;
440 else
442 cancel_changes (0);
443 return 0;
448 /* Return the number of changes so far in the current group. */
451 num_validated_changes (void)
453 return num_changes;
456 /* Retract the changes numbered NUM and up. */
458 void
459 cancel_changes (int num)
461 int i;
463 /* Back out all the changes. Do this in the opposite order in which
464 they were made. */
465 for (i = num_changes - 1; i >= num; i--)
467 *changes[i].loc = changes[i].old;
468 if (changes[i].object && !MEM_P (changes[i].object))
469 INSN_CODE (changes[i].object) = changes[i].old_code;
471 num_changes = num;
474 /* Replace every occurrence of FROM in X with TO. Mark each change with
475 validate_change passing OBJECT. */
477 static void
478 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
480 int i, j;
481 const char *fmt;
482 rtx x = *loc;
483 enum rtx_code code;
484 enum machine_mode op0_mode = VOIDmode;
485 int prev_changes = num_changes;
486 rtx new;
488 if (!x)
489 return;
491 code = GET_CODE (x);
492 fmt = GET_RTX_FORMAT (code);
493 if (fmt[0] == 'e')
494 op0_mode = GET_MODE (XEXP (x, 0));
496 /* X matches FROM if it is the same rtx or they are both referring to the
497 same register in the same mode. Avoid calling rtx_equal_p unless the
498 operands look similar. */
500 if (x == from
501 || (REG_P (x) && REG_P (from)
502 && GET_MODE (x) == GET_MODE (from)
503 && REGNO (x) == REGNO (from))
504 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
505 && rtx_equal_p (x, from)))
507 validate_change (object, loc, to, 1);
508 return;
511 /* Call ourself recursively to perform the replacements.
512 We must not replace inside already replaced expression, otherwise we
513 get infinite recursion for replacements like (reg X)->(subreg (reg X))
514 done by regmove, so we must special case shared ASM_OPERANDS. */
516 if (GET_CODE (x) == PARALLEL)
518 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
520 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
521 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
523 /* Verify that operands are really shared. */
524 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
525 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
526 (x, 0, j))));
527 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
528 from, to, object);
530 else
531 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
534 else
535 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
537 if (fmt[i] == 'e')
538 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
539 else if (fmt[i] == 'E')
540 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
541 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
544 /* If we didn't substitute, there is nothing more to do. */
545 if (num_changes == prev_changes)
546 return;
548 /* Allow substituted expression to have different mode. This is used by
549 regmove to change mode of pseudo register. */
550 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
551 op0_mode = GET_MODE (XEXP (x, 0));
553 /* Do changes needed to keep rtx consistent. Don't do any other
554 simplifications, as it is not our job. */
556 if (SWAPPABLE_OPERANDS_P (x)
557 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
559 validate_change (object, loc,
560 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
561 : swap_condition (code),
562 GET_MODE (x), XEXP (x, 1),
563 XEXP (x, 0)), 1);
564 x = *loc;
565 code = GET_CODE (x);
568 switch (code)
570 case PLUS:
571 /* If we have a PLUS whose second operand is now a CONST_INT, use
572 simplify_gen_binary to try to simplify it.
573 ??? We may want later to remove this, once simplification is
574 separated from this function. */
575 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
576 validate_change (object, loc,
577 simplify_gen_binary
578 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
579 break;
580 case MINUS:
581 if (GET_CODE (XEXP (x, 1)) == CONST_INT
582 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
583 validate_change (object, loc,
584 simplify_gen_binary
585 (PLUS, GET_MODE (x), XEXP (x, 0),
586 simplify_gen_unary (NEG,
587 GET_MODE (x), XEXP (x, 1),
588 GET_MODE (x))), 1);
589 break;
590 case ZERO_EXTEND:
591 case SIGN_EXTEND:
592 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
594 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
595 op0_mode);
596 /* If any of the above failed, substitute in something that
597 we know won't be recognized. */
598 if (!new)
599 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
600 validate_change (object, loc, new, 1);
602 break;
603 case SUBREG:
604 /* All subregs possible to simplify should be simplified. */
605 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
606 SUBREG_BYTE (x));
608 /* Subregs of VOIDmode operands are incorrect. */
609 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
610 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
611 if (new)
612 validate_change (object, loc, new, 1);
613 break;
614 case ZERO_EXTRACT:
615 case SIGN_EXTRACT:
616 /* If we are replacing a register with memory, try to change the memory
617 to be the mode required for memory in extract operations (this isn't
618 likely to be an insertion operation; if it was, nothing bad will
619 happen, we might just fail in some cases). */
621 if (MEM_P (XEXP (x, 0))
622 && GET_CODE (XEXP (x, 1)) == CONST_INT
623 && GET_CODE (XEXP (x, 2)) == CONST_INT
624 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
625 && !MEM_VOLATILE_P (XEXP (x, 0)))
627 enum machine_mode wanted_mode = VOIDmode;
628 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
629 int pos = INTVAL (XEXP (x, 2));
631 if (GET_CODE (x) == ZERO_EXTRACT)
633 enum machine_mode new_mode
634 = mode_for_extraction (EP_extzv, 1);
635 if (new_mode != MAX_MACHINE_MODE)
636 wanted_mode = new_mode;
638 else if (GET_CODE (x) == SIGN_EXTRACT)
640 enum machine_mode new_mode
641 = mode_for_extraction (EP_extv, 1);
642 if (new_mode != MAX_MACHINE_MODE)
643 wanted_mode = new_mode;
646 /* If we have a narrower mode, we can do something. */
647 if (wanted_mode != VOIDmode
648 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
650 int offset = pos / BITS_PER_UNIT;
651 rtx newmem;
653 /* If the bytes and bits are counted differently, we
654 must adjust the offset. */
655 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
656 offset =
657 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
658 offset);
660 pos %= GET_MODE_BITSIZE (wanted_mode);
662 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
664 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
665 validate_change (object, &XEXP (x, 0), newmem, 1);
669 break;
671 default:
672 break;
676 /* Try replacing every occurrence of FROM in INSN with TO. After all
677 changes have been made, validate by seeing if INSN is still valid. */
680 validate_replace_rtx (rtx from, rtx to, rtx insn)
682 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
683 return apply_change_group ();
686 /* Try replacing every occurrence of FROM in INSN with TO. */
688 void
689 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
691 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
694 /* Function called by note_uses to replace used subexpressions. */
695 struct validate_replace_src_data
697 rtx from; /* Old RTX */
698 rtx to; /* New RTX */
699 rtx insn; /* Insn in which substitution is occurring. */
702 static void
703 validate_replace_src_1 (rtx *x, void *data)
705 struct validate_replace_src_data *d
706 = (struct validate_replace_src_data *) data;
708 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
711 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
712 SET_DESTs. */
714 void
715 validate_replace_src_group (rtx from, rtx to, rtx insn)
717 struct validate_replace_src_data d;
719 d.from = from;
720 d.to = to;
721 d.insn = insn;
722 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
725 /* Try simplify INSN.
726 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
727 pattern and return true if something was simplified. */
729 bool
730 validate_simplify_insn (rtx insn)
732 int i;
733 rtx pat = NULL;
734 rtx newpat = NULL;
736 pat = PATTERN (insn);
738 if (GET_CODE (pat) == SET)
740 newpat = simplify_rtx (SET_SRC (pat));
741 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
742 validate_change (insn, &SET_SRC (pat), newpat, 1);
743 newpat = simplify_rtx (SET_DEST (pat));
744 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
745 validate_change (insn, &SET_DEST (pat), newpat, 1);
747 else if (GET_CODE (pat) == PARALLEL)
748 for (i = 0; i < XVECLEN (pat, 0); i++)
750 rtx s = XVECEXP (pat, 0, i);
752 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
754 newpat = simplify_rtx (SET_SRC (s));
755 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
756 validate_change (insn, &SET_SRC (s), newpat, 1);
757 newpat = simplify_rtx (SET_DEST (s));
758 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
759 validate_change (insn, &SET_DEST (s), newpat, 1);
762 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
765 #ifdef HAVE_cc0
766 /* Return 1 if the insn using CC0 set by INSN does not contain
767 any ordered tests applied to the condition codes.
768 EQ and NE tests do not count. */
771 next_insn_tests_no_inequality (rtx insn)
773 rtx next = next_cc0_user (insn);
775 /* If there is no next insn, we have to take the conservative choice. */
776 if (next == 0)
777 return 0;
779 return (INSN_P (next)
780 && ! inequality_comparisons_p (PATTERN (next)));
782 #endif
784 /* This is used by find_single_use to locate an rtx that contains exactly one
785 use of DEST, which is typically either a REG or CC0. It returns a
786 pointer to the innermost rtx expression containing DEST. Appearances of
787 DEST that are being used to totally replace it are not counted. */
789 static rtx *
790 find_single_use_1 (rtx dest, rtx *loc)
792 rtx x = *loc;
793 enum rtx_code code = GET_CODE (x);
794 rtx *result = 0;
795 rtx *this_result;
796 int i;
797 const char *fmt;
799 switch (code)
801 case CONST_INT:
802 case CONST:
803 case LABEL_REF:
804 case SYMBOL_REF:
805 case CONST_DOUBLE:
806 case CONST_VECTOR:
807 case CLOBBER:
808 return 0;
810 case SET:
811 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
812 of a REG that occupies all of the REG, the insn uses DEST if
813 it is mentioned in the destination or the source. Otherwise, we
814 need just check the source. */
815 if (GET_CODE (SET_DEST (x)) != CC0
816 && GET_CODE (SET_DEST (x)) != PC
817 && !REG_P (SET_DEST (x))
818 && ! (GET_CODE (SET_DEST (x)) == SUBREG
819 && REG_P (SUBREG_REG (SET_DEST (x)))
820 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
821 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
822 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
823 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
824 break;
826 return find_single_use_1 (dest, &SET_SRC (x));
828 case MEM:
829 case SUBREG:
830 return find_single_use_1 (dest, &XEXP (x, 0));
832 default:
833 break;
836 /* If it wasn't one of the common cases above, check each expression and
837 vector of this code. Look for a unique usage of DEST. */
839 fmt = GET_RTX_FORMAT (code);
840 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
842 if (fmt[i] == 'e')
844 if (dest == XEXP (x, i)
845 || (REG_P (dest) && REG_P (XEXP (x, i))
846 && REGNO (dest) == REGNO (XEXP (x, i))))
847 this_result = loc;
848 else
849 this_result = find_single_use_1 (dest, &XEXP (x, i));
851 if (result == 0)
852 result = this_result;
853 else if (this_result)
854 /* Duplicate usage. */
855 return 0;
857 else if (fmt[i] == 'E')
859 int j;
861 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
863 if (XVECEXP (x, i, j) == dest
864 || (REG_P (dest)
865 && REG_P (XVECEXP (x, i, j))
866 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
867 this_result = loc;
868 else
869 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
871 if (result == 0)
872 result = this_result;
873 else if (this_result)
874 return 0;
879 return result;
882 /* See if DEST, produced in INSN, is used only a single time in the
883 sequel. If so, return a pointer to the innermost rtx expression in which
884 it is used.
886 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
888 This routine will return usually zero either before flow is called (because
889 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
890 note can't be trusted).
892 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
893 care about REG_DEAD notes or LOG_LINKS.
895 Otherwise, we find the single use by finding an insn that has a
896 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
897 only referenced once in that insn, we know that it must be the first
898 and last insn referencing DEST. */
900 rtx *
901 find_single_use (rtx dest, rtx insn, rtx *ploc)
903 rtx next;
904 rtx *result;
905 rtx link;
907 #ifdef HAVE_cc0
908 if (dest == cc0_rtx)
910 next = NEXT_INSN (insn);
911 if (next == 0
912 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
913 return 0;
915 result = find_single_use_1 (dest, &PATTERN (next));
916 if (result && ploc)
917 *ploc = next;
918 return result;
920 #endif
922 if (reload_completed || reload_in_progress || !REG_P (dest))
923 return 0;
925 for (next = next_nonnote_insn (insn);
926 next != 0 && !LABEL_P (next);
927 next = next_nonnote_insn (next))
928 if (INSN_P (next) && dead_or_set_p (next, dest))
930 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
931 if (XEXP (link, 0) == insn)
932 break;
934 if (link)
936 result = find_single_use_1 (dest, &PATTERN (next));
937 if (ploc)
938 *ploc = next;
939 return result;
943 return 0;
946 /* Return 1 if OP is a valid general operand for machine mode MODE.
947 This is either a register reference, a memory reference,
948 or a constant. In the case of a memory reference, the address
949 is checked for general validity for the target machine.
951 Register and memory references must have mode MODE in order to be valid,
952 but some constants have no machine mode and are valid for any mode.
954 If MODE is VOIDmode, OP is checked for validity for whatever mode
955 it has.
957 The main use of this function is as a predicate in match_operand
958 expressions in the machine description.
960 For an explanation of this function's behavior for registers of
961 class NO_REGS, see the comment for `register_operand'. */
964 general_operand (rtx op, enum machine_mode mode)
966 enum rtx_code code = GET_CODE (op);
968 if (mode == VOIDmode)
969 mode = GET_MODE (op);
971 /* Don't accept CONST_INT or anything similar
972 if the caller wants something floating. */
973 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
974 && GET_MODE_CLASS (mode) != MODE_INT
975 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
976 return 0;
978 if (GET_CODE (op) == CONST_INT
979 && mode != VOIDmode
980 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
981 return 0;
983 if (CONSTANT_P (op))
984 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
985 || mode == VOIDmode)
986 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
987 && LEGITIMATE_CONSTANT_P (op));
989 /* Except for certain constants with VOIDmode, already checked for,
990 OP's mode must match MODE if MODE specifies a mode. */
992 if (GET_MODE (op) != mode)
993 return 0;
995 if (code == SUBREG)
997 rtx sub = SUBREG_REG (op);
999 #ifdef INSN_SCHEDULING
1000 /* On machines that have insn scheduling, we want all memory
1001 reference to be explicit, so outlaw paradoxical SUBREGs.
1002 However, we must allow them after reload so that they can
1003 get cleaned up by cleanup_subreg_operands. */
1004 if (!reload_completed && MEM_P (sub)
1005 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1006 return 0;
1007 #endif
1008 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1009 may result in incorrect reference. We should simplify all valid
1010 subregs of MEM anyway. But allow this after reload because we
1011 might be called from cleanup_subreg_operands.
1013 ??? This is a kludge. */
1014 if (!reload_completed && SUBREG_BYTE (op) != 0
1015 && MEM_P (sub))
1016 return 0;
1018 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1019 create such rtl, and we must reject it. */
1020 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1021 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1022 return 0;
1024 op = sub;
1025 code = GET_CODE (op);
1028 if (code == REG)
1029 /* A register whose class is NO_REGS is not a general operand. */
1030 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1031 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1033 if (code == MEM)
1035 rtx y = XEXP (op, 0);
1037 if (! volatile_ok && MEM_VOLATILE_P (op))
1038 return 0;
1040 /* Use the mem's mode, since it will be reloaded thus. */
1041 if (memory_address_p (GET_MODE (op), y))
1042 return 1;
1045 return 0;
1048 /* Return 1 if OP is a valid memory address for a memory reference
1049 of mode MODE.
1051 The main use of this function is as a predicate in match_operand
1052 expressions in the machine description. */
1055 address_operand (rtx op, enum machine_mode mode)
1057 return memory_address_p (mode, op);
1060 /* Return 1 if OP is a register reference of mode MODE.
1061 If MODE is VOIDmode, accept a register in any mode.
1063 The main use of this function is as a predicate in match_operand
1064 expressions in the machine description.
1066 As a special exception, registers whose class is NO_REGS are
1067 not accepted by `register_operand'. The reason for this change
1068 is to allow the representation of special architecture artifacts
1069 (such as a condition code register) without extending the rtl
1070 definitions. Since registers of class NO_REGS cannot be used
1071 as registers in any case where register classes are examined,
1072 it is most consistent to keep this function from accepting them. */
1075 register_operand (rtx op, enum machine_mode mode)
1077 if (GET_MODE (op) != mode && mode != VOIDmode)
1078 return 0;
1080 if (GET_CODE (op) == SUBREG)
1082 rtx sub = SUBREG_REG (op);
1084 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1085 because it is guaranteed to be reloaded into one.
1086 Just make sure the MEM is valid in itself.
1087 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1088 but currently it does result from (SUBREG (REG)...) where the
1089 reg went on the stack.) */
1090 if (! reload_completed && MEM_P (sub))
1091 return general_operand (op, mode);
1093 #ifdef CANNOT_CHANGE_MODE_CLASS
1094 if (REG_P (sub)
1095 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1096 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1097 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1098 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1099 return 0;
1100 #endif
1102 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1103 create such rtl, and we must reject it. */
1104 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1105 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1106 return 0;
1108 op = sub;
1111 /* We don't consider registers whose class is NO_REGS
1112 to be a register operand. */
1113 return (REG_P (op)
1114 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1115 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1118 /* Return 1 for a register in Pmode; ignore the tested mode. */
1121 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1123 return register_operand (op, Pmode);
1126 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1127 or a hard register. */
1130 scratch_operand (rtx op, enum machine_mode mode)
1132 if (GET_MODE (op) != mode && mode != VOIDmode)
1133 return 0;
1135 return (GET_CODE (op) == SCRATCH
1136 || (REG_P (op)
1137 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1140 /* Return 1 if OP is a valid immediate operand for mode MODE.
1142 The main use of this function is as a predicate in match_operand
1143 expressions in the machine description. */
1146 immediate_operand (rtx op, enum machine_mode mode)
1148 /* Don't accept CONST_INT or anything similar
1149 if the caller wants something floating. */
1150 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1151 && GET_MODE_CLASS (mode) != MODE_INT
1152 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1153 return 0;
1155 if (GET_CODE (op) == CONST_INT
1156 && mode != VOIDmode
1157 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1158 return 0;
1160 return (CONSTANT_P (op)
1161 && (GET_MODE (op) == mode || mode == VOIDmode
1162 || GET_MODE (op) == VOIDmode)
1163 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1164 && LEGITIMATE_CONSTANT_P (op));
1167 /* Returns 1 if OP is an operand that is a CONST_INT. */
1170 const_int_operand (rtx op, enum machine_mode mode)
1172 if (GET_CODE (op) != CONST_INT)
1173 return 0;
1175 if (mode != VOIDmode
1176 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1177 return 0;
1179 return 1;
1182 /* Returns 1 if OP is an operand that is a constant integer or constant
1183 floating-point number. */
1186 const_double_operand (rtx op, enum machine_mode mode)
1188 /* Don't accept CONST_INT or anything similar
1189 if the caller wants something floating. */
1190 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1191 && GET_MODE_CLASS (mode) != MODE_INT
1192 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1193 return 0;
1195 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1196 && (mode == VOIDmode || GET_MODE (op) == mode
1197 || GET_MODE (op) == VOIDmode));
1200 /* Return 1 if OP is a general operand that is not an immediate operand. */
1203 nonimmediate_operand (rtx op, enum machine_mode mode)
1205 return (general_operand (op, mode) && ! CONSTANT_P (op));
1208 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1211 nonmemory_operand (rtx op, enum machine_mode mode)
1213 if (CONSTANT_P (op))
1215 /* Don't accept CONST_INT or anything similar
1216 if the caller wants something floating. */
1217 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1218 && GET_MODE_CLASS (mode) != MODE_INT
1219 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1220 return 0;
1222 if (GET_CODE (op) == CONST_INT
1223 && mode != VOIDmode
1224 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1225 return 0;
1227 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1228 || mode == VOIDmode)
1229 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1230 && LEGITIMATE_CONSTANT_P (op));
1233 if (GET_MODE (op) != mode && mode != VOIDmode)
1234 return 0;
1236 if (GET_CODE (op) == SUBREG)
1238 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1239 because it is guaranteed to be reloaded into one.
1240 Just make sure the MEM is valid in itself.
1241 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1242 but currently it does result from (SUBREG (REG)...) where the
1243 reg went on the stack.) */
1244 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1245 return general_operand (op, mode);
1246 op = SUBREG_REG (op);
1249 /* We don't consider registers whose class is NO_REGS
1250 to be a register operand. */
1251 return (REG_P (op)
1252 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1253 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1256 /* Return 1 if OP is a valid operand that stands for pushing a
1257 value of mode MODE onto the stack.
1259 The main use of this function is as a predicate in match_operand
1260 expressions in the machine description. */
1263 push_operand (rtx op, enum machine_mode mode)
1265 unsigned int rounded_size = GET_MODE_SIZE (mode);
1267 #ifdef PUSH_ROUNDING
1268 rounded_size = PUSH_ROUNDING (rounded_size);
1269 #endif
1271 if (!MEM_P (op))
1272 return 0;
1274 if (mode != VOIDmode && GET_MODE (op) != mode)
1275 return 0;
1277 op = XEXP (op, 0);
1279 if (rounded_size == GET_MODE_SIZE (mode))
1281 if (GET_CODE (op) != STACK_PUSH_CODE)
1282 return 0;
1284 else
1286 if (GET_CODE (op) != PRE_MODIFY
1287 || GET_CODE (XEXP (op, 1)) != PLUS
1288 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1289 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1290 #ifdef STACK_GROWS_DOWNWARD
1291 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1292 #else
1293 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1294 #endif
1296 return 0;
1299 return XEXP (op, 0) == stack_pointer_rtx;
1302 /* Return 1 if OP is a valid operand that stands for popping a
1303 value of mode MODE off the stack.
1305 The main use of this function is as a predicate in match_operand
1306 expressions in the machine description. */
1309 pop_operand (rtx op, enum machine_mode mode)
1311 if (!MEM_P (op))
1312 return 0;
1314 if (mode != VOIDmode && GET_MODE (op) != mode)
1315 return 0;
1317 op = XEXP (op, 0);
1319 if (GET_CODE (op) != STACK_POP_CODE)
1320 return 0;
1322 return XEXP (op, 0) == stack_pointer_rtx;
1325 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1328 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1330 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1331 return 0;
1333 win:
1334 return 1;
1337 /* Return 1 if OP is a valid memory reference with mode MODE,
1338 including a valid address.
1340 The main use of this function is as a predicate in match_operand
1341 expressions in the machine description. */
1344 memory_operand (rtx op, enum machine_mode mode)
1346 rtx inner;
1348 if (! reload_completed)
1349 /* Note that no SUBREG is a memory operand before end of reload pass,
1350 because (SUBREG (MEM...)) forces reloading into a register. */
1351 return MEM_P (op) && general_operand (op, mode);
1353 if (mode != VOIDmode && GET_MODE (op) != mode)
1354 return 0;
1356 inner = op;
1357 if (GET_CODE (inner) == SUBREG)
1358 inner = SUBREG_REG (inner);
1360 return (MEM_P (inner) && general_operand (op, mode));
1363 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1364 that is, a memory reference whose address is a general_operand. */
1367 indirect_operand (rtx op, enum machine_mode mode)
1369 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1370 if (! reload_completed
1371 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1373 int offset = SUBREG_BYTE (op);
1374 rtx inner = SUBREG_REG (op);
1376 if (mode != VOIDmode && GET_MODE (op) != mode)
1377 return 0;
1379 /* The only way that we can have a general_operand as the resulting
1380 address is if OFFSET is zero and the address already is an operand
1381 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1382 operand. */
1384 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1385 || (GET_CODE (XEXP (inner, 0)) == PLUS
1386 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1387 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1388 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1391 return (MEM_P (op)
1392 && memory_operand (op, mode)
1393 && general_operand (XEXP (op, 0), Pmode));
1396 /* Return 1 if this is a comparison operator. This allows the use of
1397 MATCH_OPERATOR to recognize all the branch insns. */
1400 comparison_operator (rtx op, enum machine_mode mode)
1402 return ((mode == VOIDmode || GET_MODE (op) == mode)
1403 && COMPARISON_P (op));
1406 /* If BODY is an insn body that uses ASM_OPERANDS,
1407 return the number of operands (both input and output) in the insn.
1408 Otherwise return -1. */
1411 asm_noperands (rtx body)
1413 switch (GET_CODE (body))
1415 case ASM_OPERANDS:
1416 /* No output operands: return number of input operands. */
1417 return ASM_OPERANDS_INPUT_LENGTH (body);
1418 case SET:
1419 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1420 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1421 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1422 else
1423 return -1;
1424 case PARALLEL:
1425 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1426 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1428 /* Multiple output operands, or 1 output plus some clobbers:
1429 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1430 int i;
1431 int n_sets;
1433 /* Count backwards through CLOBBERs to determine number of SETs. */
1434 for (i = XVECLEN (body, 0); i > 0; i--)
1436 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1437 break;
1438 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1439 return -1;
1442 /* N_SETS is now number of output operands. */
1443 n_sets = i;
1445 /* Verify that all the SETs we have
1446 came from a single original asm_operands insn
1447 (so that invalid combinations are blocked). */
1448 for (i = 0; i < n_sets; i++)
1450 rtx elt = XVECEXP (body, 0, i);
1451 if (GET_CODE (elt) != SET)
1452 return -1;
1453 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1454 return -1;
1455 /* If these ASM_OPERANDS rtx's came from different original insns
1456 then they aren't allowed together. */
1457 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1458 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1459 return -1;
1461 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1462 + n_sets);
1464 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1466 /* 0 outputs, but some clobbers:
1467 body is [(asm_operands ...) (clobber (reg ...))...]. */
1468 int i;
1470 /* Make sure all the other parallel things really are clobbers. */
1471 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1472 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1473 return -1;
1475 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1477 else
1478 return -1;
1479 default:
1480 return -1;
1484 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1485 copy its operands (both input and output) into the vector OPERANDS,
1486 the locations of the operands within the insn into the vector OPERAND_LOCS,
1487 and the constraints for the operands into CONSTRAINTS.
1488 Write the modes of the operands into MODES.
1489 Return the assembler-template.
1491 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1492 we don't store that info. */
1494 const char *
1495 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1496 const char **constraints, enum machine_mode *modes,
1497 location_t *loc)
1499 int i;
1500 int noperands;
1501 rtx asmop = 0;
1503 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1505 asmop = SET_SRC (body);
1506 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1508 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1510 for (i = 1; i < noperands; i++)
1512 if (operand_locs)
1513 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1514 if (operands)
1515 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1516 if (constraints)
1517 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1518 if (modes)
1519 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1522 /* The output is in the SET.
1523 Its constraint is in the ASM_OPERANDS itself. */
1524 if (operands)
1525 operands[0] = SET_DEST (body);
1526 if (operand_locs)
1527 operand_locs[0] = &SET_DEST (body);
1528 if (constraints)
1529 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1530 if (modes)
1531 modes[0] = GET_MODE (SET_DEST (body));
1533 else if (GET_CODE (body) == ASM_OPERANDS)
1535 asmop = body;
1536 /* No output operands: BODY is (asm_operands ....). */
1538 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1540 /* The input operands are found in the 1st element vector. */
1541 /* Constraints for inputs are in the 2nd element vector. */
1542 for (i = 0; i < noperands; i++)
1544 if (operand_locs)
1545 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1546 if (operands)
1547 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1548 if (constraints)
1549 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1550 if (modes)
1551 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1554 else if (GET_CODE (body) == PARALLEL
1555 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1556 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1558 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1559 int nin;
1560 int nout = 0; /* Does not include CLOBBERs. */
1562 asmop = SET_SRC (XVECEXP (body, 0, 0));
1563 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1565 /* At least one output, plus some CLOBBERs. */
1567 /* The outputs are in the SETs.
1568 Their constraints are in the ASM_OPERANDS itself. */
1569 for (i = 0; i < nparallel; i++)
1571 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1572 break; /* Past last SET */
1574 if (operands)
1575 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1576 if (operand_locs)
1577 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1578 if (constraints)
1579 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1580 if (modes)
1581 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1582 nout++;
1585 for (i = 0; i < nin; i++)
1587 if (operand_locs)
1588 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1589 if (operands)
1590 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1591 if (constraints)
1592 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1593 if (modes)
1594 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1597 else if (GET_CODE (body) == PARALLEL
1598 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1600 /* No outputs, but some CLOBBERs. */
1602 int nin;
1604 asmop = XVECEXP (body, 0, 0);
1605 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1607 for (i = 0; i < nin; i++)
1609 if (operand_locs)
1610 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1611 if (operands)
1612 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1613 if (constraints)
1614 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1615 if (modes)
1616 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1621 if (loc)
1623 #ifdef USE_MAPPED_LOCATION
1624 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1625 #else
1626 loc->file = ASM_OPERANDS_SOURCE_FILE (asmop);
1627 loc->line = ASM_OPERANDS_SOURCE_LINE (asmop);
1628 #endif
1631 return ASM_OPERANDS_TEMPLATE (asmop);
1634 /* Check if an asm_operand matches its constraints.
1635 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1638 asm_operand_ok (rtx op, const char *constraint)
1640 int result = 0;
1642 /* Use constrain_operands after reload. */
1643 gcc_assert (!reload_completed);
1645 while (*constraint)
1647 char c = *constraint;
1648 int len;
1649 switch (c)
1651 case ',':
1652 constraint++;
1653 continue;
1654 case '=':
1655 case '+':
1656 case '*':
1657 case '%':
1658 case '!':
1659 case '#':
1660 case '&':
1661 case '?':
1662 break;
1664 case '0': case '1': case '2': case '3': case '4':
1665 case '5': case '6': case '7': case '8': case '9':
1666 /* For best results, our caller should have given us the
1667 proper matching constraint, but we can't actually fail
1668 the check if they didn't. Indicate that results are
1669 inconclusive. */
1671 constraint++;
1672 while (ISDIGIT (*constraint));
1673 if (! result)
1674 result = -1;
1675 continue;
1677 case 'p':
1678 if (address_operand (op, VOIDmode))
1679 result = 1;
1680 break;
1682 case 'm':
1683 case 'V': /* non-offsettable */
1684 if (memory_operand (op, VOIDmode))
1685 result = 1;
1686 break;
1688 case 'o': /* offsettable */
1689 if (offsettable_nonstrict_memref_p (op))
1690 result = 1;
1691 break;
1693 case '<':
1694 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1695 excepting those that expand_call created. Further, on some
1696 machines which do not have generalized auto inc/dec, an inc/dec
1697 is not a memory_operand.
1699 Match any memory and hope things are resolved after reload. */
1701 if (MEM_P (op)
1702 && (1
1703 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1704 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1705 result = 1;
1706 break;
1708 case '>':
1709 if (MEM_P (op)
1710 && (1
1711 || GET_CODE (XEXP (op, 0)) == PRE_INC
1712 || GET_CODE (XEXP (op, 0)) == POST_INC))
1713 result = 1;
1714 break;
1716 case 'E':
1717 case 'F':
1718 if (GET_CODE (op) == CONST_DOUBLE
1719 || (GET_CODE (op) == CONST_VECTOR
1720 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1721 result = 1;
1722 break;
1724 case 'G':
1725 if (GET_CODE (op) == CONST_DOUBLE
1726 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1727 result = 1;
1728 break;
1729 case 'H':
1730 if (GET_CODE (op) == CONST_DOUBLE
1731 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1732 result = 1;
1733 break;
1735 case 's':
1736 if (GET_CODE (op) == CONST_INT
1737 || (GET_CODE (op) == CONST_DOUBLE
1738 && GET_MODE (op) == VOIDmode))
1739 break;
1740 /* Fall through. */
1742 case 'i':
1743 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1744 result = 1;
1745 break;
1747 case 'n':
1748 if (GET_CODE (op) == CONST_INT
1749 || (GET_CODE (op) == CONST_DOUBLE
1750 && GET_MODE (op) == VOIDmode))
1751 result = 1;
1752 break;
1754 case 'I':
1755 if (GET_CODE (op) == CONST_INT
1756 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1757 result = 1;
1758 break;
1759 case 'J':
1760 if (GET_CODE (op) == CONST_INT
1761 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1762 result = 1;
1763 break;
1764 case 'K':
1765 if (GET_CODE (op) == CONST_INT
1766 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1767 result = 1;
1768 break;
1769 case 'L':
1770 if (GET_CODE (op) == CONST_INT
1771 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1772 result = 1;
1773 break;
1774 case 'M':
1775 if (GET_CODE (op) == CONST_INT
1776 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1777 result = 1;
1778 break;
1779 case 'N':
1780 if (GET_CODE (op) == CONST_INT
1781 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1782 result = 1;
1783 break;
1784 case 'O':
1785 if (GET_CODE (op) == CONST_INT
1786 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1787 result = 1;
1788 break;
1789 case 'P':
1790 if (GET_CODE (op) == CONST_INT
1791 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1792 result = 1;
1793 break;
1795 case 'X':
1796 result = 1;
1797 break;
1799 case 'g':
1800 if (general_operand (op, VOIDmode))
1801 result = 1;
1802 break;
1804 default:
1805 /* For all other letters, we first check for a register class,
1806 otherwise it is an EXTRA_CONSTRAINT. */
1807 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1809 case 'r':
1810 if (GET_MODE (op) == BLKmode)
1811 break;
1812 if (register_operand (op, VOIDmode))
1813 result = 1;
1815 #ifdef EXTRA_CONSTRAINT_STR
1816 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1817 result = 1;
1818 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1819 /* Every memory operand can be reloaded to fit. */
1820 && memory_operand (op, VOIDmode))
1821 result = 1;
1822 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1823 /* Every address operand can be reloaded to fit. */
1824 && address_operand (op, VOIDmode))
1825 result = 1;
1826 #endif
1827 break;
1829 len = CONSTRAINT_LEN (c, constraint);
1831 constraint++;
1832 while (--len && *constraint);
1833 if (len)
1834 return 0;
1837 return result;
1840 /* Given an rtx *P, if it is a sum containing an integer constant term,
1841 return the location (type rtx *) of the pointer to that constant term.
1842 Otherwise, return a null pointer. */
1844 rtx *
1845 find_constant_term_loc (rtx *p)
1847 rtx *tem;
1848 enum rtx_code code = GET_CODE (*p);
1850 /* If *P IS such a constant term, P is its location. */
1852 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1853 || code == CONST)
1854 return p;
1856 /* Otherwise, if not a sum, it has no constant term. */
1858 if (GET_CODE (*p) != PLUS)
1859 return 0;
1861 /* If one of the summands is constant, return its location. */
1863 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1864 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1865 return p;
1867 /* Otherwise, check each summand for containing a constant term. */
1869 if (XEXP (*p, 0) != 0)
1871 tem = find_constant_term_loc (&XEXP (*p, 0));
1872 if (tem != 0)
1873 return tem;
1876 if (XEXP (*p, 1) != 0)
1878 tem = find_constant_term_loc (&XEXP (*p, 1));
1879 if (tem != 0)
1880 return tem;
1883 return 0;
1886 /* Return 1 if OP is a memory reference
1887 whose address contains no side effects
1888 and remains valid after the addition
1889 of a positive integer less than the
1890 size of the object being referenced.
1892 We assume that the original address is valid and do not check it.
1894 This uses strict_memory_address_p as a subroutine, so
1895 don't use it before reload. */
1898 offsettable_memref_p (rtx op)
1900 return ((MEM_P (op))
1901 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1904 /* Similar, but don't require a strictly valid mem ref:
1905 consider pseudo-regs valid as index or base regs. */
1908 offsettable_nonstrict_memref_p (rtx op)
1910 return ((MEM_P (op))
1911 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1914 /* Return 1 if Y is a memory address which contains no side effects
1915 and would remain valid after the addition of a positive integer
1916 less than the size of that mode.
1918 We assume that the original address is valid and do not check it.
1919 We do check that it is valid for narrower modes.
1921 If STRICTP is nonzero, we require a strictly valid address,
1922 for the sake of use in reload.c. */
1925 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1927 enum rtx_code ycode = GET_CODE (y);
1928 rtx z;
1929 rtx y1 = y;
1930 rtx *y2;
1931 int (*addressp) (enum machine_mode, rtx) =
1932 (strictp ? strict_memory_address_p : memory_address_p);
1933 unsigned int mode_sz = GET_MODE_SIZE (mode);
1935 if (CONSTANT_ADDRESS_P (y))
1936 return 1;
1938 /* Adjusting an offsettable address involves changing to a narrower mode.
1939 Make sure that's OK. */
1941 if (mode_dependent_address_p (y))
1942 return 0;
1944 /* ??? How much offset does an offsettable BLKmode reference need?
1945 Clearly that depends on the situation in which it's being used.
1946 However, the current situation in which we test 0xffffffff is
1947 less than ideal. Caveat user. */
1948 if (mode_sz == 0)
1949 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1951 /* If the expression contains a constant term,
1952 see if it remains valid when max possible offset is added. */
1954 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1956 int good;
1958 y1 = *y2;
1959 *y2 = plus_constant (*y2, mode_sz - 1);
1960 /* Use QImode because an odd displacement may be automatically invalid
1961 for any wider mode. But it should be valid for a single byte. */
1962 good = (*addressp) (QImode, y);
1964 /* In any case, restore old contents of memory. */
1965 *y2 = y1;
1966 return good;
1969 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1970 return 0;
1972 /* The offset added here is chosen as the maximum offset that
1973 any instruction could need to add when operating on something
1974 of the specified mode. We assume that if Y and Y+c are
1975 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1976 go inside a LO_SUM here, so we do so as well. */
1977 if (GET_CODE (y) == LO_SUM
1978 && mode != BLKmode
1979 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1980 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1981 plus_constant (XEXP (y, 1), mode_sz - 1));
1982 else
1983 z = plus_constant (y, mode_sz - 1);
1985 /* Use QImode because an odd displacement may be automatically invalid
1986 for any wider mode. But it should be valid for a single byte. */
1987 return (*addressp) (QImode, z);
1990 /* Return 1 if ADDR is an address-expression whose effect depends
1991 on the mode of the memory reference it is used in.
1993 Autoincrement addressing is a typical example of mode-dependence
1994 because the amount of the increment depends on the mode. */
1997 mode_dependent_address_p (rtx addr)
1999 /* Auto-increment addressing with anything other than post_modify
2000 or pre_modify always introduces a mode dependency. Catch such
2001 cases now instead of deferring to the target. */
2002 if (GET_CODE (addr) == PRE_INC
2003 || GET_CODE (addr) == POST_INC
2004 || GET_CODE (addr) == PRE_DEC
2005 || GET_CODE (addr) == POST_DEC)
2006 return 1;
2008 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2009 return 0;
2010 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2011 win: ATTRIBUTE_UNUSED_LABEL
2012 return 1;
2015 /* Like extract_insn, but save insn extracted and don't extract again, when
2016 called again for the same insn expecting that recog_data still contain the
2017 valid information. This is used primary by gen_attr infrastructure that
2018 often does extract insn again and again. */
2019 void
2020 extract_insn_cached (rtx insn)
2022 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2023 return;
2024 extract_insn (insn);
2025 recog_data.insn = insn;
2028 /* Do cached extract_insn, constrain_operands and complain about failures.
2029 Used by insn_attrtab. */
2030 void
2031 extract_constrain_insn_cached (rtx insn)
2033 extract_insn_cached (insn);
2034 if (which_alternative == -1
2035 && !constrain_operands (reload_completed))
2036 fatal_insn_not_found (insn);
2039 /* Do cached constrain_operands and complain about failures. */
2041 constrain_operands_cached (int strict)
2043 if (which_alternative == -1)
2044 return constrain_operands (strict);
2045 else
2046 return 1;
2049 /* Analyze INSN and fill in recog_data. */
2051 void
2052 extract_insn (rtx insn)
2054 int i;
2055 int icode;
2056 int noperands;
2057 rtx body = PATTERN (insn);
2059 recog_data.insn = NULL;
2060 recog_data.n_operands = 0;
2061 recog_data.n_alternatives = 0;
2062 recog_data.n_dups = 0;
2063 which_alternative = -1;
2065 switch (GET_CODE (body))
2067 case USE:
2068 case CLOBBER:
2069 case ASM_INPUT:
2070 case ADDR_VEC:
2071 case ADDR_DIFF_VEC:
2072 return;
2074 case SET:
2075 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2076 goto asm_insn;
2077 else
2078 goto normal_insn;
2079 case PARALLEL:
2080 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2081 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2082 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2083 goto asm_insn;
2084 else
2085 goto normal_insn;
2086 case ASM_OPERANDS:
2087 asm_insn:
2088 recog_data.n_operands = noperands = asm_noperands (body);
2089 if (noperands >= 0)
2091 /* This insn is an `asm' with operands. */
2093 /* expand_asm_operands makes sure there aren't too many operands. */
2094 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2096 /* Now get the operand values and constraints out of the insn. */
2097 decode_asm_operands (body, recog_data.operand,
2098 recog_data.operand_loc,
2099 recog_data.constraints,
2100 recog_data.operand_mode, NULL);
2101 if (noperands > 0)
2103 const char *p = recog_data.constraints[0];
2104 recog_data.n_alternatives = 1;
2105 while (*p)
2106 recog_data.n_alternatives += (*p++ == ',');
2108 break;
2110 fatal_insn_not_found (insn);
2112 default:
2113 normal_insn:
2114 /* Ordinary insn: recognize it, get the operands via insn_extract
2115 and get the constraints. */
2117 icode = recog_memoized (insn);
2118 if (icode < 0)
2119 fatal_insn_not_found (insn);
2121 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2122 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2123 recog_data.n_dups = insn_data[icode].n_dups;
2125 insn_extract (insn);
2127 for (i = 0; i < noperands; i++)
2129 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2130 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2131 /* VOIDmode match_operands gets mode from their real operand. */
2132 if (recog_data.operand_mode[i] == VOIDmode)
2133 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2136 for (i = 0; i < noperands; i++)
2137 recog_data.operand_type[i]
2138 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2139 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2140 : OP_IN);
2142 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2145 /* After calling extract_insn, you can use this function to extract some
2146 information from the constraint strings into a more usable form.
2147 The collected data is stored in recog_op_alt. */
2148 void
2149 preprocess_constraints (void)
2151 int i;
2153 for (i = 0; i < recog_data.n_operands; i++)
2154 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2155 * sizeof (struct operand_alternative)));
2157 for (i = 0; i < recog_data.n_operands; i++)
2159 int j;
2160 struct operand_alternative *op_alt;
2161 const char *p = recog_data.constraints[i];
2163 op_alt = recog_op_alt[i];
2165 for (j = 0; j < recog_data.n_alternatives; j++)
2167 op_alt[j].cl = NO_REGS;
2168 op_alt[j].constraint = p;
2169 op_alt[j].matches = -1;
2170 op_alt[j].matched = -1;
2172 if (*p == '\0' || *p == ',')
2174 op_alt[j].anything_ok = 1;
2175 continue;
2178 for (;;)
2180 char c = *p;
2181 if (c == '#')
2183 c = *++p;
2184 while (c != ',' && c != '\0');
2185 if (c == ',' || c == '\0')
2187 p++;
2188 break;
2191 switch (c)
2193 case '=': case '+': case '*': case '%':
2194 case 'E': case 'F': case 'G': case 'H':
2195 case 's': case 'i': case 'n':
2196 case 'I': case 'J': case 'K': case 'L':
2197 case 'M': case 'N': case 'O': case 'P':
2198 /* These don't say anything we care about. */
2199 break;
2201 case '?':
2202 op_alt[j].reject += 6;
2203 break;
2204 case '!':
2205 op_alt[j].reject += 600;
2206 break;
2207 case '&':
2208 op_alt[j].earlyclobber = 1;
2209 break;
2211 case '0': case '1': case '2': case '3': case '4':
2212 case '5': case '6': case '7': case '8': case '9':
2214 char *end;
2215 op_alt[j].matches = strtoul (p, &end, 10);
2216 recog_op_alt[op_alt[j].matches][j].matched = i;
2217 p = end;
2219 continue;
2221 case 'm':
2222 op_alt[j].memory_ok = 1;
2223 break;
2224 case '<':
2225 op_alt[j].decmem_ok = 1;
2226 break;
2227 case '>':
2228 op_alt[j].incmem_ok = 1;
2229 break;
2230 case 'V':
2231 op_alt[j].nonoffmem_ok = 1;
2232 break;
2233 case 'o':
2234 op_alt[j].offmem_ok = 1;
2235 break;
2236 case 'X':
2237 op_alt[j].anything_ok = 1;
2238 break;
2240 case 'p':
2241 op_alt[j].is_address = 1;
2242 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2243 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2244 break;
2246 case 'g':
2247 case 'r':
2248 op_alt[j].cl =
2249 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2250 break;
2252 default:
2253 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2255 op_alt[j].memory_ok = 1;
2256 break;
2258 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2260 op_alt[j].is_address = 1;
2261 op_alt[j].cl
2262 = (reg_class_subunion
2263 [(int) op_alt[j].cl]
2264 [(int) base_reg_class (VOIDmode, ADDRESS,
2265 SCRATCH)]);
2266 break;
2269 op_alt[j].cl
2270 = (reg_class_subunion
2271 [(int) op_alt[j].cl]
2272 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2273 break;
2275 p += CONSTRAINT_LEN (c, p);
2281 /* Check the operands of an insn against the insn's operand constraints
2282 and return 1 if they are valid.
2283 The information about the insn's operands, constraints, operand modes
2284 etc. is obtained from the global variables set up by extract_insn.
2286 WHICH_ALTERNATIVE is set to a number which indicates which
2287 alternative of constraints was matched: 0 for the first alternative,
2288 1 for the next, etc.
2290 In addition, when two operands are required to match
2291 and it happens that the output operand is (reg) while the
2292 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2293 make the output operand look like the input.
2294 This is because the output operand is the one the template will print.
2296 This is used in final, just before printing the assembler code and by
2297 the routines that determine an insn's attribute.
2299 If STRICT is a positive nonzero value, it means that we have been
2300 called after reload has been completed. In that case, we must
2301 do all checks strictly. If it is zero, it means that we have been called
2302 before reload has completed. In that case, we first try to see if we can
2303 find an alternative that matches strictly. If not, we try again, this
2304 time assuming that reload will fix up the insn. This provides a "best
2305 guess" for the alternative and is used to compute attributes of insns prior
2306 to reload. A negative value of STRICT is used for this internal call. */
2308 struct funny_match
2310 int this, other;
2314 constrain_operands (int strict)
2316 const char *constraints[MAX_RECOG_OPERANDS];
2317 int matching_operands[MAX_RECOG_OPERANDS];
2318 int earlyclobber[MAX_RECOG_OPERANDS];
2319 int c;
2321 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2322 int funny_match_index;
2324 which_alternative = 0;
2325 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2326 return 1;
2328 for (c = 0; c < recog_data.n_operands; c++)
2330 constraints[c] = recog_data.constraints[c];
2331 matching_operands[c] = -1;
2336 int seen_earlyclobber_at = -1;
2337 int opno;
2338 int lose = 0;
2339 funny_match_index = 0;
2341 for (opno = 0; opno < recog_data.n_operands; opno++)
2343 rtx op = recog_data.operand[opno];
2344 enum machine_mode mode = GET_MODE (op);
2345 const char *p = constraints[opno];
2346 int offset = 0;
2347 int win = 0;
2348 int val;
2349 int len;
2351 earlyclobber[opno] = 0;
2353 /* A unary operator may be accepted by the predicate, but it
2354 is irrelevant for matching constraints. */
2355 if (UNARY_P (op))
2356 op = XEXP (op, 0);
2358 if (GET_CODE (op) == SUBREG)
2360 if (REG_P (SUBREG_REG (op))
2361 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2362 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2363 GET_MODE (SUBREG_REG (op)),
2364 SUBREG_BYTE (op),
2365 GET_MODE (op));
2366 op = SUBREG_REG (op);
2369 /* An empty constraint or empty alternative
2370 allows anything which matched the pattern. */
2371 if (*p == 0 || *p == ',')
2372 win = 1;
2375 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2377 case '\0':
2378 len = 0;
2379 break;
2380 case ',':
2381 c = '\0';
2382 break;
2384 case '?': case '!': case '*': case '%':
2385 case '=': case '+':
2386 break;
2388 case '#':
2389 /* Ignore rest of this alternative as far as
2390 constraint checking is concerned. */
2392 p++;
2393 while (*p && *p != ',');
2394 len = 0;
2395 break;
2397 case '&':
2398 earlyclobber[opno] = 1;
2399 if (seen_earlyclobber_at < 0)
2400 seen_earlyclobber_at = opno;
2401 break;
2403 case '0': case '1': case '2': case '3': case '4':
2404 case '5': case '6': case '7': case '8': case '9':
2406 /* This operand must be the same as a previous one.
2407 This kind of constraint is used for instructions such
2408 as add when they take only two operands.
2410 Note that the lower-numbered operand is passed first.
2412 If we are not testing strictly, assume that this
2413 constraint will be satisfied. */
2415 char *end;
2416 int match;
2418 match = strtoul (p, &end, 10);
2419 p = end;
2421 if (strict < 0)
2422 val = 1;
2423 else
2425 rtx op1 = recog_data.operand[match];
2426 rtx op2 = recog_data.operand[opno];
2428 /* A unary operator may be accepted by the predicate,
2429 but it is irrelevant for matching constraints. */
2430 if (UNARY_P (op1))
2431 op1 = XEXP (op1, 0);
2432 if (UNARY_P (op2))
2433 op2 = XEXP (op2, 0);
2435 val = operands_match_p (op1, op2);
2438 matching_operands[opno] = match;
2439 matching_operands[match] = opno;
2441 if (val != 0)
2442 win = 1;
2444 /* If output is *x and input is *--x, arrange later
2445 to change the output to *--x as well, since the
2446 output op is the one that will be printed. */
2447 if (val == 2 && strict > 0)
2449 funny_match[funny_match_index].this = opno;
2450 funny_match[funny_match_index++].other = match;
2453 len = 0;
2454 break;
2456 case 'p':
2457 /* p is used for address_operands. When we are called by
2458 gen_reload, no one will have checked that the address is
2459 strictly valid, i.e., that all pseudos requiring hard regs
2460 have gotten them. */
2461 if (strict <= 0
2462 || (strict_memory_address_p (recog_data.operand_mode[opno],
2463 op)))
2464 win = 1;
2465 break;
2467 /* No need to check general_operand again;
2468 it was done in insn-recog.c. Well, except that reload
2469 doesn't check the validity of its replacements, but
2470 that should only matter when there's a bug. */
2471 case 'g':
2472 /* Anything goes unless it is a REG and really has a hard reg
2473 but the hard reg is not in the class GENERAL_REGS. */
2474 if (REG_P (op))
2476 if (strict < 0
2477 || GENERAL_REGS == ALL_REGS
2478 || (reload_in_progress
2479 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2480 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2481 win = 1;
2483 else if (strict < 0 || general_operand (op, mode))
2484 win = 1;
2485 break;
2487 case 'X':
2488 /* This is used for a MATCH_SCRATCH in the cases when
2489 we don't actually need anything. So anything goes
2490 any time. */
2491 win = 1;
2492 break;
2494 case 'm':
2495 /* Memory operands must be valid, to the extent
2496 required by STRICT. */
2497 if (MEM_P (op))
2499 if (strict > 0
2500 && !strict_memory_address_p (GET_MODE (op),
2501 XEXP (op, 0)))
2502 break;
2503 if (strict == 0
2504 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2505 break;
2506 win = 1;
2508 /* Before reload, accept what reload can turn into mem. */
2509 else if (strict < 0 && CONSTANT_P (op))
2510 win = 1;
2511 /* During reload, accept a pseudo */
2512 else if (reload_in_progress && REG_P (op)
2513 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2514 win = 1;
2515 break;
2517 case '<':
2518 if (MEM_P (op)
2519 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2520 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2521 win = 1;
2522 break;
2524 case '>':
2525 if (MEM_P (op)
2526 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2527 || GET_CODE (XEXP (op, 0)) == POST_INC))
2528 win = 1;
2529 break;
2531 case 'E':
2532 case 'F':
2533 if (GET_CODE (op) == CONST_DOUBLE
2534 || (GET_CODE (op) == CONST_VECTOR
2535 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2536 win = 1;
2537 break;
2539 case 'G':
2540 case 'H':
2541 if (GET_CODE (op) == CONST_DOUBLE
2542 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2543 win = 1;
2544 break;
2546 case 's':
2547 if (GET_CODE (op) == CONST_INT
2548 || (GET_CODE (op) == CONST_DOUBLE
2549 && GET_MODE (op) == VOIDmode))
2550 break;
2551 case 'i':
2552 if (CONSTANT_P (op))
2553 win = 1;
2554 break;
2556 case 'n':
2557 if (GET_CODE (op) == CONST_INT
2558 || (GET_CODE (op) == CONST_DOUBLE
2559 && GET_MODE (op) == VOIDmode))
2560 win = 1;
2561 break;
2563 case 'I':
2564 case 'J':
2565 case 'K':
2566 case 'L':
2567 case 'M':
2568 case 'N':
2569 case 'O':
2570 case 'P':
2571 if (GET_CODE (op) == CONST_INT
2572 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2573 win = 1;
2574 break;
2576 case 'V':
2577 if (MEM_P (op)
2578 && ((strict > 0 && ! offsettable_memref_p (op))
2579 || (strict < 0
2580 && !(CONSTANT_P (op) || MEM_P (op)))
2581 || (reload_in_progress
2582 && !(REG_P (op)
2583 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2584 win = 1;
2585 break;
2587 case 'o':
2588 if ((strict > 0 && offsettable_memref_p (op))
2589 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2590 /* Before reload, accept what reload can handle. */
2591 || (strict < 0
2592 && (CONSTANT_P (op) || MEM_P (op)))
2593 /* During reload, accept a pseudo */
2594 || (reload_in_progress && REG_P (op)
2595 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2596 win = 1;
2597 break;
2599 default:
2601 enum reg_class cl;
2603 cl = (c == 'r'
2604 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2605 if (cl != NO_REGS)
2607 if (strict < 0
2608 || (strict == 0
2609 && REG_P (op)
2610 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2611 || (strict == 0 && GET_CODE (op) == SCRATCH)
2612 || (REG_P (op)
2613 && reg_fits_class_p (op, cl, offset, mode)))
2614 win = 1;
2616 #ifdef EXTRA_CONSTRAINT_STR
2617 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2618 win = 1;
2620 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2621 /* Every memory operand can be reloaded to fit. */
2622 && ((strict < 0 && MEM_P (op))
2623 /* Before reload, accept what reload can turn
2624 into mem. */
2625 || (strict < 0 && CONSTANT_P (op))
2626 /* During reload, accept a pseudo */
2627 || (reload_in_progress && REG_P (op)
2628 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2629 win = 1;
2630 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2631 /* Every address operand can be reloaded to fit. */
2632 && strict < 0)
2633 win = 1;
2634 #endif
2635 break;
2638 while (p += len, c);
2640 constraints[opno] = p;
2641 /* If this operand did not win somehow,
2642 this alternative loses. */
2643 if (! win)
2644 lose = 1;
2646 /* This alternative won; the operands are ok.
2647 Change whichever operands this alternative says to change. */
2648 if (! lose)
2650 int opno, eopno;
2652 /* See if any earlyclobber operand conflicts with some other
2653 operand. */
2655 if (strict > 0 && seen_earlyclobber_at >= 0)
2656 for (eopno = seen_earlyclobber_at;
2657 eopno < recog_data.n_operands;
2658 eopno++)
2659 /* Ignore earlyclobber operands now in memory,
2660 because we would often report failure when we have
2661 two memory operands, one of which was formerly a REG. */
2662 if (earlyclobber[eopno]
2663 && REG_P (recog_data.operand[eopno]))
2664 for (opno = 0; opno < recog_data.n_operands; opno++)
2665 if ((MEM_P (recog_data.operand[opno])
2666 || recog_data.operand_type[opno] != OP_OUT)
2667 && opno != eopno
2668 /* Ignore things like match_operator operands. */
2669 && *recog_data.constraints[opno] != 0
2670 && ! (matching_operands[opno] == eopno
2671 && operands_match_p (recog_data.operand[opno],
2672 recog_data.operand[eopno]))
2673 && ! safe_from_earlyclobber (recog_data.operand[opno],
2674 recog_data.operand[eopno]))
2675 lose = 1;
2677 if (! lose)
2679 while (--funny_match_index >= 0)
2681 recog_data.operand[funny_match[funny_match_index].other]
2682 = recog_data.operand[funny_match[funny_match_index].this];
2685 return 1;
2689 which_alternative++;
2691 while (which_alternative < recog_data.n_alternatives);
2693 which_alternative = -1;
2694 /* If we are about to reject this, but we are not to test strictly,
2695 try a very loose test. Only return failure if it fails also. */
2696 if (strict == 0)
2697 return constrain_operands (-1);
2698 else
2699 return 0;
2702 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2703 is a hard reg in class CLASS when its regno is offset by OFFSET
2704 and changed to mode MODE.
2705 If REG occupies multiple hard regs, all of them must be in CLASS. */
2708 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2709 enum machine_mode mode)
2711 int regno = REGNO (operand);
2713 if (cl == NO_REGS)
2714 return 0;
2716 if (regno < FIRST_PSEUDO_REGISTER
2717 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2718 regno + offset))
2720 int sr;
2721 regno += offset;
2722 for (sr = hard_regno_nregs[regno][mode] - 1;
2723 sr > 0; sr--)
2724 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2725 regno + sr))
2726 break;
2727 return sr == 0;
2730 return 0;
2733 /* Split single instruction. Helper function for split_all_insns and
2734 split_all_insns_noflow. Return last insn in the sequence if successful,
2735 or NULL if unsuccessful. */
2737 static rtx
2738 split_insn (rtx insn)
2740 /* Split insns here to get max fine-grain parallelism. */
2741 rtx first = PREV_INSN (insn);
2742 rtx last = try_split (PATTERN (insn), insn, 1);
2744 if (last == insn)
2745 return NULL_RTX;
2747 /* try_split returns the NOTE that INSN became. */
2748 SET_INSN_DELETED (insn);
2750 /* ??? Coddle to md files that generate subregs in post-reload
2751 splitters instead of computing the proper hard register. */
2752 if (reload_completed && first != last)
2754 first = NEXT_INSN (first);
2755 for (;;)
2757 if (INSN_P (first))
2758 cleanup_subreg_operands (first);
2759 if (first == last)
2760 break;
2761 first = NEXT_INSN (first);
2764 return last;
2767 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2769 void
2770 split_all_insns (int upd_life)
2772 sbitmap blocks;
2773 bool changed;
2774 basic_block bb;
2776 blocks = sbitmap_alloc (last_basic_block);
2777 sbitmap_zero (blocks);
2778 changed = false;
2780 FOR_EACH_BB_REVERSE (bb)
2782 rtx insn, next;
2783 bool finish = false;
2785 for (insn = BB_HEAD (bb); !finish ; insn = next)
2787 /* Can't use `next_real_insn' because that might go across
2788 CODE_LABELS and short-out basic blocks. */
2789 next = NEXT_INSN (insn);
2790 finish = (insn == BB_END (bb));
2791 if (INSN_P (insn))
2793 rtx set = single_set (insn);
2795 /* Don't split no-op move insns. These should silently
2796 disappear later in final. Splitting such insns would
2797 break the code that handles REG_NO_CONFLICT blocks. */
2798 if (set && set_noop_p (set))
2800 /* Nops get in the way while scheduling, so delete them
2801 now if register allocation has already been done. It
2802 is too risky to try to do this before register
2803 allocation, and there are unlikely to be very many
2804 nops then anyways. */
2805 if (reload_completed)
2807 /* If the no-op set has a REG_UNUSED note, we need
2808 to update liveness information. */
2809 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2811 SET_BIT (blocks, bb->index);
2812 changed = true;
2814 /* ??? Is life info affected by deleting edges? */
2815 delete_insn_and_edges (insn);
2818 else
2820 rtx last = split_insn (insn);
2821 if (last)
2823 /* The split sequence may include barrier, but the
2824 BB boundary we are interested in will be set to
2825 previous one. */
2827 while (BARRIER_P (last))
2828 last = PREV_INSN (last);
2829 SET_BIT (blocks, bb->index);
2830 changed = true;
2837 if (changed)
2839 int old_last_basic_block = last_basic_block;
2841 find_many_sub_basic_blocks (blocks);
2843 if (old_last_basic_block != last_basic_block && upd_life)
2844 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2847 if (changed && upd_life)
2848 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2849 PROP_DEATH_NOTES);
2851 #ifdef ENABLE_CHECKING
2852 verify_flow_info ();
2853 #endif
2855 sbitmap_free (blocks);
2858 /* Same as split_all_insns, but do not expect CFG to be available.
2859 Used by machine dependent reorg passes. */
2861 unsigned int
2862 split_all_insns_noflow (void)
2864 rtx next, insn;
2866 for (insn = get_insns (); insn; insn = next)
2868 next = NEXT_INSN (insn);
2869 if (INSN_P (insn))
2871 /* Don't split no-op move insns. These should silently
2872 disappear later in final. Splitting such insns would
2873 break the code that handles REG_NO_CONFLICT blocks. */
2874 rtx set = single_set (insn);
2875 if (set && set_noop_p (set))
2877 /* Nops get in the way while scheduling, so delete them
2878 now if register allocation has already been done. It
2879 is too risky to try to do this before register
2880 allocation, and there are unlikely to be very many
2881 nops then anyways.
2883 ??? Should we use delete_insn when the CFG isn't valid? */
2884 if (reload_completed)
2885 delete_insn_and_edges (insn);
2887 else
2888 split_insn (insn);
2891 return 0;
2894 #ifdef HAVE_peephole2
2895 struct peep2_insn_data
2897 rtx insn;
2898 regset live_before;
2901 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2902 static int peep2_current;
2903 /* The number of instructions available to match a peep2. */
2904 int peep2_current_count;
2906 /* A non-insn marker indicating the last insn of the block.
2907 The live_before regset for this element is correct, indicating
2908 global_live_at_end for the block. */
2909 #define PEEP2_EOB pc_rtx
2911 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2912 does not exist. Used by the recognizer to find the next insn to match
2913 in a multi-insn pattern. */
2916 peep2_next_insn (int n)
2918 gcc_assert (n <= peep2_current_count);
2920 n += peep2_current;
2921 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2922 n -= MAX_INSNS_PER_PEEP2 + 1;
2924 return peep2_insn_data[n].insn;
2927 /* Return true if REGNO is dead before the Nth non-note insn
2928 after `current'. */
2931 peep2_regno_dead_p (int ofs, int regno)
2933 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2935 ofs += peep2_current;
2936 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2937 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2939 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2941 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2944 /* Similarly for a REG. */
2947 peep2_reg_dead_p (int ofs, rtx reg)
2949 int regno, n;
2951 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2953 ofs += peep2_current;
2954 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2955 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2957 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2959 regno = REGNO (reg);
2960 n = hard_regno_nregs[regno][GET_MODE (reg)];
2961 while (--n >= 0)
2962 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2963 return 0;
2964 return 1;
2967 /* Try to find a hard register of mode MODE, matching the register class in
2968 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2969 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2970 in which case the only condition is that the register must be available
2971 before CURRENT_INSN.
2972 Registers that already have bits set in REG_SET will not be considered.
2974 If an appropriate register is available, it will be returned and the
2975 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2976 returned. */
2979 peep2_find_free_register (int from, int to, const char *class_str,
2980 enum machine_mode mode, HARD_REG_SET *reg_set)
2982 static int search_ofs;
2983 enum reg_class cl;
2984 HARD_REG_SET live;
2985 int i;
2987 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2988 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2990 from += peep2_current;
2991 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2992 from -= MAX_INSNS_PER_PEEP2 + 1;
2993 to += peep2_current;
2994 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2995 to -= MAX_INSNS_PER_PEEP2 + 1;
2997 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2998 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3000 while (from != to)
3002 HARD_REG_SET this_live;
3004 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3005 from = 0;
3006 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3007 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3008 IOR_HARD_REG_SET (live, this_live);
3011 cl = (class_str[0] == 'r' ? GENERAL_REGS
3012 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3014 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3016 int raw_regno, regno, success, j;
3018 /* Distribute the free registers as much as possible. */
3019 raw_regno = search_ofs + i;
3020 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3021 raw_regno -= FIRST_PSEUDO_REGISTER;
3022 #ifdef REG_ALLOC_ORDER
3023 regno = reg_alloc_order[raw_regno];
3024 #else
3025 regno = raw_regno;
3026 #endif
3028 /* Don't allocate fixed registers. */
3029 if (fixed_regs[regno])
3030 continue;
3031 /* Make sure the register is of the right class. */
3032 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3033 continue;
3034 /* And can support the mode we need. */
3035 if (! HARD_REGNO_MODE_OK (regno, mode))
3036 continue;
3037 /* And that we don't create an extra save/restore. */
3038 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3039 continue;
3040 /* And we don't clobber traceback for noreturn functions. */
3041 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3042 && (! reload_completed || frame_pointer_needed))
3043 continue;
3045 success = 1;
3046 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3048 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3049 || TEST_HARD_REG_BIT (live, regno + j))
3051 success = 0;
3052 break;
3055 if (success)
3057 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3058 SET_HARD_REG_BIT (*reg_set, regno + j);
3060 /* Start the next search with the next register. */
3061 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3062 raw_regno = 0;
3063 search_ofs = raw_regno;
3065 return gen_rtx_REG (mode, regno);
3069 search_ofs = 0;
3070 return NULL_RTX;
3073 /* Perform the peephole2 optimization pass. */
3075 static void
3076 peephole2_optimize (void)
3078 rtx insn, prev;
3079 regset live;
3080 int i;
3081 basic_block bb;
3082 #ifdef HAVE_conditional_execution
3083 sbitmap blocks;
3084 bool changed;
3085 #endif
3086 bool do_cleanup_cfg = false;
3087 bool do_global_life_update = false;
3088 bool do_rebuild_jump_labels = false;
3090 /* Initialize the regsets we're going to use. */
3091 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3092 peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
3093 live = ALLOC_REG_SET (&reg_obstack);
3095 #ifdef HAVE_conditional_execution
3096 blocks = sbitmap_alloc (last_basic_block);
3097 sbitmap_zero (blocks);
3098 changed = false;
3099 #else
3100 count_or_remove_death_notes (NULL, 1);
3101 #endif
3103 FOR_EACH_BB_REVERSE (bb)
3105 struct propagate_block_info *pbi;
3106 reg_set_iterator rsi;
3107 unsigned int j;
3109 /* Indicate that all slots except the last holds invalid data. */
3110 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3111 peep2_insn_data[i].insn = NULL_RTX;
3112 peep2_current_count = 0;
3114 /* Indicate that the last slot contains live_after data. */
3115 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3116 peep2_current = MAX_INSNS_PER_PEEP2;
3118 /* Start up propagation. */
3119 COPY_REG_SET (live, bb->il.rtl->global_live_at_end);
3120 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3122 #ifdef HAVE_conditional_execution
3123 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3124 #else
3125 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3126 #endif
3128 for (insn = BB_END (bb); ; insn = prev)
3130 prev = PREV_INSN (insn);
3131 if (INSN_P (insn))
3133 rtx try, before_try, x;
3134 int match_len;
3135 rtx note;
3136 bool was_call = false;
3138 /* Record this insn. */
3139 if (--peep2_current < 0)
3140 peep2_current = MAX_INSNS_PER_PEEP2;
3141 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3142 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3143 peep2_current_count++;
3144 peep2_insn_data[peep2_current].insn = insn;
3145 propagate_one_insn (pbi, insn);
3146 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3148 if (RTX_FRAME_RELATED_P (insn))
3150 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3151 substitution would lose the
3152 REG_FRAME_RELATED_EXPR that is attached. */
3153 peep2_current_count = 0;
3154 try = NULL;
3156 else
3157 /* Match the peephole. */
3158 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3160 if (try != NULL)
3162 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3163 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3164 cfg-related call notes. */
3165 for (i = 0; i <= match_len; ++i)
3167 int j;
3168 rtx old_insn, new_insn, note;
3170 j = i + peep2_current;
3171 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3172 j -= MAX_INSNS_PER_PEEP2 + 1;
3173 old_insn = peep2_insn_data[j].insn;
3174 if (!CALL_P (old_insn))
3175 continue;
3176 was_call = true;
3178 new_insn = try;
3179 while (new_insn != NULL_RTX)
3181 if (CALL_P (new_insn))
3182 break;
3183 new_insn = NEXT_INSN (new_insn);
3186 gcc_assert (new_insn != NULL_RTX);
3188 CALL_INSN_FUNCTION_USAGE (new_insn)
3189 = CALL_INSN_FUNCTION_USAGE (old_insn);
3191 for (note = REG_NOTES (old_insn);
3192 note;
3193 note = XEXP (note, 1))
3194 switch (REG_NOTE_KIND (note))
3196 case REG_NORETURN:
3197 case REG_SETJMP:
3198 REG_NOTES (new_insn)
3199 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3200 XEXP (note, 0),
3201 REG_NOTES (new_insn));
3202 default:
3203 /* Discard all other reg notes. */
3204 break;
3207 /* Croak if there is another call in the sequence. */
3208 while (++i <= match_len)
3210 j = i + peep2_current;
3211 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3212 j -= MAX_INSNS_PER_PEEP2 + 1;
3213 old_insn = peep2_insn_data[j].insn;
3214 gcc_assert (!CALL_P (old_insn));
3216 break;
3219 i = match_len + peep2_current;
3220 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3221 i -= MAX_INSNS_PER_PEEP2 + 1;
3223 note = find_reg_note (peep2_insn_data[i].insn,
3224 REG_EH_REGION, NULL_RTX);
3226 /* Replace the old sequence with the new. */
3227 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3228 INSN_LOCATOR (peep2_insn_data[i].insn));
3229 before_try = PREV_INSN (insn);
3230 delete_insn_chain (insn, peep2_insn_data[i].insn);
3232 /* Re-insert the EH_REGION notes. */
3233 if (note || (was_call && nonlocal_goto_handler_labels))
3235 edge eh_edge;
3236 edge_iterator ei;
3238 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3239 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3240 break;
3242 for (x = try ; x != before_try ; x = PREV_INSN (x))
3243 if (CALL_P (x)
3244 || (flag_non_call_exceptions
3245 && may_trap_p (PATTERN (x))
3246 && !find_reg_note (x, REG_EH_REGION, NULL)))
3248 if (note)
3249 REG_NOTES (x)
3250 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3251 XEXP (note, 0),
3252 REG_NOTES (x));
3254 if (x != BB_END (bb) && eh_edge)
3256 edge nfte, nehe;
3257 int flags;
3259 nfte = split_block (bb, x);
3260 flags = (eh_edge->flags
3261 & (EDGE_EH | EDGE_ABNORMAL));
3262 if (CALL_P (x))
3263 flags |= EDGE_ABNORMAL_CALL;
3264 nehe = make_edge (nfte->src, eh_edge->dest,
3265 flags);
3267 nehe->probability = eh_edge->probability;
3268 nfte->probability
3269 = REG_BR_PROB_BASE - nehe->probability;
3271 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3272 #ifdef HAVE_conditional_execution
3273 SET_BIT (blocks, nfte->dest->index);
3274 changed = true;
3275 #endif
3276 bb = nfte->src;
3277 eh_edge = nehe;
3281 /* Converting possibly trapping insn to non-trapping is
3282 possible. Zap dummy outgoing edges. */
3283 do_cleanup_cfg |= purge_dead_edges (bb);
3286 #ifdef HAVE_conditional_execution
3287 /* With conditional execution, we cannot back up the
3288 live information so easily, since the conditional
3289 death data structures are not so self-contained.
3290 So record that we've made a modification to this
3291 block and update life information at the end. */
3292 SET_BIT (blocks, bb->index);
3293 changed = true;
3295 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3296 peep2_insn_data[i].insn = NULL_RTX;
3297 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3298 peep2_current_count = 0;
3299 #else
3300 /* Back up lifetime information past the end of the
3301 newly created sequence. */
3302 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3303 i = 0;
3304 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3306 /* Update life information for the new sequence. */
3307 x = try;
3310 if (INSN_P (x))
3312 if (--i < 0)
3313 i = MAX_INSNS_PER_PEEP2;
3314 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3315 && peep2_insn_data[i].insn == NULL_RTX)
3316 peep2_current_count++;
3317 peep2_insn_data[i].insn = x;
3318 propagate_one_insn (pbi, x);
3319 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3321 x = PREV_INSN (x);
3323 while (x != prev);
3325 /* ??? Should verify that LIVE now matches what we
3326 had before the new sequence. */
3328 peep2_current = i;
3329 #endif
3331 /* If we generated a jump instruction, it won't have
3332 JUMP_LABEL set. Recompute after we're done. */
3333 for (x = try; x != before_try; x = PREV_INSN (x))
3334 if (JUMP_P (x))
3336 do_rebuild_jump_labels = true;
3337 break;
3342 if (insn == BB_HEAD (bb))
3343 break;
3346 /* Some peepholes can decide the don't need one or more of their
3347 inputs. If this happens, local life update is not enough. */
3348 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->il.rtl->global_live_at_start, live,
3349 0, j, rsi)
3351 do_global_life_update = true;
3352 break;
3355 free_propagate_block_info (pbi);
3358 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3359 FREE_REG_SET (peep2_insn_data[i].live_before);
3360 FREE_REG_SET (live);
3362 if (do_rebuild_jump_labels)
3363 rebuild_jump_labels (get_insns ());
3365 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3366 we've changed global life since exception handlers are no longer
3367 reachable. */
3368 if (do_cleanup_cfg)
3370 cleanup_cfg (0);
3371 do_global_life_update = true;
3373 if (do_global_life_update)
3374 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3375 #ifdef HAVE_conditional_execution
3376 else
3378 count_or_remove_death_notes (blocks, 1);
3379 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3381 sbitmap_free (blocks);
3382 #endif
3384 #endif /* HAVE_peephole2 */
3386 /* Common predicates for use with define_bypass. */
3388 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3389 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3390 must be either a single_set or a PARALLEL with SETs inside. */
3393 store_data_bypass_p (rtx out_insn, rtx in_insn)
3395 rtx out_set, in_set;
3396 rtx out_pat, in_pat;
3397 rtx out_exp, in_exp;
3398 int i, j;
3400 in_set = single_set (in_insn);
3401 if (in_set)
3403 if (!MEM_P (SET_DEST (in_set)))
3404 return false;
3406 out_set = single_set (out_insn);
3407 if (out_set)
3409 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3410 return false;
3412 else
3414 out_pat = PATTERN (out_insn);
3416 if (GET_CODE (out_pat) != PARALLEL)
3417 return false;
3419 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3421 out_exp = XVECEXP (out_pat, 0, i);
3423 if (GET_CODE (out_exp) == CLOBBER)
3424 continue;
3426 gcc_assert (GET_CODE (out_exp) == SET);
3428 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3429 return false;
3433 else
3435 in_pat = PATTERN (in_insn);
3436 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3438 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3440 in_exp = XVECEXP (in_pat, 0, i);
3442 if (GET_CODE (in_exp) == CLOBBER)
3443 continue;
3445 gcc_assert (GET_CODE (in_exp) == SET);
3447 if (!MEM_P (SET_DEST (in_exp)))
3448 return false;
3450 out_set = single_set (out_insn);
3451 if (out_set)
3453 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3454 return false;
3456 else
3458 out_pat = PATTERN (out_insn);
3459 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3461 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3463 out_exp = XVECEXP (out_pat, 0, j);
3465 if (GET_CODE (out_exp) == CLOBBER)
3466 continue;
3468 gcc_assert (GET_CODE (out_exp) == SET);
3470 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3471 return false;
3477 return true;
3480 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3481 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3482 or multiple set; IN_INSN should be single_set for truth, but for convenience
3483 of insn categorization may be any JUMP or CALL insn. */
3486 if_test_bypass_p (rtx out_insn, rtx in_insn)
3488 rtx out_set, in_set;
3490 in_set = single_set (in_insn);
3491 if (! in_set)
3493 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3494 return false;
3497 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3498 return false;
3499 in_set = SET_SRC (in_set);
3501 out_set = single_set (out_insn);
3502 if (out_set)
3504 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3505 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3506 return false;
3508 else
3510 rtx out_pat;
3511 int i;
3513 out_pat = PATTERN (out_insn);
3514 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3516 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3518 rtx exp = XVECEXP (out_pat, 0, i);
3520 if (GET_CODE (exp) == CLOBBER)
3521 continue;
3523 gcc_assert (GET_CODE (exp) == SET);
3525 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3526 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3527 return false;
3531 return true;
3534 static bool
3535 gate_handle_peephole2 (void)
3537 return (optimize > 0 && flag_peephole2);
3540 static unsigned int
3541 rest_of_handle_peephole2 (void)
3543 #ifdef HAVE_peephole2
3544 peephole2_optimize ();
3545 #endif
3546 return 0;
3549 struct tree_opt_pass pass_peephole2 =
3551 "peephole2", /* name */
3552 gate_handle_peephole2, /* gate */
3553 rest_of_handle_peephole2, /* execute */
3554 NULL, /* sub */
3555 NULL, /* next */
3556 0, /* static_pass_number */
3557 TV_PEEPHOLE2, /* tv_id */
3558 0, /* properties_required */
3559 0, /* properties_provided */
3560 0, /* properties_destroyed */
3561 0, /* todo_flags_start */
3562 TODO_dump_func, /* todo_flags_finish */
3563 'z' /* letter */
3566 static unsigned int
3567 rest_of_handle_split_all_insns (void)
3569 split_all_insns (1);
3570 return 0;
3573 struct tree_opt_pass pass_split_all_insns =
3575 "split1", /* name */
3576 NULL, /* gate */
3577 rest_of_handle_split_all_insns, /* execute */
3578 NULL, /* sub */
3579 NULL, /* next */
3580 0, /* static_pass_number */
3581 0, /* tv_id */
3582 0, /* properties_required */
3583 0, /* properties_provided */
3584 0, /* properties_destroyed */
3585 0, /* todo_flags_start */
3586 TODO_dump_func, /* todo_flags_finish */
3587 0 /* letter */
3590 /* The placement of the splitting that we do for shorten_branches
3591 depends on whether regstack is used by the target or not. */
3592 static bool
3593 gate_do_final_split (void)
3595 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3596 return 1;
3597 #else
3598 return 0;
3599 #endif
3602 struct tree_opt_pass pass_split_for_shorten_branches =
3604 "split3", /* name */
3605 gate_do_final_split, /* gate */
3606 split_all_insns_noflow, /* execute */
3607 NULL, /* sub */
3608 NULL, /* next */
3609 0, /* static_pass_number */
3610 TV_SHORTEN_BRANCH, /* tv_id */
3611 0, /* properties_required */
3612 0, /* properties_provided */
3613 0, /* properties_destroyed */
3614 0, /* todo_flags_start */
3615 TODO_dump_func, /* todo_flags_finish */
3616 0 /* letter */
3620 static bool
3621 gate_handle_split_before_regstack (void)
3623 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3624 /* If flow2 creates new instructions which need splitting
3625 and scheduling after reload is not done, they might not be
3626 split until final which doesn't allow splitting
3627 if HAVE_ATTR_length. */
3628 # ifdef INSN_SCHEDULING
3629 return (optimize && !flag_schedule_insns_after_reload);
3630 # else
3631 return (optimize);
3632 # endif
3633 #else
3634 return 0;
3635 #endif
3638 struct tree_opt_pass pass_split_before_regstack =
3640 "split2", /* name */
3641 gate_handle_split_before_regstack, /* gate */
3642 rest_of_handle_split_all_insns, /* execute */
3643 NULL, /* sub */
3644 NULL, /* next */
3645 0, /* static_pass_number */
3646 TV_SHORTEN_BRANCH, /* tv_id */
3647 0, /* properties_required */
3648 0, /* properties_provided */
3649 0, /* properties_destroyed */
3650 0, /* todo_flags_start */
3651 TODO_dump_func, /* todo_flags_finish */
3652 0 /* letter */