* config/i386/i386.md (paritydi2, paritysi2): New expanders.
[official-gcc.git] / gcc / recog.c
bloba7dc869e7ee1d478061a2200f0ec5d52d5919188
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "timevar.h"
44 #include "tree-pass.h"
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
57 #else
58 #define STACK_POP_CODE POST_DEC
59 #endif
60 #endif
62 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
63 static rtx *find_single_use_1 (rtx, rtx *);
64 static void validate_replace_src_1 (rtx *, void *);
65 static rtx split_insn (rtx);
67 /* Nonzero means allow operands to be volatile.
68 This should be 0 if you are generating rtl, such as if you are calling
69 the functions in optabs.c and expmed.c (most of the time).
70 This should be 1 if all valid insns need to be recognized,
71 such as in regclass.c and final.c and reload.c.
73 init_recog and init_recog_no_volatile are responsible for setting this. */
75 int volatile_ok;
77 struct recog_data recog_data;
79 /* Contains a vector of operand_alternative structures for every operand.
80 Set up by preprocess_constraints. */
81 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
83 /* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
86 int which_alternative;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
92 int reload_completed;
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
101 void
102 init_recog_no_volatile (void)
104 volatile_ok = 0;
107 void
108 init_recog (void)
110 volatile_ok = 1;
114 /* Check that X is an insn-body for an `asm' with operands
115 and that the operands mentioned in it are legitimate. */
118 check_asm_operands (rtx x)
120 int noperands;
121 rtx *operands;
122 const char **constraints;
123 int i;
125 /* Post-reload, be more strict with things. */
126 if (reload_completed)
128 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
129 extract_insn (make_insn_raw (x));
130 constrain_operands (1);
131 return which_alternative >= 0;
134 noperands = asm_noperands (x);
135 if (noperands < 0)
136 return 0;
137 if (noperands == 0)
138 return 1;
140 operands = alloca (noperands * sizeof (rtx));
141 constraints = alloca (noperands * sizeof (char *));
143 decode_asm_operands (x, operands, NULL, constraints, NULL);
145 for (i = 0; i < noperands; i++)
147 const char *c = constraints[i];
148 if (c[0] == '%')
149 c++;
150 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
151 c = constraints[c[0] - '0'];
153 if (! asm_operand_ok (operands[i], c))
154 return 0;
157 return 1;
160 /* Static data for the next two routines. */
162 typedef struct change_t
164 rtx object;
165 int old_code;
166 rtx *loc;
167 rtx old;
168 } change_t;
170 static change_t *changes;
171 static int changes_allocated;
173 static int num_changes = 0;
175 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
176 at which NEW will be placed. If OBJECT is zero, no validation is done,
177 the change is simply made.
179 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
180 will be called with the address and mode as parameters. If OBJECT is
181 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
182 the change in place.
184 IN_GROUP is nonzero if this is part of a group of changes that must be
185 performed as a group. In that case, the changes will be stored. The
186 function `apply_change_group' will validate and apply the changes.
188 If IN_GROUP is zero, this is a single change. Try to recognize the insn
189 or validate the memory reference with the change applied. If the result
190 is not valid for the machine, suppress the change and return zero.
191 Otherwise, perform the change and return 1. */
194 validate_change (rtx object, rtx *loc, rtx new, int in_group)
196 rtx old = *loc;
198 if (old == new || rtx_equal_p (old, new))
199 return 1;
201 gcc_assert (in_group != 0 || num_changes == 0);
203 *loc = new;
205 /* Save the information describing this change. */
206 if (num_changes >= changes_allocated)
208 if (changes_allocated == 0)
209 /* This value allows for repeated substitutions inside complex
210 indexed addresses, or changes in up to 5 insns. */
211 changes_allocated = MAX_RECOG_OPERANDS * 5;
212 else
213 changes_allocated *= 2;
215 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
218 changes[num_changes].object = object;
219 changes[num_changes].loc = loc;
220 changes[num_changes].old = old;
222 if (object && !MEM_P (object))
224 /* Set INSN_CODE to force rerecognition of insn. Save old code in
225 case invalid. */
226 changes[num_changes].old_code = INSN_CODE (object);
227 INSN_CODE (object) = -1;
230 num_changes++;
232 /* If we are making a group of changes, return 1. Otherwise, validate the
233 change group we made. */
235 if (in_group)
236 return 1;
237 else
238 return apply_change_group ();
241 /* Keep X canonicalized if some changes have made it non-canonical; only
242 modifies the operands of X, not (for example) its code. Simplifications
243 are not the job of this routine.
245 Return true if anything was changed. */
246 bool
247 canonicalize_change_group (rtx insn, rtx x)
249 if (COMMUTATIVE_P (x)
250 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
252 /* Oops, the caller has made X no longer canonical.
253 Let's redo the changes in the correct order. */
254 rtx tem = XEXP (x, 0);
255 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
256 validate_change (insn, &XEXP (x, 1), tem, 1);
257 return true;
259 else
260 return false;
264 /* This subroutine of apply_change_group verifies whether the changes to INSN
265 were valid; i.e. whether INSN can still be recognized. */
268 insn_invalid_p (rtx insn)
270 rtx pat = PATTERN (insn);
271 int num_clobbers = 0;
272 /* If we are before reload and the pattern is a SET, see if we can add
273 clobbers. */
274 int icode = recog (pat, insn,
275 (GET_CODE (pat) == SET
276 && ! reload_completed && ! reload_in_progress)
277 ? &num_clobbers : 0);
278 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
281 /* If this is an asm and the operand aren't legal, then fail. Likewise if
282 this is not an asm and the insn wasn't recognized. */
283 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
284 || (!is_asm && icode < 0))
285 return 1;
287 /* If we have to add CLOBBERs, fail if we have to add ones that reference
288 hard registers since our callers can't know if they are live or not.
289 Otherwise, add them. */
290 if (num_clobbers > 0)
292 rtx newpat;
294 if (added_clobbers_hard_reg_p (icode))
295 return 1;
297 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
298 XVECEXP (newpat, 0, 0) = pat;
299 add_clobbers (newpat, icode);
300 PATTERN (insn) = pat = newpat;
303 /* After reload, verify that all constraints are satisfied. */
304 if (reload_completed)
306 extract_insn (insn);
308 if (! constrain_operands (1))
309 return 1;
312 INSN_CODE (insn) = icode;
313 return 0;
316 /* Return number of changes made and not validated yet. */
318 num_changes_pending (void)
320 return num_changes;
323 /* Tentatively apply the changes numbered NUM and up.
324 Return 1 if all changes are valid, zero otherwise. */
327 verify_changes (int num)
329 int i;
330 rtx last_validated = NULL_RTX;
332 /* The changes have been applied and all INSN_CODEs have been reset to force
333 rerecognition.
335 The changes are valid if we aren't given an object, or if we are
336 given a MEM and it still is a valid address, or if this is in insn
337 and it is recognized. In the latter case, if reload has completed,
338 we also require that the operands meet the constraints for
339 the insn. */
341 for (i = num; i < num_changes; i++)
343 rtx object = changes[i].object;
345 /* If there is no object to test or if it is the same as the one we
346 already tested, ignore it. */
347 if (object == 0 || object == last_validated)
348 continue;
350 if (MEM_P (object))
352 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
353 break;
355 else if (insn_invalid_p (object))
357 rtx pat = PATTERN (object);
359 /* Perhaps we couldn't recognize the insn because there were
360 extra CLOBBERs at the end. If so, try to re-recognize
361 without the last CLOBBER (later iterations will cause each of
362 them to be eliminated, in turn). But don't do this if we
363 have an ASM_OPERAND. */
364 if (GET_CODE (pat) == PARALLEL
365 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
366 && asm_noperands (PATTERN (object)) < 0)
368 rtx newpat;
370 if (XVECLEN (pat, 0) == 2)
371 newpat = XVECEXP (pat, 0, 0);
372 else
374 int j;
376 newpat
377 = gen_rtx_PARALLEL (VOIDmode,
378 rtvec_alloc (XVECLEN (pat, 0) - 1));
379 for (j = 0; j < XVECLEN (newpat, 0); j++)
380 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
383 /* Add a new change to this group to replace the pattern
384 with this new pattern. Then consider this change
385 as having succeeded. The change we added will
386 cause the entire call to fail if things remain invalid.
388 Note that this can lose if a later change than the one
389 we are processing specified &XVECEXP (PATTERN (object), 0, X)
390 but this shouldn't occur. */
392 validate_change (object, &PATTERN (object), newpat, 1);
393 continue;
395 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
396 /* If this insn is a CLOBBER or USE, it is always valid, but is
397 never recognized. */
398 continue;
399 else
400 break;
402 last_validated = object;
405 return (i == num_changes);
408 /* A group of changes has previously been issued with validate_change and
409 verified with verify_changes. Update the BB_DIRTY flags of the affected
410 blocks, and clear num_changes. */
412 void
413 confirm_change_group (void)
415 int i;
416 basic_block bb;
418 for (i = 0; i < num_changes; i++)
419 if (changes[i].object
420 && INSN_P (changes[i].object)
421 && (bb = BLOCK_FOR_INSN (changes[i].object)))
422 bb->flags |= BB_DIRTY;
424 num_changes = 0;
427 /* Apply a group of changes previously issued with `validate_change'.
428 If all changes are valid, call confirm_change_group and return 1,
429 otherwise, call cancel_changes and return 0. */
432 apply_change_group (void)
434 if (verify_changes (0))
436 confirm_change_group ();
437 return 1;
439 else
441 cancel_changes (0);
442 return 0;
447 /* Return the number of changes so far in the current group. */
450 num_validated_changes (void)
452 return num_changes;
455 /* Retract the changes numbered NUM and up. */
457 void
458 cancel_changes (int num)
460 int i;
462 /* Back out all the changes. Do this in the opposite order in which
463 they were made. */
464 for (i = num_changes - 1; i >= num; i--)
466 *changes[i].loc = changes[i].old;
467 if (changes[i].object && !MEM_P (changes[i].object))
468 INSN_CODE (changes[i].object) = changes[i].old_code;
470 num_changes = num;
473 /* Replace every occurrence of FROM in X with TO. Mark each change with
474 validate_change passing OBJECT. */
476 static void
477 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
479 int i, j;
480 const char *fmt;
481 rtx x = *loc;
482 enum rtx_code code;
483 enum machine_mode op0_mode = VOIDmode;
484 int prev_changes = num_changes;
485 rtx new;
487 if (!x)
488 return;
490 code = GET_CODE (x);
491 fmt = GET_RTX_FORMAT (code);
492 if (fmt[0] == 'e')
493 op0_mode = GET_MODE (XEXP (x, 0));
495 /* X matches FROM if it is the same rtx or they are both referring to the
496 same register in the same mode. Avoid calling rtx_equal_p unless the
497 operands look similar. */
499 if (x == from
500 || (REG_P (x) && REG_P (from)
501 && GET_MODE (x) == GET_MODE (from)
502 && REGNO (x) == REGNO (from))
503 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
504 && rtx_equal_p (x, from)))
506 validate_change (object, loc, to, 1);
507 return;
510 /* Call ourself recursively to perform the replacements.
511 We must not replace inside already replaced expression, otherwise we
512 get infinite recursion for replacements like (reg X)->(subreg (reg X))
513 done by regmove, so we must special case shared ASM_OPERANDS. */
515 if (GET_CODE (x) == PARALLEL)
517 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
519 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
520 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
522 /* Verify that operands are really shared. */
523 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
524 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
525 (x, 0, j))));
526 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
527 from, to, object);
529 else
530 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
533 else
534 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
536 if (fmt[i] == 'e')
537 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
538 else if (fmt[i] == 'E')
539 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
540 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
543 /* If we didn't substitute, there is nothing more to do. */
544 if (num_changes == prev_changes)
545 return;
547 /* Allow substituted expression to have different mode. This is used by
548 regmove to change mode of pseudo register. */
549 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
550 op0_mode = GET_MODE (XEXP (x, 0));
552 /* Do changes needed to keep rtx consistent. Don't do any other
553 simplifications, as it is not our job. */
555 if (SWAPPABLE_OPERANDS_P (x)
556 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
558 validate_change (object, loc,
559 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
560 : swap_condition (code),
561 GET_MODE (x), XEXP (x, 1),
562 XEXP (x, 0)), 1);
563 x = *loc;
564 code = GET_CODE (x);
567 switch (code)
569 case PLUS:
570 /* If we have a PLUS whose second operand is now a CONST_INT, use
571 simplify_gen_binary to try to simplify it.
572 ??? We may want later to remove this, once simplification is
573 separated from this function. */
574 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
575 validate_change (object, loc,
576 simplify_gen_binary
577 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
578 break;
579 case MINUS:
580 if (GET_CODE (XEXP (x, 1)) == CONST_INT
581 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
582 validate_change (object, loc,
583 simplify_gen_binary
584 (PLUS, GET_MODE (x), XEXP (x, 0),
585 simplify_gen_unary (NEG,
586 GET_MODE (x), XEXP (x, 1),
587 GET_MODE (x))), 1);
588 break;
589 case ZERO_EXTEND:
590 case SIGN_EXTEND:
591 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
593 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
594 op0_mode);
595 /* If any of the above failed, substitute in something that
596 we know won't be recognized. */
597 if (!new)
598 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
599 validate_change (object, loc, new, 1);
601 break;
602 case SUBREG:
603 /* All subregs possible to simplify should be simplified. */
604 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
605 SUBREG_BYTE (x));
607 /* Subregs of VOIDmode operands are incorrect. */
608 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
609 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
610 if (new)
611 validate_change (object, loc, new, 1);
612 break;
613 case ZERO_EXTRACT:
614 case SIGN_EXTRACT:
615 /* If we are replacing a register with memory, try to change the memory
616 to be the mode required for memory in extract operations (this isn't
617 likely to be an insertion operation; if it was, nothing bad will
618 happen, we might just fail in some cases). */
620 if (MEM_P (XEXP (x, 0))
621 && GET_CODE (XEXP (x, 1)) == CONST_INT
622 && GET_CODE (XEXP (x, 2)) == CONST_INT
623 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
624 && !MEM_VOLATILE_P (XEXP (x, 0)))
626 enum machine_mode wanted_mode = VOIDmode;
627 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
628 int pos = INTVAL (XEXP (x, 2));
630 if (GET_CODE (x) == ZERO_EXTRACT)
632 enum machine_mode new_mode
633 = mode_for_extraction (EP_extzv, 1);
634 if (new_mode != MAX_MACHINE_MODE)
635 wanted_mode = new_mode;
637 else if (GET_CODE (x) == SIGN_EXTRACT)
639 enum machine_mode new_mode
640 = mode_for_extraction (EP_extv, 1);
641 if (new_mode != MAX_MACHINE_MODE)
642 wanted_mode = new_mode;
645 /* If we have a narrower mode, we can do something. */
646 if (wanted_mode != VOIDmode
647 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
649 int offset = pos / BITS_PER_UNIT;
650 rtx newmem;
652 /* If the bytes and bits are counted differently, we
653 must adjust the offset. */
654 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
655 offset =
656 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
657 offset);
659 pos %= GET_MODE_BITSIZE (wanted_mode);
661 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
663 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
664 validate_change (object, &XEXP (x, 0), newmem, 1);
668 break;
670 default:
671 break;
675 /* Try replacing every occurrence of FROM in INSN with TO. After all
676 changes have been made, validate by seeing if INSN is still valid. */
679 validate_replace_rtx (rtx from, rtx to, rtx insn)
681 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
682 return apply_change_group ();
685 /* Try replacing every occurrence of FROM in INSN with TO. */
687 void
688 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
690 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
693 /* Function called by note_uses to replace used subexpressions. */
694 struct validate_replace_src_data
696 rtx from; /* Old RTX */
697 rtx to; /* New RTX */
698 rtx insn; /* Insn in which substitution is occurring. */
701 static void
702 validate_replace_src_1 (rtx *x, void *data)
704 struct validate_replace_src_data *d
705 = (struct validate_replace_src_data *) data;
707 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
710 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
711 SET_DESTs. */
713 void
714 validate_replace_src_group (rtx from, rtx to, rtx insn)
716 struct validate_replace_src_data d;
718 d.from = from;
719 d.to = to;
720 d.insn = insn;
721 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
724 /* Try simplify INSN.
725 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
726 pattern and return true if something was simplified. */
728 bool
729 validate_simplify_insn (rtx insn)
731 int i;
732 rtx pat = NULL;
733 rtx newpat = NULL;
735 pat = PATTERN (insn);
737 if (GET_CODE (pat) == SET)
739 newpat = simplify_rtx (SET_SRC (pat));
740 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
741 validate_change (insn, &SET_SRC (pat), newpat, 1);
742 newpat = simplify_rtx (SET_DEST (pat));
743 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
744 validate_change (insn, &SET_DEST (pat), newpat, 1);
746 else if (GET_CODE (pat) == PARALLEL)
747 for (i = 0; i < XVECLEN (pat, 0); i++)
749 rtx s = XVECEXP (pat, 0, i);
751 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
753 newpat = simplify_rtx (SET_SRC (s));
754 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
755 validate_change (insn, &SET_SRC (s), newpat, 1);
756 newpat = simplify_rtx (SET_DEST (s));
757 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
758 validate_change (insn, &SET_DEST (s), newpat, 1);
761 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
764 #ifdef HAVE_cc0
765 /* Return 1 if the insn using CC0 set by INSN does not contain
766 any ordered tests applied to the condition codes.
767 EQ and NE tests do not count. */
770 next_insn_tests_no_inequality (rtx insn)
772 rtx next = next_cc0_user (insn);
774 /* If there is no next insn, we have to take the conservative choice. */
775 if (next == 0)
776 return 0;
778 return (INSN_P (next)
779 && ! inequality_comparisons_p (PATTERN (next)));
781 #endif
783 /* This is used by find_single_use to locate an rtx that contains exactly one
784 use of DEST, which is typically either a REG or CC0. It returns a
785 pointer to the innermost rtx expression containing DEST. Appearances of
786 DEST that are being used to totally replace it are not counted. */
788 static rtx *
789 find_single_use_1 (rtx dest, rtx *loc)
791 rtx x = *loc;
792 enum rtx_code code = GET_CODE (x);
793 rtx *result = 0;
794 rtx *this_result;
795 int i;
796 const char *fmt;
798 switch (code)
800 case CONST_INT:
801 case CONST:
802 case LABEL_REF:
803 case SYMBOL_REF:
804 case CONST_DOUBLE:
805 case CONST_VECTOR:
806 case CLOBBER:
807 return 0;
809 case SET:
810 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
811 of a REG that occupies all of the REG, the insn uses DEST if
812 it is mentioned in the destination or the source. Otherwise, we
813 need just check the source. */
814 if (GET_CODE (SET_DEST (x)) != CC0
815 && GET_CODE (SET_DEST (x)) != PC
816 && !REG_P (SET_DEST (x))
817 && ! (GET_CODE (SET_DEST (x)) == SUBREG
818 && REG_P (SUBREG_REG (SET_DEST (x)))
819 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
820 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
821 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
822 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
823 break;
825 return find_single_use_1 (dest, &SET_SRC (x));
827 case MEM:
828 case SUBREG:
829 return find_single_use_1 (dest, &XEXP (x, 0));
831 default:
832 break;
835 /* If it wasn't one of the common cases above, check each expression and
836 vector of this code. Look for a unique usage of DEST. */
838 fmt = GET_RTX_FORMAT (code);
839 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
841 if (fmt[i] == 'e')
843 if (dest == XEXP (x, i)
844 || (REG_P (dest) && REG_P (XEXP (x, i))
845 && REGNO (dest) == REGNO (XEXP (x, i))))
846 this_result = loc;
847 else
848 this_result = find_single_use_1 (dest, &XEXP (x, i));
850 if (result == 0)
851 result = this_result;
852 else if (this_result)
853 /* Duplicate usage. */
854 return 0;
856 else if (fmt[i] == 'E')
858 int j;
860 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
862 if (XVECEXP (x, i, j) == dest
863 || (REG_P (dest)
864 && REG_P (XVECEXP (x, i, j))
865 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
866 this_result = loc;
867 else
868 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
870 if (result == 0)
871 result = this_result;
872 else if (this_result)
873 return 0;
878 return result;
881 /* See if DEST, produced in INSN, is used only a single time in the
882 sequel. If so, return a pointer to the innermost rtx expression in which
883 it is used.
885 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
887 This routine will return usually zero either before flow is called (because
888 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
889 note can't be trusted).
891 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
892 care about REG_DEAD notes or LOG_LINKS.
894 Otherwise, we find the single use by finding an insn that has a
895 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
896 only referenced once in that insn, we know that it must be the first
897 and last insn referencing DEST. */
899 rtx *
900 find_single_use (rtx dest, rtx insn, rtx *ploc)
902 rtx next;
903 rtx *result;
904 rtx link;
906 #ifdef HAVE_cc0
907 if (dest == cc0_rtx)
909 next = NEXT_INSN (insn);
910 if (next == 0
911 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
912 return 0;
914 result = find_single_use_1 (dest, &PATTERN (next));
915 if (result && ploc)
916 *ploc = next;
917 return result;
919 #endif
921 if (reload_completed || reload_in_progress || !REG_P (dest))
922 return 0;
924 for (next = next_nonnote_insn (insn);
925 next != 0 && !LABEL_P (next);
926 next = next_nonnote_insn (next))
927 if (INSN_P (next) && dead_or_set_p (next, dest))
929 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
930 if (XEXP (link, 0) == insn)
931 break;
933 if (link)
935 result = find_single_use_1 (dest, &PATTERN (next));
936 if (ploc)
937 *ploc = next;
938 return result;
942 return 0;
945 /* Return 1 if OP is a valid general operand for machine mode MODE.
946 This is either a register reference, a memory reference,
947 or a constant. In the case of a memory reference, the address
948 is checked for general validity for the target machine.
950 Register and memory references must have mode MODE in order to be valid,
951 but some constants have no machine mode and are valid for any mode.
953 If MODE is VOIDmode, OP is checked for validity for whatever mode
954 it has.
956 The main use of this function is as a predicate in match_operand
957 expressions in the machine description.
959 For an explanation of this function's behavior for registers of
960 class NO_REGS, see the comment for `register_operand'. */
963 general_operand (rtx op, enum machine_mode mode)
965 enum rtx_code code = GET_CODE (op);
967 if (mode == VOIDmode)
968 mode = GET_MODE (op);
970 /* Don't accept CONST_INT or anything similar
971 if the caller wants something floating. */
972 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
973 && GET_MODE_CLASS (mode) != MODE_INT
974 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
975 return 0;
977 if (GET_CODE (op) == CONST_INT
978 && mode != VOIDmode
979 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
980 return 0;
982 if (CONSTANT_P (op))
983 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
984 || mode == VOIDmode)
985 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
986 && LEGITIMATE_CONSTANT_P (op));
988 /* Except for certain constants with VOIDmode, already checked for,
989 OP's mode must match MODE if MODE specifies a mode. */
991 if (GET_MODE (op) != mode)
992 return 0;
994 if (code == SUBREG)
996 rtx sub = SUBREG_REG (op);
998 #ifdef INSN_SCHEDULING
999 /* On machines that have insn scheduling, we want all memory
1000 reference to be explicit, so outlaw paradoxical SUBREGs.
1001 However, we must allow them after reload so that they can
1002 get cleaned up by cleanup_subreg_operands. */
1003 if (!reload_completed && MEM_P (sub)
1004 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1005 return 0;
1006 #endif
1007 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1008 may result in incorrect reference. We should simplify all valid
1009 subregs of MEM anyway. But allow this after reload because we
1010 might be called from cleanup_subreg_operands.
1012 ??? This is a kludge. */
1013 if (!reload_completed && SUBREG_BYTE (op) != 0
1014 && MEM_P (sub))
1015 return 0;
1017 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1018 create such rtl, and we must reject it. */
1019 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1020 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1021 return 0;
1023 op = sub;
1024 code = GET_CODE (op);
1027 if (code == REG)
1028 /* A register whose class is NO_REGS is not a general operand. */
1029 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1030 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1032 if (code == MEM)
1034 rtx y = XEXP (op, 0);
1036 if (! volatile_ok && MEM_VOLATILE_P (op))
1037 return 0;
1039 /* Use the mem's mode, since it will be reloaded thus. */
1040 if (memory_address_p (GET_MODE (op), y))
1041 return 1;
1044 return 0;
1047 /* Return 1 if OP is a valid memory address for a memory reference
1048 of mode MODE.
1050 The main use of this function is as a predicate in match_operand
1051 expressions in the machine description. */
1054 address_operand (rtx op, enum machine_mode mode)
1056 return memory_address_p (mode, op);
1059 /* Return 1 if OP is a register reference of mode MODE.
1060 If MODE is VOIDmode, accept a register in any mode.
1062 The main use of this function is as a predicate in match_operand
1063 expressions in the machine description.
1065 As a special exception, registers whose class is NO_REGS are
1066 not accepted by `register_operand'. The reason for this change
1067 is to allow the representation of special architecture artifacts
1068 (such as a condition code register) without extending the rtl
1069 definitions. Since registers of class NO_REGS cannot be used
1070 as registers in any case where register classes are examined,
1071 it is most consistent to keep this function from accepting them. */
1074 register_operand (rtx op, enum machine_mode mode)
1076 if (GET_MODE (op) != mode && mode != VOIDmode)
1077 return 0;
1079 if (GET_CODE (op) == SUBREG)
1081 rtx sub = SUBREG_REG (op);
1083 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1084 because it is guaranteed to be reloaded into one.
1085 Just make sure the MEM is valid in itself.
1086 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1087 but currently it does result from (SUBREG (REG)...) where the
1088 reg went on the stack.) */
1089 if (! reload_completed && MEM_P (sub))
1090 return general_operand (op, mode);
1092 #ifdef CANNOT_CHANGE_MODE_CLASS
1093 if (REG_P (sub)
1094 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1095 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1096 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1097 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1098 return 0;
1099 #endif
1101 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1102 create such rtl, and we must reject it. */
1103 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1104 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1105 return 0;
1107 op = sub;
1110 /* We don't consider registers whose class is NO_REGS
1111 to be a register operand. */
1112 return (REG_P (op)
1113 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1114 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1117 /* Return 1 for a register in Pmode; ignore the tested mode. */
1120 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1122 return register_operand (op, Pmode);
1125 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1126 or a hard register. */
1129 scratch_operand (rtx op, enum machine_mode mode)
1131 if (GET_MODE (op) != mode && mode != VOIDmode)
1132 return 0;
1134 return (GET_CODE (op) == SCRATCH
1135 || (REG_P (op)
1136 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1139 /* Return 1 if OP is a valid immediate operand for mode MODE.
1141 The main use of this function is as a predicate in match_operand
1142 expressions in the machine description. */
1145 immediate_operand (rtx op, enum machine_mode mode)
1147 /* Don't accept CONST_INT or anything similar
1148 if the caller wants something floating. */
1149 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1150 && GET_MODE_CLASS (mode) != MODE_INT
1151 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1152 return 0;
1154 if (GET_CODE (op) == CONST_INT
1155 && mode != VOIDmode
1156 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1157 return 0;
1159 return (CONSTANT_P (op)
1160 && (GET_MODE (op) == mode || mode == VOIDmode
1161 || GET_MODE (op) == VOIDmode)
1162 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1163 && LEGITIMATE_CONSTANT_P (op));
1166 /* Returns 1 if OP is an operand that is a CONST_INT. */
1169 const_int_operand (rtx op, enum machine_mode mode)
1171 if (GET_CODE (op) != CONST_INT)
1172 return 0;
1174 if (mode != VOIDmode
1175 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1176 return 0;
1178 return 1;
1181 /* Returns 1 if OP is an operand that is a constant integer or constant
1182 floating-point number. */
1185 const_double_operand (rtx op, enum machine_mode mode)
1187 /* Don't accept CONST_INT or anything similar
1188 if the caller wants something floating. */
1189 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1190 && GET_MODE_CLASS (mode) != MODE_INT
1191 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1192 return 0;
1194 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1195 && (mode == VOIDmode || GET_MODE (op) == mode
1196 || GET_MODE (op) == VOIDmode));
1199 /* Return 1 if OP is a general operand that is not an immediate operand. */
1202 nonimmediate_operand (rtx op, enum machine_mode mode)
1204 return (general_operand (op, mode) && ! CONSTANT_P (op));
1207 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1210 nonmemory_operand (rtx op, enum machine_mode mode)
1212 if (CONSTANT_P (op))
1214 /* Don't accept CONST_INT or anything similar
1215 if the caller wants something floating. */
1216 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1217 && GET_MODE_CLASS (mode) != MODE_INT
1218 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1219 return 0;
1221 if (GET_CODE (op) == CONST_INT
1222 && mode != VOIDmode
1223 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1224 return 0;
1226 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1227 || mode == VOIDmode)
1228 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1229 && LEGITIMATE_CONSTANT_P (op));
1232 if (GET_MODE (op) != mode && mode != VOIDmode)
1233 return 0;
1235 if (GET_CODE (op) == SUBREG)
1237 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1238 because it is guaranteed to be reloaded into one.
1239 Just make sure the MEM is valid in itself.
1240 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1241 but currently it does result from (SUBREG (REG)...) where the
1242 reg went on the stack.) */
1243 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1244 return general_operand (op, mode);
1245 op = SUBREG_REG (op);
1248 /* We don't consider registers whose class is NO_REGS
1249 to be a register operand. */
1250 return (REG_P (op)
1251 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1252 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1255 /* Return 1 if OP is a valid operand that stands for pushing a
1256 value of mode MODE onto the stack.
1258 The main use of this function is as a predicate in match_operand
1259 expressions in the machine description. */
1262 push_operand (rtx op, enum machine_mode mode)
1264 unsigned int rounded_size = GET_MODE_SIZE (mode);
1266 #ifdef PUSH_ROUNDING
1267 rounded_size = PUSH_ROUNDING (rounded_size);
1268 #endif
1270 if (!MEM_P (op))
1271 return 0;
1273 if (mode != VOIDmode && GET_MODE (op) != mode)
1274 return 0;
1276 op = XEXP (op, 0);
1278 if (rounded_size == GET_MODE_SIZE (mode))
1280 if (GET_CODE (op) != STACK_PUSH_CODE)
1281 return 0;
1283 else
1285 if (GET_CODE (op) != PRE_MODIFY
1286 || GET_CODE (XEXP (op, 1)) != PLUS
1287 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1288 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1289 #ifdef STACK_GROWS_DOWNWARD
1290 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1291 #else
1292 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1293 #endif
1295 return 0;
1298 return XEXP (op, 0) == stack_pointer_rtx;
1301 /* Return 1 if OP is a valid operand that stands for popping a
1302 value of mode MODE off the stack.
1304 The main use of this function is as a predicate in match_operand
1305 expressions in the machine description. */
1308 pop_operand (rtx op, enum machine_mode mode)
1310 if (!MEM_P (op))
1311 return 0;
1313 if (mode != VOIDmode && GET_MODE (op) != mode)
1314 return 0;
1316 op = XEXP (op, 0);
1318 if (GET_CODE (op) != STACK_POP_CODE)
1319 return 0;
1321 return XEXP (op, 0) == stack_pointer_rtx;
1324 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1327 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1329 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1330 return 0;
1332 win:
1333 return 1;
1336 /* Return 1 if OP is a valid memory reference with mode MODE,
1337 including a valid address.
1339 The main use of this function is as a predicate in match_operand
1340 expressions in the machine description. */
1343 memory_operand (rtx op, enum machine_mode mode)
1345 rtx inner;
1347 if (! reload_completed)
1348 /* Note that no SUBREG is a memory operand before end of reload pass,
1349 because (SUBREG (MEM...)) forces reloading into a register. */
1350 return MEM_P (op) && general_operand (op, mode);
1352 if (mode != VOIDmode && GET_MODE (op) != mode)
1353 return 0;
1355 inner = op;
1356 if (GET_CODE (inner) == SUBREG)
1357 inner = SUBREG_REG (inner);
1359 return (MEM_P (inner) && general_operand (op, mode));
1362 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1363 that is, a memory reference whose address is a general_operand. */
1366 indirect_operand (rtx op, enum machine_mode mode)
1368 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1369 if (! reload_completed
1370 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1372 int offset = SUBREG_BYTE (op);
1373 rtx inner = SUBREG_REG (op);
1375 if (mode != VOIDmode && GET_MODE (op) != mode)
1376 return 0;
1378 /* The only way that we can have a general_operand as the resulting
1379 address is if OFFSET is zero and the address already is an operand
1380 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1381 operand. */
1383 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1384 || (GET_CODE (XEXP (inner, 0)) == PLUS
1385 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1386 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1387 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1390 return (MEM_P (op)
1391 && memory_operand (op, mode)
1392 && general_operand (XEXP (op, 0), Pmode));
1395 /* Return 1 if this is a comparison operator. This allows the use of
1396 MATCH_OPERATOR to recognize all the branch insns. */
1399 comparison_operator (rtx op, enum machine_mode mode)
1401 return ((mode == VOIDmode || GET_MODE (op) == mode)
1402 && COMPARISON_P (op));
1405 /* If BODY is an insn body that uses ASM_OPERANDS,
1406 return the number of operands (both input and output) in the insn.
1407 Otherwise return -1. */
1410 asm_noperands (rtx body)
1412 switch (GET_CODE (body))
1414 case ASM_OPERANDS:
1415 /* No output operands: return number of input operands. */
1416 return ASM_OPERANDS_INPUT_LENGTH (body);
1417 case SET:
1418 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1419 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1420 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1421 else
1422 return -1;
1423 case PARALLEL:
1424 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1425 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1427 /* Multiple output operands, or 1 output plus some clobbers:
1428 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1429 int i;
1430 int n_sets;
1432 /* Count backwards through CLOBBERs to determine number of SETs. */
1433 for (i = XVECLEN (body, 0); i > 0; i--)
1435 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1436 break;
1437 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1438 return -1;
1441 /* N_SETS is now number of output operands. */
1442 n_sets = i;
1444 /* Verify that all the SETs we have
1445 came from a single original asm_operands insn
1446 (so that invalid combinations are blocked). */
1447 for (i = 0; i < n_sets; i++)
1449 rtx elt = XVECEXP (body, 0, i);
1450 if (GET_CODE (elt) != SET)
1451 return -1;
1452 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1453 return -1;
1454 /* If these ASM_OPERANDS rtx's came from different original insns
1455 then they aren't allowed together. */
1456 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1457 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1458 return -1;
1460 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1461 + n_sets);
1463 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1465 /* 0 outputs, but some clobbers:
1466 body is [(asm_operands ...) (clobber (reg ...))...]. */
1467 int i;
1469 /* Make sure all the other parallel things really are clobbers. */
1470 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1471 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1472 return -1;
1474 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1476 else
1477 return -1;
1478 default:
1479 return -1;
1483 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1484 copy its operands (both input and output) into the vector OPERANDS,
1485 the locations of the operands within the insn into the vector OPERAND_LOCS,
1486 and the constraints for the operands into CONSTRAINTS.
1487 Write the modes of the operands into MODES.
1488 Return the assembler-template.
1490 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1491 we don't store that info. */
1493 const char *
1494 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1495 const char **constraints, enum machine_mode *modes)
1497 int i;
1498 int noperands;
1499 const char *template = 0;
1501 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1503 rtx asmop = SET_SRC (body);
1504 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1506 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1508 for (i = 1; i < noperands; i++)
1510 if (operand_locs)
1511 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1512 if (operands)
1513 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1514 if (constraints)
1515 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1516 if (modes)
1517 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1520 /* The output is in the SET.
1521 Its constraint is in the ASM_OPERANDS itself. */
1522 if (operands)
1523 operands[0] = SET_DEST (body);
1524 if (operand_locs)
1525 operand_locs[0] = &SET_DEST (body);
1526 if (constraints)
1527 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1528 if (modes)
1529 modes[0] = GET_MODE (SET_DEST (body));
1530 template = ASM_OPERANDS_TEMPLATE (asmop);
1532 else if (GET_CODE (body) == ASM_OPERANDS)
1534 rtx asmop = body;
1535 /* No output operands: BODY is (asm_operands ....). */
1537 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1539 /* The input operands are found in the 1st element vector. */
1540 /* Constraints for inputs are in the 2nd element vector. */
1541 for (i = 0; i < noperands; i++)
1543 if (operand_locs)
1544 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1545 if (operands)
1546 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1547 if (constraints)
1548 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1549 if (modes)
1550 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1552 template = ASM_OPERANDS_TEMPLATE (asmop);
1554 else if (GET_CODE (body) == PARALLEL
1555 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1556 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1558 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1559 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1560 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1561 int nout = 0; /* Does not include CLOBBERs. */
1563 /* At least one output, plus some CLOBBERs. */
1565 /* The outputs are in the SETs.
1566 Their constraints are in the ASM_OPERANDS itself. */
1567 for (i = 0; i < nparallel; i++)
1569 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1570 break; /* Past last SET */
1572 if (operands)
1573 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1574 if (operand_locs)
1575 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1576 if (constraints)
1577 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1578 if (modes)
1579 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1580 nout++;
1583 for (i = 0; i < nin; i++)
1585 if (operand_locs)
1586 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1587 if (operands)
1588 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1589 if (constraints)
1590 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1591 if (modes)
1592 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1595 template = ASM_OPERANDS_TEMPLATE (asmop);
1597 else if (GET_CODE (body) == PARALLEL
1598 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1600 /* No outputs, but some CLOBBERs. */
1602 rtx asmop = XVECEXP (body, 0, 0);
1603 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1605 for (i = 0; i < nin; i++)
1607 if (operand_locs)
1608 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1609 if (operands)
1610 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1611 if (constraints)
1612 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1613 if (modes)
1614 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1617 template = ASM_OPERANDS_TEMPLATE (asmop);
1620 return template;
1623 /* Check if an asm_operand matches its constraints.
1624 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1627 asm_operand_ok (rtx op, const char *constraint)
1629 int result = 0;
1631 /* Use constrain_operands after reload. */
1632 gcc_assert (!reload_completed);
1634 while (*constraint)
1636 char c = *constraint;
1637 int len;
1638 switch (c)
1640 case ',':
1641 constraint++;
1642 continue;
1643 case '=':
1644 case '+':
1645 case '*':
1646 case '%':
1647 case '!':
1648 case '#':
1649 case '&':
1650 case '?':
1651 break;
1653 case '0': case '1': case '2': case '3': case '4':
1654 case '5': case '6': case '7': case '8': case '9':
1655 /* For best results, our caller should have given us the
1656 proper matching constraint, but we can't actually fail
1657 the check if they didn't. Indicate that results are
1658 inconclusive. */
1660 constraint++;
1661 while (ISDIGIT (*constraint));
1662 if (! result)
1663 result = -1;
1664 continue;
1666 case 'p':
1667 if (address_operand (op, VOIDmode))
1668 result = 1;
1669 break;
1671 case 'm':
1672 case 'V': /* non-offsettable */
1673 if (memory_operand (op, VOIDmode))
1674 result = 1;
1675 break;
1677 case 'o': /* offsettable */
1678 if (offsettable_nonstrict_memref_p (op))
1679 result = 1;
1680 break;
1682 case '<':
1683 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1684 excepting those that expand_call created. Further, on some
1685 machines which do not have generalized auto inc/dec, an inc/dec
1686 is not a memory_operand.
1688 Match any memory and hope things are resolved after reload. */
1690 if (MEM_P (op)
1691 && (1
1692 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1693 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1694 result = 1;
1695 break;
1697 case '>':
1698 if (MEM_P (op)
1699 && (1
1700 || GET_CODE (XEXP (op, 0)) == PRE_INC
1701 || GET_CODE (XEXP (op, 0)) == POST_INC))
1702 result = 1;
1703 break;
1705 case 'E':
1706 case 'F':
1707 if (GET_CODE (op) == CONST_DOUBLE
1708 || (GET_CODE (op) == CONST_VECTOR
1709 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1710 result = 1;
1711 break;
1713 case 'G':
1714 if (GET_CODE (op) == CONST_DOUBLE
1715 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1716 result = 1;
1717 break;
1718 case 'H':
1719 if (GET_CODE (op) == CONST_DOUBLE
1720 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1721 result = 1;
1722 break;
1724 case 's':
1725 if (GET_CODE (op) == CONST_INT
1726 || (GET_CODE (op) == CONST_DOUBLE
1727 && GET_MODE (op) == VOIDmode))
1728 break;
1729 /* Fall through. */
1731 case 'i':
1732 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1733 result = 1;
1734 break;
1736 case 'n':
1737 if (GET_CODE (op) == CONST_INT
1738 || (GET_CODE (op) == CONST_DOUBLE
1739 && GET_MODE (op) == VOIDmode))
1740 result = 1;
1741 break;
1743 case 'I':
1744 if (GET_CODE (op) == CONST_INT
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1746 result = 1;
1747 break;
1748 case 'J':
1749 if (GET_CODE (op) == CONST_INT
1750 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1751 result = 1;
1752 break;
1753 case 'K':
1754 if (GET_CODE (op) == CONST_INT
1755 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1756 result = 1;
1757 break;
1758 case 'L':
1759 if (GET_CODE (op) == CONST_INT
1760 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1761 result = 1;
1762 break;
1763 case 'M':
1764 if (GET_CODE (op) == CONST_INT
1765 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1766 result = 1;
1767 break;
1768 case 'N':
1769 if (GET_CODE (op) == CONST_INT
1770 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1771 result = 1;
1772 break;
1773 case 'O':
1774 if (GET_CODE (op) == CONST_INT
1775 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1776 result = 1;
1777 break;
1778 case 'P':
1779 if (GET_CODE (op) == CONST_INT
1780 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1781 result = 1;
1782 break;
1784 case 'X':
1785 result = 1;
1786 break;
1788 case 'g':
1789 if (general_operand (op, VOIDmode))
1790 result = 1;
1791 break;
1793 default:
1794 /* For all other letters, we first check for a register class,
1795 otherwise it is an EXTRA_CONSTRAINT. */
1796 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1798 case 'r':
1799 if (GET_MODE (op) == BLKmode)
1800 break;
1801 if (register_operand (op, VOIDmode))
1802 result = 1;
1804 #ifdef EXTRA_CONSTRAINT_STR
1805 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1806 result = 1;
1807 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1808 /* Every memory operand can be reloaded to fit. */
1809 && memory_operand (op, VOIDmode))
1810 result = 1;
1811 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1812 /* Every address operand can be reloaded to fit. */
1813 && address_operand (op, VOIDmode))
1814 result = 1;
1815 #endif
1816 break;
1818 len = CONSTRAINT_LEN (c, constraint);
1820 constraint++;
1821 while (--len && *constraint);
1822 if (len)
1823 return 0;
1826 return result;
1829 /* Given an rtx *P, if it is a sum containing an integer constant term,
1830 return the location (type rtx *) of the pointer to that constant term.
1831 Otherwise, return a null pointer. */
1833 rtx *
1834 find_constant_term_loc (rtx *p)
1836 rtx *tem;
1837 enum rtx_code code = GET_CODE (*p);
1839 /* If *P IS such a constant term, P is its location. */
1841 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1842 || code == CONST)
1843 return p;
1845 /* Otherwise, if not a sum, it has no constant term. */
1847 if (GET_CODE (*p) != PLUS)
1848 return 0;
1850 /* If one of the summands is constant, return its location. */
1852 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1853 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1854 return p;
1856 /* Otherwise, check each summand for containing a constant term. */
1858 if (XEXP (*p, 0) != 0)
1860 tem = find_constant_term_loc (&XEXP (*p, 0));
1861 if (tem != 0)
1862 return tem;
1865 if (XEXP (*p, 1) != 0)
1867 tem = find_constant_term_loc (&XEXP (*p, 1));
1868 if (tem != 0)
1869 return tem;
1872 return 0;
1875 /* Return 1 if OP is a memory reference
1876 whose address contains no side effects
1877 and remains valid after the addition
1878 of a positive integer less than the
1879 size of the object being referenced.
1881 We assume that the original address is valid and do not check it.
1883 This uses strict_memory_address_p as a subroutine, so
1884 don't use it before reload. */
1887 offsettable_memref_p (rtx op)
1889 return ((MEM_P (op))
1890 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1893 /* Similar, but don't require a strictly valid mem ref:
1894 consider pseudo-regs valid as index or base regs. */
1897 offsettable_nonstrict_memref_p (rtx op)
1899 return ((MEM_P (op))
1900 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1903 /* Return 1 if Y is a memory address which contains no side effects
1904 and would remain valid after the addition of a positive integer
1905 less than the size of that mode.
1907 We assume that the original address is valid and do not check it.
1908 We do check that it is valid for narrower modes.
1910 If STRICTP is nonzero, we require a strictly valid address,
1911 for the sake of use in reload.c. */
1914 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1916 enum rtx_code ycode = GET_CODE (y);
1917 rtx z;
1918 rtx y1 = y;
1919 rtx *y2;
1920 int (*addressp) (enum machine_mode, rtx) =
1921 (strictp ? strict_memory_address_p : memory_address_p);
1922 unsigned int mode_sz = GET_MODE_SIZE (mode);
1924 if (CONSTANT_ADDRESS_P (y))
1925 return 1;
1927 /* Adjusting an offsettable address involves changing to a narrower mode.
1928 Make sure that's OK. */
1930 if (mode_dependent_address_p (y))
1931 return 0;
1933 /* ??? How much offset does an offsettable BLKmode reference need?
1934 Clearly that depends on the situation in which it's being used.
1935 However, the current situation in which we test 0xffffffff is
1936 less than ideal. Caveat user. */
1937 if (mode_sz == 0)
1938 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1940 /* If the expression contains a constant term,
1941 see if it remains valid when max possible offset is added. */
1943 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1945 int good;
1947 y1 = *y2;
1948 *y2 = plus_constant (*y2, mode_sz - 1);
1949 /* Use QImode because an odd displacement may be automatically invalid
1950 for any wider mode. But it should be valid for a single byte. */
1951 good = (*addressp) (QImode, y);
1953 /* In any case, restore old contents of memory. */
1954 *y2 = y1;
1955 return good;
1958 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1959 return 0;
1961 /* The offset added here is chosen as the maximum offset that
1962 any instruction could need to add when operating on something
1963 of the specified mode. We assume that if Y and Y+c are
1964 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1965 go inside a LO_SUM here, so we do so as well. */
1966 if (GET_CODE (y) == LO_SUM
1967 && mode != BLKmode
1968 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1969 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1970 plus_constant (XEXP (y, 1), mode_sz - 1));
1971 else
1972 z = plus_constant (y, mode_sz - 1);
1974 /* Use QImode because an odd displacement may be automatically invalid
1975 for any wider mode. But it should be valid for a single byte. */
1976 return (*addressp) (QImode, z);
1979 /* Return 1 if ADDR is an address-expression whose effect depends
1980 on the mode of the memory reference it is used in.
1982 Autoincrement addressing is a typical example of mode-dependence
1983 because the amount of the increment depends on the mode. */
1986 mode_dependent_address_p (rtx addr)
1988 /* Auto-increment addressing with anything other than post_modify
1989 or pre_modify always introduces a mode dependency. Catch such
1990 cases now instead of deferring to the target. */
1991 if (GET_CODE (addr) == PRE_INC
1992 || GET_CODE (addr) == POST_INC
1993 || GET_CODE (addr) == PRE_DEC
1994 || GET_CODE (addr) == POST_DEC)
1995 return 1;
1997 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1998 return 0;
1999 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2000 win: ATTRIBUTE_UNUSED_LABEL
2001 return 1;
2004 /* Like extract_insn, but save insn extracted and don't extract again, when
2005 called again for the same insn expecting that recog_data still contain the
2006 valid information. This is used primary by gen_attr infrastructure that
2007 often does extract insn again and again. */
2008 void
2009 extract_insn_cached (rtx insn)
2011 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2012 return;
2013 extract_insn (insn);
2014 recog_data.insn = insn;
2017 /* Do cached extract_insn, constrain_operands and complain about failures.
2018 Used by insn_attrtab. */
2019 void
2020 extract_constrain_insn_cached (rtx insn)
2022 extract_insn_cached (insn);
2023 if (which_alternative == -1
2024 && !constrain_operands (reload_completed))
2025 fatal_insn_not_found (insn);
2028 /* Do cached constrain_operands and complain about failures. */
2030 constrain_operands_cached (int strict)
2032 if (which_alternative == -1)
2033 return constrain_operands (strict);
2034 else
2035 return 1;
2038 /* Analyze INSN and fill in recog_data. */
2040 void
2041 extract_insn (rtx insn)
2043 int i;
2044 int icode;
2045 int noperands;
2046 rtx body = PATTERN (insn);
2048 recog_data.insn = NULL;
2049 recog_data.n_operands = 0;
2050 recog_data.n_alternatives = 0;
2051 recog_data.n_dups = 0;
2052 which_alternative = -1;
2054 switch (GET_CODE (body))
2056 case USE:
2057 case CLOBBER:
2058 case ASM_INPUT:
2059 case ADDR_VEC:
2060 case ADDR_DIFF_VEC:
2061 return;
2063 case SET:
2064 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2065 goto asm_insn;
2066 else
2067 goto normal_insn;
2068 case PARALLEL:
2069 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2070 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2071 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2072 goto asm_insn;
2073 else
2074 goto normal_insn;
2075 case ASM_OPERANDS:
2076 asm_insn:
2077 recog_data.n_operands = noperands = asm_noperands (body);
2078 if (noperands >= 0)
2080 /* This insn is an `asm' with operands. */
2082 /* expand_asm_operands makes sure there aren't too many operands. */
2083 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2085 /* Now get the operand values and constraints out of the insn. */
2086 decode_asm_operands (body, recog_data.operand,
2087 recog_data.operand_loc,
2088 recog_data.constraints,
2089 recog_data.operand_mode);
2090 if (noperands > 0)
2092 const char *p = recog_data.constraints[0];
2093 recog_data.n_alternatives = 1;
2094 while (*p)
2095 recog_data.n_alternatives += (*p++ == ',');
2097 break;
2099 fatal_insn_not_found (insn);
2101 default:
2102 normal_insn:
2103 /* Ordinary insn: recognize it, get the operands via insn_extract
2104 and get the constraints. */
2106 icode = recog_memoized (insn);
2107 if (icode < 0)
2108 fatal_insn_not_found (insn);
2110 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2111 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2112 recog_data.n_dups = insn_data[icode].n_dups;
2114 insn_extract (insn);
2116 for (i = 0; i < noperands; i++)
2118 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2119 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2120 /* VOIDmode match_operands gets mode from their real operand. */
2121 if (recog_data.operand_mode[i] == VOIDmode)
2122 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2125 for (i = 0; i < noperands; i++)
2126 recog_data.operand_type[i]
2127 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2128 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2129 : OP_IN);
2131 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2134 /* After calling extract_insn, you can use this function to extract some
2135 information from the constraint strings into a more usable form.
2136 The collected data is stored in recog_op_alt. */
2137 void
2138 preprocess_constraints (void)
2140 int i;
2142 for (i = 0; i < recog_data.n_operands; i++)
2143 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2144 * sizeof (struct operand_alternative)));
2146 for (i = 0; i < recog_data.n_operands; i++)
2148 int j;
2149 struct operand_alternative *op_alt;
2150 const char *p = recog_data.constraints[i];
2152 op_alt = recog_op_alt[i];
2154 for (j = 0; j < recog_data.n_alternatives; j++)
2156 op_alt[j].cl = NO_REGS;
2157 op_alt[j].constraint = p;
2158 op_alt[j].matches = -1;
2159 op_alt[j].matched = -1;
2161 if (*p == '\0' || *p == ',')
2163 op_alt[j].anything_ok = 1;
2164 continue;
2167 for (;;)
2169 char c = *p;
2170 if (c == '#')
2172 c = *++p;
2173 while (c != ',' && c != '\0');
2174 if (c == ',' || c == '\0')
2176 p++;
2177 break;
2180 switch (c)
2182 case '=': case '+': case '*': case '%':
2183 case 'E': case 'F': case 'G': case 'H':
2184 case 's': case 'i': case 'n':
2185 case 'I': case 'J': case 'K': case 'L':
2186 case 'M': case 'N': case 'O': case 'P':
2187 /* These don't say anything we care about. */
2188 break;
2190 case '?':
2191 op_alt[j].reject += 6;
2192 break;
2193 case '!':
2194 op_alt[j].reject += 600;
2195 break;
2196 case '&':
2197 op_alt[j].earlyclobber = 1;
2198 break;
2200 case '0': case '1': case '2': case '3': case '4':
2201 case '5': case '6': case '7': case '8': case '9':
2203 char *end;
2204 op_alt[j].matches = strtoul (p, &end, 10);
2205 recog_op_alt[op_alt[j].matches][j].matched = i;
2206 p = end;
2208 continue;
2210 case 'm':
2211 op_alt[j].memory_ok = 1;
2212 break;
2213 case '<':
2214 op_alt[j].decmem_ok = 1;
2215 break;
2216 case '>':
2217 op_alt[j].incmem_ok = 1;
2218 break;
2219 case 'V':
2220 op_alt[j].nonoffmem_ok = 1;
2221 break;
2222 case 'o':
2223 op_alt[j].offmem_ok = 1;
2224 break;
2225 case 'X':
2226 op_alt[j].anything_ok = 1;
2227 break;
2229 case 'p':
2230 op_alt[j].is_address = 1;
2231 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2232 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2233 break;
2235 case 'g':
2236 case 'r':
2237 op_alt[j].cl =
2238 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2239 break;
2241 default:
2242 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2244 op_alt[j].memory_ok = 1;
2245 break;
2247 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2249 op_alt[j].is_address = 1;
2250 op_alt[j].cl
2251 = (reg_class_subunion
2252 [(int) op_alt[j].cl]
2253 [(int) base_reg_class (VOIDmode, ADDRESS,
2254 SCRATCH)]);
2255 break;
2258 op_alt[j].cl
2259 = (reg_class_subunion
2260 [(int) op_alt[j].cl]
2261 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2262 break;
2264 p += CONSTRAINT_LEN (c, p);
2270 /* Check the operands of an insn against the insn's operand constraints
2271 and return 1 if they are valid.
2272 The information about the insn's operands, constraints, operand modes
2273 etc. is obtained from the global variables set up by extract_insn.
2275 WHICH_ALTERNATIVE is set to a number which indicates which
2276 alternative of constraints was matched: 0 for the first alternative,
2277 1 for the next, etc.
2279 In addition, when two operands are required to match
2280 and it happens that the output operand is (reg) while the
2281 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2282 make the output operand look like the input.
2283 This is because the output operand is the one the template will print.
2285 This is used in final, just before printing the assembler code and by
2286 the routines that determine an insn's attribute.
2288 If STRICT is a positive nonzero value, it means that we have been
2289 called after reload has been completed. In that case, we must
2290 do all checks strictly. If it is zero, it means that we have been called
2291 before reload has completed. In that case, we first try to see if we can
2292 find an alternative that matches strictly. If not, we try again, this
2293 time assuming that reload will fix up the insn. This provides a "best
2294 guess" for the alternative and is used to compute attributes of insns prior
2295 to reload. A negative value of STRICT is used for this internal call. */
2297 struct funny_match
2299 int this, other;
2303 constrain_operands (int strict)
2305 const char *constraints[MAX_RECOG_OPERANDS];
2306 int matching_operands[MAX_RECOG_OPERANDS];
2307 int earlyclobber[MAX_RECOG_OPERANDS];
2308 int c;
2310 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2311 int funny_match_index;
2313 which_alternative = 0;
2314 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2315 return 1;
2317 for (c = 0; c < recog_data.n_operands; c++)
2319 constraints[c] = recog_data.constraints[c];
2320 matching_operands[c] = -1;
2325 int seen_earlyclobber_at = -1;
2326 int opno;
2327 int lose = 0;
2328 funny_match_index = 0;
2330 for (opno = 0; opno < recog_data.n_operands; opno++)
2332 rtx op = recog_data.operand[opno];
2333 enum machine_mode mode = GET_MODE (op);
2334 const char *p = constraints[opno];
2335 int offset = 0;
2336 int win = 0;
2337 int val;
2338 int len;
2340 earlyclobber[opno] = 0;
2342 /* A unary operator may be accepted by the predicate, but it
2343 is irrelevant for matching constraints. */
2344 if (UNARY_P (op))
2345 op = XEXP (op, 0);
2347 if (GET_CODE (op) == SUBREG)
2349 if (REG_P (SUBREG_REG (op))
2350 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2351 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2352 GET_MODE (SUBREG_REG (op)),
2353 SUBREG_BYTE (op),
2354 GET_MODE (op));
2355 op = SUBREG_REG (op);
2358 /* An empty constraint or empty alternative
2359 allows anything which matched the pattern. */
2360 if (*p == 0 || *p == ',')
2361 win = 1;
2364 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2366 case '\0':
2367 len = 0;
2368 break;
2369 case ',':
2370 c = '\0';
2371 break;
2373 case '?': case '!': case '*': case '%':
2374 case '=': case '+':
2375 break;
2377 case '#':
2378 /* Ignore rest of this alternative as far as
2379 constraint checking is concerned. */
2381 p++;
2382 while (*p && *p != ',');
2383 len = 0;
2384 break;
2386 case '&':
2387 earlyclobber[opno] = 1;
2388 if (seen_earlyclobber_at < 0)
2389 seen_earlyclobber_at = opno;
2390 break;
2392 case '0': case '1': case '2': case '3': case '4':
2393 case '5': case '6': case '7': case '8': case '9':
2395 /* This operand must be the same as a previous one.
2396 This kind of constraint is used for instructions such
2397 as add when they take only two operands.
2399 Note that the lower-numbered operand is passed first.
2401 If we are not testing strictly, assume that this
2402 constraint will be satisfied. */
2404 char *end;
2405 int match;
2407 match = strtoul (p, &end, 10);
2408 p = end;
2410 if (strict < 0)
2411 val = 1;
2412 else
2414 rtx op1 = recog_data.operand[match];
2415 rtx op2 = recog_data.operand[opno];
2417 /* A unary operator may be accepted by the predicate,
2418 but it is irrelevant for matching constraints. */
2419 if (UNARY_P (op1))
2420 op1 = XEXP (op1, 0);
2421 if (UNARY_P (op2))
2422 op2 = XEXP (op2, 0);
2424 val = operands_match_p (op1, op2);
2427 matching_operands[opno] = match;
2428 matching_operands[match] = opno;
2430 if (val != 0)
2431 win = 1;
2433 /* If output is *x and input is *--x, arrange later
2434 to change the output to *--x as well, since the
2435 output op is the one that will be printed. */
2436 if (val == 2 && strict > 0)
2438 funny_match[funny_match_index].this = opno;
2439 funny_match[funny_match_index++].other = match;
2442 len = 0;
2443 break;
2445 case 'p':
2446 /* p is used for address_operands. When we are called by
2447 gen_reload, no one will have checked that the address is
2448 strictly valid, i.e., that all pseudos requiring hard regs
2449 have gotten them. */
2450 if (strict <= 0
2451 || (strict_memory_address_p (recog_data.operand_mode[opno],
2452 op)))
2453 win = 1;
2454 break;
2456 /* No need to check general_operand again;
2457 it was done in insn-recog.c. Well, except that reload
2458 doesn't check the validity of its replacements, but
2459 that should only matter when there's a bug. */
2460 case 'g':
2461 /* Anything goes unless it is a REG and really has a hard reg
2462 but the hard reg is not in the class GENERAL_REGS. */
2463 if (REG_P (op))
2465 if (strict < 0
2466 || GENERAL_REGS == ALL_REGS
2467 || (reload_in_progress
2468 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2469 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2470 win = 1;
2472 else if (strict < 0 || general_operand (op, mode))
2473 win = 1;
2474 break;
2476 case 'X':
2477 /* This is used for a MATCH_SCRATCH in the cases when
2478 we don't actually need anything. So anything goes
2479 any time. */
2480 win = 1;
2481 break;
2483 case 'm':
2484 /* Memory operands must be valid, to the extent
2485 required by STRICT. */
2486 if (MEM_P (op))
2488 if (strict > 0
2489 && !strict_memory_address_p (GET_MODE (op),
2490 XEXP (op, 0)))
2491 break;
2492 if (strict == 0
2493 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2494 break;
2495 win = 1;
2497 /* Before reload, accept what reload can turn into mem. */
2498 else if (strict < 0 && CONSTANT_P (op))
2499 win = 1;
2500 /* During reload, accept a pseudo */
2501 else if (reload_in_progress && REG_P (op)
2502 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2503 win = 1;
2504 break;
2506 case '<':
2507 if (MEM_P (op)
2508 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2509 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2510 win = 1;
2511 break;
2513 case '>':
2514 if (MEM_P (op)
2515 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2516 || GET_CODE (XEXP (op, 0)) == POST_INC))
2517 win = 1;
2518 break;
2520 case 'E':
2521 case 'F':
2522 if (GET_CODE (op) == CONST_DOUBLE
2523 || (GET_CODE (op) == CONST_VECTOR
2524 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2525 win = 1;
2526 break;
2528 case 'G':
2529 case 'H':
2530 if (GET_CODE (op) == CONST_DOUBLE
2531 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2532 win = 1;
2533 break;
2535 case 's':
2536 if (GET_CODE (op) == CONST_INT
2537 || (GET_CODE (op) == CONST_DOUBLE
2538 && GET_MODE (op) == VOIDmode))
2539 break;
2540 case 'i':
2541 if (CONSTANT_P (op))
2542 win = 1;
2543 break;
2545 case 'n':
2546 if (GET_CODE (op) == CONST_INT
2547 || (GET_CODE (op) == CONST_DOUBLE
2548 && GET_MODE (op) == VOIDmode))
2549 win = 1;
2550 break;
2552 case 'I':
2553 case 'J':
2554 case 'K':
2555 case 'L':
2556 case 'M':
2557 case 'N':
2558 case 'O':
2559 case 'P':
2560 if (GET_CODE (op) == CONST_INT
2561 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2562 win = 1;
2563 break;
2565 case 'V':
2566 if (MEM_P (op)
2567 && ((strict > 0 && ! offsettable_memref_p (op))
2568 || (strict < 0
2569 && !(CONSTANT_P (op) || MEM_P (op)))
2570 || (reload_in_progress
2571 && !(REG_P (op)
2572 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2573 win = 1;
2574 break;
2576 case 'o':
2577 if ((strict > 0 && offsettable_memref_p (op))
2578 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2579 /* Before reload, accept what reload can handle. */
2580 || (strict < 0
2581 && (CONSTANT_P (op) || MEM_P (op)))
2582 /* During reload, accept a pseudo */
2583 || (reload_in_progress && REG_P (op)
2584 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2585 win = 1;
2586 break;
2588 default:
2590 enum reg_class cl;
2592 cl = (c == 'r'
2593 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2594 if (cl != NO_REGS)
2596 if (strict < 0
2597 || (strict == 0
2598 && REG_P (op)
2599 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2600 || (strict == 0 && GET_CODE (op) == SCRATCH)
2601 || (REG_P (op)
2602 && reg_fits_class_p (op, cl, offset, mode)))
2603 win = 1;
2605 #ifdef EXTRA_CONSTRAINT_STR
2606 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2607 win = 1;
2609 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2610 /* Every memory operand can be reloaded to fit. */
2611 && ((strict < 0 && MEM_P (op))
2612 /* Before reload, accept what reload can turn
2613 into mem. */
2614 || (strict < 0 && CONSTANT_P (op))
2615 /* During reload, accept a pseudo */
2616 || (reload_in_progress && REG_P (op)
2617 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2618 win = 1;
2619 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2620 /* Every address operand can be reloaded to fit. */
2621 && strict < 0)
2622 win = 1;
2623 #endif
2624 break;
2627 while (p += len, c);
2629 constraints[opno] = p;
2630 /* If this operand did not win somehow,
2631 this alternative loses. */
2632 if (! win)
2633 lose = 1;
2635 /* This alternative won; the operands are ok.
2636 Change whichever operands this alternative says to change. */
2637 if (! lose)
2639 int opno, eopno;
2641 /* See if any earlyclobber operand conflicts with some other
2642 operand. */
2644 if (strict > 0 && seen_earlyclobber_at >= 0)
2645 for (eopno = seen_earlyclobber_at;
2646 eopno < recog_data.n_operands;
2647 eopno++)
2648 /* Ignore earlyclobber operands now in memory,
2649 because we would often report failure when we have
2650 two memory operands, one of which was formerly a REG. */
2651 if (earlyclobber[eopno]
2652 && REG_P (recog_data.operand[eopno]))
2653 for (opno = 0; opno < recog_data.n_operands; opno++)
2654 if ((MEM_P (recog_data.operand[opno])
2655 || recog_data.operand_type[opno] != OP_OUT)
2656 && opno != eopno
2657 /* Ignore things like match_operator operands. */
2658 && *recog_data.constraints[opno] != 0
2659 && ! (matching_operands[opno] == eopno
2660 && operands_match_p (recog_data.operand[opno],
2661 recog_data.operand[eopno]))
2662 && ! safe_from_earlyclobber (recog_data.operand[opno],
2663 recog_data.operand[eopno]))
2664 lose = 1;
2666 if (! lose)
2668 while (--funny_match_index >= 0)
2670 recog_data.operand[funny_match[funny_match_index].other]
2671 = recog_data.operand[funny_match[funny_match_index].this];
2674 return 1;
2678 which_alternative++;
2680 while (which_alternative < recog_data.n_alternatives);
2682 which_alternative = -1;
2683 /* If we are about to reject this, but we are not to test strictly,
2684 try a very loose test. Only return failure if it fails also. */
2685 if (strict == 0)
2686 return constrain_operands (-1);
2687 else
2688 return 0;
2691 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2692 is a hard reg in class CLASS when its regno is offset by OFFSET
2693 and changed to mode MODE.
2694 If REG occupies multiple hard regs, all of them must be in CLASS. */
2697 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2698 enum machine_mode mode)
2700 int regno = REGNO (operand);
2702 if (cl == NO_REGS)
2703 return 0;
2705 if (regno < FIRST_PSEUDO_REGISTER
2706 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2707 regno + offset))
2709 int sr;
2710 regno += offset;
2711 for (sr = hard_regno_nregs[regno][mode] - 1;
2712 sr > 0; sr--)
2713 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2714 regno + sr))
2715 break;
2716 return sr == 0;
2719 return 0;
2722 /* Split single instruction. Helper function for split_all_insns and
2723 split_all_insns_noflow. Return last insn in the sequence if successful,
2724 or NULL if unsuccessful. */
2726 static rtx
2727 split_insn (rtx insn)
2729 /* Split insns here to get max fine-grain parallelism. */
2730 rtx first = PREV_INSN (insn);
2731 rtx last = try_split (PATTERN (insn), insn, 1);
2733 if (last == insn)
2734 return NULL_RTX;
2736 /* try_split returns the NOTE that INSN became. */
2737 SET_INSN_DELETED (insn);
2739 /* ??? Coddle to md files that generate subregs in post-reload
2740 splitters instead of computing the proper hard register. */
2741 if (reload_completed && first != last)
2743 first = NEXT_INSN (first);
2744 for (;;)
2746 if (INSN_P (first))
2747 cleanup_subreg_operands (first);
2748 if (first == last)
2749 break;
2750 first = NEXT_INSN (first);
2753 return last;
2756 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2758 void
2759 split_all_insns (int upd_life)
2761 sbitmap blocks;
2762 bool changed;
2763 basic_block bb;
2765 blocks = sbitmap_alloc (last_basic_block);
2766 sbitmap_zero (blocks);
2767 changed = false;
2769 FOR_EACH_BB_REVERSE (bb)
2771 rtx insn, next;
2772 bool finish = false;
2774 for (insn = BB_HEAD (bb); !finish ; insn = next)
2776 /* Can't use `next_real_insn' because that might go across
2777 CODE_LABELS and short-out basic blocks. */
2778 next = NEXT_INSN (insn);
2779 finish = (insn == BB_END (bb));
2780 if (INSN_P (insn))
2782 rtx set = single_set (insn);
2784 /* Don't split no-op move insns. These should silently
2785 disappear later in final. Splitting such insns would
2786 break the code that handles REG_NO_CONFLICT blocks. */
2787 if (set && set_noop_p (set))
2789 /* Nops get in the way while scheduling, so delete them
2790 now if register allocation has already been done. It
2791 is too risky to try to do this before register
2792 allocation, and there are unlikely to be very many
2793 nops then anyways. */
2794 if (reload_completed)
2796 /* If the no-op set has a REG_UNUSED note, we need
2797 to update liveness information. */
2798 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2800 SET_BIT (blocks, bb->index);
2801 changed = true;
2803 /* ??? Is life info affected by deleting edges? */
2804 delete_insn_and_edges (insn);
2807 else
2809 rtx last = split_insn (insn);
2810 if (last)
2812 /* The split sequence may include barrier, but the
2813 BB boundary we are interested in will be set to
2814 previous one. */
2816 while (BARRIER_P (last))
2817 last = PREV_INSN (last);
2818 SET_BIT (blocks, bb->index);
2819 changed = true;
2826 if (changed)
2828 int old_last_basic_block = last_basic_block;
2830 find_many_sub_basic_blocks (blocks);
2832 if (old_last_basic_block != last_basic_block && upd_life)
2833 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2836 if (changed && upd_life)
2837 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2838 PROP_DEATH_NOTES);
2840 #ifdef ENABLE_CHECKING
2841 verify_flow_info ();
2842 #endif
2844 sbitmap_free (blocks);
2847 /* Same as split_all_insns, but do not expect CFG to be available.
2848 Used by machine dependent reorg passes. */
2850 unsigned int
2851 split_all_insns_noflow (void)
2853 rtx next, insn;
2855 for (insn = get_insns (); insn; insn = next)
2857 next = NEXT_INSN (insn);
2858 if (INSN_P (insn))
2860 /* Don't split no-op move insns. These should silently
2861 disappear later in final. Splitting such insns would
2862 break the code that handles REG_NO_CONFLICT blocks. */
2863 rtx set = single_set (insn);
2864 if (set && set_noop_p (set))
2866 /* Nops get in the way while scheduling, so delete them
2867 now if register allocation has already been done. It
2868 is too risky to try to do this before register
2869 allocation, and there are unlikely to be very many
2870 nops then anyways.
2872 ??? Should we use delete_insn when the CFG isn't valid? */
2873 if (reload_completed)
2874 delete_insn_and_edges (insn);
2876 else
2877 split_insn (insn);
2880 return 0;
2883 #ifdef HAVE_peephole2
2884 struct peep2_insn_data
2886 rtx insn;
2887 regset live_before;
2890 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2891 static int peep2_current;
2892 /* The number of instructions available to match a peep2. */
2893 int peep2_current_count;
2895 /* A non-insn marker indicating the last insn of the block.
2896 The live_before regset for this element is correct, indicating
2897 global_live_at_end for the block. */
2898 #define PEEP2_EOB pc_rtx
2900 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2901 does not exist. Used by the recognizer to find the next insn to match
2902 in a multi-insn pattern. */
2905 peep2_next_insn (int n)
2907 gcc_assert (n <= peep2_current_count);
2909 n += peep2_current;
2910 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2911 n -= MAX_INSNS_PER_PEEP2 + 1;
2913 return peep2_insn_data[n].insn;
2916 /* Return true if REGNO is dead before the Nth non-note insn
2917 after `current'. */
2920 peep2_regno_dead_p (int ofs, int regno)
2922 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2924 ofs += peep2_current;
2925 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2926 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2928 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2930 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2933 /* Similarly for a REG. */
2936 peep2_reg_dead_p (int ofs, rtx reg)
2938 int regno, n;
2940 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2942 ofs += peep2_current;
2943 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2944 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2946 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2948 regno = REGNO (reg);
2949 n = hard_regno_nregs[regno][GET_MODE (reg)];
2950 while (--n >= 0)
2951 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2952 return 0;
2953 return 1;
2956 /* Try to find a hard register of mode MODE, matching the register class in
2957 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2958 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2959 in which case the only condition is that the register must be available
2960 before CURRENT_INSN.
2961 Registers that already have bits set in REG_SET will not be considered.
2963 If an appropriate register is available, it will be returned and the
2964 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2965 returned. */
2968 peep2_find_free_register (int from, int to, const char *class_str,
2969 enum machine_mode mode, HARD_REG_SET *reg_set)
2971 static int search_ofs;
2972 enum reg_class cl;
2973 HARD_REG_SET live;
2974 int i;
2976 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2977 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2979 from += peep2_current;
2980 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2981 from -= MAX_INSNS_PER_PEEP2 + 1;
2982 to += peep2_current;
2983 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2984 to -= MAX_INSNS_PER_PEEP2 + 1;
2986 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2987 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2989 while (from != to)
2991 HARD_REG_SET this_live;
2993 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2994 from = 0;
2995 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2996 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2997 IOR_HARD_REG_SET (live, this_live);
3000 cl = (class_str[0] == 'r' ? GENERAL_REGS
3001 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3003 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3005 int raw_regno, regno, success, j;
3007 /* Distribute the free registers as much as possible. */
3008 raw_regno = search_ofs + i;
3009 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3010 raw_regno -= FIRST_PSEUDO_REGISTER;
3011 #ifdef REG_ALLOC_ORDER
3012 regno = reg_alloc_order[raw_regno];
3013 #else
3014 regno = raw_regno;
3015 #endif
3017 /* Don't allocate fixed registers. */
3018 if (fixed_regs[regno])
3019 continue;
3020 /* Make sure the register is of the right class. */
3021 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3022 continue;
3023 /* And can support the mode we need. */
3024 if (! HARD_REGNO_MODE_OK (regno, mode))
3025 continue;
3026 /* And that we don't create an extra save/restore. */
3027 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3028 continue;
3029 /* And we don't clobber traceback for noreturn functions. */
3030 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3031 && (! reload_completed || frame_pointer_needed))
3032 continue;
3034 success = 1;
3035 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3037 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3038 || TEST_HARD_REG_BIT (live, regno + j))
3040 success = 0;
3041 break;
3044 if (success)
3046 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3047 SET_HARD_REG_BIT (*reg_set, regno + j);
3049 /* Start the next search with the next register. */
3050 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3051 raw_regno = 0;
3052 search_ofs = raw_regno;
3054 return gen_rtx_REG (mode, regno);
3058 search_ofs = 0;
3059 return NULL_RTX;
3062 /* Perform the peephole2 optimization pass. */
3064 static void
3065 peephole2_optimize (void)
3067 rtx insn, prev;
3068 regset live;
3069 int i;
3070 basic_block bb;
3071 #ifdef HAVE_conditional_execution
3072 sbitmap blocks;
3073 bool changed;
3074 #endif
3075 bool do_cleanup_cfg = false;
3076 bool do_global_life_update = false;
3077 bool do_rebuild_jump_labels = false;
3079 /* Initialize the regsets we're going to use. */
3080 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3081 peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
3082 live = ALLOC_REG_SET (&reg_obstack);
3084 #ifdef HAVE_conditional_execution
3085 blocks = sbitmap_alloc (last_basic_block);
3086 sbitmap_zero (blocks);
3087 changed = false;
3088 #else
3089 count_or_remove_death_notes (NULL, 1);
3090 #endif
3092 FOR_EACH_BB_REVERSE (bb)
3094 struct propagate_block_info *pbi;
3095 reg_set_iterator rsi;
3096 unsigned int j;
3098 /* Indicate that all slots except the last holds invalid data. */
3099 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3100 peep2_insn_data[i].insn = NULL_RTX;
3101 peep2_current_count = 0;
3103 /* Indicate that the last slot contains live_after data. */
3104 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3105 peep2_current = MAX_INSNS_PER_PEEP2;
3107 /* Start up propagation. */
3108 COPY_REG_SET (live, bb->il.rtl->global_live_at_end);
3109 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3111 #ifdef HAVE_conditional_execution
3112 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3113 #else
3114 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3115 #endif
3117 for (insn = BB_END (bb); ; insn = prev)
3119 prev = PREV_INSN (insn);
3120 if (INSN_P (insn))
3122 rtx try, before_try, x;
3123 int match_len;
3124 rtx note;
3125 bool was_call = false;
3127 /* Record this insn. */
3128 if (--peep2_current < 0)
3129 peep2_current = MAX_INSNS_PER_PEEP2;
3130 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3131 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3132 peep2_current_count++;
3133 peep2_insn_data[peep2_current].insn = insn;
3134 propagate_one_insn (pbi, insn);
3135 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3137 if (RTX_FRAME_RELATED_P (insn))
3139 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3140 substitution would lose the
3141 REG_FRAME_RELATED_EXPR that is attached. */
3142 peep2_current_count = 0;
3143 try = NULL;
3145 else
3146 /* Match the peephole. */
3147 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3149 if (try != NULL)
3151 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3152 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3153 cfg-related call notes. */
3154 for (i = 0; i <= match_len; ++i)
3156 int j;
3157 rtx old_insn, new_insn, note;
3159 j = i + peep2_current;
3160 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3161 j -= MAX_INSNS_PER_PEEP2 + 1;
3162 old_insn = peep2_insn_data[j].insn;
3163 if (!CALL_P (old_insn))
3164 continue;
3165 was_call = true;
3167 new_insn = try;
3168 while (new_insn != NULL_RTX)
3170 if (CALL_P (new_insn))
3171 break;
3172 new_insn = NEXT_INSN (new_insn);
3175 gcc_assert (new_insn != NULL_RTX);
3177 CALL_INSN_FUNCTION_USAGE (new_insn)
3178 = CALL_INSN_FUNCTION_USAGE (old_insn);
3180 for (note = REG_NOTES (old_insn);
3181 note;
3182 note = XEXP (note, 1))
3183 switch (REG_NOTE_KIND (note))
3185 case REG_NORETURN:
3186 case REG_SETJMP:
3187 REG_NOTES (new_insn)
3188 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3189 XEXP (note, 0),
3190 REG_NOTES (new_insn));
3191 default:
3192 /* Discard all other reg notes. */
3193 break;
3196 /* Croak if there is another call in the sequence. */
3197 while (++i <= match_len)
3199 j = i + peep2_current;
3200 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3201 j -= MAX_INSNS_PER_PEEP2 + 1;
3202 old_insn = peep2_insn_data[j].insn;
3203 gcc_assert (!CALL_P (old_insn));
3205 break;
3208 i = match_len + peep2_current;
3209 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3210 i -= MAX_INSNS_PER_PEEP2 + 1;
3212 note = find_reg_note (peep2_insn_data[i].insn,
3213 REG_EH_REGION, NULL_RTX);
3215 /* Replace the old sequence with the new. */
3216 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3217 INSN_LOCATOR (peep2_insn_data[i].insn));
3218 before_try = PREV_INSN (insn);
3219 delete_insn_chain (insn, peep2_insn_data[i].insn);
3221 /* Re-insert the EH_REGION notes. */
3222 if (note || (was_call && nonlocal_goto_handler_labels))
3224 edge eh_edge;
3225 edge_iterator ei;
3227 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3228 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3229 break;
3231 for (x = try ; x != before_try ; x = PREV_INSN (x))
3232 if (CALL_P (x)
3233 || (flag_non_call_exceptions
3234 && may_trap_p (PATTERN (x))
3235 && !find_reg_note (x, REG_EH_REGION, NULL)))
3237 if (note)
3238 REG_NOTES (x)
3239 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3240 XEXP (note, 0),
3241 REG_NOTES (x));
3243 if (x != BB_END (bb) && eh_edge)
3245 edge nfte, nehe;
3246 int flags;
3248 nfte = split_block (bb, x);
3249 flags = (eh_edge->flags
3250 & (EDGE_EH | EDGE_ABNORMAL));
3251 if (CALL_P (x))
3252 flags |= EDGE_ABNORMAL_CALL;
3253 nehe = make_edge (nfte->src, eh_edge->dest,
3254 flags);
3256 nehe->probability = eh_edge->probability;
3257 nfte->probability
3258 = REG_BR_PROB_BASE - nehe->probability;
3260 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3261 #ifdef HAVE_conditional_execution
3262 SET_BIT (blocks, nfte->dest->index);
3263 changed = true;
3264 #endif
3265 bb = nfte->src;
3266 eh_edge = nehe;
3270 /* Converting possibly trapping insn to non-trapping is
3271 possible. Zap dummy outgoing edges. */
3272 do_cleanup_cfg |= purge_dead_edges (bb);
3275 #ifdef HAVE_conditional_execution
3276 /* With conditional execution, we cannot back up the
3277 live information so easily, since the conditional
3278 death data structures are not so self-contained.
3279 So record that we've made a modification to this
3280 block and update life information at the end. */
3281 SET_BIT (blocks, bb->index);
3282 changed = true;
3284 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3285 peep2_insn_data[i].insn = NULL_RTX;
3286 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3287 peep2_current_count = 0;
3288 #else
3289 /* Back up lifetime information past the end of the
3290 newly created sequence. */
3291 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3292 i = 0;
3293 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3295 /* Update life information for the new sequence. */
3296 x = try;
3299 if (INSN_P (x))
3301 if (--i < 0)
3302 i = MAX_INSNS_PER_PEEP2;
3303 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3304 && peep2_insn_data[i].insn == NULL_RTX)
3305 peep2_current_count++;
3306 peep2_insn_data[i].insn = x;
3307 propagate_one_insn (pbi, x);
3308 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3310 x = PREV_INSN (x);
3312 while (x != prev);
3314 /* ??? Should verify that LIVE now matches what we
3315 had before the new sequence. */
3317 peep2_current = i;
3318 #endif
3320 /* If we generated a jump instruction, it won't have
3321 JUMP_LABEL set. Recompute after we're done. */
3322 for (x = try; x != before_try; x = PREV_INSN (x))
3323 if (JUMP_P (x))
3325 do_rebuild_jump_labels = true;
3326 break;
3331 if (insn == BB_HEAD (bb))
3332 break;
3335 /* Some peepholes can decide the don't need one or more of their
3336 inputs. If this happens, local life update is not enough. */
3337 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->il.rtl->global_live_at_start, live,
3338 0, j, rsi)
3340 do_global_life_update = true;
3341 break;
3344 free_propagate_block_info (pbi);
3347 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3348 FREE_REG_SET (peep2_insn_data[i].live_before);
3349 FREE_REG_SET (live);
3351 if (do_rebuild_jump_labels)
3352 rebuild_jump_labels (get_insns ());
3354 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3355 we've changed global life since exception handlers are no longer
3356 reachable. */
3357 if (do_cleanup_cfg)
3359 cleanup_cfg (0);
3360 do_global_life_update = true;
3362 if (do_global_life_update)
3363 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3364 #ifdef HAVE_conditional_execution
3365 else
3367 count_or_remove_death_notes (blocks, 1);
3368 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3370 sbitmap_free (blocks);
3371 #endif
3373 #endif /* HAVE_peephole2 */
3375 /* Common predicates for use with define_bypass. */
3377 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3378 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3379 must be either a single_set or a PARALLEL with SETs inside. */
3382 store_data_bypass_p (rtx out_insn, rtx in_insn)
3384 rtx out_set, in_set;
3385 rtx out_pat, in_pat;
3386 rtx out_exp, in_exp;
3387 int i, j;
3389 in_set = single_set (in_insn);
3390 if (in_set)
3392 if (!MEM_P (SET_DEST (in_set)))
3393 return false;
3395 out_set = single_set (out_insn);
3396 if (out_set)
3398 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3399 return false;
3401 else
3403 out_pat = PATTERN (out_insn);
3405 if (GET_CODE (out_pat) != PARALLEL)
3406 return false;
3408 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3410 out_exp = XVECEXP (out_pat, 0, i);
3412 if (GET_CODE (out_exp) == CLOBBER)
3413 continue;
3415 gcc_assert (GET_CODE (out_exp) == SET);
3417 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3418 return false;
3422 else
3424 in_pat = PATTERN (in_insn);
3425 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3427 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3429 in_exp = XVECEXP (in_pat, 0, i);
3431 if (GET_CODE (in_exp) == CLOBBER)
3432 continue;
3434 gcc_assert (GET_CODE (in_exp) == SET);
3436 if (!MEM_P (SET_DEST (in_exp)))
3437 return false;
3439 out_set = single_set (out_insn);
3440 if (out_set)
3442 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3443 return false;
3445 else
3447 out_pat = PATTERN (out_insn);
3448 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3450 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3452 out_exp = XVECEXP (out_pat, 0, j);
3454 if (GET_CODE (out_exp) == CLOBBER)
3455 continue;
3457 gcc_assert (GET_CODE (out_exp) == SET);
3459 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3460 return false;
3466 return true;
3469 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3470 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3471 or multiple set; IN_INSN should be single_set for truth, but for convenience
3472 of insn categorization may be any JUMP or CALL insn. */
3475 if_test_bypass_p (rtx out_insn, rtx in_insn)
3477 rtx out_set, in_set;
3479 in_set = single_set (in_insn);
3480 if (! in_set)
3482 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3483 return false;
3486 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3487 return false;
3488 in_set = SET_SRC (in_set);
3490 out_set = single_set (out_insn);
3491 if (out_set)
3493 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3494 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3495 return false;
3497 else
3499 rtx out_pat;
3500 int i;
3502 out_pat = PATTERN (out_insn);
3503 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3505 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3507 rtx exp = XVECEXP (out_pat, 0, i);
3509 if (GET_CODE (exp) == CLOBBER)
3510 continue;
3512 gcc_assert (GET_CODE (exp) == SET);
3514 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3515 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3516 return false;
3520 return true;
3523 static bool
3524 gate_handle_peephole2 (void)
3526 return (optimize > 0 && flag_peephole2);
3529 static unsigned int
3530 rest_of_handle_peephole2 (void)
3532 #ifdef HAVE_peephole2
3533 peephole2_optimize ();
3534 #endif
3535 return 0;
3538 struct tree_opt_pass pass_peephole2 =
3540 "peephole2", /* name */
3541 gate_handle_peephole2, /* gate */
3542 rest_of_handle_peephole2, /* execute */
3543 NULL, /* sub */
3544 NULL, /* next */
3545 0, /* static_pass_number */
3546 TV_PEEPHOLE2, /* tv_id */
3547 0, /* properties_required */
3548 0, /* properties_provided */
3549 0, /* properties_destroyed */
3550 0, /* todo_flags_start */
3551 TODO_dump_func, /* todo_flags_finish */
3552 'z' /* letter */
3555 static unsigned int
3556 rest_of_handle_split_all_insns (void)
3558 split_all_insns (1);
3559 return 0;
3562 struct tree_opt_pass pass_split_all_insns =
3564 "split1", /* name */
3565 NULL, /* gate */
3566 rest_of_handle_split_all_insns, /* execute */
3567 NULL, /* sub */
3568 NULL, /* next */
3569 0, /* static_pass_number */
3570 0, /* tv_id */
3571 0, /* properties_required */
3572 0, /* properties_provided */
3573 0, /* properties_destroyed */
3574 0, /* todo_flags_start */
3575 TODO_dump_func, /* todo_flags_finish */
3576 0 /* letter */
3579 /* The placement of the splitting that we do for shorten_branches
3580 depends on whether regstack is used by the target or not. */
3581 static bool
3582 gate_do_final_split (void)
3584 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3585 return 1;
3586 #else
3587 return 0;
3588 #endif
3591 struct tree_opt_pass pass_split_for_shorten_branches =
3593 "split3", /* name */
3594 gate_do_final_split, /* gate */
3595 split_all_insns_noflow, /* execute */
3596 NULL, /* sub */
3597 NULL, /* next */
3598 0, /* static_pass_number */
3599 TV_SHORTEN_BRANCH, /* tv_id */
3600 0, /* properties_required */
3601 0, /* properties_provided */
3602 0, /* properties_destroyed */
3603 0, /* todo_flags_start */
3604 TODO_dump_func, /* todo_flags_finish */
3605 0 /* letter */
3609 static bool
3610 gate_handle_split_before_regstack (void)
3612 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3613 /* If flow2 creates new instructions which need splitting
3614 and scheduling after reload is not done, they might not be
3615 split until final which doesn't allow splitting
3616 if HAVE_ATTR_length. */
3617 # ifdef INSN_SCHEDULING
3618 return (optimize && !flag_schedule_insns_after_reload);
3619 # else
3620 return (optimize);
3621 # endif
3622 #else
3623 return 0;
3624 #endif
3627 struct tree_opt_pass pass_split_before_regstack =
3629 "split2", /* name */
3630 gate_handle_split_before_regstack, /* gate */
3631 rest_of_handle_split_all_insns, /* execute */
3632 NULL, /* sub */
3633 NULL, /* next */
3634 0, /* static_pass_number */
3635 TV_SHORTEN_BRANCH, /* tv_id */
3636 0, /* properties_required */
3637 0, /* properties_provided */
3638 0, /* properties_destroyed */
3639 0, /* todo_flags_start */
3640 TODO_dump_func, /* todo_flags_finish */
3641 0 /* letter */