* config/m68k/m68k.md (bungt_rev): New pattern.
[official-gcc.git] / gcc / recog.c
blobcd1cb7d6bee4eb32e19042744c8b489692302977
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "timevar.h"
44 #include "tree-pass.h"
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
57 #else
58 #define STACK_POP_CODE POST_DEC
59 #endif
60 #endif
62 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
63 static rtx *find_single_use_1 (rtx, rtx *);
64 static void validate_replace_src_1 (rtx *, void *);
65 static rtx split_insn (rtx);
67 /* Nonzero means allow operands to be volatile.
68 This should be 0 if you are generating rtl, such as if you are calling
69 the functions in optabs.c and expmed.c (most of the time).
70 This should be 1 if all valid insns need to be recognized,
71 such as in regclass.c and final.c and reload.c.
73 init_recog and init_recog_no_volatile are responsible for setting this. */
75 int volatile_ok;
77 struct recog_data recog_data;
79 /* Contains a vector of operand_alternative structures for every operand.
80 Set up by preprocess_constraints. */
81 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
83 /* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
86 int which_alternative;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
92 int reload_completed;
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
101 void
102 init_recog_no_volatile (void)
104 volatile_ok = 0;
107 void
108 init_recog (void)
110 volatile_ok = 1;
114 /* Check that X is an insn-body for an `asm' with operands
115 and that the operands mentioned in it are legitimate. */
118 check_asm_operands (rtx x)
120 int noperands;
121 rtx *operands;
122 const char **constraints;
123 int i;
125 /* Post-reload, be more strict with things. */
126 if (reload_completed)
128 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
129 extract_insn (make_insn_raw (x));
130 constrain_operands (1);
131 return which_alternative >= 0;
134 noperands = asm_noperands (x);
135 if (noperands < 0)
136 return 0;
137 if (noperands == 0)
138 return 1;
140 operands = alloca (noperands * sizeof (rtx));
141 constraints = alloca (noperands * sizeof (char *));
143 decode_asm_operands (x, operands, NULL, constraints, NULL);
145 for (i = 0; i < noperands; i++)
147 const char *c = constraints[i];
148 if (c[0] == '%')
149 c++;
150 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
151 c = constraints[c[0] - '0'];
153 if (! asm_operand_ok (operands[i], c))
154 return 0;
157 return 1;
160 /* Static data for the next two routines. */
162 typedef struct change_t
164 rtx object;
165 int old_code;
166 rtx *loc;
167 rtx old;
168 } change_t;
170 static change_t *changes;
171 static int changes_allocated;
173 static int num_changes = 0;
175 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
176 at which NEW will be placed. If OBJECT is zero, no validation is done,
177 the change is simply made.
179 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
180 will be called with the address and mode as parameters. If OBJECT is
181 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
182 the change in place.
184 IN_GROUP is nonzero if this is part of a group of changes that must be
185 performed as a group. In that case, the changes will be stored. The
186 function `apply_change_group' will validate and apply the changes.
188 If IN_GROUP is zero, this is a single change. Try to recognize the insn
189 or validate the memory reference with the change applied. If the result
190 is not valid for the machine, suppress the change and return zero.
191 Otherwise, perform the change and return 1. */
194 validate_change (rtx object, rtx *loc, rtx new, int in_group)
196 rtx old = *loc;
198 if (old == new || rtx_equal_p (old, new))
199 return 1;
201 gcc_assert (in_group != 0 || num_changes == 0);
203 *loc = new;
205 /* Save the information describing this change. */
206 if (num_changes >= changes_allocated)
208 if (changes_allocated == 0)
209 /* This value allows for repeated substitutions inside complex
210 indexed addresses, or changes in up to 5 insns. */
211 changes_allocated = MAX_RECOG_OPERANDS * 5;
212 else
213 changes_allocated *= 2;
215 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
218 changes[num_changes].object = object;
219 changes[num_changes].loc = loc;
220 changes[num_changes].old = old;
222 if (object && !MEM_P (object))
224 /* Set INSN_CODE to force rerecognition of insn. Save old code in
225 case invalid. */
226 changes[num_changes].old_code = INSN_CODE (object);
227 INSN_CODE (object) = -1;
230 num_changes++;
232 /* If we are making a group of changes, return 1. Otherwise, validate the
233 change group we made. */
235 if (in_group)
236 return 1;
237 else
238 return apply_change_group ();
242 /* This subroutine of apply_change_group verifies whether the changes to INSN
243 were valid; i.e. whether INSN can still be recognized. */
246 insn_invalid_p (rtx insn)
248 rtx pat = PATTERN (insn);
249 int num_clobbers = 0;
250 /* If we are before reload and the pattern is a SET, see if we can add
251 clobbers. */
252 int icode = recog (pat, insn,
253 (GET_CODE (pat) == SET
254 && ! reload_completed && ! reload_in_progress)
255 ? &num_clobbers : 0);
256 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
259 /* If this is an asm and the operand aren't legal, then fail. Likewise if
260 this is not an asm and the insn wasn't recognized. */
261 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
262 || (!is_asm && icode < 0))
263 return 1;
265 /* If we have to add CLOBBERs, fail if we have to add ones that reference
266 hard registers since our callers can't know if they are live or not.
267 Otherwise, add them. */
268 if (num_clobbers > 0)
270 rtx newpat;
272 if (added_clobbers_hard_reg_p (icode))
273 return 1;
275 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
276 XVECEXP (newpat, 0, 0) = pat;
277 add_clobbers (newpat, icode);
278 PATTERN (insn) = pat = newpat;
281 /* After reload, verify that all constraints are satisfied. */
282 if (reload_completed)
284 extract_insn (insn);
286 if (! constrain_operands (1))
287 return 1;
290 INSN_CODE (insn) = icode;
291 return 0;
294 /* Return number of changes made and not validated yet. */
296 num_changes_pending (void)
298 return num_changes;
301 /* Tentatively apply the changes numbered NUM and up.
302 Return 1 if all changes are valid, zero otherwise. */
305 verify_changes (int num)
307 int i;
308 rtx last_validated = NULL_RTX;
310 /* The changes have been applied and all INSN_CODEs have been reset to force
311 rerecognition.
313 The changes are valid if we aren't given an object, or if we are
314 given a MEM and it still is a valid address, or if this is in insn
315 and it is recognized. In the latter case, if reload has completed,
316 we also require that the operands meet the constraints for
317 the insn. */
319 for (i = num; i < num_changes; i++)
321 rtx object = changes[i].object;
323 /* If there is no object to test or if it is the same as the one we
324 already tested, ignore it. */
325 if (object == 0 || object == last_validated)
326 continue;
328 if (MEM_P (object))
330 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
331 break;
333 else if (insn_invalid_p (object))
335 rtx pat = PATTERN (object);
337 /* Perhaps we couldn't recognize the insn because there were
338 extra CLOBBERs at the end. If so, try to re-recognize
339 without the last CLOBBER (later iterations will cause each of
340 them to be eliminated, in turn). But don't do this if we
341 have an ASM_OPERAND. */
342 if (GET_CODE (pat) == PARALLEL
343 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
344 && asm_noperands (PATTERN (object)) < 0)
346 rtx newpat;
348 if (XVECLEN (pat, 0) == 2)
349 newpat = XVECEXP (pat, 0, 0);
350 else
352 int j;
354 newpat
355 = gen_rtx_PARALLEL (VOIDmode,
356 rtvec_alloc (XVECLEN (pat, 0) - 1));
357 for (j = 0; j < XVECLEN (newpat, 0); j++)
358 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
361 /* Add a new change to this group to replace the pattern
362 with this new pattern. Then consider this change
363 as having succeeded. The change we added will
364 cause the entire call to fail if things remain invalid.
366 Note that this can lose if a later change than the one
367 we are processing specified &XVECEXP (PATTERN (object), 0, X)
368 but this shouldn't occur. */
370 validate_change (object, &PATTERN (object), newpat, 1);
371 continue;
373 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
374 /* If this insn is a CLOBBER or USE, it is always valid, but is
375 never recognized. */
376 continue;
377 else
378 break;
380 last_validated = object;
383 return (i == num_changes);
386 /* A group of changes has previously been issued with validate_change and
387 verified with verify_changes. Update the BB_DIRTY flags of the affected
388 blocks, and clear num_changes. */
390 void
391 confirm_change_group (void)
393 int i;
394 basic_block bb;
396 for (i = 0; i < num_changes; i++)
397 if (changes[i].object
398 && INSN_P (changes[i].object)
399 && (bb = BLOCK_FOR_INSN (changes[i].object)))
400 bb->flags |= BB_DIRTY;
402 num_changes = 0;
405 /* Apply a group of changes previously issued with `validate_change'.
406 If all changes are valid, call confirm_change_group and return 1,
407 otherwise, call cancel_changes and return 0. */
410 apply_change_group (void)
412 if (verify_changes (0))
414 confirm_change_group ();
415 return 1;
417 else
419 cancel_changes (0);
420 return 0;
425 /* Return the number of changes so far in the current group. */
428 num_validated_changes (void)
430 return num_changes;
433 /* Retract the changes numbered NUM and up. */
435 void
436 cancel_changes (int num)
438 int i;
440 /* Back out all the changes. Do this in the opposite order in which
441 they were made. */
442 for (i = num_changes - 1; i >= num; i--)
444 *changes[i].loc = changes[i].old;
445 if (changes[i].object && !MEM_P (changes[i].object))
446 INSN_CODE (changes[i].object) = changes[i].old_code;
448 num_changes = num;
451 /* Replace every occurrence of FROM in X with TO. Mark each change with
452 validate_change passing OBJECT. */
454 static void
455 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
457 int i, j;
458 const char *fmt;
459 rtx x = *loc;
460 enum rtx_code code;
461 enum machine_mode op0_mode = VOIDmode;
462 int prev_changes = num_changes;
463 rtx new;
465 if (!x)
466 return;
468 code = GET_CODE (x);
469 fmt = GET_RTX_FORMAT (code);
470 if (fmt[0] == 'e')
471 op0_mode = GET_MODE (XEXP (x, 0));
473 /* X matches FROM if it is the same rtx or they are both referring to the
474 same register in the same mode. Avoid calling rtx_equal_p unless the
475 operands look similar. */
477 if (x == from
478 || (REG_P (x) && REG_P (from)
479 && GET_MODE (x) == GET_MODE (from)
480 && REGNO (x) == REGNO (from))
481 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
482 && rtx_equal_p (x, from)))
484 validate_change (object, loc, to, 1);
485 return;
488 /* Call ourself recursively to perform the replacements.
489 We must not replace inside already replaced expression, otherwise we
490 get infinite recursion for replacements like (reg X)->(subreg (reg X))
491 done by regmove, so we must special case shared ASM_OPERANDS. */
493 if (GET_CODE (x) == PARALLEL)
495 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
497 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
498 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
500 /* Verify that operands are really shared. */
501 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
502 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
503 (x, 0, j))));
504 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
505 from, to, object);
507 else
508 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
511 else
512 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
514 if (fmt[i] == 'e')
515 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
516 else if (fmt[i] == 'E')
517 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
518 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
521 /* If we didn't substitute, there is nothing more to do. */
522 if (num_changes == prev_changes)
523 return;
525 /* Allow substituted expression to have different mode. This is used by
526 regmove to change mode of pseudo register. */
527 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
528 op0_mode = GET_MODE (XEXP (x, 0));
530 /* Do changes needed to keep rtx consistent. Don't do any other
531 simplifications, as it is not our job. */
533 if (SWAPPABLE_OPERANDS_P (x)
534 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
536 validate_change (object, loc,
537 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
538 : swap_condition (code),
539 GET_MODE (x), XEXP (x, 1),
540 XEXP (x, 0)), 1);
541 x = *loc;
542 code = GET_CODE (x);
545 switch (code)
547 case PLUS:
548 /* If we have a PLUS whose second operand is now a CONST_INT, use
549 simplify_gen_binary to try to simplify it.
550 ??? We may want later to remove this, once simplification is
551 separated from this function. */
552 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
553 validate_change (object, loc,
554 simplify_gen_binary
555 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
556 break;
557 case MINUS:
558 if (GET_CODE (XEXP (x, 1)) == CONST_INT
559 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
560 validate_change (object, loc,
561 simplify_gen_binary
562 (PLUS, GET_MODE (x), XEXP (x, 0),
563 simplify_gen_unary (NEG,
564 GET_MODE (x), XEXP (x, 1),
565 GET_MODE (x))), 1);
566 break;
567 case ZERO_EXTEND:
568 case SIGN_EXTEND:
569 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
571 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
572 op0_mode);
573 /* If any of the above failed, substitute in something that
574 we know won't be recognized. */
575 if (!new)
576 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
577 validate_change (object, loc, new, 1);
579 break;
580 case SUBREG:
581 /* All subregs possible to simplify should be simplified. */
582 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
583 SUBREG_BYTE (x));
585 /* Subregs of VOIDmode operands are incorrect. */
586 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
587 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
588 if (new)
589 validate_change (object, loc, new, 1);
590 break;
591 case ZERO_EXTRACT:
592 case SIGN_EXTRACT:
593 /* If we are replacing a register with memory, try to change the memory
594 to be the mode required for memory in extract operations (this isn't
595 likely to be an insertion operation; if it was, nothing bad will
596 happen, we might just fail in some cases). */
598 if (MEM_P (XEXP (x, 0))
599 && GET_CODE (XEXP (x, 1)) == CONST_INT
600 && GET_CODE (XEXP (x, 2)) == CONST_INT
601 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
602 && !MEM_VOLATILE_P (XEXP (x, 0)))
604 enum machine_mode wanted_mode = VOIDmode;
605 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
606 int pos = INTVAL (XEXP (x, 2));
608 if (GET_CODE (x) == ZERO_EXTRACT)
610 enum machine_mode new_mode
611 = mode_for_extraction (EP_extzv, 1);
612 if (new_mode != MAX_MACHINE_MODE)
613 wanted_mode = new_mode;
615 else if (GET_CODE (x) == SIGN_EXTRACT)
617 enum machine_mode new_mode
618 = mode_for_extraction (EP_extv, 1);
619 if (new_mode != MAX_MACHINE_MODE)
620 wanted_mode = new_mode;
623 /* If we have a narrower mode, we can do something. */
624 if (wanted_mode != VOIDmode
625 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
627 int offset = pos / BITS_PER_UNIT;
628 rtx newmem;
630 /* If the bytes and bits are counted differently, we
631 must adjust the offset. */
632 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
633 offset =
634 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
635 offset);
637 pos %= GET_MODE_BITSIZE (wanted_mode);
639 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
641 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
642 validate_change (object, &XEXP (x, 0), newmem, 1);
646 break;
648 default:
649 break;
653 /* Try replacing every occurrence of FROM in INSN with TO. After all
654 changes have been made, validate by seeing if INSN is still valid. */
657 validate_replace_rtx (rtx from, rtx to, rtx insn)
659 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
660 return apply_change_group ();
663 /* Try replacing every occurrence of FROM in INSN with TO. */
665 void
666 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
668 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
671 /* Function called by note_uses to replace used subexpressions. */
672 struct validate_replace_src_data
674 rtx from; /* Old RTX */
675 rtx to; /* New RTX */
676 rtx insn; /* Insn in which substitution is occurring. */
679 static void
680 validate_replace_src_1 (rtx *x, void *data)
682 struct validate_replace_src_data *d
683 = (struct validate_replace_src_data *) data;
685 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
688 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
689 SET_DESTs. */
691 void
692 validate_replace_src_group (rtx from, rtx to, rtx insn)
694 struct validate_replace_src_data d;
696 d.from = from;
697 d.to = to;
698 d.insn = insn;
699 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
702 /* Try simplify INSN.
703 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
704 pattern and return true if something was simplified. */
706 bool
707 validate_simplify_insn (rtx insn)
709 int i;
710 rtx pat = NULL;
711 rtx newpat = NULL;
713 pat = PATTERN (insn);
715 if (GET_CODE (pat) == SET)
717 newpat = simplify_rtx (SET_SRC (pat));
718 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
719 validate_change (insn, &SET_SRC (pat), newpat, 1);
720 newpat = simplify_rtx (SET_DEST (pat));
721 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
722 validate_change (insn, &SET_DEST (pat), newpat, 1);
724 else if (GET_CODE (pat) == PARALLEL)
725 for (i = 0; i < XVECLEN (pat, 0); i++)
727 rtx s = XVECEXP (pat, 0, i);
729 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
731 newpat = simplify_rtx (SET_SRC (s));
732 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
733 validate_change (insn, &SET_SRC (s), newpat, 1);
734 newpat = simplify_rtx (SET_DEST (s));
735 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
736 validate_change (insn, &SET_DEST (s), newpat, 1);
739 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
742 #ifdef HAVE_cc0
743 /* Return 1 if the insn using CC0 set by INSN does not contain
744 any ordered tests applied to the condition codes.
745 EQ and NE tests do not count. */
748 next_insn_tests_no_inequality (rtx insn)
750 rtx next = next_cc0_user (insn);
752 /* If there is no next insn, we have to take the conservative choice. */
753 if (next == 0)
754 return 0;
756 return (INSN_P (next)
757 && ! inequality_comparisons_p (PATTERN (next)));
759 #endif
761 /* This is used by find_single_use to locate an rtx that contains exactly one
762 use of DEST, which is typically either a REG or CC0. It returns a
763 pointer to the innermost rtx expression containing DEST. Appearances of
764 DEST that are being used to totally replace it are not counted. */
766 static rtx *
767 find_single_use_1 (rtx dest, rtx *loc)
769 rtx x = *loc;
770 enum rtx_code code = GET_CODE (x);
771 rtx *result = 0;
772 rtx *this_result;
773 int i;
774 const char *fmt;
776 switch (code)
778 case CONST_INT:
779 case CONST:
780 case LABEL_REF:
781 case SYMBOL_REF:
782 case CONST_DOUBLE:
783 case CONST_VECTOR:
784 case CLOBBER:
785 return 0;
787 case SET:
788 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
789 of a REG that occupies all of the REG, the insn uses DEST if
790 it is mentioned in the destination or the source. Otherwise, we
791 need just check the source. */
792 if (GET_CODE (SET_DEST (x)) != CC0
793 && GET_CODE (SET_DEST (x)) != PC
794 && !REG_P (SET_DEST (x))
795 && ! (GET_CODE (SET_DEST (x)) == SUBREG
796 && REG_P (SUBREG_REG (SET_DEST (x)))
797 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
798 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
799 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
800 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
801 break;
803 return find_single_use_1 (dest, &SET_SRC (x));
805 case MEM:
806 case SUBREG:
807 return find_single_use_1 (dest, &XEXP (x, 0));
809 default:
810 break;
813 /* If it wasn't one of the common cases above, check each expression and
814 vector of this code. Look for a unique usage of DEST. */
816 fmt = GET_RTX_FORMAT (code);
817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
819 if (fmt[i] == 'e')
821 if (dest == XEXP (x, i)
822 || (REG_P (dest) && REG_P (XEXP (x, i))
823 && REGNO (dest) == REGNO (XEXP (x, i))))
824 this_result = loc;
825 else
826 this_result = find_single_use_1 (dest, &XEXP (x, i));
828 if (result == 0)
829 result = this_result;
830 else if (this_result)
831 /* Duplicate usage. */
832 return 0;
834 else if (fmt[i] == 'E')
836 int j;
838 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
840 if (XVECEXP (x, i, j) == dest
841 || (REG_P (dest)
842 && REG_P (XVECEXP (x, i, j))
843 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
844 this_result = loc;
845 else
846 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
848 if (result == 0)
849 result = this_result;
850 else if (this_result)
851 return 0;
856 return result;
859 /* See if DEST, produced in INSN, is used only a single time in the
860 sequel. If so, return a pointer to the innermost rtx expression in which
861 it is used.
863 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
865 This routine will return usually zero either before flow is called (because
866 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
867 note can't be trusted).
869 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
870 care about REG_DEAD notes or LOG_LINKS.
872 Otherwise, we find the single use by finding an insn that has a
873 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
874 only referenced once in that insn, we know that it must be the first
875 and last insn referencing DEST. */
877 rtx *
878 find_single_use (rtx dest, rtx insn, rtx *ploc)
880 rtx next;
881 rtx *result;
882 rtx link;
884 #ifdef HAVE_cc0
885 if (dest == cc0_rtx)
887 next = NEXT_INSN (insn);
888 if (next == 0
889 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
890 return 0;
892 result = find_single_use_1 (dest, &PATTERN (next));
893 if (result && ploc)
894 *ploc = next;
895 return result;
897 #endif
899 if (reload_completed || reload_in_progress || !REG_P (dest))
900 return 0;
902 for (next = next_nonnote_insn (insn);
903 next != 0 && !LABEL_P (next);
904 next = next_nonnote_insn (next))
905 if (INSN_P (next) && dead_or_set_p (next, dest))
907 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
908 if (XEXP (link, 0) == insn)
909 break;
911 if (link)
913 result = find_single_use_1 (dest, &PATTERN (next));
914 if (ploc)
915 *ploc = next;
916 return result;
920 return 0;
923 /* Return 1 if OP is a valid general operand for machine mode MODE.
924 This is either a register reference, a memory reference,
925 or a constant. In the case of a memory reference, the address
926 is checked for general validity for the target machine.
928 Register and memory references must have mode MODE in order to be valid,
929 but some constants have no machine mode and are valid for any mode.
931 If MODE is VOIDmode, OP is checked for validity for whatever mode
932 it has.
934 The main use of this function is as a predicate in match_operand
935 expressions in the machine description.
937 For an explanation of this function's behavior for registers of
938 class NO_REGS, see the comment for `register_operand'. */
941 general_operand (rtx op, enum machine_mode mode)
943 enum rtx_code code = GET_CODE (op);
945 if (mode == VOIDmode)
946 mode = GET_MODE (op);
948 /* Don't accept CONST_INT or anything similar
949 if the caller wants something floating. */
950 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
951 && GET_MODE_CLASS (mode) != MODE_INT
952 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
953 return 0;
955 if (GET_CODE (op) == CONST_INT
956 && mode != VOIDmode
957 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
958 return 0;
960 if (CONSTANT_P (op))
961 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
962 || mode == VOIDmode)
963 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
964 && LEGITIMATE_CONSTANT_P (op));
966 /* Except for certain constants with VOIDmode, already checked for,
967 OP's mode must match MODE if MODE specifies a mode. */
969 if (GET_MODE (op) != mode)
970 return 0;
972 if (code == SUBREG)
974 rtx sub = SUBREG_REG (op);
976 #ifdef INSN_SCHEDULING
977 /* On machines that have insn scheduling, we want all memory
978 reference to be explicit, so outlaw paradoxical SUBREGs.
979 However, we must allow them after reload so that they can
980 get cleaned up by cleanup_subreg_operands. */
981 if (!reload_completed && MEM_P (sub)
982 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
983 return 0;
984 #endif
985 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
986 may result in incorrect reference. We should simplify all valid
987 subregs of MEM anyway. But allow this after reload because we
988 might be called from cleanup_subreg_operands.
990 ??? This is a kludge. */
991 if (!reload_completed && SUBREG_BYTE (op) != 0
992 && MEM_P (sub))
993 return 0;
995 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
996 create such rtl, and we must reject it. */
997 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
998 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
999 return 0;
1001 op = sub;
1002 code = GET_CODE (op);
1005 if (code == REG)
1006 /* A register whose class is NO_REGS is not a general operand. */
1007 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1008 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1010 if (code == MEM)
1012 rtx y = XEXP (op, 0);
1014 if (! volatile_ok && MEM_VOLATILE_P (op))
1015 return 0;
1017 /* Use the mem's mode, since it will be reloaded thus. */
1018 if (memory_address_p (GET_MODE (op), y))
1019 return 1;
1022 return 0;
1025 /* Return 1 if OP is a valid memory address for a memory reference
1026 of mode MODE.
1028 The main use of this function is as a predicate in match_operand
1029 expressions in the machine description. */
1032 address_operand (rtx op, enum machine_mode mode)
1034 return memory_address_p (mode, op);
1037 /* Return 1 if OP is a register reference of mode MODE.
1038 If MODE is VOIDmode, accept a register in any mode.
1040 The main use of this function is as a predicate in match_operand
1041 expressions in the machine description.
1043 As a special exception, registers whose class is NO_REGS are
1044 not accepted by `register_operand'. The reason for this change
1045 is to allow the representation of special architecture artifacts
1046 (such as a condition code register) without extending the rtl
1047 definitions. Since registers of class NO_REGS cannot be used
1048 as registers in any case where register classes are examined,
1049 it is most consistent to keep this function from accepting them. */
1052 register_operand (rtx op, enum machine_mode mode)
1054 if (GET_MODE (op) != mode && mode != VOIDmode)
1055 return 0;
1057 if (GET_CODE (op) == SUBREG)
1059 rtx sub = SUBREG_REG (op);
1061 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1062 because it is guaranteed to be reloaded into one.
1063 Just make sure the MEM is valid in itself.
1064 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1065 but currently it does result from (SUBREG (REG)...) where the
1066 reg went on the stack.) */
1067 if (! reload_completed && MEM_P (sub))
1068 return general_operand (op, mode);
1070 #ifdef CANNOT_CHANGE_MODE_CLASS
1071 if (REG_P (sub)
1072 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1073 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1074 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1075 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1076 return 0;
1077 #endif
1079 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1080 create such rtl, and we must reject it. */
1081 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1082 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1083 return 0;
1085 op = sub;
1088 /* We don't consider registers whose class is NO_REGS
1089 to be a register operand. */
1090 return (REG_P (op)
1091 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1092 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1095 /* Return 1 for a register in Pmode; ignore the tested mode. */
1098 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1100 return register_operand (op, Pmode);
1103 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1104 or a hard register. */
1107 scratch_operand (rtx op, enum machine_mode mode)
1109 if (GET_MODE (op) != mode && mode != VOIDmode)
1110 return 0;
1112 return (GET_CODE (op) == SCRATCH
1113 || (REG_P (op)
1114 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1117 /* Return 1 if OP is a valid immediate operand for mode MODE.
1119 The main use of this function is as a predicate in match_operand
1120 expressions in the machine description. */
1123 immediate_operand (rtx op, enum machine_mode mode)
1125 /* Don't accept CONST_INT or anything similar
1126 if the caller wants something floating. */
1127 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1128 && GET_MODE_CLASS (mode) != MODE_INT
1129 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1130 return 0;
1132 if (GET_CODE (op) == CONST_INT
1133 && mode != VOIDmode
1134 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1135 return 0;
1137 return (CONSTANT_P (op)
1138 && (GET_MODE (op) == mode || mode == VOIDmode
1139 || GET_MODE (op) == VOIDmode)
1140 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1141 && LEGITIMATE_CONSTANT_P (op));
1144 /* Returns 1 if OP is an operand that is a CONST_INT. */
1147 const_int_operand (rtx op, enum machine_mode mode)
1149 if (GET_CODE (op) != CONST_INT)
1150 return 0;
1152 if (mode != VOIDmode
1153 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1154 return 0;
1156 return 1;
1159 /* Returns 1 if OP is an operand that is a constant integer or constant
1160 floating-point number. */
1163 const_double_operand (rtx op, enum machine_mode mode)
1165 /* Don't accept CONST_INT or anything similar
1166 if the caller wants something floating. */
1167 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1168 && GET_MODE_CLASS (mode) != MODE_INT
1169 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1170 return 0;
1172 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1173 && (mode == VOIDmode || GET_MODE (op) == mode
1174 || GET_MODE (op) == VOIDmode));
1177 /* Return 1 if OP is a general operand that is not an immediate operand. */
1180 nonimmediate_operand (rtx op, enum machine_mode mode)
1182 return (general_operand (op, mode) && ! CONSTANT_P (op));
1185 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1188 nonmemory_operand (rtx op, enum machine_mode mode)
1190 if (CONSTANT_P (op))
1192 /* Don't accept CONST_INT or anything similar
1193 if the caller wants something floating. */
1194 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1195 && GET_MODE_CLASS (mode) != MODE_INT
1196 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1197 return 0;
1199 if (GET_CODE (op) == CONST_INT
1200 && mode != VOIDmode
1201 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1202 return 0;
1204 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1205 || mode == VOIDmode)
1206 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1207 && LEGITIMATE_CONSTANT_P (op));
1210 if (GET_MODE (op) != mode && mode != VOIDmode)
1211 return 0;
1213 if (GET_CODE (op) == SUBREG)
1215 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1216 because it is guaranteed to be reloaded into one.
1217 Just make sure the MEM is valid in itself.
1218 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1219 but currently it does result from (SUBREG (REG)...) where the
1220 reg went on the stack.) */
1221 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1222 return general_operand (op, mode);
1223 op = SUBREG_REG (op);
1226 /* We don't consider registers whose class is NO_REGS
1227 to be a register operand. */
1228 return (REG_P (op)
1229 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1230 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1233 /* Return 1 if OP is a valid operand that stands for pushing a
1234 value of mode MODE onto the stack.
1236 The main use of this function is as a predicate in match_operand
1237 expressions in the machine description. */
1240 push_operand (rtx op, enum machine_mode mode)
1242 unsigned int rounded_size = GET_MODE_SIZE (mode);
1244 #ifdef PUSH_ROUNDING
1245 rounded_size = PUSH_ROUNDING (rounded_size);
1246 #endif
1248 if (!MEM_P (op))
1249 return 0;
1251 if (mode != VOIDmode && GET_MODE (op) != mode)
1252 return 0;
1254 op = XEXP (op, 0);
1256 if (rounded_size == GET_MODE_SIZE (mode))
1258 if (GET_CODE (op) != STACK_PUSH_CODE)
1259 return 0;
1261 else
1263 if (GET_CODE (op) != PRE_MODIFY
1264 || GET_CODE (XEXP (op, 1)) != PLUS
1265 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1266 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1267 #ifdef STACK_GROWS_DOWNWARD
1268 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1269 #else
1270 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1271 #endif
1273 return 0;
1276 return XEXP (op, 0) == stack_pointer_rtx;
1279 /* Return 1 if OP is a valid operand that stands for popping a
1280 value of mode MODE off the stack.
1282 The main use of this function is as a predicate in match_operand
1283 expressions in the machine description. */
1286 pop_operand (rtx op, enum machine_mode mode)
1288 if (!MEM_P (op))
1289 return 0;
1291 if (mode != VOIDmode && GET_MODE (op) != mode)
1292 return 0;
1294 op = XEXP (op, 0);
1296 if (GET_CODE (op) != STACK_POP_CODE)
1297 return 0;
1299 return XEXP (op, 0) == stack_pointer_rtx;
1302 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1305 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1307 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1308 return 0;
1310 win:
1311 return 1;
1314 /* Return 1 if OP is a valid memory reference with mode MODE,
1315 including a valid address.
1317 The main use of this function is as a predicate in match_operand
1318 expressions in the machine description. */
1321 memory_operand (rtx op, enum machine_mode mode)
1323 rtx inner;
1325 if (! reload_completed)
1326 /* Note that no SUBREG is a memory operand before end of reload pass,
1327 because (SUBREG (MEM...)) forces reloading into a register. */
1328 return MEM_P (op) && general_operand (op, mode);
1330 if (mode != VOIDmode && GET_MODE (op) != mode)
1331 return 0;
1333 inner = op;
1334 if (GET_CODE (inner) == SUBREG)
1335 inner = SUBREG_REG (inner);
1337 return (MEM_P (inner) && general_operand (op, mode));
1340 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1341 that is, a memory reference whose address is a general_operand. */
1344 indirect_operand (rtx op, enum machine_mode mode)
1346 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1347 if (! reload_completed
1348 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1350 int offset = SUBREG_BYTE (op);
1351 rtx inner = SUBREG_REG (op);
1353 if (mode != VOIDmode && GET_MODE (op) != mode)
1354 return 0;
1356 /* The only way that we can have a general_operand as the resulting
1357 address is if OFFSET is zero and the address already is an operand
1358 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1359 operand. */
1361 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1362 || (GET_CODE (XEXP (inner, 0)) == PLUS
1363 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1364 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1365 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1368 return (MEM_P (op)
1369 && memory_operand (op, mode)
1370 && general_operand (XEXP (op, 0), Pmode));
1373 /* Return 1 if this is a comparison operator. This allows the use of
1374 MATCH_OPERATOR to recognize all the branch insns. */
1377 comparison_operator (rtx op, enum machine_mode mode)
1379 return ((mode == VOIDmode || GET_MODE (op) == mode)
1380 && COMPARISON_P (op));
1383 /* If BODY is an insn body that uses ASM_OPERANDS,
1384 return the number of operands (both input and output) in the insn.
1385 Otherwise return -1. */
1388 asm_noperands (rtx body)
1390 switch (GET_CODE (body))
1392 case ASM_OPERANDS:
1393 /* No output operands: return number of input operands. */
1394 return ASM_OPERANDS_INPUT_LENGTH (body);
1395 case SET:
1396 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1397 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1398 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1399 else
1400 return -1;
1401 case PARALLEL:
1402 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1403 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1405 /* Multiple output operands, or 1 output plus some clobbers:
1406 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1407 int i;
1408 int n_sets;
1410 /* Count backwards through CLOBBERs to determine number of SETs. */
1411 for (i = XVECLEN (body, 0); i > 0; i--)
1413 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1414 break;
1415 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1416 return -1;
1419 /* N_SETS is now number of output operands. */
1420 n_sets = i;
1422 /* Verify that all the SETs we have
1423 came from a single original asm_operands insn
1424 (so that invalid combinations are blocked). */
1425 for (i = 0; i < n_sets; i++)
1427 rtx elt = XVECEXP (body, 0, i);
1428 if (GET_CODE (elt) != SET)
1429 return -1;
1430 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1431 return -1;
1432 /* If these ASM_OPERANDS rtx's came from different original insns
1433 then they aren't allowed together. */
1434 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1435 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1436 return -1;
1438 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1439 + n_sets);
1441 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1443 /* 0 outputs, but some clobbers:
1444 body is [(asm_operands ...) (clobber (reg ...))...]. */
1445 int i;
1447 /* Make sure all the other parallel things really are clobbers. */
1448 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1449 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1450 return -1;
1452 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1454 else
1455 return -1;
1456 default:
1457 return -1;
1461 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1462 copy its operands (both input and output) into the vector OPERANDS,
1463 the locations of the operands within the insn into the vector OPERAND_LOCS,
1464 and the constraints for the operands into CONSTRAINTS.
1465 Write the modes of the operands into MODES.
1466 Return the assembler-template.
1468 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1469 we don't store that info. */
1471 const char *
1472 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1473 const char **constraints, enum machine_mode *modes)
1475 int i;
1476 int noperands;
1477 const char *template = 0;
1479 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1481 rtx asmop = SET_SRC (body);
1482 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1484 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1486 for (i = 1; i < noperands; i++)
1488 if (operand_locs)
1489 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1490 if (operands)
1491 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1492 if (constraints)
1493 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1494 if (modes)
1495 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1498 /* The output is in the SET.
1499 Its constraint is in the ASM_OPERANDS itself. */
1500 if (operands)
1501 operands[0] = SET_DEST (body);
1502 if (operand_locs)
1503 operand_locs[0] = &SET_DEST (body);
1504 if (constraints)
1505 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1506 if (modes)
1507 modes[0] = GET_MODE (SET_DEST (body));
1508 template = ASM_OPERANDS_TEMPLATE (asmop);
1510 else if (GET_CODE (body) == ASM_OPERANDS)
1512 rtx asmop = body;
1513 /* No output operands: BODY is (asm_operands ....). */
1515 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1517 /* The input operands are found in the 1st element vector. */
1518 /* Constraints for inputs are in the 2nd element vector. */
1519 for (i = 0; i < noperands; i++)
1521 if (operand_locs)
1522 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1523 if (operands)
1524 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1525 if (constraints)
1526 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1527 if (modes)
1528 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1530 template = ASM_OPERANDS_TEMPLATE (asmop);
1532 else if (GET_CODE (body) == PARALLEL
1533 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1534 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1536 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1537 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1538 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1539 int nout = 0; /* Does not include CLOBBERs. */
1541 /* At least one output, plus some CLOBBERs. */
1543 /* The outputs are in the SETs.
1544 Their constraints are in the ASM_OPERANDS itself. */
1545 for (i = 0; i < nparallel; i++)
1547 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1548 break; /* Past last SET */
1550 if (operands)
1551 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1552 if (operand_locs)
1553 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1554 if (constraints)
1555 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1556 if (modes)
1557 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1558 nout++;
1561 for (i = 0; i < nin; i++)
1563 if (operand_locs)
1564 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1565 if (operands)
1566 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1567 if (constraints)
1568 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1569 if (modes)
1570 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1573 template = ASM_OPERANDS_TEMPLATE (asmop);
1575 else if (GET_CODE (body) == PARALLEL
1576 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1578 /* No outputs, but some CLOBBERs. */
1580 rtx asmop = XVECEXP (body, 0, 0);
1581 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1583 for (i = 0; i < nin; i++)
1585 if (operand_locs)
1586 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1587 if (operands)
1588 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1589 if (constraints)
1590 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1591 if (modes)
1592 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1595 template = ASM_OPERANDS_TEMPLATE (asmop);
1598 return template;
1601 /* Check if an asm_operand matches its constraints.
1602 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1605 asm_operand_ok (rtx op, const char *constraint)
1607 int result = 0;
1609 /* Use constrain_operands after reload. */
1610 gcc_assert (!reload_completed);
1612 while (*constraint)
1614 char c = *constraint;
1615 int len;
1616 switch (c)
1618 case ',':
1619 constraint++;
1620 continue;
1621 case '=':
1622 case '+':
1623 case '*':
1624 case '%':
1625 case '!':
1626 case '#':
1627 case '&':
1628 case '?':
1629 break;
1631 case '0': case '1': case '2': case '3': case '4':
1632 case '5': case '6': case '7': case '8': case '9':
1633 /* For best results, our caller should have given us the
1634 proper matching constraint, but we can't actually fail
1635 the check if they didn't. Indicate that results are
1636 inconclusive. */
1638 constraint++;
1639 while (ISDIGIT (*constraint));
1640 if (! result)
1641 result = -1;
1642 continue;
1644 case 'p':
1645 if (address_operand (op, VOIDmode))
1646 result = 1;
1647 break;
1649 case 'm':
1650 case 'V': /* non-offsettable */
1651 if (memory_operand (op, VOIDmode))
1652 result = 1;
1653 break;
1655 case 'o': /* offsettable */
1656 if (offsettable_nonstrict_memref_p (op))
1657 result = 1;
1658 break;
1660 case '<':
1661 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1662 excepting those that expand_call created. Further, on some
1663 machines which do not have generalized auto inc/dec, an inc/dec
1664 is not a memory_operand.
1666 Match any memory and hope things are resolved after reload. */
1668 if (MEM_P (op)
1669 && (1
1670 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1671 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1672 result = 1;
1673 break;
1675 case '>':
1676 if (MEM_P (op)
1677 && (1
1678 || GET_CODE (XEXP (op, 0)) == PRE_INC
1679 || GET_CODE (XEXP (op, 0)) == POST_INC))
1680 result = 1;
1681 break;
1683 case 'E':
1684 case 'F':
1685 if (GET_CODE (op) == CONST_DOUBLE
1686 || (GET_CODE (op) == CONST_VECTOR
1687 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1688 result = 1;
1689 break;
1691 case 'G':
1692 if (GET_CODE (op) == CONST_DOUBLE
1693 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1694 result = 1;
1695 break;
1696 case 'H':
1697 if (GET_CODE (op) == CONST_DOUBLE
1698 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1699 result = 1;
1700 break;
1702 case 's':
1703 if (GET_CODE (op) == CONST_INT
1704 || (GET_CODE (op) == CONST_DOUBLE
1705 && GET_MODE (op) == VOIDmode))
1706 break;
1707 /* Fall through. */
1709 case 'i':
1710 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1711 result = 1;
1712 break;
1714 case 'n':
1715 if (GET_CODE (op) == CONST_INT
1716 || (GET_CODE (op) == CONST_DOUBLE
1717 && GET_MODE (op) == VOIDmode))
1718 result = 1;
1719 break;
1721 case 'I':
1722 if (GET_CODE (op) == CONST_INT
1723 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1724 result = 1;
1725 break;
1726 case 'J':
1727 if (GET_CODE (op) == CONST_INT
1728 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1729 result = 1;
1730 break;
1731 case 'K':
1732 if (GET_CODE (op) == CONST_INT
1733 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1734 result = 1;
1735 break;
1736 case 'L':
1737 if (GET_CODE (op) == CONST_INT
1738 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1739 result = 1;
1740 break;
1741 case 'M':
1742 if (GET_CODE (op) == CONST_INT
1743 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1744 result = 1;
1745 break;
1746 case 'N':
1747 if (GET_CODE (op) == CONST_INT
1748 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1749 result = 1;
1750 break;
1751 case 'O':
1752 if (GET_CODE (op) == CONST_INT
1753 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1754 result = 1;
1755 break;
1756 case 'P':
1757 if (GET_CODE (op) == CONST_INT
1758 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1759 result = 1;
1760 break;
1762 case 'X':
1763 result = 1;
1764 break;
1766 case 'g':
1767 if (general_operand (op, VOIDmode))
1768 result = 1;
1769 break;
1771 default:
1772 /* For all other letters, we first check for a register class,
1773 otherwise it is an EXTRA_CONSTRAINT. */
1774 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1776 case 'r':
1777 if (GET_MODE (op) == BLKmode)
1778 break;
1779 if (register_operand (op, VOIDmode))
1780 result = 1;
1782 #ifdef EXTRA_CONSTRAINT_STR
1783 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1784 result = 1;
1785 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1786 /* Every memory operand can be reloaded to fit. */
1787 && memory_operand (op, VOIDmode))
1788 result = 1;
1789 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1790 /* Every address operand can be reloaded to fit. */
1791 && address_operand (op, VOIDmode))
1792 result = 1;
1793 #endif
1794 break;
1796 len = CONSTRAINT_LEN (c, constraint);
1798 constraint++;
1799 while (--len && *constraint);
1800 if (len)
1801 return 0;
1804 return result;
1807 /* Given an rtx *P, if it is a sum containing an integer constant term,
1808 return the location (type rtx *) of the pointer to that constant term.
1809 Otherwise, return a null pointer. */
1811 rtx *
1812 find_constant_term_loc (rtx *p)
1814 rtx *tem;
1815 enum rtx_code code = GET_CODE (*p);
1817 /* If *P IS such a constant term, P is its location. */
1819 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1820 || code == CONST)
1821 return p;
1823 /* Otherwise, if not a sum, it has no constant term. */
1825 if (GET_CODE (*p) != PLUS)
1826 return 0;
1828 /* If one of the summands is constant, return its location. */
1830 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1831 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1832 return p;
1834 /* Otherwise, check each summand for containing a constant term. */
1836 if (XEXP (*p, 0) != 0)
1838 tem = find_constant_term_loc (&XEXP (*p, 0));
1839 if (tem != 0)
1840 return tem;
1843 if (XEXP (*p, 1) != 0)
1845 tem = find_constant_term_loc (&XEXP (*p, 1));
1846 if (tem != 0)
1847 return tem;
1850 return 0;
1853 /* Return 1 if OP is a memory reference
1854 whose address contains no side effects
1855 and remains valid after the addition
1856 of a positive integer less than the
1857 size of the object being referenced.
1859 We assume that the original address is valid and do not check it.
1861 This uses strict_memory_address_p as a subroutine, so
1862 don't use it before reload. */
1865 offsettable_memref_p (rtx op)
1867 return ((MEM_P (op))
1868 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1871 /* Similar, but don't require a strictly valid mem ref:
1872 consider pseudo-regs valid as index or base regs. */
1875 offsettable_nonstrict_memref_p (rtx op)
1877 return ((MEM_P (op))
1878 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1881 /* Return 1 if Y is a memory address which contains no side effects
1882 and would remain valid after the addition of a positive integer
1883 less than the size of that mode.
1885 We assume that the original address is valid and do not check it.
1886 We do check that it is valid for narrower modes.
1888 If STRICTP is nonzero, we require a strictly valid address,
1889 for the sake of use in reload.c. */
1892 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1894 enum rtx_code ycode = GET_CODE (y);
1895 rtx z;
1896 rtx y1 = y;
1897 rtx *y2;
1898 int (*addressp) (enum machine_mode, rtx) =
1899 (strictp ? strict_memory_address_p : memory_address_p);
1900 unsigned int mode_sz = GET_MODE_SIZE (mode);
1902 if (CONSTANT_ADDRESS_P (y))
1903 return 1;
1905 /* Adjusting an offsettable address involves changing to a narrower mode.
1906 Make sure that's OK. */
1908 if (mode_dependent_address_p (y))
1909 return 0;
1911 /* ??? How much offset does an offsettable BLKmode reference need?
1912 Clearly that depends on the situation in which it's being used.
1913 However, the current situation in which we test 0xffffffff is
1914 less than ideal. Caveat user. */
1915 if (mode_sz == 0)
1916 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1918 /* If the expression contains a constant term,
1919 see if it remains valid when max possible offset is added. */
1921 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1923 int good;
1925 y1 = *y2;
1926 *y2 = plus_constant (*y2, mode_sz - 1);
1927 /* Use QImode because an odd displacement may be automatically invalid
1928 for any wider mode. But it should be valid for a single byte. */
1929 good = (*addressp) (QImode, y);
1931 /* In any case, restore old contents of memory. */
1932 *y2 = y1;
1933 return good;
1936 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1937 return 0;
1939 /* The offset added here is chosen as the maximum offset that
1940 any instruction could need to add when operating on something
1941 of the specified mode. We assume that if Y and Y+c are
1942 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1943 go inside a LO_SUM here, so we do so as well. */
1944 if (GET_CODE (y) == LO_SUM
1945 && mode != BLKmode
1946 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1947 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1948 plus_constant (XEXP (y, 1), mode_sz - 1));
1949 else
1950 z = plus_constant (y, mode_sz - 1);
1952 /* Use QImode because an odd displacement may be automatically invalid
1953 for any wider mode. But it should be valid for a single byte. */
1954 return (*addressp) (QImode, z);
1957 /* Return 1 if ADDR is an address-expression whose effect depends
1958 on the mode of the memory reference it is used in.
1960 Autoincrement addressing is a typical example of mode-dependence
1961 because the amount of the increment depends on the mode. */
1964 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1966 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1967 return 0;
1968 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1969 win: ATTRIBUTE_UNUSED_LABEL
1970 return 1;
1973 /* Like extract_insn, but save insn extracted and don't extract again, when
1974 called again for the same insn expecting that recog_data still contain the
1975 valid information. This is used primary by gen_attr infrastructure that
1976 often does extract insn again and again. */
1977 void
1978 extract_insn_cached (rtx insn)
1980 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1981 return;
1982 extract_insn (insn);
1983 recog_data.insn = insn;
1986 /* Do cached extract_insn, constrain_operands and complain about failures.
1987 Used by insn_attrtab. */
1988 void
1989 extract_constrain_insn_cached (rtx insn)
1991 extract_insn_cached (insn);
1992 if (which_alternative == -1
1993 && !constrain_operands (reload_completed))
1994 fatal_insn_not_found (insn);
1997 /* Do cached constrain_operands and complain about failures. */
1999 constrain_operands_cached (int strict)
2001 if (which_alternative == -1)
2002 return constrain_operands (strict);
2003 else
2004 return 1;
2007 /* Analyze INSN and fill in recog_data. */
2009 void
2010 extract_insn (rtx insn)
2012 int i;
2013 int icode;
2014 int noperands;
2015 rtx body = PATTERN (insn);
2017 recog_data.insn = NULL;
2018 recog_data.n_operands = 0;
2019 recog_data.n_alternatives = 0;
2020 recog_data.n_dups = 0;
2021 which_alternative = -1;
2023 switch (GET_CODE (body))
2025 case USE:
2026 case CLOBBER:
2027 case ASM_INPUT:
2028 case ADDR_VEC:
2029 case ADDR_DIFF_VEC:
2030 return;
2032 case SET:
2033 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2034 goto asm_insn;
2035 else
2036 goto normal_insn;
2037 case PARALLEL:
2038 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2039 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2040 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2041 goto asm_insn;
2042 else
2043 goto normal_insn;
2044 case ASM_OPERANDS:
2045 asm_insn:
2046 recog_data.n_operands = noperands = asm_noperands (body);
2047 if (noperands >= 0)
2049 /* This insn is an `asm' with operands. */
2051 /* expand_asm_operands makes sure there aren't too many operands. */
2052 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2054 /* Now get the operand values and constraints out of the insn. */
2055 decode_asm_operands (body, recog_data.operand,
2056 recog_data.operand_loc,
2057 recog_data.constraints,
2058 recog_data.operand_mode);
2059 if (noperands > 0)
2061 const char *p = recog_data.constraints[0];
2062 recog_data.n_alternatives = 1;
2063 while (*p)
2064 recog_data.n_alternatives += (*p++ == ',');
2066 break;
2068 fatal_insn_not_found (insn);
2070 default:
2071 normal_insn:
2072 /* Ordinary insn: recognize it, get the operands via insn_extract
2073 and get the constraints. */
2075 icode = recog_memoized (insn);
2076 if (icode < 0)
2077 fatal_insn_not_found (insn);
2079 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2080 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2081 recog_data.n_dups = insn_data[icode].n_dups;
2083 insn_extract (insn);
2085 for (i = 0; i < noperands; i++)
2087 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2088 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2089 /* VOIDmode match_operands gets mode from their real operand. */
2090 if (recog_data.operand_mode[i] == VOIDmode)
2091 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2094 for (i = 0; i < noperands; i++)
2095 recog_data.operand_type[i]
2096 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2097 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2098 : OP_IN);
2100 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2103 /* After calling extract_insn, you can use this function to extract some
2104 information from the constraint strings into a more usable form.
2105 The collected data is stored in recog_op_alt. */
2106 void
2107 preprocess_constraints (void)
2109 int i;
2111 for (i = 0; i < recog_data.n_operands; i++)
2112 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2113 * sizeof (struct operand_alternative)));
2115 for (i = 0; i < recog_data.n_operands; i++)
2117 int j;
2118 struct operand_alternative *op_alt;
2119 const char *p = recog_data.constraints[i];
2121 op_alt = recog_op_alt[i];
2123 for (j = 0; j < recog_data.n_alternatives; j++)
2125 op_alt[j].cl = NO_REGS;
2126 op_alt[j].constraint = p;
2127 op_alt[j].matches = -1;
2128 op_alt[j].matched = -1;
2130 if (*p == '\0' || *p == ',')
2132 op_alt[j].anything_ok = 1;
2133 continue;
2136 for (;;)
2138 char c = *p;
2139 if (c == '#')
2141 c = *++p;
2142 while (c != ',' && c != '\0');
2143 if (c == ',' || c == '\0')
2145 p++;
2146 break;
2149 switch (c)
2151 case '=': case '+': case '*': case '%':
2152 case 'E': case 'F': case 'G': case 'H':
2153 case 's': case 'i': case 'n':
2154 case 'I': case 'J': case 'K': case 'L':
2155 case 'M': case 'N': case 'O': case 'P':
2156 /* These don't say anything we care about. */
2157 break;
2159 case '?':
2160 op_alt[j].reject += 6;
2161 break;
2162 case '!':
2163 op_alt[j].reject += 600;
2164 break;
2165 case '&':
2166 op_alt[j].earlyclobber = 1;
2167 break;
2169 case '0': case '1': case '2': case '3': case '4':
2170 case '5': case '6': case '7': case '8': case '9':
2172 char *end;
2173 op_alt[j].matches = strtoul (p, &end, 10);
2174 recog_op_alt[op_alt[j].matches][j].matched = i;
2175 p = end;
2177 continue;
2179 case 'm':
2180 op_alt[j].memory_ok = 1;
2181 break;
2182 case '<':
2183 op_alt[j].decmem_ok = 1;
2184 break;
2185 case '>':
2186 op_alt[j].incmem_ok = 1;
2187 break;
2188 case 'V':
2189 op_alt[j].nonoffmem_ok = 1;
2190 break;
2191 case 'o':
2192 op_alt[j].offmem_ok = 1;
2193 break;
2194 case 'X':
2195 op_alt[j].anything_ok = 1;
2196 break;
2198 case 'p':
2199 op_alt[j].is_address = 1;
2200 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2201 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2202 break;
2204 case 'g':
2205 case 'r':
2206 op_alt[j].cl =
2207 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2208 break;
2210 default:
2211 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2213 op_alt[j].memory_ok = 1;
2214 break;
2216 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2218 op_alt[j].is_address = 1;
2219 op_alt[j].cl
2220 = (reg_class_subunion
2221 [(int) op_alt[j].cl]
2222 [(int) base_reg_class (VOIDmode, ADDRESS,
2223 SCRATCH)]);
2224 break;
2227 op_alt[j].cl
2228 = (reg_class_subunion
2229 [(int) op_alt[j].cl]
2230 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2231 break;
2233 p += CONSTRAINT_LEN (c, p);
2239 /* Check the operands of an insn against the insn's operand constraints
2240 and return 1 if they are valid.
2241 The information about the insn's operands, constraints, operand modes
2242 etc. is obtained from the global variables set up by extract_insn.
2244 WHICH_ALTERNATIVE is set to a number which indicates which
2245 alternative of constraints was matched: 0 for the first alternative,
2246 1 for the next, etc.
2248 In addition, when two operands are required to match
2249 and it happens that the output operand is (reg) while the
2250 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2251 make the output operand look like the input.
2252 This is because the output operand is the one the template will print.
2254 This is used in final, just before printing the assembler code and by
2255 the routines that determine an insn's attribute.
2257 If STRICT is a positive nonzero value, it means that we have been
2258 called after reload has been completed. In that case, we must
2259 do all checks strictly. If it is zero, it means that we have been called
2260 before reload has completed. In that case, we first try to see if we can
2261 find an alternative that matches strictly. If not, we try again, this
2262 time assuming that reload will fix up the insn. This provides a "best
2263 guess" for the alternative and is used to compute attributes of insns prior
2264 to reload. A negative value of STRICT is used for this internal call. */
2266 struct funny_match
2268 int this, other;
2272 constrain_operands (int strict)
2274 const char *constraints[MAX_RECOG_OPERANDS];
2275 int matching_operands[MAX_RECOG_OPERANDS];
2276 int earlyclobber[MAX_RECOG_OPERANDS];
2277 int c;
2279 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2280 int funny_match_index;
2282 which_alternative = 0;
2283 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2284 return 1;
2286 for (c = 0; c < recog_data.n_operands; c++)
2288 constraints[c] = recog_data.constraints[c];
2289 matching_operands[c] = -1;
2294 int seen_earlyclobber_at = -1;
2295 int opno;
2296 int lose = 0;
2297 funny_match_index = 0;
2299 for (opno = 0; opno < recog_data.n_operands; opno++)
2301 rtx op = recog_data.operand[opno];
2302 enum machine_mode mode = GET_MODE (op);
2303 const char *p = constraints[opno];
2304 int offset = 0;
2305 int win = 0;
2306 int val;
2307 int len;
2309 earlyclobber[opno] = 0;
2311 /* A unary operator may be accepted by the predicate, but it
2312 is irrelevant for matching constraints. */
2313 if (UNARY_P (op))
2314 op = XEXP (op, 0);
2316 if (GET_CODE (op) == SUBREG)
2318 if (REG_P (SUBREG_REG (op))
2319 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2320 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2321 GET_MODE (SUBREG_REG (op)),
2322 SUBREG_BYTE (op),
2323 GET_MODE (op));
2324 op = SUBREG_REG (op);
2327 /* An empty constraint or empty alternative
2328 allows anything which matched the pattern. */
2329 if (*p == 0 || *p == ',')
2330 win = 1;
2333 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2335 case '\0':
2336 len = 0;
2337 break;
2338 case ',':
2339 c = '\0';
2340 break;
2342 case '?': case '!': case '*': case '%':
2343 case '=': case '+':
2344 break;
2346 case '#':
2347 /* Ignore rest of this alternative as far as
2348 constraint checking is concerned. */
2350 p++;
2351 while (*p && *p != ',');
2352 len = 0;
2353 break;
2355 case '&':
2356 earlyclobber[opno] = 1;
2357 if (seen_earlyclobber_at < 0)
2358 seen_earlyclobber_at = opno;
2359 break;
2361 case '0': case '1': case '2': case '3': case '4':
2362 case '5': case '6': case '7': case '8': case '9':
2364 /* This operand must be the same as a previous one.
2365 This kind of constraint is used for instructions such
2366 as add when they take only two operands.
2368 Note that the lower-numbered operand is passed first.
2370 If we are not testing strictly, assume that this
2371 constraint will be satisfied. */
2373 char *end;
2374 int match;
2376 match = strtoul (p, &end, 10);
2377 p = end;
2379 if (strict < 0)
2380 val = 1;
2381 else
2383 rtx op1 = recog_data.operand[match];
2384 rtx op2 = recog_data.operand[opno];
2386 /* A unary operator may be accepted by the predicate,
2387 but it is irrelevant for matching constraints. */
2388 if (UNARY_P (op1))
2389 op1 = XEXP (op1, 0);
2390 if (UNARY_P (op2))
2391 op2 = XEXP (op2, 0);
2393 val = operands_match_p (op1, op2);
2396 matching_operands[opno] = match;
2397 matching_operands[match] = opno;
2399 if (val != 0)
2400 win = 1;
2402 /* If output is *x and input is *--x, arrange later
2403 to change the output to *--x as well, since the
2404 output op is the one that will be printed. */
2405 if (val == 2 && strict > 0)
2407 funny_match[funny_match_index].this = opno;
2408 funny_match[funny_match_index++].other = match;
2411 len = 0;
2412 break;
2414 case 'p':
2415 /* p is used for address_operands. When we are called by
2416 gen_reload, no one will have checked that the address is
2417 strictly valid, i.e., that all pseudos requiring hard regs
2418 have gotten them. */
2419 if (strict <= 0
2420 || (strict_memory_address_p (recog_data.operand_mode[opno],
2421 op)))
2422 win = 1;
2423 break;
2425 /* No need to check general_operand again;
2426 it was done in insn-recog.c. Well, except that reload
2427 doesn't check the validity of its replacements, but
2428 that should only matter when there's a bug. */
2429 case 'g':
2430 /* Anything goes unless it is a REG and really has a hard reg
2431 but the hard reg is not in the class GENERAL_REGS. */
2432 if (REG_P (op))
2434 if (strict < 0
2435 || GENERAL_REGS == ALL_REGS
2436 || (reload_in_progress
2437 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2438 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2439 win = 1;
2441 else if (strict < 0 || general_operand (op, mode))
2442 win = 1;
2443 break;
2445 case 'X':
2446 /* This is used for a MATCH_SCRATCH in the cases when
2447 we don't actually need anything. So anything goes
2448 any time. */
2449 win = 1;
2450 break;
2452 case 'm':
2453 /* Memory operands must be valid, to the extent
2454 required by STRICT. */
2455 if (MEM_P (op))
2457 if (strict > 0
2458 && !strict_memory_address_p (GET_MODE (op),
2459 XEXP (op, 0)))
2460 break;
2461 if (strict == 0
2462 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2463 break;
2464 win = 1;
2466 /* Before reload, accept what reload can turn into mem. */
2467 else if (strict < 0 && CONSTANT_P (op))
2468 win = 1;
2469 /* During reload, accept a pseudo */
2470 else if (reload_in_progress && REG_P (op)
2471 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2472 win = 1;
2473 break;
2475 case '<':
2476 if (MEM_P (op)
2477 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2478 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2479 win = 1;
2480 break;
2482 case '>':
2483 if (MEM_P (op)
2484 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2485 || GET_CODE (XEXP (op, 0)) == POST_INC))
2486 win = 1;
2487 break;
2489 case 'E':
2490 case 'F':
2491 if (GET_CODE (op) == CONST_DOUBLE
2492 || (GET_CODE (op) == CONST_VECTOR
2493 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2494 win = 1;
2495 break;
2497 case 'G':
2498 case 'H':
2499 if (GET_CODE (op) == CONST_DOUBLE
2500 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2501 win = 1;
2502 break;
2504 case 's':
2505 if (GET_CODE (op) == CONST_INT
2506 || (GET_CODE (op) == CONST_DOUBLE
2507 && GET_MODE (op) == VOIDmode))
2508 break;
2509 case 'i':
2510 if (CONSTANT_P (op))
2511 win = 1;
2512 break;
2514 case 'n':
2515 if (GET_CODE (op) == CONST_INT
2516 || (GET_CODE (op) == CONST_DOUBLE
2517 && GET_MODE (op) == VOIDmode))
2518 win = 1;
2519 break;
2521 case 'I':
2522 case 'J':
2523 case 'K':
2524 case 'L':
2525 case 'M':
2526 case 'N':
2527 case 'O':
2528 case 'P':
2529 if (GET_CODE (op) == CONST_INT
2530 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2531 win = 1;
2532 break;
2534 case 'V':
2535 if (MEM_P (op)
2536 && ((strict > 0 && ! offsettable_memref_p (op))
2537 || (strict < 0
2538 && !(CONSTANT_P (op) || MEM_P (op)))
2539 || (reload_in_progress
2540 && !(REG_P (op)
2541 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2542 win = 1;
2543 break;
2545 case 'o':
2546 if ((strict > 0 && offsettable_memref_p (op))
2547 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2548 /* Before reload, accept what reload can handle. */
2549 || (strict < 0
2550 && (CONSTANT_P (op) || MEM_P (op)))
2551 /* During reload, accept a pseudo */
2552 || (reload_in_progress && REG_P (op)
2553 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2554 win = 1;
2555 break;
2557 default:
2559 enum reg_class cl;
2561 cl = (c == 'r'
2562 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2563 if (cl != NO_REGS)
2565 if (strict < 0
2566 || (strict == 0
2567 && REG_P (op)
2568 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2569 || (strict == 0 && GET_CODE (op) == SCRATCH)
2570 || (REG_P (op)
2571 && reg_fits_class_p (op, cl, offset, mode)))
2572 win = 1;
2574 #ifdef EXTRA_CONSTRAINT_STR
2575 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2576 win = 1;
2578 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2579 /* Every memory operand can be reloaded to fit. */
2580 && ((strict < 0 && MEM_P (op))
2581 /* Before reload, accept what reload can turn
2582 into mem. */
2583 || (strict < 0 && CONSTANT_P (op))
2584 /* During reload, accept a pseudo */
2585 || (reload_in_progress && REG_P (op)
2586 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2587 win = 1;
2588 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2589 /* Every address operand can be reloaded to fit. */
2590 && strict < 0)
2591 win = 1;
2592 #endif
2593 break;
2596 while (p += len, c);
2598 constraints[opno] = p;
2599 /* If this operand did not win somehow,
2600 this alternative loses. */
2601 if (! win)
2602 lose = 1;
2604 /* This alternative won; the operands are ok.
2605 Change whichever operands this alternative says to change. */
2606 if (! lose)
2608 int opno, eopno;
2610 /* See if any earlyclobber operand conflicts with some other
2611 operand. */
2613 if (strict > 0 && seen_earlyclobber_at >= 0)
2614 for (eopno = seen_earlyclobber_at;
2615 eopno < recog_data.n_operands;
2616 eopno++)
2617 /* Ignore earlyclobber operands now in memory,
2618 because we would often report failure when we have
2619 two memory operands, one of which was formerly a REG. */
2620 if (earlyclobber[eopno]
2621 && REG_P (recog_data.operand[eopno]))
2622 for (opno = 0; opno < recog_data.n_operands; opno++)
2623 if ((MEM_P (recog_data.operand[opno])
2624 || recog_data.operand_type[opno] != OP_OUT)
2625 && opno != eopno
2626 /* Ignore things like match_operator operands. */
2627 && *recog_data.constraints[opno] != 0
2628 && ! (matching_operands[opno] == eopno
2629 && operands_match_p (recog_data.operand[opno],
2630 recog_data.operand[eopno]))
2631 && ! safe_from_earlyclobber (recog_data.operand[opno],
2632 recog_data.operand[eopno]))
2633 lose = 1;
2635 if (! lose)
2637 while (--funny_match_index >= 0)
2639 recog_data.operand[funny_match[funny_match_index].other]
2640 = recog_data.operand[funny_match[funny_match_index].this];
2643 return 1;
2647 which_alternative++;
2649 while (which_alternative < recog_data.n_alternatives);
2651 which_alternative = -1;
2652 /* If we are about to reject this, but we are not to test strictly,
2653 try a very loose test. Only return failure if it fails also. */
2654 if (strict == 0)
2655 return constrain_operands (-1);
2656 else
2657 return 0;
2660 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2661 is a hard reg in class CLASS when its regno is offset by OFFSET
2662 and changed to mode MODE.
2663 If REG occupies multiple hard regs, all of them must be in CLASS. */
2666 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2667 enum machine_mode mode)
2669 int regno = REGNO (operand);
2671 if (cl == NO_REGS)
2672 return 0;
2674 if (regno < FIRST_PSEUDO_REGISTER
2675 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2676 regno + offset))
2678 int sr;
2679 regno += offset;
2680 for (sr = hard_regno_nregs[regno][mode] - 1;
2681 sr > 0; sr--)
2682 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2683 regno + sr))
2684 break;
2685 return sr == 0;
2688 return 0;
2691 /* Split single instruction. Helper function for split_all_insns and
2692 split_all_insns_noflow. Return last insn in the sequence if successful,
2693 or NULL if unsuccessful. */
2695 static rtx
2696 split_insn (rtx insn)
2698 /* Split insns here to get max fine-grain parallelism. */
2699 rtx first = PREV_INSN (insn);
2700 rtx last = try_split (PATTERN (insn), insn, 1);
2702 if (last == insn)
2703 return NULL_RTX;
2705 /* try_split returns the NOTE that INSN became. */
2706 SET_INSN_DELETED (insn);
2708 /* ??? Coddle to md files that generate subregs in post-reload
2709 splitters instead of computing the proper hard register. */
2710 if (reload_completed && first != last)
2712 first = NEXT_INSN (first);
2713 for (;;)
2715 if (INSN_P (first))
2716 cleanup_subreg_operands (first);
2717 if (first == last)
2718 break;
2719 first = NEXT_INSN (first);
2722 return last;
2725 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2727 void
2728 split_all_insns (int upd_life)
2730 sbitmap blocks;
2731 bool changed;
2732 basic_block bb;
2734 blocks = sbitmap_alloc (last_basic_block);
2735 sbitmap_zero (blocks);
2736 changed = false;
2738 FOR_EACH_BB_REVERSE (bb)
2740 rtx insn, next;
2741 bool finish = false;
2743 for (insn = BB_HEAD (bb); !finish ; insn = next)
2745 /* Can't use `next_real_insn' because that might go across
2746 CODE_LABELS and short-out basic blocks. */
2747 next = NEXT_INSN (insn);
2748 finish = (insn == BB_END (bb));
2749 if (INSN_P (insn))
2751 rtx set = single_set (insn);
2753 /* Don't split no-op move insns. These should silently
2754 disappear later in final. Splitting such insns would
2755 break the code that handles REG_NO_CONFLICT blocks. */
2756 if (set && set_noop_p (set))
2758 /* Nops get in the way while scheduling, so delete them
2759 now if register allocation has already been done. It
2760 is too risky to try to do this before register
2761 allocation, and there are unlikely to be very many
2762 nops then anyways. */
2763 if (reload_completed)
2765 /* If the no-op set has a REG_UNUSED note, we need
2766 to update liveness information. */
2767 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2769 SET_BIT (blocks, bb->index);
2770 changed = true;
2772 /* ??? Is life info affected by deleting edges? */
2773 delete_insn_and_edges (insn);
2776 else
2778 rtx last = split_insn (insn);
2779 if (last)
2781 /* The split sequence may include barrier, but the
2782 BB boundary we are interested in will be set to
2783 previous one. */
2785 while (BARRIER_P (last))
2786 last = PREV_INSN (last);
2787 SET_BIT (blocks, bb->index);
2788 changed = true;
2795 if (changed)
2797 int old_last_basic_block = last_basic_block;
2799 find_many_sub_basic_blocks (blocks);
2801 if (old_last_basic_block != last_basic_block && upd_life)
2802 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2805 if (changed && upd_life)
2806 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2807 PROP_DEATH_NOTES);
2809 #ifdef ENABLE_CHECKING
2810 verify_flow_info ();
2811 #endif
2813 sbitmap_free (blocks);
2816 /* Same as split_all_insns, but do not expect CFG to be available.
2817 Used by machine dependent reorg passes. */
2819 unsigned int
2820 split_all_insns_noflow (void)
2822 rtx next, insn;
2824 for (insn = get_insns (); insn; insn = next)
2826 next = NEXT_INSN (insn);
2827 if (INSN_P (insn))
2829 /* Don't split no-op move insns. These should silently
2830 disappear later in final. Splitting such insns would
2831 break the code that handles REG_NO_CONFLICT blocks. */
2832 rtx set = single_set (insn);
2833 if (set && set_noop_p (set))
2835 /* Nops get in the way while scheduling, so delete them
2836 now if register allocation has already been done. It
2837 is too risky to try to do this before register
2838 allocation, and there are unlikely to be very many
2839 nops then anyways.
2841 ??? Should we use delete_insn when the CFG isn't valid? */
2842 if (reload_completed)
2843 delete_insn_and_edges (insn);
2845 else
2846 split_insn (insn);
2849 return 0;
2852 #ifdef HAVE_peephole2
2853 struct peep2_insn_data
2855 rtx insn;
2856 regset live_before;
2859 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2860 static int peep2_current;
2861 /* The number of instructions available to match a peep2. */
2862 int peep2_current_count;
2864 /* A non-insn marker indicating the last insn of the block.
2865 The live_before regset for this element is correct, indicating
2866 global_live_at_end for the block. */
2867 #define PEEP2_EOB pc_rtx
2869 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2870 does not exist. Used by the recognizer to find the next insn to match
2871 in a multi-insn pattern. */
2874 peep2_next_insn (int n)
2876 gcc_assert (n <= peep2_current_count);
2878 n += peep2_current;
2879 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2880 n -= MAX_INSNS_PER_PEEP2 + 1;
2882 return peep2_insn_data[n].insn;
2885 /* Return true if REGNO is dead before the Nth non-note insn
2886 after `current'. */
2889 peep2_regno_dead_p (int ofs, int regno)
2891 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2893 ofs += peep2_current;
2894 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2895 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2897 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2899 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2902 /* Similarly for a REG. */
2905 peep2_reg_dead_p (int ofs, rtx reg)
2907 int regno, n;
2909 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2911 ofs += peep2_current;
2912 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2913 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2915 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2917 regno = REGNO (reg);
2918 n = hard_regno_nregs[regno][GET_MODE (reg)];
2919 while (--n >= 0)
2920 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2921 return 0;
2922 return 1;
2925 /* Try to find a hard register of mode MODE, matching the register class in
2926 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2927 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2928 in which case the only condition is that the register must be available
2929 before CURRENT_INSN.
2930 Registers that already have bits set in REG_SET will not be considered.
2932 If an appropriate register is available, it will be returned and the
2933 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2934 returned. */
2937 peep2_find_free_register (int from, int to, const char *class_str,
2938 enum machine_mode mode, HARD_REG_SET *reg_set)
2940 static int search_ofs;
2941 enum reg_class cl;
2942 HARD_REG_SET live;
2943 int i;
2945 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2946 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2948 from += peep2_current;
2949 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2950 from -= MAX_INSNS_PER_PEEP2 + 1;
2951 to += peep2_current;
2952 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2953 to -= MAX_INSNS_PER_PEEP2 + 1;
2955 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2956 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2958 while (from != to)
2960 HARD_REG_SET this_live;
2962 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2963 from = 0;
2964 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2965 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2966 IOR_HARD_REG_SET (live, this_live);
2969 cl = (class_str[0] == 'r' ? GENERAL_REGS
2970 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2972 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2974 int raw_regno, regno, success, j;
2976 /* Distribute the free registers as much as possible. */
2977 raw_regno = search_ofs + i;
2978 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2979 raw_regno -= FIRST_PSEUDO_REGISTER;
2980 #ifdef REG_ALLOC_ORDER
2981 regno = reg_alloc_order[raw_regno];
2982 #else
2983 regno = raw_regno;
2984 #endif
2986 /* Don't allocate fixed registers. */
2987 if (fixed_regs[regno])
2988 continue;
2989 /* Make sure the register is of the right class. */
2990 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2991 continue;
2992 /* And can support the mode we need. */
2993 if (! HARD_REGNO_MODE_OK (regno, mode))
2994 continue;
2995 /* And that we don't create an extra save/restore. */
2996 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2997 continue;
2998 /* And we don't clobber traceback for noreturn functions. */
2999 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3000 && (! reload_completed || frame_pointer_needed))
3001 continue;
3003 success = 1;
3004 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3006 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3007 || TEST_HARD_REG_BIT (live, regno + j))
3009 success = 0;
3010 break;
3013 if (success)
3015 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3016 SET_HARD_REG_BIT (*reg_set, regno + j);
3018 /* Start the next search with the next register. */
3019 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3020 raw_regno = 0;
3021 search_ofs = raw_regno;
3023 return gen_rtx_REG (mode, regno);
3027 search_ofs = 0;
3028 return NULL_RTX;
3031 /* Perform the peephole2 optimization pass. */
3033 static void
3034 peephole2_optimize (void)
3036 rtx insn, prev;
3037 regset live;
3038 int i;
3039 basic_block bb;
3040 #ifdef HAVE_conditional_execution
3041 sbitmap blocks;
3042 bool changed;
3043 #endif
3044 bool do_cleanup_cfg = false;
3045 bool do_global_life_update = false;
3046 bool do_rebuild_jump_labels = false;
3048 /* Initialize the regsets we're going to use. */
3049 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3050 peep2_insn_data[i].live_before = ALLOC_REG_SET (&reg_obstack);
3051 live = ALLOC_REG_SET (&reg_obstack);
3053 #ifdef HAVE_conditional_execution
3054 blocks = sbitmap_alloc (last_basic_block);
3055 sbitmap_zero (blocks);
3056 changed = false;
3057 #else
3058 count_or_remove_death_notes (NULL, 1);
3059 #endif
3061 FOR_EACH_BB_REVERSE (bb)
3063 struct propagate_block_info *pbi;
3064 reg_set_iterator rsi;
3065 unsigned int j;
3067 /* Indicate that all slots except the last holds invalid data. */
3068 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3069 peep2_insn_data[i].insn = NULL_RTX;
3070 peep2_current_count = 0;
3072 /* Indicate that the last slot contains live_after data. */
3073 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3074 peep2_current = MAX_INSNS_PER_PEEP2;
3076 /* Start up propagation. */
3077 COPY_REG_SET (live, bb->il.rtl->global_live_at_end);
3078 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3080 #ifdef HAVE_conditional_execution
3081 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3082 #else
3083 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3084 #endif
3086 for (insn = BB_END (bb); ; insn = prev)
3088 prev = PREV_INSN (insn);
3089 if (INSN_P (insn))
3091 rtx try, before_try, x;
3092 int match_len;
3093 rtx note;
3094 bool was_call = false;
3096 /* Record this insn. */
3097 if (--peep2_current < 0)
3098 peep2_current = MAX_INSNS_PER_PEEP2;
3099 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3100 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3101 peep2_current_count++;
3102 peep2_insn_data[peep2_current].insn = insn;
3103 propagate_one_insn (pbi, insn);
3104 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3106 if (RTX_FRAME_RELATED_P (insn))
3108 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3109 substitution would lose the
3110 REG_FRAME_RELATED_EXPR that is attached. */
3111 peep2_current_count = 0;
3112 try = NULL;
3114 else
3115 /* Match the peephole. */
3116 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3118 if (try != NULL)
3120 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3121 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3122 cfg-related call notes. */
3123 for (i = 0; i <= match_len; ++i)
3125 int j;
3126 rtx old_insn, new_insn, note;
3128 j = i + peep2_current;
3129 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3130 j -= MAX_INSNS_PER_PEEP2 + 1;
3131 old_insn = peep2_insn_data[j].insn;
3132 if (!CALL_P (old_insn))
3133 continue;
3134 was_call = true;
3136 new_insn = try;
3137 while (new_insn != NULL_RTX)
3139 if (CALL_P (new_insn))
3140 break;
3141 new_insn = NEXT_INSN (new_insn);
3144 gcc_assert (new_insn != NULL_RTX);
3146 CALL_INSN_FUNCTION_USAGE (new_insn)
3147 = CALL_INSN_FUNCTION_USAGE (old_insn);
3149 for (note = REG_NOTES (old_insn);
3150 note;
3151 note = XEXP (note, 1))
3152 switch (REG_NOTE_KIND (note))
3154 case REG_NORETURN:
3155 case REG_SETJMP:
3156 REG_NOTES (new_insn)
3157 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3158 XEXP (note, 0),
3159 REG_NOTES (new_insn));
3160 default:
3161 /* Discard all other reg notes. */
3162 break;
3165 /* Croak if there is another call in the sequence. */
3166 while (++i <= match_len)
3168 j = i + peep2_current;
3169 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3170 j -= MAX_INSNS_PER_PEEP2 + 1;
3171 old_insn = peep2_insn_data[j].insn;
3172 gcc_assert (!CALL_P (old_insn));
3174 break;
3177 i = match_len + peep2_current;
3178 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3179 i -= MAX_INSNS_PER_PEEP2 + 1;
3181 note = find_reg_note (peep2_insn_data[i].insn,
3182 REG_EH_REGION, NULL_RTX);
3184 /* Replace the old sequence with the new. */
3185 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3186 INSN_LOCATOR (peep2_insn_data[i].insn));
3187 before_try = PREV_INSN (insn);
3188 delete_insn_chain (insn, peep2_insn_data[i].insn);
3190 /* Re-insert the EH_REGION notes. */
3191 if (note || (was_call && nonlocal_goto_handler_labels))
3193 edge eh_edge;
3194 edge_iterator ei;
3196 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3197 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3198 break;
3200 for (x = try ; x != before_try ; x = PREV_INSN (x))
3201 if (CALL_P (x)
3202 || (flag_non_call_exceptions
3203 && may_trap_p (PATTERN (x))
3204 && !find_reg_note (x, REG_EH_REGION, NULL)))
3206 if (note)
3207 REG_NOTES (x)
3208 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3209 XEXP (note, 0),
3210 REG_NOTES (x));
3212 if (x != BB_END (bb) && eh_edge)
3214 edge nfte, nehe;
3215 int flags;
3217 nfte = split_block (bb, x);
3218 flags = (eh_edge->flags
3219 & (EDGE_EH | EDGE_ABNORMAL));
3220 if (CALL_P (x))
3221 flags |= EDGE_ABNORMAL_CALL;
3222 nehe = make_edge (nfte->src, eh_edge->dest,
3223 flags);
3225 nehe->probability = eh_edge->probability;
3226 nfte->probability
3227 = REG_BR_PROB_BASE - nehe->probability;
3229 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3230 #ifdef HAVE_conditional_execution
3231 SET_BIT (blocks, nfte->dest->index);
3232 changed = true;
3233 #endif
3234 bb = nfte->src;
3235 eh_edge = nehe;
3239 /* Converting possibly trapping insn to non-trapping is
3240 possible. Zap dummy outgoing edges. */
3241 do_cleanup_cfg |= purge_dead_edges (bb);
3244 #ifdef HAVE_conditional_execution
3245 /* With conditional execution, we cannot back up the
3246 live information so easily, since the conditional
3247 death data structures are not so self-contained.
3248 So record that we've made a modification to this
3249 block and update life information at the end. */
3250 SET_BIT (blocks, bb->index);
3251 changed = true;
3253 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3254 peep2_insn_data[i].insn = NULL_RTX;
3255 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3256 peep2_current_count = 0;
3257 #else
3258 /* Back up lifetime information past the end of the
3259 newly created sequence. */
3260 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3261 i = 0;
3262 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3264 /* Update life information for the new sequence. */
3265 x = try;
3268 if (INSN_P (x))
3270 if (--i < 0)
3271 i = MAX_INSNS_PER_PEEP2;
3272 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3273 && peep2_insn_data[i].insn == NULL_RTX)
3274 peep2_current_count++;
3275 peep2_insn_data[i].insn = x;
3276 propagate_one_insn (pbi, x);
3277 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3279 x = PREV_INSN (x);
3281 while (x != prev);
3283 /* ??? Should verify that LIVE now matches what we
3284 had before the new sequence. */
3286 peep2_current = i;
3287 #endif
3289 /* If we generated a jump instruction, it won't have
3290 JUMP_LABEL set. Recompute after we're done. */
3291 for (x = try; x != before_try; x = PREV_INSN (x))
3292 if (JUMP_P (x))
3294 do_rebuild_jump_labels = true;
3295 break;
3300 if (insn == BB_HEAD (bb))
3301 break;
3304 /* Some peepholes can decide the don't need one or more of their
3305 inputs. If this happens, local life update is not enough. */
3306 EXECUTE_IF_AND_COMPL_IN_BITMAP (bb->il.rtl->global_live_at_start, live,
3307 0, j, rsi)
3309 do_global_life_update = true;
3310 break;
3313 free_propagate_block_info (pbi);
3316 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3317 FREE_REG_SET (peep2_insn_data[i].live_before);
3318 FREE_REG_SET (live);
3320 if (do_rebuild_jump_labels)
3321 rebuild_jump_labels (get_insns ());
3323 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3324 we've changed global life since exception handlers are no longer
3325 reachable. */
3326 if (do_cleanup_cfg)
3328 cleanup_cfg (0);
3329 do_global_life_update = true;
3331 if (do_global_life_update)
3332 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3333 #ifdef HAVE_conditional_execution
3334 else
3336 count_or_remove_death_notes (blocks, 1);
3337 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3339 sbitmap_free (blocks);
3340 #endif
3342 #endif /* HAVE_peephole2 */
3344 /* Common predicates for use with define_bypass. */
3346 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3347 data not the address operand(s) of the store. IN_INSN must be
3348 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3349 SETs inside. */
3352 store_data_bypass_p (rtx out_insn, rtx in_insn)
3354 rtx out_set, in_set;
3356 in_set = single_set (in_insn);
3357 gcc_assert (in_set);
3359 if (!MEM_P (SET_DEST (in_set)))
3360 return false;
3362 out_set = single_set (out_insn);
3363 if (out_set)
3365 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3366 return false;
3368 else
3370 rtx out_pat;
3371 int i;
3373 out_pat = PATTERN (out_insn);
3374 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3376 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3378 rtx exp = XVECEXP (out_pat, 0, i);
3380 if (GET_CODE (exp) == CLOBBER)
3381 continue;
3383 gcc_assert (GET_CODE (exp) == SET);
3385 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3386 return false;
3390 return true;
3393 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3394 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3395 or multiple set; IN_INSN should be single_set for truth, but for convenience
3396 of insn categorization may be any JUMP or CALL insn. */
3399 if_test_bypass_p (rtx out_insn, rtx in_insn)
3401 rtx out_set, in_set;
3403 in_set = single_set (in_insn);
3404 if (! in_set)
3406 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3407 return false;
3410 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3411 return false;
3412 in_set = SET_SRC (in_set);
3414 out_set = single_set (out_insn);
3415 if (out_set)
3417 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3418 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3419 return false;
3421 else
3423 rtx out_pat;
3424 int i;
3426 out_pat = PATTERN (out_insn);
3427 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3429 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3431 rtx exp = XVECEXP (out_pat, 0, i);
3433 if (GET_CODE (exp) == CLOBBER)
3434 continue;
3436 gcc_assert (GET_CODE (exp) == SET);
3438 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3439 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3440 return false;
3444 return true;
3447 static bool
3448 gate_handle_peephole2 (void)
3450 return (optimize > 0 && flag_peephole2);
3453 static unsigned int
3454 rest_of_handle_peephole2 (void)
3456 #ifdef HAVE_peephole2
3457 peephole2_optimize ();
3458 #endif
3459 return 0;
3462 struct tree_opt_pass pass_peephole2 =
3464 "peephole2", /* name */
3465 gate_handle_peephole2, /* gate */
3466 rest_of_handle_peephole2, /* execute */
3467 NULL, /* sub */
3468 NULL, /* next */
3469 0, /* static_pass_number */
3470 TV_PEEPHOLE2, /* tv_id */
3471 0, /* properties_required */
3472 0, /* properties_provided */
3473 0, /* properties_destroyed */
3474 0, /* todo_flags_start */
3475 TODO_dump_func, /* todo_flags_finish */
3476 'z' /* letter */
3479 static unsigned int
3480 rest_of_handle_split_all_insns (void)
3482 split_all_insns (1);
3483 return 0;
3486 struct tree_opt_pass pass_split_all_insns =
3488 "split1", /* name */
3489 NULL, /* gate */
3490 rest_of_handle_split_all_insns, /* execute */
3491 NULL, /* sub */
3492 NULL, /* next */
3493 0, /* static_pass_number */
3494 0, /* tv_id */
3495 0, /* properties_required */
3496 0, /* properties_provided */
3497 0, /* properties_destroyed */
3498 0, /* todo_flags_start */
3499 TODO_dump_func, /* todo_flags_finish */
3500 0 /* letter */
3503 /* The placement of the splitting that we do for shorten_branches
3504 depends on whether regstack is used by the target or not. */
3505 static bool
3506 gate_do_final_split (void)
3508 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3509 return 1;
3510 #else
3511 return 0;
3512 #endif
3515 struct tree_opt_pass pass_split_for_shorten_branches =
3517 "split3", /* name */
3518 gate_do_final_split, /* gate */
3519 split_all_insns_noflow, /* execute */
3520 NULL, /* sub */
3521 NULL, /* next */
3522 0, /* static_pass_number */
3523 TV_SHORTEN_BRANCH, /* tv_id */
3524 0, /* properties_required */
3525 0, /* properties_provided */
3526 0, /* properties_destroyed */
3527 0, /* todo_flags_start */
3528 TODO_dump_func, /* todo_flags_finish */
3529 0 /* letter */
3533 static bool
3534 gate_handle_split_before_regstack (void)
3536 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3537 /* If flow2 creates new instructions which need splitting
3538 and scheduling after reload is not done, they might not be
3539 split until final which doesn't allow splitting
3540 if HAVE_ATTR_length. */
3541 # ifdef INSN_SCHEDULING
3542 return (optimize && !flag_schedule_insns_after_reload);
3543 # else
3544 return (optimize);
3545 # endif
3546 #else
3547 return 0;
3548 #endif
3551 struct tree_opt_pass pass_split_before_regstack =
3553 "split2", /* name */
3554 gate_handle_split_before_regstack, /* gate */
3555 rest_of_handle_split_all_insns, /* execute */
3556 NULL, /* sub */
3557 NULL, /* next */
3558 0, /* static_pass_number */
3559 TV_SHORTEN_BRANCH, /* tv_id */
3560 0, /* properties_required */
3561 0, /* properties_provided */
3562 0, /* properties_destroyed */
3563 0, /* todo_flags_start */
3564 TODO_dump_func, /* todo_flags_finish */
3565 0 /* letter */