Upgrade GCC from 4.4.7 snapshot 2011-10-25 to 4.4.7-RELEASE
[dragonfly.git] / contrib / gcc-4.4 / gcc / recog.c
blob587ad1b092a6892811ddfda52dd2fd5479165d19
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
64 #ifndef HAVE_ATTR_enabled
65 static inline bool
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
68 return true;
70 #endif
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 static void validate_replace_src_1 (rtx *, void *);
74 static rtx split_insn (rtx);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in reginfo.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
84 int volatile_ok;
86 struct recog_data recog_data;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
92 /* On return from `constrain_operands', indicate which alternative
93 was satisfied. */
95 int which_alternative;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
110 void
111 init_recog_no_volatile (void)
113 volatile_ok = 0;
116 void
117 init_recog (void)
119 volatile_ok = 1;
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x)
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = XALLOCAVEC (rtx, noperands);
150 constraints = XALLOCAVEC (const char *, noperands);
152 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (! asm_operand_ok (operands[i], c, constraints))
160 return 0;
163 return 1;
166 /* Static data for the next two routines. */
168 typedef struct change_t
170 rtx object;
171 int old_code;
172 rtx *loc;
173 rtx old;
174 bool unshare;
175 } change_t;
177 static change_t *changes;
178 static int changes_allocated;
180 static int num_changes = 0;
182 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
183 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
184 the change is simply made.
186 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
187 will be called with the address and mode as parameters. If OBJECT is
188 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
189 the change in place.
191 IN_GROUP is nonzero if this is part of a group of changes that must be
192 performed as a group. In that case, the changes will be stored. The
193 function `apply_change_group' will validate and apply the changes.
195 If IN_GROUP is zero, this is a single change. Try to recognize the insn
196 or validate the memory reference with the change applied. If the result
197 is not valid for the machine, suppress the change and return zero.
198 Otherwise, perform the change and return 1. */
200 static bool
201 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
203 rtx old = *loc;
205 if (old == new_rtx || rtx_equal_p (old, new_rtx))
206 return 1;
208 gcc_assert (in_group != 0 || num_changes == 0);
210 *loc = new_rtx;
212 /* Save the information describing this change. */
213 if (num_changes >= changes_allocated)
215 if (changes_allocated == 0)
216 /* This value allows for repeated substitutions inside complex
217 indexed addresses, or changes in up to 5 insns. */
218 changes_allocated = MAX_RECOG_OPERANDS * 5;
219 else
220 changes_allocated *= 2;
222 changes = XRESIZEVEC (change_t, changes, changes_allocated);
225 changes[num_changes].object = object;
226 changes[num_changes].loc = loc;
227 changes[num_changes].old = old;
228 changes[num_changes].unshare = unshare;
230 if (object && !MEM_P (object))
232 /* Set INSN_CODE to force rerecognition of insn. Save old code in
233 case invalid. */
234 changes[num_changes].old_code = INSN_CODE (object);
235 INSN_CODE (object) = -1;
238 num_changes++;
240 /* If we are making a group of changes, return 1. Otherwise, validate the
241 change group we made. */
243 if (in_group)
244 return 1;
245 else
246 return apply_change_group ();
249 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
250 UNSHARE to false. */
252 bool
253 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
255 return validate_change_1 (object, loc, new_rtx, in_group, false);
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259 UNSHARE to true. */
261 bool
262 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 return validate_change_1 (object, loc, new_rtx, in_group, true);
268 /* Keep X canonicalized if some changes have made it non-canonical; only
269 modifies the operands of X, not (for example) its code. Simplifications
270 are not the job of this routine.
272 Return true if anything was changed. */
273 bool
274 canonicalize_change_group (rtx insn, rtx x)
276 if (COMMUTATIVE_P (x)
277 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
279 /* Oops, the caller has made X no longer canonical.
280 Let's redo the changes in the correct order. */
281 rtx tem = XEXP (x, 0);
282 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
283 validate_change (insn, &XEXP (x, 1), tem, 1);
284 return true;
286 else
287 return false;
291 /* This subroutine of apply_change_group verifies whether the changes to INSN
292 were valid; i.e. whether INSN can still be recognized. */
295 insn_invalid_p (rtx insn)
297 rtx pat = PATTERN (insn);
298 int num_clobbers = 0;
299 /* If we are before reload and the pattern is a SET, see if we can add
300 clobbers. */
301 int icode = recog (pat, insn,
302 (GET_CODE (pat) == SET
303 && ! reload_completed && ! reload_in_progress)
304 ? &num_clobbers : 0);
305 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
308 /* If this is an asm and the operand aren't legal, then fail. Likewise if
309 this is not an asm and the insn wasn't recognized. */
310 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
311 || (!is_asm && icode < 0))
312 return 1;
314 /* If we have to add CLOBBERs, fail if we have to add ones that reference
315 hard registers since our callers can't know if they are live or not.
316 Otherwise, add them. */
317 if (num_clobbers > 0)
319 rtx newpat;
321 if (added_clobbers_hard_reg_p (icode))
322 return 1;
324 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
325 XVECEXP (newpat, 0, 0) = pat;
326 add_clobbers (newpat, icode);
327 PATTERN (insn) = pat = newpat;
330 /* After reload, verify that all constraints are satisfied. */
331 if (reload_completed)
333 extract_insn (insn);
335 if (! constrain_operands (1))
336 return 1;
339 INSN_CODE (insn) = icode;
340 return 0;
343 /* Return number of changes made and not validated yet. */
345 num_changes_pending (void)
347 return num_changes;
350 /* Tentatively apply the changes numbered NUM and up.
351 Return 1 if all changes are valid, zero otherwise. */
354 verify_changes (int num)
356 int i;
357 rtx last_validated = NULL_RTX;
359 /* The changes have been applied and all INSN_CODEs have been reset to force
360 rerecognition.
362 The changes are valid if we aren't given an object, or if we are
363 given a MEM and it still is a valid address, or if this is in insn
364 and it is recognized. In the latter case, if reload has completed,
365 we also require that the operands meet the constraints for
366 the insn. */
368 for (i = num; i < num_changes; i++)
370 rtx object = changes[i].object;
372 /* If there is no object to test or if it is the same as the one we
373 already tested, ignore it. */
374 if (object == 0 || object == last_validated)
375 continue;
377 if (MEM_P (object))
379 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
380 break;
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
392 else if (insn_invalid_p (object))
394 rtx pat = PATTERN (object);
396 /* Perhaps we couldn't recognize the insn because there were
397 extra CLOBBERs at the end. If so, try to re-recognize
398 without the last CLOBBER (later iterations will cause each of
399 them to be eliminated, in turn). But don't do this if we
400 have an ASM_OPERAND. */
401 if (GET_CODE (pat) == PARALLEL
402 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
403 && asm_noperands (PATTERN (object)) < 0)
405 rtx newpat;
407 if (XVECLEN (pat, 0) == 2)
408 newpat = XVECEXP (pat, 0, 0);
409 else
411 int j;
413 newpat
414 = gen_rtx_PARALLEL (VOIDmode,
415 rtvec_alloc (XVECLEN (pat, 0) - 1));
416 for (j = 0; j < XVECLEN (newpat, 0); j++)
417 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
420 /* Add a new change to this group to replace the pattern
421 with this new pattern. Then consider this change
422 as having succeeded. The change we added will
423 cause the entire call to fail if things remain invalid.
425 Note that this can lose if a later change than the one
426 we are processing specified &XVECEXP (PATTERN (object), 0, X)
427 but this shouldn't occur. */
429 validate_change (object, &PATTERN (object), newpat, 1);
430 continue;
432 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
433 /* If this insn is a CLOBBER or USE, it is always valid, but is
434 never recognized. */
435 continue;
436 else
437 break;
439 last_validated = object;
442 return (i == num_changes);
445 /* A group of changes has previously been issued with validate_change
446 and verified with verify_changes. Call df_insn_rescan for each of
447 the insn changed and clear num_changes. */
449 void
450 confirm_change_group (void)
452 int i;
453 rtx last_object = NULL;
455 for (i = 0; i < num_changes; i++)
457 rtx object = changes[i].object;
459 if (changes[i].unshare)
460 *changes[i].loc = copy_rtx (*changes[i].loc);
462 /* Avoid unnecessary rescanning when multiple changes to same instruction
463 are made. */
464 if (object)
466 if (object != last_object && last_object && INSN_P (last_object))
467 df_insn_rescan (last_object);
468 last_object = object;
472 if (last_object && INSN_P (last_object))
473 df_insn_rescan (last_object);
474 num_changes = 0;
477 /* Apply a group of changes previously issued with `validate_change'.
478 If all changes are valid, call confirm_change_group and return 1,
479 otherwise, call cancel_changes and return 0. */
482 apply_change_group (void)
484 if (verify_changes (0))
486 confirm_change_group ();
487 return 1;
489 else
491 cancel_changes (0);
492 return 0;
497 /* Return the number of changes so far in the current group. */
500 num_validated_changes (void)
502 return num_changes;
505 /* Retract the changes numbered NUM and up. */
507 void
508 cancel_changes (int num)
510 int i;
512 /* Back out all the changes. Do this in the opposite order in which
513 they were made. */
514 for (i = num_changes - 1; i >= num; i--)
516 *changes[i].loc = changes[i].old;
517 if (changes[i].object && !MEM_P (changes[i].object))
518 INSN_CODE (changes[i].object) = changes[i].old_code;
520 num_changes = num;
523 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
524 rtx. */
526 static void
527 simplify_while_replacing (rtx *loc, rtx to, rtx object,
528 enum machine_mode op0_mode)
530 rtx x = *loc;
531 enum rtx_code code = GET_CODE (x);
532 rtx new_rtx;
534 if (SWAPPABLE_OPERANDS_P (x)
535 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
537 validate_unshare_change (object, loc,
538 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
539 : swap_condition (code),
540 GET_MODE (x), XEXP (x, 1),
541 XEXP (x, 0)), 1);
542 x = *loc;
543 code = GET_CODE (x);
546 switch (code)
548 case PLUS:
549 /* If we have a PLUS whose second operand is now a CONST_INT, use
550 simplify_gen_binary to try to simplify it.
551 ??? We may want later to remove this, once simplification is
552 separated from this function. */
553 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
554 validate_change (object, loc,
555 simplify_gen_binary
556 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
557 break;
558 case MINUS:
559 if (GET_CODE (XEXP (x, 1)) == CONST_INT
560 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
561 validate_change (object, loc,
562 simplify_gen_binary
563 (PLUS, GET_MODE (x), XEXP (x, 0),
564 simplify_gen_unary (NEG,
565 GET_MODE (x), XEXP (x, 1),
566 GET_MODE (x))), 1);
567 break;
568 case ZERO_EXTEND:
569 case SIGN_EXTEND:
570 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
572 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
573 op0_mode);
574 /* If any of the above failed, substitute in something that
575 we know won't be recognized. */
576 if (!new_rtx)
577 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
578 validate_change (object, loc, new_rtx, 1);
580 break;
581 case SUBREG:
582 /* All subregs possible to simplify should be simplified. */
583 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
584 SUBREG_BYTE (x));
586 /* Subregs of VOIDmode operands are incorrect. */
587 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
588 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
589 if (new_rtx)
590 validate_change (object, loc, new_rtx, 1);
591 break;
592 case ZERO_EXTRACT:
593 case SIGN_EXTRACT:
594 /* If we are replacing a register with memory, try to change the memory
595 to be the mode required for memory in extract operations (this isn't
596 likely to be an insertion operation; if it was, nothing bad will
597 happen, we might just fail in some cases). */
599 if (MEM_P (XEXP (x, 0))
600 && GET_CODE (XEXP (x, 1)) == CONST_INT
601 && GET_CODE (XEXP (x, 2)) == CONST_INT
602 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
603 && !MEM_VOLATILE_P (XEXP (x, 0)))
605 enum machine_mode wanted_mode = VOIDmode;
606 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
607 int pos = INTVAL (XEXP (x, 2));
609 if (GET_CODE (x) == ZERO_EXTRACT)
611 enum machine_mode new_mode
612 = mode_for_extraction (EP_extzv, 1);
613 if (new_mode != MAX_MACHINE_MODE)
614 wanted_mode = new_mode;
616 else if (GET_CODE (x) == SIGN_EXTRACT)
618 enum machine_mode new_mode
619 = mode_for_extraction (EP_extv, 1);
620 if (new_mode != MAX_MACHINE_MODE)
621 wanted_mode = new_mode;
624 /* If we have a narrower mode, we can do something. */
625 if (wanted_mode != VOIDmode
626 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
628 int offset = pos / BITS_PER_UNIT;
629 rtx newmem;
631 /* If the bytes and bits are counted differently, we
632 must adjust the offset. */
633 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
634 offset =
635 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
636 offset);
638 pos %= GET_MODE_BITSIZE (wanted_mode);
640 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
642 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
643 validate_change (object, &XEXP (x, 0), newmem, 1);
647 break;
649 default:
650 break;
654 /* Replace every occurrence of FROM in X with TO. Mark each change with
655 validate_change passing OBJECT. */
657 static void
658 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
659 bool simplify)
661 int i, j;
662 const char *fmt;
663 rtx x = *loc;
664 enum rtx_code code;
665 enum machine_mode op0_mode = VOIDmode;
666 int prev_changes = num_changes;
668 if (!x)
669 return;
671 code = GET_CODE (x);
672 fmt = GET_RTX_FORMAT (code);
673 if (fmt[0] == 'e')
674 op0_mode = GET_MODE (XEXP (x, 0));
676 /* X matches FROM if it is the same rtx or they are both referring to the
677 same register in the same mode. Avoid calling rtx_equal_p unless the
678 operands look similar. */
680 if (x == from
681 || (REG_P (x) && REG_P (from)
682 && GET_MODE (x) == GET_MODE (from)
683 && REGNO (x) == REGNO (from))
684 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
685 && rtx_equal_p (x, from)))
687 validate_unshare_change (object, loc, to, 1);
688 return;
691 /* Call ourself recursively to perform the replacements.
692 We must not replace inside already replaced expression, otherwise we
693 get infinite recursion for replacements like (reg X)->(subreg (reg X))
694 done by regmove, so we must special case shared ASM_OPERANDS. */
696 if (GET_CODE (x) == PARALLEL)
698 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
700 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
701 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
703 /* Verify that operands are really shared. */
704 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
705 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
706 (x, 0, j))));
707 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
708 from, to, object, simplify);
710 else
711 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
712 simplify);
715 else
716 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
718 if (fmt[i] == 'e')
719 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
720 else if (fmt[i] == 'E')
721 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
722 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
723 simplify);
726 /* If we didn't substitute, there is nothing more to do. */
727 if (num_changes == prev_changes)
728 return;
730 /* Allow substituted expression to have different mode. This is used by
731 regmove to change mode of pseudo register. */
732 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
733 op0_mode = GET_MODE (XEXP (x, 0));
735 /* Do changes needed to keep rtx consistent. Don't do any other
736 simplifications, as it is not our job. */
737 if (simplify)
738 simplify_while_replacing (loc, to, object, op0_mode);
741 /* Try replacing every occurrence of FROM in INSN with TO. After all
742 changes have been made, validate by seeing if INSN is still valid. */
745 validate_replace_rtx (rtx from, rtx to, rtx insn)
747 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
748 return apply_change_group ();
751 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
752 is a part of INSN. After all changes have been made, validate by seeing if
753 INSN is still valid.
754 validate_replace_rtx (from, to, insn) is equivalent to
755 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
758 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
760 validate_replace_rtx_1 (where, from, to, insn, true);
761 return apply_change_group ();
764 /* Same as above, but do not simplify rtx afterwards. */
765 int
766 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
767 rtx insn)
769 validate_replace_rtx_1 (where, from, to, insn, false);
770 return apply_change_group ();
774 /* Try replacing every occurrence of FROM in INSN with TO. */
776 void
777 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
779 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
782 /* Function called by note_uses to replace used subexpressions. */
783 struct validate_replace_src_data
785 rtx from; /* Old RTX */
786 rtx to; /* New RTX */
787 rtx insn; /* Insn in which substitution is occurring. */
790 static void
791 validate_replace_src_1 (rtx *x, void *data)
793 struct validate_replace_src_data *d
794 = (struct validate_replace_src_data *) data;
796 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
799 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
800 SET_DESTs. */
802 void
803 validate_replace_src_group (rtx from, rtx to, rtx insn)
805 struct validate_replace_src_data d;
807 d.from = from;
808 d.to = to;
809 d.insn = insn;
810 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
813 /* Try simplify INSN.
814 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
815 pattern and return true if something was simplified. */
817 bool
818 validate_simplify_insn (rtx insn)
820 int i;
821 rtx pat = NULL;
822 rtx newpat = NULL;
824 pat = PATTERN (insn);
826 if (GET_CODE (pat) == SET)
828 newpat = simplify_rtx (SET_SRC (pat));
829 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
830 validate_change (insn, &SET_SRC (pat), newpat, 1);
831 newpat = simplify_rtx (SET_DEST (pat));
832 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
833 validate_change (insn, &SET_DEST (pat), newpat, 1);
835 else if (GET_CODE (pat) == PARALLEL)
836 for (i = 0; i < XVECLEN (pat, 0); i++)
838 rtx s = XVECEXP (pat, 0, i);
840 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
842 newpat = simplify_rtx (SET_SRC (s));
843 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
844 validate_change (insn, &SET_SRC (s), newpat, 1);
845 newpat = simplify_rtx (SET_DEST (s));
846 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
847 validate_change (insn, &SET_DEST (s), newpat, 1);
850 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
853 #ifdef HAVE_cc0
854 /* Return 1 if the insn using CC0 set by INSN does not contain
855 any ordered tests applied to the condition codes.
856 EQ and NE tests do not count. */
859 next_insn_tests_no_inequality (rtx insn)
861 rtx next = next_cc0_user (insn);
863 /* If there is no next insn, we have to take the conservative choice. */
864 if (next == 0)
865 return 0;
867 return (INSN_P (next)
868 && ! inequality_comparisons_p (PATTERN (next)));
870 #endif
872 /* Return 1 if OP is a valid general operand for machine mode MODE.
873 This is either a register reference, a memory reference,
874 or a constant. In the case of a memory reference, the address
875 is checked for general validity for the target machine.
877 Register and memory references must have mode MODE in order to be valid,
878 but some constants have no machine mode and are valid for any mode.
880 If MODE is VOIDmode, OP is checked for validity for whatever mode
881 it has.
883 The main use of this function is as a predicate in match_operand
884 expressions in the machine description.
886 For an explanation of this function's behavior for registers of
887 class NO_REGS, see the comment for `register_operand'. */
890 general_operand (rtx op, enum machine_mode mode)
892 enum rtx_code code = GET_CODE (op);
894 if (mode == VOIDmode)
895 mode = GET_MODE (op);
897 /* Don't accept CONST_INT or anything similar
898 if the caller wants something floating. */
899 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
900 && GET_MODE_CLASS (mode) != MODE_INT
901 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
902 return 0;
904 if (GET_CODE (op) == CONST_INT
905 && mode != VOIDmode
906 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
907 return 0;
909 if (CONSTANT_P (op))
910 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
911 || mode == VOIDmode)
912 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
913 && LEGITIMATE_CONSTANT_P (op));
915 /* Except for certain constants with VOIDmode, already checked for,
916 OP's mode must match MODE if MODE specifies a mode. */
918 if (GET_MODE (op) != mode)
919 return 0;
921 if (code == SUBREG)
923 rtx sub = SUBREG_REG (op);
925 #ifdef INSN_SCHEDULING
926 /* On machines that have insn scheduling, we want all memory
927 reference to be explicit, so outlaw paradoxical SUBREGs.
928 However, we must allow them after reload so that they can
929 get cleaned up by cleanup_subreg_operands. */
930 if (!reload_completed && MEM_P (sub)
931 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
932 return 0;
933 #endif
934 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
935 may result in incorrect reference. We should simplify all valid
936 subregs of MEM anyway. But allow this after reload because we
937 might be called from cleanup_subreg_operands.
939 ??? This is a kludge. */
940 if (!reload_completed && SUBREG_BYTE (op) != 0
941 && MEM_P (sub))
942 return 0;
944 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
945 create such rtl, and we must reject it. */
946 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
947 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
948 return 0;
950 op = sub;
951 code = GET_CODE (op);
954 if (code == REG)
955 /* A register whose class is NO_REGS is not a general operand. */
956 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
957 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
959 if (code == MEM)
961 rtx y = XEXP (op, 0);
963 if (! volatile_ok && MEM_VOLATILE_P (op))
964 return 0;
966 /* Use the mem's mode, since it will be reloaded thus. */
967 if (memory_address_p (GET_MODE (op), y))
968 return 1;
971 return 0;
974 /* Return 1 if OP is a valid memory address for a memory reference
975 of mode MODE.
977 The main use of this function is as a predicate in match_operand
978 expressions in the machine description. */
981 address_operand (rtx op, enum machine_mode mode)
983 return memory_address_p (mode, op);
986 /* Return 1 if OP is a register reference of mode MODE.
987 If MODE is VOIDmode, accept a register in any mode.
989 The main use of this function is as a predicate in match_operand
990 expressions in the machine description.
992 As a special exception, registers whose class is NO_REGS are
993 not accepted by `register_operand'. The reason for this change
994 is to allow the representation of special architecture artifacts
995 (such as a condition code register) without extending the rtl
996 definitions. Since registers of class NO_REGS cannot be used
997 as registers in any case where register classes are examined,
998 it is most consistent to keep this function from accepting them. */
1001 register_operand (rtx op, enum machine_mode mode)
1003 if (GET_MODE (op) != mode && mode != VOIDmode)
1004 return 0;
1006 if (GET_CODE (op) == SUBREG)
1008 rtx sub = SUBREG_REG (op);
1010 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1011 because it is guaranteed to be reloaded into one.
1012 Just make sure the MEM is valid in itself.
1013 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1014 but currently it does result from (SUBREG (REG)...) where the
1015 reg went on the stack.) */
1016 if (! reload_completed && MEM_P (sub))
1017 return general_operand (op, mode);
1019 #ifdef CANNOT_CHANGE_MODE_CLASS
1020 if (REG_P (sub)
1021 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1022 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1023 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1024 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1025 return 0;
1026 #endif
1028 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1029 create such rtl, and we must reject it. */
1030 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1031 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1032 return 0;
1034 op = sub;
1037 /* We don't consider registers whose class is NO_REGS
1038 to be a register operand. */
1039 return (REG_P (op)
1040 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1041 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1044 /* Return 1 for a register in Pmode; ignore the tested mode. */
1047 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1049 return register_operand (op, Pmode);
1052 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1053 or a hard register. */
1056 scratch_operand (rtx op, enum machine_mode mode)
1058 if (GET_MODE (op) != mode && mode != VOIDmode)
1059 return 0;
1061 return (GET_CODE (op) == SCRATCH
1062 || (REG_P (op)
1063 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1066 /* Return 1 if OP is a valid immediate operand for mode MODE.
1068 The main use of this function is as a predicate in match_operand
1069 expressions in the machine description. */
1072 immediate_operand (rtx op, enum machine_mode mode)
1074 /* Don't accept CONST_INT or anything similar
1075 if the caller wants something floating. */
1076 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1077 && GET_MODE_CLASS (mode) != MODE_INT
1078 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1079 return 0;
1081 if (GET_CODE (op) == CONST_INT
1082 && mode != VOIDmode
1083 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1084 return 0;
1086 return (CONSTANT_P (op)
1087 && (GET_MODE (op) == mode || mode == VOIDmode
1088 || GET_MODE (op) == VOIDmode)
1089 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1090 && LEGITIMATE_CONSTANT_P (op));
1093 /* Returns 1 if OP is an operand that is a CONST_INT. */
1096 const_int_operand (rtx op, enum machine_mode mode)
1098 if (GET_CODE (op) != CONST_INT)
1099 return 0;
1101 if (mode != VOIDmode
1102 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1103 return 0;
1105 return 1;
1108 /* Returns 1 if OP is an operand that is a constant integer or constant
1109 floating-point number. */
1112 const_double_operand (rtx op, enum machine_mode mode)
1114 /* Don't accept CONST_INT or anything similar
1115 if the caller wants something floating. */
1116 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1117 && GET_MODE_CLASS (mode) != MODE_INT
1118 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1119 return 0;
1121 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1122 && (mode == VOIDmode || GET_MODE (op) == mode
1123 || GET_MODE (op) == VOIDmode));
1126 /* Return 1 if OP is a general operand that is not an immediate operand. */
1129 nonimmediate_operand (rtx op, enum machine_mode mode)
1131 return (general_operand (op, mode) && ! CONSTANT_P (op));
1134 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1137 nonmemory_operand (rtx op, enum machine_mode mode)
1139 if (CONSTANT_P (op))
1141 /* Don't accept CONST_INT or anything similar
1142 if the caller wants something floating. */
1143 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1144 && GET_MODE_CLASS (mode) != MODE_INT
1145 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1146 return 0;
1148 if (GET_CODE (op) == CONST_INT
1149 && mode != VOIDmode
1150 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1151 return 0;
1153 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1154 || mode == VOIDmode)
1155 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1156 && LEGITIMATE_CONSTANT_P (op));
1159 if (GET_MODE (op) != mode && mode != VOIDmode)
1160 return 0;
1162 if (GET_CODE (op) == SUBREG)
1164 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1165 because it is guaranteed to be reloaded into one.
1166 Just make sure the MEM is valid in itself.
1167 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1168 but currently it does result from (SUBREG (REG)...) where the
1169 reg went on the stack.) */
1170 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1171 return general_operand (op, mode);
1172 op = SUBREG_REG (op);
1175 /* We don't consider registers whose class is NO_REGS
1176 to be a register operand. */
1177 return (REG_P (op)
1178 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1179 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1182 /* Return 1 if OP is a valid operand that stands for pushing a
1183 value of mode MODE onto the stack.
1185 The main use of this function is as a predicate in match_operand
1186 expressions in the machine description. */
1189 push_operand (rtx op, enum machine_mode mode)
1191 unsigned int rounded_size = GET_MODE_SIZE (mode);
1193 #ifdef PUSH_ROUNDING
1194 rounded_size = PUSH_ROUNDING (rounded_size);
1195 #endif
1197 if (!MEM_P (op))
1198 return 0;
1200 if (mode != VOIDmode && GET_MODE (op) != mode)
1201 return 0;
1203 op = XEXP (op, 0);
1205 if (rounded_size == GET_MODE_SIZE (mode))
1207 if (GET_CODE (op) != STACK_PUSH_CODE)
1208 return 0;
1210 else
1212 if (GET_CODE (op) != PRE_MODIFY
1213 || GET_CODE (XEXP (op, 1)) != PLUS
1214 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1215 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1216 #ifdef STACK_GROWS_DOWNWARD
1217 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1218 #else
1219 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1220 #endif
1222 return 0;
1225 return XEXP (op, 0) == stack_pointer_rtx;
1228 /* Return 1 if OP is a valid operand that stands for popping a
1229 value of mode MODE off the stack.
1231 The main use of this function is as a predicate in match_operand
1232 expressions in the machine description. */
1235 pop_operand (rtx op, enum machine_mode mode)
1237 if (!MEM_P (op))
1238 return 0;
1240 if (mode != VOIDmode && GET_MODE (op) != mode)
1241 return 0;
1243 op = XEXP (op, 0);
1245 if (GET_CODE (op) != STACK_POP_CODE)
1246 return 0;
1248 return XEXP (op, 0) == stack_pointer_rtx;
1251 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1254 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1256 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1257 return 0;
1259 win:
1260 return 1;
1263 /* Return 1 if OP is a valid memory reference with mode MODE,
1264 including a valid address.
1266 The main use of this function is as a predicate in match_operand
1267 expressions in the machine description. */
1270 memory_operand (rtx op, enum machine_mode mode)
1272 rtx inner;
1274 if (! reload_completed)
1275 /* Note that no SUBREG is a memory operand before end of reload pass,
1276 because (SUBREG (MEM...)) forces reloading into a register. */
1277 return MEM_P (op) && general_operand (op, mode);
1279 if (mode != VOIDmode && GET_MODE (op) != mode)
1280 return 0;
1282 inner = op;
1283 if (GET_CODE (inner) == SUBREG)
1284 inner = SUBREG_REG (inner);
1286 return (MEM_P (inner) && general_operand (op, mode));
1289 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1290 that is, a memory reference whose address is a general_operand. */
1293 indirect_operand (rtx op, enum machine_mode mode)
1295 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1296 if (! reload_completed
1297 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1299 int offset = SUBREG_BYTE (op);
1300 rtx inner = SUBREG_REG (op);
1302 if (mode != VOIDmode && GET_MODE (op) != mode)
1303 return 0;
1305 /* The only way that we can have a general_operand as the resulting
1306 address is if OFFSET is zero and the address already is an operand
1307 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1308 operand. */
1310 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1311 || (GET_CODE (XEXP (inner, 0)) == PLUS
1312 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1313 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1314 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1317 return (MEM_P (op)
1318 && memory_operand (op, mode)
1319 && general_operand (XEXP (op, 0), Pmode));
1322 /* Return 1 if this is a comparison operator. This allows the use of
1323 MATCH_OPERATOR to recognize all the branch insns. */
1326 comparison_operator (rtx op, enum machine_mode mode)
1328 return ((mode == VOIDmode || GET_MODE (op) == mode)
1329 && COMPARISON_P (op));
1332 /* If BODY is an insn body that uses ASM_OPERANDS,
1333 return the number of operands (both input and output) in the insn.
1334 Otherwise return -1. */
1337 asm_noperands (const_rtx body)
1339 switch (GET_CODE (body))
1341 case ASM_OPERANDS:
1342 /* No output operands: return number of input operands. */
1343 return ASM_OPERANDS_INPUT_LENGTH (body);
1344 case SET:
1345 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1346 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1347 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1348 else
1349 return -1;
1350 case PARALLEL:
1351 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1352 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1354 /* Multiple output operands, or 1 output plus some clobbers:
1355 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1356 int i;
1357 int n_sets;
1359 /* Count backwards through CLOBBERs to determine number of SETs. */
1360 for (i = XVECLEN (body, 0); i > 0; i--)
1362 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1363 break;
1364 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1365 return -1;
1368 /* N_SETS is now number of output operands. */
1369 n_sets = i;
1371 /* Verify that all the SETs we have
1372 came from a single original asm_operands insn
1373 (so that invalid combinations are blocked). */
1374 for (i = 0; i < n_sets; i++)
1376 rtx elt = XVECEXP (body, 0, i);
1377 if (GET_CODE (elt) != SET)
1378 return -1;
1379 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1380 return -1;
1381 /* If these ASM_OPERANDS rtx's came from different original insns
1382 then they aren't allowed together. */
1383 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1384 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1385 return -1;
1387 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1388 + n_sets);
1390 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1392 /* 0 outputs, but some clobbers:
1393 body is [(asm_operands ...) (clobber (reg ...))...]. */
1394 int i;
1396 /* Make sure all the other parallel things really are clobbers. */
1397 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1398 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1399 return -1;
1401 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1403 else
1404 return -1;
1405 default:
1406 return -1;
1410 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1411 copy its operands (both input and output) into the vector OPERANDS,
1412 the locations of the operands within the insn into the vector OPERAND_LOCS,
1413 and the constraints for the operands into CONSTRAINTS.
1414 Write the modes of the operands into MODES.
1415 Return the assembler-template.
1417 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1418 we don't store that info. */
1420 const char *
1421 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1422 const char **constraints, enum machine_mode *modes,
1423 location_t *loc)
1425 int i;
1426 int noperands;
1427 rtx asmop = 0;
1429 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1431 asmop = SET_SRC (body);
1432 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1434 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1436 for (i = 1; i < noperands; i++)
1438 if (operand_locs)
1439 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1440 if (operands)
1441 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1442 if (constraints)
1443 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1444 if (modes)
1445 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1448 /* The output is in the SET.
1449 Its constraint is in the ASM_OPERANDS itself. */
1450 if (operands)
1451 operands[0] = SET_DEST (body);
1452 if (operand_locs)
1453 operand_locs[0] = &SET_DEST (body);
1454 if (constraints)
1455 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1456 if (modes)
1457 modes[0] = GET_MODE (SET_DEST (body));
1459 else if (GET_CODE (body) == ASM_OPERANDS)
1461 asmop = body;
1462 /* No output operands: BODY is (asm_operands ....). */
1464 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1466 /* The input operands are found in the 1st element vector. */
1467 /* Constraints for inputs are in the 2nd element vector. */
1468 for (i = 0; i < noperands; i++)
1470 if (operand_locs)
1471 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1472 if (operands)
1473 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1474 if (constraints)
1475 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1476 if (modes)
1477 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1480 else if (GET_CODE (body) == PARALLEL
1481 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1482 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1484 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1485 int nin;
1486 int nout = 0; /* Does not include CLOBBERs. */
1488 asmop = SET_SRC (XVECEXP (body, 0, 0));
1489 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1491 /* At least one output, plus some CLOBBERs. */
1493 /* The outputs are in the SETs.
1494 Their constraints are in the ASM_OPERANDS itself. */
1495 for (i = 0; i < nparallel; i++)
1497 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1498 break; /* Past last SET */
1500 if (operands)
1501 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1502 if (operand_locs)
1503 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1504 if (constraints)
1505 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1506 if (modes)
1507 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1508 nout++;
1511 for (i = 0; i < nin; i++)
1513 if (operand_locs)
1514 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1515 if (operands)
1516 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1517 if (constraints)
1518 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1519 if (modes)
1520 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1523 else if (GET_CODE (body) == PARALLEL
1524 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1526 /* No outputs, but some CLOBBERs. */
1528 int nin;
1530 asmop = XVECEXP (body, 0, 0);
1531 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1533 for (i = 0; i < nin; i++)
1535 if (operand_locs)
1536 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1537 if (operands)
1538 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1539 if (constraints)
1540 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1541 if (modes)
1542 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1547 if (loc)
1548 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1550 return ASM_OPERANDS_TEMPLATE (asmop);
1553 /* Check if an asm_operand matches its constraints.
1554 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1557 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1559 int result = 0;
1561 /* Use constrain_operands after reload. */
1562 gcc_assert (!reload_completed);
1564 while (*constraint)
1566 char c = *constraint;
1567 int len;
1568 switch (c)
1570 case ',':
1571 constraint++;
1572 continue;
1573 case '=':
1574 case '+':
1575 case '*':
1576 case '%':
1577 case '!':
1578 case '#':
1579 case '&':
1580 case '?':
1581 break;
1583 case '0': case '1': case '2': case '3': case '4':
1584 case '5': case '6': case '7': case '8': case '9':
1585 /* If caller provided constraints pointer, look up
1586 the maching constraint. Otherwise, our caller should have
1587 given us the proper matching constraint, but we can't
1588 actually fail the check if they didn't. Indicate that
1589 results are inconclusive. */
1590 if (constraints)
1592 char *end;
1593 unsigned long match;
1595 match = strtoul (constraint, &end, 10);
1596 if (!result)
1597 result = asm_operand_ok (op, constraints[match], NULL);
1598 constraint = (const char *) end;
1600 else
1603 constraint++;
1604 while (ISDIGIT (*constraint));
1605 if (! result)
1606 result = -1;
1608 continue;
1610 case 'p':
1611 if (address_operand (op, VOIDmode))
1612 result = 1;
1613 break;
1615 case TARGET_MEM_CONSTRAINT:
1616 case 'V': /* non-offsettable */
1617 if (memory_operand (op, VOIDmode))
1618 result = 1;
1619 break;
1621 case 'o': /* offsettable */
1622 if (offsettable_nonstrict_memref_p (op))
1623 result = 1;
1624 break;
1626 case '<':
1627 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1628 excepting those that expand_call created. Further, on some
1629 machines which do not have generalized auto inc/dec, an inc/dec
1630 is not a memory_operand.
1632 Match any memory and hope things are resolved after reload. */
1634 if (MEM_P (op)
1635 && (1
1636 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1637 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1638 result = 1;
1639 break;
1641 case '>':
1642 if (MEM_P (op)
1643 && (1
1644 || GET_CODE (XEXP (op, 0)) == PRE_INC
1645 || GET_CODE (XEXP (op, 0)) == POST_INC))
1646 result = 1;
1647 break;
1649 case 'E':
1650 case 'F':
1651 if (GET_CODE (op) == CONST_DOUBLE
1652 || (GET_CODE (op) == CONST_VECTOR
1653 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1654 result = 1;
1655 break;
1657 case 'G':
1658 if (GET_CODE (op) == CONST_DOUBLE
1659 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1660 result = 1;
1661 break;
1662 case 'H':
1663 if (GET_CODE (op) == CONST_DOUBLE
1664 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1665 result = 1;
1666 break;
1668 case 's':
1669 if (GET_CODE (op) == CONST_INT
1670 || (GET_CODE (op) == CONST_DOUBLE
1671 && GET_MODE (op) == VOIDmode))
1672 break;
1673 /* Fall through. */
1675 case 'i':
1676 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1677 result = 1;
1678 break;
1680 case 'n':
1681 if (GET_CODE (op) == CONST_INT
1682 || (GET_CODE (op) == CONST_DOUBLE
1683 && GET_MODE (op) == VOIDmode))
1684 result = 1;
1685 break;
1687 case 'I':
1688 if (GET_CODE (op) == CONST_INT
1689 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1690 result = 1;
1691 break;
1692 case 'J':
1693 if (GET_CODE (op) == CONST_INT
1694 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1695 result = 1;
1696 break;
1697 case 'K':
1698 if (GET_CODE (op) == CONST_INT
1699 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1700 result = 1;
1701 break;
1702 case 'L':
1703 if (GET_CODE (op) == CONST_INT
1704 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1705 result = 1;
1706 break;
1707 case 'M':
1708 if (GET_CODE (op) == CONST_INT
1709 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1710 result = 1;
1711 break;
1712 case 'N':
1713 if (GET_CODE (op) == CONST_INT
1714 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1715 result = 1;
1716 break;
1717 case 'O':
1718 if (GET_CODE (op) == CONST_INT
1719 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1720 result = 1;
1721 break;
1722 case 'P':
1723 if (GET_CODE (op) == CONST_INT
1724 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1725 result = 1;
1726 break;
1728 case 'X':
1729 result = 1;
1730 break;
1732 case 'g':
1733 if (general_operand (op, VOIDmode))
1734 result = 1;
1735 break;
1737 default:
1738 /* For all other letters, we first check for a register class,
1739 otherwise it is an EXTRA_CONSTRAINT. */
1740 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1742 case 'r':
1743 if (GET_MODE (op) == BLKmode)
1744 break;
1745 if (register_operand (op, VOIDmode))
1746 result = 1;
1748 #ifdef EXTRA_CONSTRAINT_STR
1749 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1750 /* Every memory operand can be reloaded to fit. */
1751 result = result || memory_operand (op, VOIDmode);
1752 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1753 /* Every address operand can be reloaded to fit. */
1754 result = result || address_operand (op, VOIDmode);
1755 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1756 result = 1;
1757 #endif
1758 break;
1760 len = CONSTRAINT_LEN (c, constraint);
1762 constraint++;
1763 while (--len && *constraint);
1764 if (len)
1765 return 0;
1768 return result;
1771 /* Given an rtx *P, if it is a sum containing an integer constant term,
1772 return the location (type rtx *) of the pointer to that constant term.
1773 Otherwise, return a null pointer. */
1775 rtx *
1776 find_constant_term_loc (rtx *p)
1778 rtx *tem;
1779 enum rtx_code code = GET_CODE (*p);
1781 /* If *P IS such a constant term, P is its location. */
1783 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1784 || code == CONST)
1785 return p;
1787 /* Otherwise, if not a sum, it has no constant term. */
1789 if (GET_CODE (*p) != PLUS)
1790 return 0;
1792 /* If one of the summands is constant, return its location. */
1794 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1795 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1796 return p;
1798 /* Otherwise, check each summand for containing a constant term. */
1800 if (XEXP (*p, 0) != 0)
1802 tem = find_constant_term_loc (&XEXP (*p, 0));
1803 if (tem != 0)
1804 return tem;
1807 if (XEXP (*p, 1) != 0)
1809 tem = find_constant_term_loc (&XEXP (*p, 1));
1810 if (tem != 0)
1811 return tem;
1814 return 0;
1817 /* Return 1 if OP is a memory reference
1818 whose address contains no side effects
1819 and remains valid after the addition
1820 of a positive integer less than the
1821 size of the object being referenced.
1823 We assume that the original address is valid and do not check it.
1825 This uses strict_memory_address_p as a subroutine, so
1826 don't use it before reload. */
1829 offsettable_memref_p (rtx op)
1831 return ((MEM_P (op))
1832 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1835 /* Similar, but don't require a strictly valid mem ref:
1836 consider pseudo-regs valid as index or base regs. */
1839 offsettable_nonstrict_memref_p (rtx op)
1841 return ((MEM_P (op))
1842 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1845 /* Return 1 if Y is a memory address which contains no side effects
1846 and would remain valid after the addition of a positive integer
1847 less than the size of that mode.
1849 We assume that the original address is valid and do not check it.
1850 We do check that it is valid for narrower modes.
1852 If STRICTP is nonzero, we require a strictly valid address,
1853 for the sake of use in reload.c. */
1856 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1858 enum rtx_code ycode = GET_CODE (y);
1859 rtx z;
1860 rtx y1 = y;
1861 rtx *y2;
1862 int (*addressp) (enum machine_mode, rtx) =
1863 (strictp ? strict_memory_address_p : memory_address_p);
1864 unsigned int mode_sz = GET_MODE_SIZE (mode);
1866 if (CONSTANT_ADDRESS_P (y))
1867 return 1;
1869 /* Adjusting an offsettable address involves changing to a narrower mode.
1870 Make sure that's OK. */
1872 if (mode_dependent_address_p (y))
1873 return 0;
1875 /* ??? How much offset does an offsettable BLKmode reference need?
1876 Clearly that depends on the situation in which it's being used.
1877 However, the current situation in which we test 0xffffffff is
1878 less than ideal. Caveat user. */
1879 if (mode_sz == 0)
1880 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1882 /* If the expression contains a constant term,
1883 see if it remains valid when max possible offset is added. */
1885 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1887 int good;
1889 y1 = *y2;
1890 *y2 = plus_constant (*y2, mode_sz - 1);
1891 /* Use QImode because an odd displacement may be automatically invalid
1892 for any wider mode. But it should be valid for a single byte. */
1893 good = (*addressp) (QImode, y);
1895 /* In any case, restore old contents of memory. */
1896 *y2 = y1;
1897 return good;
1900 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1901 return 0;
1903 /* The offset added here is chosen as the maximum offset that
1904 any instruction could need to add when operating on something
1905 of the specified mode. We assume that if Y and Y+c are
1906 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1907 go inside a LO_SUM here, so we do so as well. */
1908 if (GET_CODE (y) == LO_SUM
1909 && mode != BLKmode
1910 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1911 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1912 plus_constant (XEXP (y, 1), mode_sz - 1));
1913 else
1914 z = plus_constant (y, mode_sz - 1);
1916 /* Use QImode because an odd displacement may be automatically invalid
1917 for any wider mode. But it should be valid for a single byte. */
1918 return (*addressp) (QImode, z);
1921 /* Return 1 if ADDR is an address-expression whose effect depends
1922 on the mode of the memory reference it is used in.
1924 Autoincrement addressing is a typical example of mode-dependence
1925 because the amount of the increment depends on the mode. */
1928 mode_dependent_address_p (rtx addr)
1930 /* Auto-increment addressing with anything other than post_modify
1931 or pre_modify always introduces a mode dependency. Catch such
1932 cases now instead of deferring to the target. */
1933 if (GET_CODE (addr) == PRE_INC
1934 || GET_CODE (addr) == POST_INC
1935 || GET_CODE (addr) == PRE_DEC
1936 || GET_CODE (addr) == POST_DEC)
1937 return 1;
1939 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1940 return 0;
1941 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1942 win: ATTRIBUTE_UNUSED_LABEL
1943 return 1;
1946 /* Like extract_insn, but save insn extracted and don't extract again, when
1947 called again for the same insn expecting that recog_data still contain the
1948 valid information. This is used primary by gen_attr infrastructure that
1949 often does extract insn again and again. */
1950 void
1951 extract_insn_cached (rtx insn)
1953 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1954 return;
1955 extract_insn (insn);
1956 recog_data.insn = insn;
1959 /* Do cached extract_insn, constrain_operands and complain about failures.
1960 Used by insn_attrtab. */
1961 void
1962 extract_constrain_insn_cached (rtx insn)
1964 extract_insn_cached (insn);
1965 if (which_alternative == -1
1966 && !constrain_operands (reload_completed))
1967 fatal_insn_not_found (insn);
1970 /* Do cached constrain_operands and complain about failures. */
1972 constrain_operands_cached (int strict)
1974 if (which_alternative == -1)
1975 return constrain_operands (strict);
1976 else
1977 return 1;
1980 /* Analyze INSN and fill in recog_data. */
1982 void
1983 extract_insn (rtx insn)
1985 int i;
1986 int icode;
1987 int noperands;
1988 rtx body = PATTERN (insn);
1990 recog_data.n_operands = 0;
1991 recog_data.n_alternatives = 0;
1992 recog_data.n_dups = 0;
1994 switch (GET_CODE (body))
1996 case USE:
1997 case CLOBBER:
1998 case ASM_INPUT:
1999 case ADDR_VEC:
2000 case ADDR_DIFF_VEC:
2001 return;
2003 case SET:
2004 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2005 goto asm_insn;
2006 else
2007 goto normal_insn;
2008 case PARALLEL:
2009 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2010 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2011 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2012 goto asm_insn;
2013 else
2014 goto normal_insn;
2015 case ASM_OPERANDS:
2016 asm_insn:
2017 recog_data.n_operands = noperands = asm_noperands (body);
2018 if (noperands >= 0)
2020 /* This insn is an `asm' with operands. */
2022 /* expand_asm_operands makes sure there aren't too many operands. */
2023 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2025 /* Now get the operand values and constraints out of the insn. */
2026 decode_asm_operands (body, recog_data.operand,
2027 recog_data.operand_loc,
2028 recog_data.constraints,
2029 recog_data.operand_mode, NULL);
2030 if (noperands > 0)
2032 const char *p = recog_data.constraints[0];
2033 recog_data.n_alternatives = 1;
2034 while (*p)
2035 recog_data.n_alternatives += (*p++ == ',');
2037 break;
2039 fatal_insn_not_found (insn);
2041 default:
2042 normal_insn:
2043 /* Ordinary insn: recognize it, get the operands via insn_extract
2044 and get the constraints. */
2046 icode = recog_memoized (insn);
2047 if (icode < 0)
2048 fatal_insn_not_found (insn);
2050 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2051 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2052 recog_data.n_dups = insn_data[icode].n_dups;
2054 insn_extract (insn);
2056 for (i = 0; i < noperands; i++)
2058 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2059 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2060 /* VOIDmode match_operands gets mode from their real operand. */
2061 if (recog_data.operand_mode[i] == VOIDmode)
2062 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2065 for (i = 0; i < noperands; i++)
2066 recog_data.operand_type[i]
2067 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2068 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2069 : OP_IN);
2071 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2073 if (INSN_CODE (insn) < 0)
2074 for (i = 0; i < recog_data.n_alternatives; i++)
2075 recog_data.alternative_enabled_p[i] = true;
2076 else
2078 recog_data.insn = insn;
2079 for (i = 0; i < recog_data.n_alternatives; i++)
2081 which_alternative = i;
2082 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2086 recog_data.insn = NULL;
2087 which_alternative = -1;
2090 /* After calling extract_insn, you can use this function to extract some
2091 information from the constraint strings into a more usable form.
2092 The collected data is stored in recog_op_alt. */
2093 void
2094 preprocess_constraints (void)
2096 int i;
2098 for (i = 0; i < recog_data.n_operands; i++)
2099 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2100 * sizeof (struct operand_alternative)));
2102 for (i = 0; i < recog_data.n_operands; i++)
2104 int j;
2105 struct operand_alternative *op_alt;
2106 const char *p = recog_data.constraints[i];
2108 op_alt = recog_op_alt[i];
2110 for (j = 0; j < recog_data.n_alternatives; j++)
2112 op_alt[j].cl = NO_REGS;
2113 op_alt[j].constraint = p;
2114 op_alt[j].matches = -1;
2115 op_alt[j].matched = -1;
2117 if (!recog_data.alternative_enabled_p[j])
2119 p = skip_alternative (p);
2120 continue;
2123 if (*p == '\0' || *p == ',')
2125 op_alt[j].anything_ok = 1;
2126 continue;
2129 for (;;)
2131 char c = *p;
2132 if (c == '#')
2134 c = *++p;
2135 while (c != ',' && c != '\0');
2136 if (c == ',' || c == '\0')
2138 p++;
2139 break;
2142 switch (c)
2144 case '=': case '+': case '*': case '%':
2145 case 'E': case 'F': case 'G': case 'H':
2146 case 's': case 'i': case 'n':
2147 case 'I': case 'J': case 'K': case 'L':
2148 case 'M': case 'N': case 'O': case 'P':
2149 /* These don't say anything we care about. */
2150 break;
2152 case '?':
2153 op_alt[j].reject += 6;
2154 break;
2155 case '!':
2156 op_alt[j].reject += 600;
2157 break;
2158 case '&':
2159 op_alt[j].earlyclobber = 1;
2160 break;
2162 case '0': case '1': case '2': case '3': case '4':
2163 case '5': case '6': case '7': case '8': case '9':
2165 char *end;
2166 op_alt[j].matches = strtoul (p, &end, 10);
2167 recog_op_alt[op_alt[j].matches][j].matched = i;
2168 p = end;
2170 continue;
2172 case TARGET_MEM_CONSTRAINT:
2173 op_alt[j].memory_ok = 1;
2174 break;
2175 case '<':
2176 op_alt[j].decmem_ok = 1;
2177 break;
2178 case '>':
2179 op_alt[j].incmem_ok = 1;
2180 break;
2181 case 'V':
2182 op_alt[j].nonoffmem_ok = 1;
2183 break;
2184 case 'o':
2185 op_alt[j].offmem_ok = 1;
2186 break;
2187 case 'X':
2188 op_alt[j].anything_ok = 1;
2189 break;
2191 case 'p':
2192 op_alt[j].is_address = 1;
2193 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2194 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2195 break;
2197 case 'g':
2198 case 'r':
2199 op_alt[j].cl =
2200 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2201 break;
2203 default:
2204 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2206 op_alt[j].memory_ok = 1;
2207 break;
2209 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2211 op_alt[j].is_address = 1;
2212 op_alt[j].cl
2213 = (reg_class_subunion
2214 [(int) op_alt[j].cl]
2215 [(int) base_reg_class (VOIDmode, ADDRESS,
2216 SCRATCH)]);
2217 break;
2220 op_alt[j].cl
2221 = (reg_class_subunion
2222 [(int) op_alt[j].cl]
2223 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2224 break;
2226 p += CONSTRAINT_LEN (c, p);
2232 /* Check the operands of an insn against the insn's operand constraints
2233 and return 1 if they are valid.
2234 The information about the insn's operands, constraints, operand modes
2235 etc. is obtained from the global variables set up by extract_insn.
2237 WHICH_ALTERNATIVE is set to a number which indicates which
2238 alternative of constraints was matched: 0 for the first alternative,
2239 1 for the next, etc.
2241 In addition, when two operands are required to match
2242 and it happens that the output operand is (reg) while the
2243 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2244 make the output operand look like the input.
2245 This is because the output operand is the one the template will print.
2247 This is used in final, just before printing the assembler code and by
2248 the routines that determine an insn's attribute.
2250 If STRICT is a positive nonzero value, it means that we have been
2251 called after reload has been completed. In that case, we must
2252 do all checks strictly. If it is zero, it means that we have been called
2253 before reload has completed. In that case, we first try to see if we can
2254 find an alternative that matches strictly. If not, we try again, this
2255 time assuming that reload will fix up the insn. This provides a "best
2256 guess" for the alternative and is used to compute attributes of insns prior
2257 to reload. A negative value of STRICT is used for this internal call. */
2259 struct funny_match
2261 int this_op, other;
2265 constrain_operands (int strict)
2267 const char *constraints[MAX_RECOG_OPERANDS];
2268 int matching_operands[MAX_RECOG_OPERANDS];
2269 int earlyclobber[MAX_RECOG_OPERANDS];
2270 int c;
2272 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2273 int funny_match_index;
2275 which_alternative = 0;
2276 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2277 return 1;
2279 for (c = 0; c < recog_data.n_operands; c++)
2281 constraints[c] = recog_data.constraints[c];
2282 matching_operands[c] = -1;
2287 int seen_earlyclobber_at = -1;
2288 int opno;
2289 int lose = 0;
2290 funny_match_index = 0;
2292 if (!recog_data.alternative_enabled_p[which_alternative])
2294 int i;
2296 for (i = 0; i < recog_data.n_operands; i++)
2297 constraints[i] = skip_alternative (constraints[i]);
2299 which_alternative++;
2300 continue;
2303 for (opno = 0; opno < recog_data.n_operands; opno++)
2305 rtx op = recog_data.operand[opno];
2306 enum machine_mode mode = GET_MODE (op);
2307 const char *p = constraints[opno];
2308 int offset = 0;
2309 int win = 0;
2310 int val;
2311 int len;
2313 earlyclobber[opno] = 0;
2315 /* A unary operator may be accepted by the predicate, but it
2316 is irrelevant for matching constraints. */
2317 if (UNARY_P (op))
2318 op = XEXP (op, 0);
2320 if (GET_CODE (op) == SUBREG)
2322 if (REG_P (SUBREG_REG (op))
2323 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2324 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2325 GET_MODE (SUBREG_REG (op)),
2326 SUBREG_BYTE (op),
2327 GET_MODE (op));
2328 op = SUBREG_REG (op);
2331 /* An empty constraint or empty alternative
2332 allows anything which matched the pattern. */
2333 if (*p == 0 || *p == ',')
2334 win = 1;
2337 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2339 case '\0':
2340 len = 0;
2341 break;
2342 case ',':
2343 c = '\0';
2344 break;
2346 case '?': case '!': case '*': case '%':
2347 case '=': case '+':
2348 break;
2350 case '#':
2351 /* Ignore rest of this alternative as far as
2352 constraint checking is concerned. */
2354 p++;
2355 while (*p && *p != ',');
2356 len = 0;
2357 break;
2359 case '&':
2360 earlyclobber[opno] = 1;
2361 if (seen_earlyclobber_at < 0)
2362 seen_earlyclobber_at = opno;
2363 break;
2365 case '0': case '1': case '2': case '3': case '4':
2366 case '5': case '6': case '7': case '8': case '9':
2368 /* This operand must be the same as a previous one.
2369 This kind of constraint is used for instructions such
2370 as add when they take only two operands.
2372 Note that the lower-numbered operand is passed first.
2374 If we are not testing strictly, assume that this
2375 constraint will be satisfied. */
2377 char *end;
2378 int match;
2380 match = strtoul (p, &end, 10);
2381 p = end;
2383 if (strict < 0)
2384 val = 1;
2385 else
2387 rtx op1 = recog_data.operand[match];
2388 rtx op2 = recog_data.operand[opno];
2390 /* A unary operator may be accepted by the predicate,
2391 but it is irrelevant for matching constraints. */
2392 if (UNARY_P (op1))
2393 op1 = XEXP (op1, 0);
2394 if (UNARY_P (op2))
2395 op2 = XEXP (op2, 0);
2397 val = operands_match_p (op1, op2);
2400 matching_operands[opno] = match;
2401 matching_operands[match] = opno;
2403 if (val != 0)
2404 win = 1;
2406 /* If output is *x and input is *--x, arrange later
2407 to change the output to *--x as well, since the
2408 output op is the one that will be printed. */
2409 if (val == 2 && strict > 0)
2411 funny_match[funny_match_index].this_op = opno;
2412 funny_match[funny_match_index++].other = match;
2415 len = 0;
2416 break;
2418 case 'p':
2419 /* p is used for address_operands. When we are called by
2420 gen_reload, no one will have checked that the address is
2421 strictly valid, i.e., that all pseudos requiring hard regs
2422 have gotten them. */
2423 if (strict <= 0
2424 || (strict_memory_address_p (recog_data.operand_mode[opno],
2425 op)))
2426 win = 1;
2427 break;
2429 /* No need to check general_operand again;
2430 it was done in insn-recog.c. Well, except that reload
2431 doesn't check the validity of its replacements, but
2432 that should only matter when there's a bug. */
2433 case 'g':
2434 /* Anything goes unless it is a REG and really has a hard reg
2435 but the hard reg is not in the class GENERAL_REGS. */
2436 if (REG_P (op))
2438 if (strict < 0
2439 || GENERAL_REGS == ALL_REGS
2440 || (reload_in_progress
2441 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2442 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2443 win = 1;
2445 else if (strict < 0 || general_operand (op, mode))
2446 win = 1;
2447 break;
2449 case 'X':
2450 /* This is used for a MATCH_SCRATCH in the cases when
2451 we don't actually need anything. So anything goes
2452 any time. */
2453 win = 1;
2454 break;
2456 case TARGET_MEM_CONSTRAINT:
2457 /* Memory operands must be valid, to the extent
2458 required by STRICT. */
2459 if (MEM_P (op))
2461 if (strict > 0
2462 && !strict_memory_address_p (GET_MODE (op),
2463 XEXP (op, 0)))
2464 break;
2465 if (strict == 0
2466 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2467 break;
2468 win = 1;
2470 /* Before reload, accept what reload can turn into mem. */
2471 else if (strict < 0 && CONSTANT_P (op))
2472 win = 1;
2473 /* During reload, accept a pseudo */
2474 else if (reload_in_progress && REG_P (op)
2475 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2476 win = 1;
2477 break;
2479 case '<':
2480 if (MEM_P (op)
2481 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2482 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2483 win = 1;
2484 break;
2486 case '>':
2487 if (MEM_P (op)
2488 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2489 || GET_CODE (XEXP (op, 0)) == POST_INC))
2490 win = 1;
2491 break;
2493 case 'E':
2494 case 'F':
2495 if (GET_CODE (op) == CONST_DOUBLE
2496 || (GET_CODE (op) == CONST_VECTOR
2497 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2498 win = 1;
2499 break;
2501 case 'G':
2502 case 'H':
2503 if (GET_CODE (op) == CONST_DOUBLE
2504 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2505 win = 1;
2506 break;
2508 case 's':
2509 if (GET_CODE (op) == CONST_INT
2510 || (GET_CODE (op) == CONST_DOUBLE
2511 && GET_MODE (op) == VOIDmode))
2512 break;
2513 case 'i':
2514 if (CONSTANT_P (op))
2515 win = 1;
2516 break;
2518 case 'n':
2519 if (GET_CODE (op) == CONST_INT
2520 || (GET_CODE (op) == CONST_DOUBLE
2521 && GET_MODE (op) == VOIDmode))
2522 win = 1;
2523 break;
2525 case 'I':
2526 case 'J':
2527 case 'K':
2528 case 'L':
2529 case 'M':
2530 case 'N':
2531 case 'O':
2532 case 'P':
2533 if (GET_CODE (op) == CONST_INT
2534 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2535 win = 1;
2536 break;
2538 case 'V':
2539 if (MEM_P (op)
2540 && ((strict > 0 && ! offsettable_memref_p (op))
2541 || (strict < 0
2542 && !(CONSTANT_P (op) || MEM_P (op)))
2543 || (reload_in_progress
2544 && !(REG_P (op)
2545 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2546 win = 1;
2547 break;
2549 case 'o':
2550 if ((strict > 0 && offsettable_memref_p (op))
2551 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2552 /* Before reload, accept what reload can handle. */
2553 || (strict < 0
2554 && (CONSTANT_P (op) || MEM_P (op)))
2555 /* During reload, accept a pseudo */
2556 || (reload_in_progress && REG_P (op)
2557 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2558 win = 1;
2559 break;
2561 default:
2563 enum reg_class cl;
2565 cl = (c == 'r'
2566 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2567 if (cl != NO_REGS)
2569 if (strict < 0
2570 || (strict == 0
2571 && REG_P (op)
2572 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2573 || (strict == 0 && GET_CODE (op) == SCRATCH)
2574 || (REG_P (op)
2575 && reg_fits_class_p (op, cl, offset, mode)))
2576 win = 1;
2578 #ifdef EXTRA_CONSTRAINT_STR
2579 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2580 win = 1;
2582 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2583 /* Every memory operand can be reloaded to fit. */
2584 && ((strict < 0 && MEM_P (op))
2585 /* Before reload, accept what reload can turn
2586 into mem. */
2587 || (strict < 0 && CONSTANT_P (op))
2588 /* During reload, accept a pseudo */
2589 || (reload_in_progress && REG_P (op)
2590 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2591 win = 1;
2592 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2593 /* Every address operand can be reloaded to fit. */
2594 && strict < 0)
2595 win = 1;
2596 #endif
2597 break;
2600 while (p += len, c);
2602 constraints[opno] = p;
2603 /* If this operand did not win somehow,
2604 this alternative loses. */
2605 if (! win)
2606 lose = 1;
2608 /* This alternative won; the operands are ok.
2609 Change whichever operands this alternative says to change. */
2610 if (! lose)
2612 int opno, eopno;
2614 /* See if any earlyclobber operand conflicts with some other
2615 operand. */
2617 if (strict > 0 && seen_earlyclobber_at >= 0)
2618 for (eopno = seen_earlyclobber_at;
2619 eopno < recog_data.n_operands;
2620 eopno++)
2621 /* Ignore earlyclobber operands now in memory,
2622 because we would often report failure when we have
2623 two memory operands, one of which was formerly a REG. */
2624 if (earlyclobber[eopno]
2625 && REG_P (recog_data.operand[eopno]))
2626 for (opno = 0; opno < recog_data.n_operands; opno++)
2627 if ((MEM_P (recog_data.operand[opno])
2628 || recog_data.operand_type[opno] != OP_OUT)
2629 && opno != eopno
2630 /* Ignore things like match_operator operands. */
2631 && *recog_data.constraints[opno] != 0
2632 && ! (matching_operands[opno] == eopno
2633 && operands_match_p (recog_data.operand[opno],
2634 recog_data.operand[eopno]))
2635 && ! safe_from_earlyclobber (recog_data.operand[opno],
2636 recog_data.operand[eopno]))
2637 lose = 1;
2639 if (! lose)
2641 while (--funny_match_index >= 0)
2643 recog_data.operand[funny_match[funny_match_index].other]
2644 = recog_data.operand[funny_match[funny_match_index].this_op];
2647 return 1;
2651 which_alternative++;
2653 while (which_alternative < recog_data.n_alternatives);
2655 which_alternative = -1;
2656 /* If we are about to reject this, but we are not to test strictly,
2657 try a very loose test. Only return failure if it fails also. */
2658 if (strict == 0)
2659 return constrain_operands (-1);
2660 else
2661 return 0;
2664 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2665 is a hard reg in class CLASS when its regno is offset by OFFSET
2666 and changed to mode MODE.
2667 If REG occupies multiple hard regs, all of them must be in CLASS. */
2670 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2671 enum machine_mode mode)
2673 int regno = REGNO (operand);
2675 if (cl == NO_REGS)
2676 return 0;
2678 return (regno < FIRST_PSEUDO_REGISTER
2679 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2680 mode, regno + offset));
2683 /* Split single instruction. Helper function for split_all_insns and
2684 split_all_insns_noflow. Return last insn in the sequence if successful,
2685 or NULL if unsuccessful. */
2687 static rtx
2688 split_insn (rtx insn)
2690 /* Split insns here to get max fine-grain parallelism. */
2691 rtx first = PREV_INSN (insn);
2692 rtx last = try_split (PATTERN (insn), insn, 1);
2693 rtx insn_set, last_set, note;
2695 if (last == insn)
2696 return NULL_RTX;
2698 /* If the original instruction was a single set that was known to be
2699 equivalent to a constant, see if we can say the same about the last
2700 instruction in the split sequence. The two instructions must set
2701 the same destination. */
2702 insn_set = single_set (insn);
2703 if (insn_set)
2705 last_set = single_set (last);
2706 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2708 note = find_reg_equal_equiv_note (insn);
2709 if (note && CONSTANT_P (XEXP (note, 0)))
2710 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2711 else if (CONSTANT_P (SET_SRC (insn_set)))
2712 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2716 /* try_split returns the NOTE that INSN became. */
2717 SET_INSN_DELETED (insn);
2719 /* ??? Coddle to md files that generate subregs in post-reload
2720 splitters instead of computing the proper hard register. */
2721 if (reload_completed && first != last)
2723 first = NEXT_INSN (first);
2724 for (;;)
2726 if (INSN_P (first))
2727 cleanup_subreg_operands (first);
2728 if (first == last)
2729 break;
2730 first = NEXT_INSN (first);
2734 return last;
2737 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2739 void
2740 split_all_insns (void)
2742 sbitmap blocks;
2743 bool changed;
2744 basic_block bb;
2746 blocks = sbitmap_alloc (last_basic_block);
2747 sbitmap_zero (blocks);
2748 changed = false;
2750 FOR_EACH_BB_REVERSE (bb)
2752 rtx insn, next;
2753 bool finish = false;
2755 rtl_profile_for_bb (bb);
2756 for (insn = BB_HEAD (bb); !finish ; insn = next)
2758 /* Can't use `next_real_insn' because that might go across
2759 CODE_LABELS and short-out basic blocks. */
2760 next = NEXT_INSN (insn);
2761 finish = (insn == BB_END (bb));
2762 if (INSN_P (insn))
2764 rtx set = single_set (insn);
2766 /* Don't split no-op move insns. These should silently
2767 disappear later in final. Splitting such insns would
2768 break the code that handles LIBCALL blocks. */
2769 if (set && set_noop_p (set))
2771 /* Nops get in the way while scheduling, so delete them
2772 now if register allocation has already been done. It
2773 is too risky to try to do this before register
2774 allocation, and there are unlikely to be very many
2775 nops then anyways. */
2776 if (reload_completed)
2777 delete_insn_and_edges (insn);
2779 else
2781 rtx last = split_insn (insn);
2782 if (last)
2784 /* The split sequence may include barrier, but the
2785 BB boundary we are interested in will be set to
2786 previous one. */
2788 while (BARRIER_P (last))
2789 last = PREV_INSN (last);
2790 SET_BIT (blocks, bb->index);
2791 changed = true;
2798 default_rtl_profile ();
2799 if (changed)
2800 find_many_sub_basic_blocks (blocks);
2802 #ifdef ENABLE_CHECKING
2803 verify_flow_info ();
2804 #endif
2806 sbitmap_free (blocks);
2809 /* Same as split_all_insns, but do not expect CFG to be available.
2810 Used by machine dependent reorg passes. */
2812 unsigned int
2813 split_all_insns_noflow (void)
2815 rtx next, insn;
2817 for (insn = get_insns (); insn; insn = next)
2819 next = NEXT_INSN (insn);
2820 if (INSN_P (insn))
2822 /* Don't split no-op move insns. These should silently
2823 disappear later in final. Splitting such insns would
2824 break the code that handles LIBCALL blocks. */
2825 rtx set = single_set (insn);
2826 if (set && set_noop_p (set))
2828 /* Nops get in the way while scheduling, so delete them
2829 now if register allocation has already been done. It
2830 is too risky to try to do this before register
2831 allocation, and there are unlikely to be very many
2832 nops then anyways.
2834 ??? Should we use delete_insn when the CFG isn't valid? */
2835 if (reload_completed)
2836 delete_insn_and_edges (insn);
2838 else
2839 split_insn (insn);
2842 return 0;
2845 #ifdef HAVE_peephole2
2846 struct peep2_insn_data
2848 rtx insn;
2849 regset live_before;
2852 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2853 static int peep2_current;
2854 /* The number of instructions available to match a peep2. */
2855 int peep2_current_count;
2857 /* A non-insn marker indicating the last insn of the block.
2858 The live_before regset for this element is correct, indicating
2859 DF_LIVE_OUT for the block. */
2860 #define PEEP2_EOB pc_rtx
2862 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2863 does not exist. Used by the recognizer to find the next insn to match
2864 in a multi-insn pattern. */
2867 peep2_next_insn (int n)
2869 gcc_assert (n <= peep2_current_count);
2871 n += peep2_current;
2872 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2873 n -= MAX_INSNS_PER_PEEP2 + 1;
2875 return peep2_insn_data[n].insn;
2878 /* Return true if REGNO is dead before the Nth non-note insn
2879 after `current'. */
2882 peep2_regno_dead_p (int ofs, int regno)
2884 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2886 ofs += peep2_current;
2887 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2888 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2890 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2892 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2895 /* Similarly for a REG. */
2898 peep2_reg_dead_p (int ofs, rtx reg)
2900 int regno, n;
2902 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2904 ofs += peep2_current;
2905 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2906 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2908 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2910 regno = REGNO (reg);
2911 n = hard_regno_nregs[regno][GET_MODE (reg)];
2912 while (--n >= 0)
2913 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2914 return 0;
2915 return 1;
2918 /* Try to find a hard register of mode MODE, matching the register class in
2919 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2920 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2921 in which case the only condition is that the register must be available
2922 before CURRENT_INSN.
2923 Registers that already have bits set in REG_SET will not be considered.
2925 If an appropriate register is available, it will be returned and the
2926 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2927 returned. */
2930 peep2_find_free_register (int from, int to, const char *class_str,
2931 enum machine_mode mode, HARD_REG_SET *reg_set)
2933 static int search_ofs;
2934 enum reg_class cl;
2935 HARD_REG_SET live;
2936 df_ref *def_rec;
2937 int i;
2939 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2940 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2942 from += peep2_current;
2943 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2944 from -= MAX_INSNS_PER_PEEP2 + 1;
2945 to += peep2_current;
2946 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2947 to -= MAX_INSNS_PER_PEEP2 + 1;
2949 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2950 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2952 while (from != to)
2954 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2956 /* Don't use registers set or clobbered by the insn. */
2957 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
2958 *def_rec; def_rec++)
2959 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
2961 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2962 from = 0;
2965 cl = (class_str[0] == 'r' ? GENERAL_REGS
2966 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2968 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2970 int raw_regno, regno, success, j;
2972 /* Distribute the free registers as much as possible. */
2973 raw_regno = search_ofs + i;
2974 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2975 raw_regno -= FIRST_PSEUDO_REGISTER;
2976 #ifdef REG_ALLOC_ORDER
2977 regno = reg_alloc_order[raw_regno];
2978 #else
2979 regno = raw_regno;
2980 #endif
2982 /* Don't allocate fixed registers. */
2983 if (fixed_regs[regno])
2984 continue;
2985 /* Don't allocate global registers. */
2986 if (global_regs[regno])
2987 continue;
2988 /* Make sure the register is of the right class. */
2989 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2990 continue;
2991 /* And can support the mode we need. */
2992 if (! HARD_REGNO_MODE_OK (regno, mode))
2993 continue;
2994 /* And that we don't create an extra save/restore. */
2995 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
2996 continue;
2997 if (! targetm.hard_regno_scratch_ok (regno))
2998 continue;
3000 /* And we don't clobber traceback for noreturn functions. */
3001 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3002 && (! reload_completed || frame_pointer_needed))
3003 continue;
3005 success = 1;
3006 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3008 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3009 || TEST_HARD_REG_BIT (live, regno + j))
3011 success = 0;
3012 break;
3015 if (success)
3017 add_to_hard_reg_set (reg_set, mode, regno);
3019 /* Start the next search with the next register. */
3020 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3021 raw_regno = 0;
3022 search_ofs = raw_regno;
3024 return gen_rtx_REG (mode, regno);
3028 search_ofs = 0;
3029 return NULL_RTX;
3032 /* Perform the peephole2 optimization pass. */
3034 static void
3035 peephole2_optimize (void)
3037 rtx insn, prev;
3038 bitmap live;
3039 int i;
3040 basic_block bb;
3041 bool do_cleanup_cfg = false;
3042 bool do_rebuild_jump_labels = false;
3044 df_set_flags (DF_LR_RUN_DCE);
3045 df_analyze ();
3047 /* Initialize the regsets we're going to use. */
3048 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3049 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3050 live = BITMAP_ALLOC (&reg_obstack);
3052 FOR_EACH_BB_REVERSE (bb)
3054 rtl_profile_for_bb (bb);
3055 /* Indicate that all slots except the last holds invalid data. */
3056 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3057 peep2_insn_data[i].insn = NULL_RTX;
3058 peep2_current_count = 0;
3060 /* Indicate that the last slot contains live_after data. */
3061 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3062 peep2_current = MAX_INSNS_PER_PEEP2;
3064 /* Start up propagation. */
3065 bitmap_copy (live, DF_LR_OUT (bb));
3066 df_simulate_initialize_backwards (bb, live);
3067 bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3069 for (insn = BB_END (bb); ; insn = prev)
3071 prev = PREV_INSN (insn);
3072 if (INSN_P (insn))
3074 rtx attempt, before_try, x;
3075 int match_len;
3076 rtx note;
3077 bool was_call = false;
3079 /* Record this insn. */
3080 if (--peep2_current < 0)
3081 peep2_current = MAX_INSNS_PER_PEEP2;
3082 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3083 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3084 peep2_current_count++;
3085 peep2_insn_data[peep2_current].insn = insn;
3086 df_simulate_one_insn_backwards (bb, insn, live);
3087 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3089 if (RTX_FRAME_RELATED_P (insn))
3091 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3092 substitution would lose the
3093 REG_FRAME_RELATED_EXPR that is attached. */
3094 peep2_current_count = 0;
3095 attempt = NULL;
3097 else
3098 /* Match the peephole. */
3099 attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3101 if (attempt != NULL)
3103 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3104 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3105 cfg-related call notes. */
3106 for (i = 0; i <= match_len; ++i)
3108 int j;
3109 rtx old_insn, new_insn, note;
3111 j = i + peep2_current;
3112 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3113 j -= MAX_INSNS_PER_PEEP2 + 1;
3114 old_insn = peep2_insn_data[j].insn;
3115 if (!CALL_P (old_insn))
3116 continue;
3117 was_call = true;
3119 new_insn = attempt;
3120 while (new_insn != NULL_RTX)
3122 if (CALL_P (new_insn))
3123 break;
3124 new_insn = NEXT_INSN (new_insn);
3127 gcc_assert (new_insn != NULL_RTX);
3129 CALL_INSN_FUNCTION_USAGE (new_insn)
3130 = CALL_INSN_FUNCTION_USAGE (old_insn);
3132 for (note = REG_NOTES (old_insn);
3133 note;
3134 note = XEXP (note, 1))
3135 switch (REG_NOTE_KIND (note))
3137 case REG_NORETURN:
3138 case REG_SETJMP:
3139 add_reg_note (new_insn, REG_NOTE_KIND (note),
3140 XEXP (note, 0));
3141 break;
3142 default:
3143 /* Discard all other reg notes. */
3144 break;
3147 /* Croak if there is another call in the sequence. */
3148 while (++i <= match_len)
3150 j = i + peep2_current;
3151 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3152 j -= MAX_INSNS_PER_PEEP2 + 1;
3153 old_insn = peep2_insn_data[j].insn;
3154 gcc_assert (!CALL_P (old_insn));
3156 break;
3159 i = match_len + peep2_current;
3160 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3161 i -= MAX_INSNS_PER_PEEP2 + 1;
3163 note = find_reg_note (peep2_insn_data[i].insn,
3164 REG_EH_REGION, NULL_RTX);
3166 /* Replace the old sequence with the new. */
3167 attempt = emit_insn_after_setloc (attempt,
3168 peep2_insn_data[i].insn,
3169 INSN_LOCATOR (peep2_insn_data[i].insn));
3170 before_try = PREV_INSN (insn);
3171 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3173 /* Re-insert the EH_REGION notes. */
3174 if (note || (was_call && nonlocal_goto_handler_labels))
3176 edge eh_edge;
3177 edge_iterator ei;
3179 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3180 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3181 break;
3183 for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3184 if (CALL_P (x)
3185 || (flag_non_call_exceptions
3186 && may_trap_p (PATTERN (x))
3187 && !find_reg_note (x, REG_EH_REGION, NULL)))
3189 if (note)
3190 add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
3192 if (x != BB_END (bb) && eh_edge)
3194 edge nfte, nehe;
3195 int flags;
3197 nfte = split_block (bb, x);
3198 flags = (eh_edge->flags
3199 & (EDGE_EH | EDGE_ABNORMAL));
3200 if (CALL_P (x))
3201 flags |= EDGE_ABNORMAL_CALL;
3202 nehe = make_edge (nfte->src, eh_edge->dest,
3203 flags);
3205 nehe->probability = eh_edge->probability;
3206 nfte->probability
3207 = REG_BR_PROB_BASE - nehe->probability;
3209 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3210 bb = nfte->src;
3211 eh_edge = nehe;
3215 /* Converting possibly trapping insn to non-trapping is
3216 possible. Zap dummy outgoing edges. */
3217 do_cleanup_cfg |= purge_dead_edges (bb);
3220 #ifdef HAVE_conditional_execution
3221 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3222 peep2_insn_data[i].insn = NULL_RTX;
3223 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3224 peep2_current_count = 0;
3225 #else
3226 /* Back up lifetime information past the end of the
3227 newly created sequence. */
3228 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3229 i = 0;
3230 bitmap_copy (live, peep2_insn_data[i].live_before);
3232 /* Update life information for the new sequence. */
3233 x = attempt;
3236 if (INSN_P (x))
3238 if (--i < 0)
3239 i = MAX_INSNS_PER_PEEP2;
3240 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3241 && peep2_insn_data[i].insn == NULL_RTX)
3242 peep2_current_count++;
3243 peep2_insn_data[i].insn = x;
3244 df_insn_rescan (x);
3245 df_simulate_one_insn_backwards (bb, x, live);
3246 bitmap_copy (peep2_insn_data[i].live_before, live);
3248 x = PREV_INSN (x);
3250 while (x != prev);
3252 peep2_current = i;
3253 #endif
3255 /* If we generated a jump instruction, it won't have
3256 JUMP_LABEL set. Recompute after we're done. */
3257 for (x = attempt; x != before_try; x = PREV_INSN (x))
3258 if (JUMP_P (x))
3260 do_rebuild_jump_labels = true;
3261 break;
3266 if (insn == BB_HEAD (bb))
3267 break;
3271 default_rtl_profile ();
3272 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3273 BITMAP_FREE (peep2_insn_data[i].live_before);
3274 BITMAP_FREE (live);
3275 if (do_rebuild_jump_labels)
3276 rebuild_jump_labels (get_insns ());
3278 #endif /* HAVE_peephole2 */
3280 /* Common predicates for use with define_bypass. */
3282 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3283 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3284 must be either a single_set or a PARALLEL with SETs inside. */
3287 store_data_bypass_p (rtx out_insn, rtx in_insn)
3289 rtx out_set, in_set;
3290 rtx out_pat, in_pat;
3291 rtx out_exp, in_exp;
3292 int i, j;
3294 in_set = single_set (in_insn);
3295 if (in_set)
3297 if (!MEM_P (SET_DEST (in_set)))
3298 return false;
3300 out_set = single_set (out_insn);
3301 if (out_set)
3303 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3304 return false;
3306 else
3308 out_pat = PATTERN (out_insn);
3310 if (GET_CODE (out_pat) != PARALLEL)
3311 return false;
3313 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3315 out_exp = XVECEXP (out_pat, 0, i);
3317 if (GET_CODE (out_exp) == CLOBBER)
3318 continue;
3320 gcc_assert (GET_CODE (out_exp) == SET);
3322 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3323 return false;
3327 else
3329 in_pat = PATTERN (in_insn);
3330 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3332 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3334 in_exp = XVECEXP (in_pat, 0, i);
3336 if (GET_CODE (in_exp) == CLOBBER)
3337 continue;
3339 gcc_assert (GET_CODE (in_exp) == SET);
3341 if (!MEM_P (SET_DEST (in_exp)))
3342 return false;
3344 out_set = single_set (out_insn);
3345 if (out_set)
3347 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3348 return false;
3350 else
3352 out_pat = PATTERN (out_insn);
3353 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3355 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3357 out_exp = XVECEXP (out_pat, 0, j);
3359 if (GET_CODE (out_exp) == CLOBBER)
3360 continue;
3362 gcc_assert (GET_CODE (out_exp) == SET);
3364 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3365 return false;
3371 return true;
3374 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3375 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3376 or multiple set; IN_INSN should be single_set for truth, but for convenience
3377 of insn categorization may be any JUMP or CALL insn. */
3380 if_test_bypass_p (rtx out_insn, rtx in_insn)
3382 rtx out_set, in_set;
3384 in_set = single_set (in_insn);
3385 if (! in_set)
3387 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3388 return false;
3391 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3392 return false;
3393 in_set = SET_SRC (in_set);
3395 out_set = single_set (out_insn);
3396 if (out_set)
3398 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3399 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3400 return false;
3402 else
3404 rtx out_pat;
3405 int i;
3407 out_pat = PATTERN (out_insn);
3408 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3410 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3412 rtx exp = XVECEXP (out_pat, 0, i);
3414 if (GET_CODE (exp) == CLOBBER)
3415 continue;
3417 gcc_assert (GET_CODE (exp) == SET);
3419 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3420 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3421 return false;
3425 return true;
3428 static bool
3429 gate_handle_peephole2 (void)
3431 return (optimize > 0 && flag_peephole2);
3434 static unsigned int
3435 rest_of_handle_peephole2 (void)
3437 #ifdef HAVE_peephole2
3438 peephole2_optimize ();
3439 #endif
3440 return 0;
3443 struct rtl_opt_pass pass_peephole2 =
3446 RTL_PASS,
3447 "peephole2", /* name */
3448 gate_handle_peephole2, /* gate */
3449 rest_of_handle_peephole2, /* execute */
3450 NULL, /* sub */
3451 NULL, /* next */
3452 0, /* static_pass_number */
3453 TV_PEEPHOLE2, /* tv_id */
3454 0, /* properties_required */
3455 0, /* properties_provided */
3456 0, /* properties_destroyed */
3457 0, /* todo_flags_start */
3458 TODO_df_finish | TODO_verify_rtl_sharing |
3459 TODO_dump_func /* todo_flags_finish */
3463 static unsigned int
3464 rest_of_handle_split_all_insns (void)
3466 split_all_insns ();
3467 return 0;
3470 struct rtl_opt_pass pass_split_all_insns =
3473 RTL_PASS,
3474 "split1", /* name */
3475 NULL, /* gate */
3476 rest_of_handle_split_all_insns, /* execute */
3477 NULL, /* sub */
3478 NULL, /* next */
3479 0, /* static_pass_number */
3480 0, /* tv_id */
3481 0, /* properties_required */
3482 0, /* properties_provided */
3483 0, /* properties_destroyed */
3484 0, /* todo_flags_start */
3485 TODO_dump_func /* todo_flags_finish */
3489 static unsigned int
3490 rest_of_handle_split_after_reload (void)
3492 /* If optimizing, then go ahead and split insns now. */
3493 #ifndef STACK_REGS
3494 if (optimize > 0)
3495 #endif
3496 split_all_insns ();
3497 return 0;
3500 struct rtl_opt_pass pass_split_after_reload =
3503 RTL_PASS,
3504 "split2", /* name */
3505 NULL, /* gate */
3506 rest_of_handle_split_after_reload, /* execute */
3507 NULL, /* sub */
3508 NULL, /* next */
3509 0, /* static_pass_number */
3510 0, /* tv_id */
3511 0, /* properties_required */
3512 0, /* properties_provided */
3513 0, /* properties_destroyed */
3514 0, /* todo_flags_start */
3515 TODO_dump_func /* todo_flags_finish */
3519 static bool
3520 gate_handle_split_before_regstack (void)
3522 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3523 /* If flow2 creates new instructions which need splitting
3524 and scheduling after reload is not done, they might not be
3525 split until final which doesn't allow splitting
3526 if HAVE_ATTR_length. */
3527 # ifdef INSN_SCHEDULING
3528 return (optimize && !flag_schedule_insns_after_reload);
3529 # else
3530 return (optimize);
3531 # endif
3532 #else
3533 return 0;
3534 #endif
3537 static unsigned int
3538 rest_of_handle_split_before_regstack (void)
3540 split_all_insns ();
3541 return 0;
3544 struct rtl_opt_pass pass_split_before_regstack =
3547 RTL_PASS,
3548 "split3", /* name */
3549 gate_handle_split_before_regstack, /* gate */
3550 rest_of_handle_split_before_regstack, /* execute */
3551 NULL, /* sub */
3552 NULL, /* next */
3553 0, /* static_pass_number */
3554 0, /* tv_id */
3555 0, /* properties_required */
3556 0, /* properties_provided */
3557 0, /* properties_destroyed */
3558 0, /* todo_flags_start */
3559 TODO_dump_func /* todo_flags_finish */
3563 static bool
3564 gate_handle_split_before_sched2 (void)
3566 #ifdef INSN_SCHEDULING
3567 return optimize > 0 && flag_schedule_insns_after_reload;
3568 #else
3569 return 0;
3570 #endif
3573 static unsigned int
3574 rest_of_handle_split_before_sched2 (void)
3576 #ifdef INSN_SCHEDULING
3577 split_all_insns ();
3578 #endif
3579 return 0;
3582 struct rtl_opt_pass pass_split_before_sched2 =
3585 RTL_PASS,
3586 "split4", /* name */
3587 gate_handle_split_before_sched2, /* gate */
3588 rest_of_handle_split_before_sched2, /* execute */
3589 NULL, /* sub */
3590 NULL, /* next */
3591 0, /* static_pass_number */
3592 0, /* tv_id */
3593 0, /* properties_required */
3594 0, /* properties_provided */
3595 0, /* properties_destroyed */
3596 0, /* todo_flags_start */
3597 TODO_verify_flow |
3598 TODO_dump_func /* todo_flags_finish */
3602 /* The placement of the splitting that we do for shorten_branches
3603 depends on whether regstack is used by the target or not. */
3604 static bool
3605 gate_do_final_split (void)
3607 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3608 return 1;
3609 #else
3610 return 0;
3611 #endif
3614 struct rtl_opt_pass pass_split_for_shorten_branches =
3617 RTL_PASS,
3618 "split5", /* name */
3619 gate_do_final_split, /* gate */
3620 split_all_insns_noflow, /* execute */
3621 NULL, /* sub */
3622 NULL, /* next */
3623 0, /* static_pass_number */
3624 0, /* tv_id */
3625 0, /* properties_required */
3626 0, /* properties_provided */
3627 0, /* properties_destroyed */
3628 0, /* todo_flags_start */
3629 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */