lto-streamer-in.c (unpack_value_fields): Remove unneeded asserts.
[official-gcc.git] / gcc / recog.c
blobafe985e2f2711f241ca501b3f06985075fc38714
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl-error.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
41 #include "target.h"
42 #include "timevar.h"
43 #include "tree-pass.h"
44 #include "df.h"
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
57 #else
58 #define STACK_POP_CODE POST_DEC
59 #endif
60 #endif
62 #ifndef HAVE_ATTR_enabled
63 static inline bool
64 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
66 return true;
68 #endif
70 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
71 static void validate_replace_src_1 (rtx *, void *);
72 static rtx split_insn (rtx);
74 /* Nonzero means allow operands to be volatile.
75 This should be 0 if you are generating rtl, such as if you are calling
76 the functions in optabs.c and expmed.c (most of the time).
77 This should be 1 if all valid insns need to be recognized,
78 such as in reginfo.c and final.c and reload.c.
80 init_recog and init_recog_no_volatile are responsible for setting this. */
82 int volatile_ok;
84 struct recog_data recog_data;
86 /* Contains a vector of operand_alternative structures for every operand.
87 Set up by preprocess_constraints. */
88 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
90 /* On return from `constrain_operands', indicate which alternative
91 was satisfied. */
93 int which_alternative;
95 /* Nonzero after end of reload pass.
96 Set to 1 or 0 by toplev.c.
97 Controls the significance of (SUBREG (MEM)). */
99 int reload_completed;
101 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
102 int epilogue_completed;
104 /* Initialize data used by the function `recog'.
105 This must be called once in the compilation of a function
106 before any insn recognition may be done in the function. */
108 void
109 init_recog_no_volatile (void)
111 volatile_ok = 0;
114 void
115 init_recog (void)
117 volatile_ok = 1;
121 /* Check that X is an insn-body for an `asm' with operands
122 and that the operands mentioned in it are legitimate. */
125 check_asm_operands (rtx x)
127 int noperands;
128 rtx *operands;
129 const char **constraints;
130 int i;
132 /* Post-reload, be more strict with things. */
133 if (reload_completed)
135 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
136 extract_insn (make_insn_raw (x));
137 constrain_operands (1);
138 return which_alternative >= 0;
141 noperands = asm_noperands (x);
142 if (noperands < 0)
143 return 0;
144 if (noperands == 0)
145 return 1;
147 operands = XALLOCAVEC (rtx, noperands);
148 constraints = XALLOCAVEC (const char *, noperands);
150 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
152 for (i = 0; i < noperands; i++)
154 const char *c = constraints[i];
155 if (c[0] == '%')
156 c++;
157 if (! asm_operand_ok (operands[i], c, constraints))
158 return 0;
161 return 1;
164 /* Static data for the next two routines. */
166 typedef struct change_t
168 rtx object;
169 int old_code;
170 rtx *loc;
171 rtx old;
172 bool unshare;
173 } change_t;
175 static change_t *changes;
176 static int changes_allocated;
178 static int num_changes = 0;
180 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
181 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
182 the change is simply made.
184 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
185 will be called with the address and mode as parameters. If OBJECT is
186 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
187 the change in place.
189 IN_GROUP is nonzero if this is part of a group of changes that must be
190 performed as a group. In that case, the changes will be stored. The
191 function `apply_change_group' will validate and apply the changes.
193 If IN_GROUP is zero, this is a single change. Try to recognize the insn
194 or validate the memory reference with the change applied. If the result
195 is not valid for the machine, suppress the change and return zero.
196 Otherwise, perform the change and return 1. */
198 static bool
199 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
201 rtx old = *loc;
203 if (old == new_rtx || rtx_equal_p (old, new_rtx))
204 return 1;
206 gcc_assert (in_group != 0 || num_changes == 0);
208 *loc = new_rtx;
210 /* Save the information describing this change. */
211 if (num_changes >= changes_allocated)
213 if (changes_allocated == 0)
214 /* This value allows for repeated substitutions inside complex
215 indexed addresses, or changes in up to 5 insns. */
216 changes_allocated = MAX_RECOG_OPERANDS * 5;
217 else
218 changes_allocated *= 2;
220 changes = XRESIZEVEC (change_t, changes, changes_allocated);
223 changes[num_changes].object = object;
224 changes[num_changes].loc = loc;
225 changes[num_changes].old = old;
226 changes[num_changes].unshare = unshare;
228 if (object && !MEM_P (object))
230 /* Set INSN_CODE to force rerecognition of insn. Save old code in
231 case invalid. */
232 changes[num_changes].old_code = INSN_CODE (object);
233 INSN_CODE (object) = -1;
236 num_changes++;
238 /* If we are making a group of changes, return 1. Otherwise, validate the
239 change group we made. */
241 if (in_group)
242 return 1;
243 else
244 return apply_change_group ();
247 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
248 UNSHARE to false. */
250 bool
251 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
253 return validate_change_1 (object, loc, new_rtx, in_group, false);
256 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
257 UNSHARE to true. */
259 bool
260 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
262 return validate_change_1 (object, loc, new_rtx, in_group, true);
266 /* Keep X canonicalized if some changes have made it non-canonical; only
267 modifies the operands of X, not (for example) its code. Simplifications
268 are not the job of this routine.
270 Return true if anything was changed. */
271 bool
272 canonicalize_change_group (rtx insn, rtx x)
274 if (COMMUTATIVE_P (x)
275 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
277 /* Oops, the caller has made X no longer canonical.
278 Let's redo the changes in the correct order. */
279 rtx tem = XEXP (x, 0);
280 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
281 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
282 return true;
284 else
285 return false;
289 /* This subroutine of apply_change_group verifies whether the changes to INSN
290 were valid; i.e. whether INSN can still be recognized. */
293 insn_invalid_p (rtx insn)
295 rtx pat = PATTERN (insn);
296 int num_clobbers = 0;
297 /* If we are before reload and the pattern is a SET, see if we can add
298 clobbers. */
299 int icode = recog (pat, insn,
300 (GET_CODE (pat) == SET
301 && ! reload_completed && ! reload_in_progress)
302 ? &num_clobbers : 0);
303 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
306 /* If this is an asm and the operand aren't legal, then fail. Likewise if
307 this is not an asm and the insn wasn't recognized. */
308 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
309 || (!is_asm && icode < 0))
310 return 1;
312 /* If we have to add CLOBBERs, fail if we have to add ones that reference
313 hard registers since our callers can't know if they are live or not.
314 Otherwise, add them. */
315 if (num_clobbers > 0)
317 rtx newpat;
319 if (added_clobbers_hard_reg_p (icode))
320 return 1;
322 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
323 XVECEXP (newpat, 0, 0) = pat;
324 add_clobbers (newpat, icode);
325 PATTERN (insn) = pat = newpat;
328 /* After reload, verify that all constraints are satisfied. */
329 if (reload_completed)
331 extract_insn (insn);
333 if (! constrain_operands (1))
334 return 1;
337 INSN_CODE (insn) = icode;
338 return 0;
341 /* Return number of changes made and not validated yet. */
343 num_changes_pending (void)
345 return num_changes;
348 /* Tentatively apply the changes numbered NUM and up.
349 Return 1 if all changes are valid, zero otherwise. */
352 verify_changes (int num)
354 int i;
355 rtx last_validated = NULL_RTX;
357 /* The changes have been applied and all INSN_CODEs have been reset to force
358 rerecognition.
360 The changes are valid if we aren't given an object, or if we are
361 given a MEM and it still is a valid address, or if this is in insn
362 and it is recognized. In the latter case, if reload has completed,
363 we also require that the operands meet the constraints for
364 the insn. */
366 for (i = num; i < num_changes; i++)
368 rtx object = changes[i].object;
370 /* If there is no object to test or if it is the same as the one we
371 already tested, ignore it. */
372 if (object == 0 || object == last_validated)
373 continue;
375 if (MEM_P (object))
377 if (! memory_address_addr_space_p (GET_MODE (object),
378 XEXP (object, 0),
379 MEM_ADDR_SPACE (object)))
380 break;
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
392 else if (DEBUG_INSN_P (object))
393 continue;
394 else if (insn_invalid_p (object))
396 rtx pat = PATTERN (object);
398 /* Perhaps we couldn't recognize the insn because there were
399 extra CLOBBERs at the end. If so, try to re-recognize
400 without the last CLOBBER (later iterations will cause each of
401 them to be eliminated, in turn). But don't do this if we
402 have an ASM_OPERAND. */
403 if (GET_CODE (pat) == PARALLEL
404 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
405 && asm_noperands (PATTERN (object)) < 0)
407 rtx newpat;
409 if (XVECLEN (pat, 0) == 2)
410 newpat = XVECEXP (pat, 0, 0);
411 else
413 int j;
415 newpat
416 = gen_rtx_PARALLEL (VOIDmode,
417 rtvec_alloc (XVECLEN (pat, 0) - 1));
418 for (j = 0; j < XVECLEN (newpat, 0); j++)
419 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
422 /* Add a new change to this group to replace the pattern
423 with this new pattern. Then consider this change
424 as having succeeded. The change we added will
425 cause the entire call to fail if things remain invalid.
427 Note that this can lose if a later change than the one
428 we are processing specified &XVECEXP (PATTERN (object), 0, X)
429 but this shouldn't occur. */
431 validate_change (object, &PATTERN (object), newpat, 1);
432 continue;
434 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
435 || GET_CODE (pat) == VAR_LOCATION)
436 /* If this insn is a CLOBBER or USE, it is always valid, but is
437 never recognized. */
438 continue;
439 else
440 break;
442 last_validated = object;
445 return (i == num_changes);
448 /* A group of changes has previously been issued with validate_change
449 and verified with verify_changes. Call df_insn_rescan for each of
450 the insn changed and clear num_changes. */
452 void
453 confirm_change_group (void)
455 int i;
456 rtx last_object = NULL;
458 for (i = 0; i < num_changes; i++)
460 rtx object = changes[i].object;
462 if (changes[i].unshare)
463 *changes[i].loc = copy_rtx (*changes[i].loc);
465 /* Avoid unnecessary rescanning when multiple changes to same instruction
466 are made. */
467 if (object)
469 if (object != last_object && last_object && INSN_P (last_object))
470 df_insn_rescan (last_object);
471 last_object = object;
475 if (last_object && INSN_P (last_object))
476 df_insn_rescan (last_object);
477 num_changes = 0;
480 /* Apply a group of changes previously issued with `validate_change'.
481 If all changes are valid, call confirm_change_group and return 1,
482 otherwise, call cancel_changes and return 0. */
485 apply_change_group (void)
487 if (verify_changes (0))
489 confirm_change_group ();
490 return 1;
492 else
494 cancel_changes (0);
495 return 0;
500 /* Return the number of changes so far in the current group. */
503 num_validated_changes (void)
505 return num_changes;
508 /* Retract the changes numbered NUM and up. */
510 void
511 cancel_changes (int num)
513 int i;
515 /* Back out all the changes. Do this in the opposite order in which
516 they were made. */
517 for (i = num_changes - 1; i >= num; i--)
519 *changes[i].loc = changes[i].old;
520 if (changes[i].object && !MEM_P (changes[i].object))
521 INSN_CODE (changes[i].object) = changes[i].old_code;
523 num_changes = num;
526 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
527 rtx. */
529 static void
530 simplify_while_replacing (rtx *loc, rtx to, rtx object,
531 enum machine_mode op0_mode)
533 rtx x = *loc;
534 enum rtx_code code = GET_CODE (x);
535 rtx new_rtx;
537 if (SWAPPABLE_OPERANDS_P (x)
538 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
540 validate_unshare_change (object, loc,
541 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
542 : swap_condition (code),
543 GET_MODE (x), XEXP (x, 1),
544 XEXP (x, 0)), 1);
545 x = *loc;
546 code = GET_CODE (x);
549 switch (code)
551 case PLUS:
552 /* If we have a PLUS whose second operand is now a CONST_INT, use
553 simplify_gen_binary to try to simplify it.
554 ??? We may want later to remove this, once simplification is
555 separated from this function. */
556 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
557 validate_change (object, loc,
558 simplify_gen_binary
559 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
560 break;
561 case MINUS:
562 if (CONST_INT_P (XEXP (x, 1))
563 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
564 validate_change (object, loc,
565 simplify_gen_binary
566 (PLUS, GET_MODE (x), XEXP (x, 0),
567 simplify_gen_unary (NEG,
568 GET_MODE (x), XEXP (x, 1),
569 GET_MODE (x))), 1);
570 break;
571 case ZERO_EXTEND:
572 case SIGN_EXTEND:
573 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
575 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
576 op0_mode);
577 /* If any of the above failed, substitute in something that
578 we know won't be recognized. */
579 if (!new_rtx)
580 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
581 validate_change (object, loc, new_rtx, 1);
583 break;
584 case SUBREG:
585 /* All subregs possible to simplify should be simplified. */
586 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
587 SUBREG_BYTE (x));
589 /* Subregs of VOIDmode operands are incorrect. */
590 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
591 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
592 if (new_rtx)
593 validate_change (object, loc, new_rtx, 1);
594 break;
595 case ZERO_EXTRACT:
596 case SIGN_EXTRACT:
597 /* If we are replacing a register with memory, try to change the memory
598 to be the mode required for memory in extract operations (this isn't
599 likely to be an insertion operation; if it was, nothing bad will
600 happen, we might just fail in some cases). */
602 if (MEM_P (XEXP (x, 0))
603 && CONST_INT_P (XEXP (x, 1))
604 && CONST_INT_P (XEXP (x, 2))
605 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
606 && !MEM_VOLATILE_P (XEXP (x, 0)))
608 enum machine_mode wanted_mode = VOIDmode;
609 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
610 int pos = INTVAL (XEXP (x, 2));
612 if (GET_CODE (x) == ZERO_EXTRACT)
614 enum machine_mode new_mode
615 = mode_for_extraction (EP_extzv, 1);
616 if (new_mode != MAX_MACHINE_MODE)
617 wanted_mode = new_mode;
619 else if (GET_CODE (x) == SIGN_EXTRACT)
621 enum machine_mode new_mode
622 = mode_for_extraction (EP_extv, 1);
623 if (new_mode != MAX_MACHINE_MODE)
624 wanted_mode = new_mode;
627 /* If we have a narrower mode, we can do something. */
628 if (wanted_mode != VOIDmode
629 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
631 int offset = pos / BITS_PER_UNIT;
632 rtx newmem;
634 /* If the bytes and bits are counted differently, we
635 must adjust the offset. */
636 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
637 offset =
638 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
639 offset);
641 pos %= GET_MODE_BITSIZE (wanted_mode);
643 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
645 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
646 validate_change (object, &XEXP (x, 0), newmem, 1);
650 break;
652 default:
653 break;
657 /* Replace every occurrence of FROM in X with TO. Mark each change with
658 validate_change passing OBJECT. */
660 static void
661 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
662 bool simplify)
664 int i, j;
665 const char *fmt;
666 rtx x = *loc;
667 enum rtx_code code;
668 enum machine_mode op0_mode = VOIDmode;
669 int prev_changes = num_changes;
671 if (!x)
672 return;
674 code = GET_CODE (x);
675 fmt = GET_RTX_FORMAT (code);
676 if (fmt[0] == 'e')
677 op0_mode = GET_MODE (XEXP (x, 0));
679 /* X matches FROM if it is the same rtx or they are both referring to the
680 same register in the same mode. Avoid calling rtx_equal_p unless the
681 operands look similar. */
683 if (x == from
684 || (REG_P (x) && REG_P (from)
685 && GET_MODE (x) == GET_MODE (from)
686 && REGNO (x) == REGNO (from))
687 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
688 && rtx_equal_p (x, from)))
690 validate_unshare_change (object, loc, to, 1);
691 return;
694 /* Call ourself recursively to perform the replacements.
695 We must not replace inside already replaced expression, otherwise we
696 get infinite recursion for replacements like (reg X)->(subreg (reg X))
697 done by regmove, so we must special case shared ASM_OPERANDS. */
699 if (GET_CODE (x) == PARALLEL)
701 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
703 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
704 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
706 /* Verify that operands are really shared. */
707 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
708 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
709 (x, 0, j))));
710 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
711 from, to, object, simplify);
713 else
714 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
715 simplify);
718 else
719 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
721 if (fmt[i] == 'e')
722 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
723 else if (fmt[i] == 'E')
724 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
725 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
726 simplify);
729 /* If we didn't substitute, there is nothing more to do. */
730 if (num_changes == prev_changes)
731 return;
733 /* Allow substituted expression to have different mode. This is used by
734 regmove to change mode of pseudo register. */
735 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
736 op0_mode = GET_MODE (XEXP (x, 0));
738 /* Do changes needed to keep rtx consistent. Don't do any other
739 simplifications, as it is not our job. */
740 if (simplify)
741 simplify_while_replacing (loc, to, object, op0_mode);
744 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
745 with TO. After all changes have been made, validate by seeing
746 if INSN is still valid. */
749 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
751 validate_replace_rtx_1 (loc, from, to, insn, true);
752 return apply_change_group ();
755 /* Try replacing every occurrence of FROM in INSN with TO. After all
756 changes have been made, validate by seeing if INSN is still valid. */
759 validate_replace_rtx (rtx from, rtx to, rtx insn)
761 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
762 return apply_change_group ();
765 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
766 is a part of INSN. After all changes have been made, validate by seeing if
767 INSN is still valid.
768 validate_replace_rtx (from, to, insn) is equivalent to
769 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
772 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
774 validate_replace_rtx_1 (where, from, to, insn, true);
775 return apply_change_group ();
778 /* Same as above, but do not simplify rtx afterwards. */
780 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
781 rtx insn)
783 validate_replace_rtx_1 (where, from, to, insn, false);
784 return apply_change_group ();
788 /* Try replacing every occurrence of FROM in INSN with TO. This also
789 will replace in REG_EQUAL and REG_EQUIV notes. */
791 void
792 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
794 rtx note;
795 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
796 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
797 if (REG_NOTE_KIND (note) == REG_EQUAL
798 || REG_NOTE_KIND (note) == REG_EQUIV)
799 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
802 /* Function called by note_uses to replace used subexpressions. */
803 struct validate_replace_src_data
805 rtx from; /* Old RTX */
806 rtx to; /* New RTX */
807 rtx insn; /* Insn in which substitution is occurring. */
810 static void
811 validate_replace_src_1 (rtx *x, void *data)
813 struct validate_replace_src_data *d
814 = (struct validate_replace_src_data *) data;
816 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
819 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
820 SET_DESTs. */
822 void
823 validate_replace_src_group (rtx from, rtx to, rtx insn)
825 struct validate_replace_src_data d;
827 d.from = from;
828 d.to = to;
829 d.insn = insn;
830 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
833 /* Try simplify INSN.
834 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
835 pattern and return true if something was simplified. */
837 bool
838 validate_simplify_insn (rtx insn)
840 int i;
841 rtx pat = NULL;
842 rtx newpat = NULL;
844 pat = PATTERN (insn);
846 if (GET_CODE (pat) == SET)
848 newpat = simplify_rtx (SET_SRC (pat));
849 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
850 validate_change (insn, &SET_SRC (pat), newpat, 1);
851 newpat = simplify_rtx (SET_DEST (pat));
852 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
853 validate_change (insn, &SET_DEST (pat), newpat, 1);
855 else if (GET_CODE (pat) == PARALLEL)
856 for (i = 0; i < XVECLEN (pat, 0); i++)
858 rtx s = XVECEXP (pat, 0, i);
860 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
862 newpat = simplify_rtx (SET_SRC (s));
863 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
864 validate_change (insn, &SET_SRC (s), newpat, 1);
865 newpat = simplify_rtx (SET_DEST (s));
866 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
867 validate_change (insn, &SET_DEST (s), newpat, 1);
870 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
873 #ifdef HAVE_cc0
874 /* Return 1 if the insn using CC0 set by INSN does not contain
875 any ordered tests applied to the condition codes.
876 EQ and NE tests do not count. */
879 next_insn_tests_no_inequality (rtx insn)
881 rtx next = next_cc0_user (insn);
883 /* If there is no next insn, we have to take the conservative choice. */
884 if (next == 0)
885 return 0;
887 return (INSN_P (next)
888 && ! inequality_comparisons_p (PATTERN (next)));
890 #endif
892 /* Return 1 if OP is a valid general operand for machine mode MODE.
893 This is either a register reference, a memory reference,
894 or a constant. In the case of a memory reference, the address
895 is checked for general validity for the target machine.
897 Register and memory references must have mode MODE in order to be valid,
898 but some constants have no machine mode and are valid for any mode.
900 If MODE is VOIDmode, OP is checked for validity for whatever mode
901 it has.
903 The main use of this function is as a predicate in match_operand
904 expressions in the machine description.
906 For an explanation of this function's behavior for registers of
907 class NO_REGS, see the comment for `register_operand'. */
910 general_operand (rtx op, enum machine_mode mode)
912 enum rtx_code code = GET_CODE (op);
914 if (mode == VOIDmode)
915 mode = GET_MODE (op);
917 /* Don't accept CONST_INT or anything similar
918 if the caller wants something floating. */
919 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
920 && GET_MODE_CLASS (mode) != MODE_INT
921 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
922 return 0;
924 if (CONST_INT_P (op)
925 && mode != VOIDmode
926 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
927 return 0;
929 if (CONSTANT_P (op))
930 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
931 || mode == VOIDmode)
932 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
933 && targetm.legitimate_constant_p (mode == VOIDmode
934 ? GET_MODE (op)
935 : mode, op));
937 /* Except for certain constants with VOIDmode, already checked for,
938 OP's mode must match MODE if MODE specifies a mode. */
940 if (GET_MODE (op) != mode)
941 return 0;
943 if (code == SUBREG)
945 rtx sub = SUBREG_REG (op);
947 #ifdef INSN_SCHEDULING
948 /* On machines that have insn scheduling, we want all memory
949 reference to be explicit, so outlaw paradoxical SUBREGs.
950 However, we must allow them after reload so that they can
951 get cleaned up by cleanup_subreg_operands. */
952 if (!reload_completed && MEM_P (sub)
953 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
954 return 0;
955 #endif
956 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
957 may result in incorrect reference. We should simplify all valid
958 subregs of MEM anyway. But allow this after reload because we
959 might be called from cleanup_subreg_operands.
961 ??? This is a kludge. */
962 if (!reload_completed && SUBREG_BYTE (op) != 0
963 && MEM_P (sub))
964 return 0;
966 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
967 create such rtl, and we must reject it. */
968 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
969 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
970 return 0;
972 op = sub;
973 code = GET_CODE (op);
976 if (code == REG)
977 /* A register whose class is NO_REGS is not a general operand. */
978 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
979 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
981 if (code == MEM)
983 rtx y = XEXP (op, 0);
985 if (! volatile_ok && MEM_VOLATILE_P (op))
986 return 0;
988 /* Use the mem's mode, since it will be reloaded thus. */
989 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
990 return 1;
993 return 0;
996 /* Return 1 if OP is a valid memory address for a memory reference
997 of mode MODE.
999 The main use of this function is as a predicate in match_operand
1000 expressions in the machine description. */
1003 address_operand (rtx op, enum machine_mode mode)
1005 return memory_address_p (mode, op);
1008 /* Return 1 if OP is a register reference of mode MODE.
1009 If MODE is VOIDmode, accept a register in any mode.
1011 The main use of this function is as a predicate in match_operand
1012 expressions in the machine description.
1014 As a special exception, registers whose class is NO_REGS are
1015 not accepted by `register_operand'. The reason for this change
1016 is to allow the representation of special architecture artifacts
1017 (such as a condition code register) without extending the rtl
1018 definitions. Since registers of class NO_REGS cannot be used
1019 as registers in any case where register classes are examined,
1020 it is most consistent to keep this function from accepting them. */
1023 register_operand (rtx op, enum machine_mode mode)
1025 if (GET_MODE (op) != mode && mode != VOIDmode)
1026 return 0;
1028 if (GET_CODE (op) == SUBREG)
1030 rtx sub = SUBREG_REG (op);
1032 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1033 because it is guaranteed to be reloaded into one.
1034 Just make sure the MEM is valid in itself.
1035 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1036 but currently it does result from (SUBREG (REG)...) where the
1037 reg went on the stack.) */
1038 if (! reload_completed && MEM_P (sub))
1039 return general_operand (op, mode);
1041 #ifdef CANNOT_CHANGE_MODE_CLASS
1042 if (REG_P (sub)
1043 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1044 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1045 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1046 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1047 return 0;
1048 #endif
1050 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1051 create such rtl, and we must reject it. */
1052 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1053 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1054 return 0;
1056 op = sub;
1059 /* We don't consider registers whose class is NO_REGS
1060 to be a register operand. */
1061 return (REG_P (op)
1062 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1063 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1066 /* Return 1 for a register in Pmode; ignore the tested mode. */
1069 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1071 return register_operand (op, Pmode);
1074 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1075 or a hard register. */
1078 scratch_operand (rtx op, enum machine_mode mode)
1080 if (GET_MODE (op) != mode && mode != VOIDmode)
1081 return 0;
1083 return (GET_CODE (op) == SCRATCH
1084 || (REG_P (op)
1085 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1088 /* Return 1 if OP is a valid immediate operand for mode MODE.
1090 The main use of this function is as a predicate in match_operand
1091 expressions in the machine description. */
1094 immediate_operand (rtx op, enum machine_mode mode)
1096 /* Don't accept CONST_INT or anything similar
1097 if the caller wants something floating. */
1098 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1099 && GET_MODE_CLASS (mode) != MODE_INT
1100 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1101 return 0;
1103 if (CONST_INT_P (op)
1104 && mode != VOIDmode
1105 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1106 return 0;
1108 return (CONSTANT_P (op)
1109 && (GET_MODE (op) == mode || mode == VOIDmode
1110 || GET_MODE (op) == VOIDmode)
1111 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1112 && targetm.legitimate_constant_p (mode == VOIDmode
1113 ? GET_MODE (op)
1114 : mode, op));
1117 /* Returns 1 if OP is an operand that is a CONST_INT. */
1120 const_int_operand (rtx op, enum machine_mode mode)
1122 if (!CONST_INT_P (op))
1123 return 0;
1125 if (mode != VOIDmode
1126 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1127 return 0;
1129 return 1;
1132 /* Returns 1 if OP is an operand that is a constant integer or constant
1133 floating-point number. */
1136 const_double_operand (rtx op, enum machine_mode mode)
1138 /* Don't accept CONST_INT or anything similar
1139 if the caller wants something floating. */
1140 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1141 && GET_MODE_CLASS (mode) != MODE_INT
1142 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1143 return 0;
1145 return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1146 && (mode == VOIDmode || GET_MODE (op) == mode
1147 || GET_MODE (op) == VOIDmode));
1150 /* Return 1 if OP is a general operand that is not an immediate operand. */
1153 nonimmediate_operand (rtx op, enum machine_mode mode)
1155 return (general_operand (op, mode) && ! CONSTANT_P (op));
1158 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1161 nonmemory_operand (rtx op, enum machine_mode mode)
1163 if (CONSTANT_P (op))
1164 return immediate_operand (op, mode);
1166 if (GET_MODE (op) != mode && mode != VOIDmode)
1167 return 0;
1169 if (GET_CODE (op) == SUBREG)
1171 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1172 because it is guaranteed to be reloaded into one.
1173 Just make sure the MEM is valid in itself.
1174 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1175 but currently it does result from (SUBREG (REG)...) where the
1176 reg went on the stack.) */
1177 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1178 return general_operand (op, mode);
1179 op = SUBREG_REG (op);
1182 /* We don't consider registers whose class is NO_REGS
1183 to be a register operand. */
1184 return (REG_P (op)
1185 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1186 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1189 /* Return 1 if OP is a valid operand that stands for pushing a
1190 value of mode MODE onto the stack.
1192 The main use of this function is as a predicate in match_operand
1193 expressions in the machine description. */
1196 push_operand (rtx op, enum machine_mode mode)
1198 unsigned int rounded_size = GET_MODE_SIZE (mode);
1200 #ifdef PUSH_ROUNDING
1201 rounded_size = PUSH_ROUNDING (rounded_size);
1202 #endif
1204 if (!MEM_P (op))
1205 return 0;
1207 if (mode != VOIDmode && GET_MODE (op) != mode)
1208 return 0;
1210 op = XEXP (op, 0);
1212 if (rounded_size == GET_MODE_SIZE (mode))
1214 if (GET_CODE (op) != STACK_PUSH_CODE)
1215 return 0;
1217 else
1219 if (GET_CODE (op) != PRE_MODIFY
1220 || GET_CODE (XEXP (op, 1)) != PLUS
1221 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1222 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1223 #ifdef STACK_GROWS_DOWNWARD
1224 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1225 #else
1226 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1227 #endif
1229 return 0;
1232 return XEXP (op, 0) == stack_pointer_rtx;
1235 /* Return 1 if OP is a valid operand that stands for popping a
1236 value of mode MODE off the stack.
1238 The main use of this function is as a predicate in match_operand
1239 expressions in the machine description. */
1242 pop_operand (rtx op, enum machine_mode mode)
1244 if (!MEM_P (op))
1245 return 0;
1247 if (mode != VOIDmode && GET_MODE (op) != mode)
1248 return 0;
1250 op = XEXP (op, 0);
1252 if (GET_CODE (op) != STACK_POP_CODE)
1253 return 0;
1255 return XEXP (op, 0) == stack_pointer_rtx;
1258 /* Return 1 if ADDR is a valid memory address
1259 for mode MODE in address space AS. */
1262 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1263 rtx addr, addr_space_t as)
1265 #ifdef GO_IF_LEGITIMATE_ADDRESS
1266 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1267 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1268 return 0;
1270 win:
1271 return 1;
1272 #else
1273 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1274 #endif
1277 /* Return 1 if OP is a valid memory reference with mode MODE,
1278 including a valid address.
1280 The main use of this function is as a predicate in match_operand
1281 expressions in the machine description. */
1284 memory_operand (rtx op, enum machine_mode mode)
1286 rtx inner;
1288 if (! reload_completed)
1289 /* Note that no SUBREG is a memory operand before end of reload pass,
1290 because (SUBREG (MEM...)) forces reloading into a register. */
1291 return MEM_P (op) && general_operand (op, mode);
1293 if (mode != VOIDmode && GET_MODE (op) != mode)
1294 return 0;
1296 inner = op;
1297 if (GET_CODE (inner) == SUBREG)
1298 inner = SUBREG_REG (inner);
1300 return (MEM_P (inner) && general_operand (op, mode));
1303 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1304 that is, a memory reference whose address is a general_operand. */
1307 indirect_operand (rtx op, enum machine_mode mode)
1309 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1310 if (! reload_completed
1311 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1313 int offset = SUBREG_BYTE (op);
1314 rtx inner = SUBREG_REG (op);
1316 if (mode != VOIDmode && GET_MODE (op) != mode)
1317 return 0;
1319 /* The only way that we can have a general_operand as the resulting
1320 address is if OFFSET is zero and the address already is an operand
1321 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1322 operand. */
1324 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1325 || (GET_CODE (XEXP (inner, 0)) == PLUS
1326 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1327 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1328 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1331 return (MEM_P (op)
1332 && memory_operand (op, mode)
1333 && general_operand (XEXP (op, 0), Pmode));
1336 /* Return 1 if this is an ordered comparison operator (not including
1337 ORDERED and UNORDERED). */
1340 ordered_comparison_operator (rtx op, enum machine_mode mode)
1342 if (mode != VOIDmode && GET_MODE (op) != mode)
1343 return false;
1344 switch (GET_CODE (op))
1346 case EQ:
1347 case NE:
1348 case LT:
1349 case LTU:
1350 case LE:
1351 case LEU:
1352 case GT:
1353 case GTU:
1354 case GE:
1355 case GEU:
1356 return true;
1357 default:
1358 return false;
1362 /* Return 1 if this is a comparison operator. This allows the use of
1363 MATCH_OPERATOR to recognize all the branch insns. */
1366 comparison_operator (rtx op, enum machine_mode mode)
1368 return ((mode == VOIDmode || GET_MODE (op) == mode)
1369 && COMPARISON_P (op));
1372 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1375 extract_asm_operands (rtx body)
1377 rtx tmp;
1378 switch (GET_CODE (body))
1380 case ASM_OPERANDS:
1381 return body;
1383 case SET:
1384 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1385 tmp = SET_SRC (body);
1386 if (GET_CODE (tmp) == ASM_OPERANDS)
1387 return tmp;
1388 break;
1390 case PARALLEL:
1391 tmp = XVECEXP (body, 0, 0);
1392 if (GET_CODE (tmp) == ASM_OPERANDS)
1393 return tmp;
1394 if (GET_CODE (tmp) == SET)
1396 tmp = SET_SRC (tmp);
1397 if (GET_CODE (tmp) == ASM_OPERANDS)
1398 return tmp;
1400 break;
1402 default:
1403 break;
1405 return NULL;
1408 /* If BODY is an insn body that uses ASM_OPERANDS,
1409 return the number of operands (both input and output) in the insn.
1410 Otherwise return -1. */
1413 asm_noperands (const_rtx body)
1415 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1416 int n_sets = 0;
1418 if (asm_op == NULL)
1419 return -1;
1421 if (GET_CODE (body) == SET)
1422 n_sets = 1;
1423 else if (GET_CODE (body) == PARALLEL)
1425 int i;
1426 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1428 /* Multiple output operands, or 1 output plus some clobbers:
1429 body is
1430 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1431 /* Count backwards through CLOBBERs to determine number of SETs. */
1432 for (i = XVECLEN (body, 0); i > 0; i--)
1434 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1435 break;
1436 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1437 return -1;
1440 /* N_SETS is now number of output operands. */
1441 n_sets = i;
1443 /* Verify that all the SETs we have
1444 came from a single original asm_operands insn
1445 (so that invalid combinations are blocked). */
1446 for (i = 0; i < n_sets; i++)
1448 rtx elt = XVECEXP (body, 0, i);
1449 if (GET_CODE (elt) != SET)
1450 return -1;
1451 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1452 return -1;
1453 /* If these ASM_OPERANDS rtx's came from different original insns
1454 then they aren't allowed together. */
1455 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1456 != ASM_OPERANDS_INPUT_VEC (asm_op))
1457 return -1;
1460 else
1462 /* 0 outputs, but some clobbers:
1463 body is [(asm_operands ...) (clobber (reg ...))...]. */
1464 /* Make sure all the other parallel things really are clobbers. */
1465 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1466 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1467 return -1;
1471 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1472 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1475 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1476 copy its operands (both input and output) into the vector OPERANDS,
1477 the locations of the operands within the insn into the vector OPERAND_LOCS,
1478 and the constraints for the operands into CONSTRAINTS.
1479 Write the modes of the operands into MODES.
1480 Return the assembler-template.
1482 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1483 we don't store that info. */
1485 const char *
1486 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1487 const char **constraints, enum machine_mode *modes,
1488 location_t *loc)
1490 int nbase = 0, n, i;
1491 rtx asmop;
1493 switch (GET_CODE (body))
1495 case ASM_OPERANDS:
1496 /* Zero output asm: BODY is (asm_operands ...). */
1497 asmop = body;
1498 break;
1500 case SET:
1501 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1502 asmop = SET_SRC (body);
1504 /* The output is in the SET.
1505 Its constraint is in the ASM_OPERANDS itself. */
1506 if (operands)
1507 operands[0] = SET_DEST (body);
1508 if (operand_locs)
1509 operand_locs[0] = &SET_DEST (body);
1510 if (constraints)
1511 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1512 if (modes)
1513 modes[0] = GET_MODE (SET_DEST (body));
1514 nbase = 1;
1515 break;
1517 case PARALLEL:
1519 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1521 asmop = XVECEXP (body, 0, 0);
1522 if (GET_CODE (asmop) == SET)
1524 asmop = SET_SRC (asmop);
1526 /* At least one output, plus some CLOBBERs. The outputs are in
1527 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1528 for (i = 0; i < nparallel; i++)
1530 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1531 break; /* Past last SET */
1532 if (operands)
1533 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1534 if (operand_locs)
1535 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1536 if (constraints)
1537 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1538 if (modes)
1539 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1541 nbase = i;
1543 break;
1546 default:
1547 gcc_unreachable ();
1550 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1551 for (i = 0; i < n; i++)
1553 if (operand_locs)
1554 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1555 if (operands)
1556 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1557 if (constraints)
1558 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1559 if (modes)
1560 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1562 nbase += n;
1564 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1565 for (i = 0; i < n; i++)
1567 if (operand_locs)
1568 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1569 if (operands)
1570 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1571 if (constraints)
1572 constraints[nbase + i] = "";
1573 if (modes)
1574 modes[nbase + i] = Pmode;
1577 if (loc)
1578 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1580 return ASM_OPERANDS_TEMPLATE (asmop);
1583 /* Check if an asm_operand matches its constraints.
1584 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1587 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1589 int result = 0;
1590 #ifdef AUTO_INC_DEC
1591 bool incdec_ok = false;
1592 #endif
1594 /* Use constrain_operands after reload. */
1595 gcc_assert (!reload_completed);
1597 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1598 many alternatives as required to match the other operands. */
1599 if (*constraint == '\0')
1600 result = 1;
1602 while (*constraint)
1604 char c = *constraint;
1605 int len;
1606 switch (c)
1608 case ',':
1609 constraint++;
1610 continue;
1611 case '=':
1612 case '+':
1613 case '*':
1614 case '%':
1615 case '!':
1616 case '#':
1617 case '&':
1618 case '?':
1619 break;
1621 case '0': case '1': case '2': case '3': case '4':
1622 case '5': case '6': case '7': case '8': case '9':
1623 /* If caller provided constraints pointer, look up
1624 the maching constraint. Otherwise, our caller should have
1625 given us the proper matching constraint, but we can't
1626 actually fail the check if they didn't. Indicate that
1627 results are inconclusive. */
1628 if (constraints)
1630 char *end;
1631 unsigned long match;
1633 match = strtoul (constraint, &end, 10);
1634 if (!result)
1635 result = asm_operand_ok (op, constraints[match], NULL);
1636 constraint = (const char *) end;
1638 else
1641 constraint++;
1642 while (ISDIGIT (*constraint));
1643 if (! result)
1644 result = -1;
1646 continue;
1648 case 'p':
1649 if (address_operand (op, VOIDmode))
1650 result = 1;
1651 break;
1653 case TARGET_MEM_CONSTRAINT:
1654 case 'V': /* non-offsettable */
1655 if (memory_operand (op, VOIDmode))
1656 result = 1;
1657 break;
1659 case 'o': /* offsettable */
1660 if (offsettable_nonstrict_memref_p (op))
1661 result = 1;
1662 break;
1664 case '<':
1665 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1666 excepting those that expand_call created. Further, on some
1667 machines which do not have generalized auto inc/dec, an inc/dec
1668 is not a memory_operand.
1670 Match any memory and hope things are resolved after reload. */
1672 if (MEM_P (op)
1673 && (1
1674 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1675 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1676 result = 1;
1677 #ifdef AUTO_INC_DEC
1678 incdec_ok = true;
1679 #endif
1680 break;
1682 case '>':
1683 if (MEM_P (op)
1684 && (1
1685 || GET_CODE (XEXP (op, 0)) == PRE_INC
1686 || GET_CODE (XEXP (op, 0)) == POST_INC))
1687 result = 1;
1688 #ifdef AUTO_INC_DEC
1689 incdec_ok = true;
1690 #endif
1691 break;
1693 case 'E':
1694 case 'F':
1695 if (GET_CODE (op) == CONST_DOUBLE
1696 || (GET_CODE (op) == CONST_VECTOR
1697 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1698 result = 1;
1699 break;
1701 case 'G':
1702 if (GET_CODE (op) == CONST_DOUBLE
1703 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1704 result = 1;
1705 break;
1706 case 'H':
1707 if (GET_CODE (op) == CONST_DOUBLE
1708 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1709 result = 1;
1710 break;
1712 case 's':
1713 if (CONST_INT_P (op)
1714 || (GET_CODE (op) == CONST_DOUBLE
1715 && GET_MODE (op) == VOIDmode))
1716 break;
1717 /* Fall through. */
1719 case 'i':
1720 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1721 result = 1;
1722 break;
1724 case 'n':
1725 if (CONST_INT_P (op)
1726 || (GET_CODE (op) == CONST_DOUBLE
1727 && GET_MODE (op) == VOIDmode))
1728 result = 1;
1729 break;
1731 case 'I':
1732 if (CONST_INT_P (op)
1733 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1734 result = 1;
1735 break;
1736 case 'J':
1737 if (CONST_INT_P (op)
1738 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1739 result = 1;
1740 break;
1741 case 'K':
1742 if (CONST_INT_P (op)
1743 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1744 result = 1;
1745 break;
1746 case 'L':
1747 if (CONST_INT_P (op)
1748 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1749 result = 1;
1750 break;
1751 case 'M':
1752 if (CONST_INT_P (op)
1753 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1754 result = 1;
1755 break;
1756 case 'N':
1757 if (CONST_INT_P (op)
1758 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1759 result = 1;
1760 break;
1761 case 'O':
1762 if (CONST_INT_P (op)
1763 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1764 result = 1;
1765 break;
1766 case 'P':
1767 if (CONST_INT_P (op)
1768 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1769 result = 1;
1770 break;
1772 case 'X':
1773 result = 1;
1774 break;
1776 case 'g':
1777 if (general_operand (op, VOIDmode))
1778 result = 1;
1779 break;
1781 default:
1782 /* For all other letters, we first check for a register class,
1783 otherwise it is an EXTRA_CONSTRAINT. */
1784 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1786 case 'r':
1787 if (GET_MODE (op) == BLKmode)
1788 break;
1789 if (register_operand (op, VOIDmode))
1790 result = 1;
1792 #ifdef EXTRA_CONSTRAINT_STR
1793 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1794 /* Every memory operand can be reloaded to fit. */
1795 result = result || memory_operand (op, VOIDmode);
1796 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1797 /* Every address operand can be reloaded to fit. */
1798 result = result || address_operand (op, VOIDmode);
1799 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1800 result = 1;
1801 #endif
1802 break;
1804 len = CONSTRAINT_LEN (c, constraint);
1806 constraint++;
1807 while (--len && *constraint);
1808 if (len)
1809 return 0;
1812 #ifdef AUTO_INC_DEC
1813 /* For operands without < or > constraints reject side-effects. */
1814 if (!incdec_ok && result && MEM_P (op))
1815 switch (GET_CODE (XEXP (op, 0)))
1817 case PRE_INC:
1818 case POST_INC:
1819 case PRE_DEC:
1820 case POST_DEC:
1821 case PRE_MODIFY:
1822 case POST_MODIFY:
1823 return 0;
1824 default:
1825 break;
1827 #endif
1829 return result;
1832 /* Given an rtx *P, if it is a sum containing an integer constant term,
1833 return the location (type rtx *) of the pointer to that constant term.
1834 Otherwise, return a null pointer. */
1836 rtx *
1837 find_constant_term_loc (rtx *p)
1839 rtx *tem;
1840 enum rtx_code code = GET_CODE (*p);
1842 /* If *P IS such a constant term, P is its location. */
1844 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1845 || code == CONST)
1846 return p;
1848 /* Otherwise, if not a sum, it has no constant term. */
1850 if (GET_CODE (*p) != PLUS)
1851 return 0;
1853 /* If one of the summands is constant, return its location. */
1855 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1856 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1857 return p;
1859 /* Otherwise, check each summand for containing a constant term. */
1861 if (XEXP (*p, 0) != 0)
1863 tem = find_constant_term_loc (&XEXP (*p, 0));
1864 if (tem != 0)
1865 return tem;
1868 if (XEXP (*p, 1) != 0)
1870 tem = find_constant_term_loc (&XEXP (*p, 1));
1871 if (tem != 0)
1872 return tem;
1875 return 0;
1878 /* Return 1 if OP is a memory reference
1879 whose address contains no side effects
1880 and remains valid after the addition
1881 of a positive integer less than the
1882 size of the object being referenced.
1884 We assume that the original address is valid and do not check it.
1886 This uses strict_memory_address_p as a subroutine, so
1887 don't use it before reload. */
1890 offsettable_memref_p (rtx op)
1892 return ((MEM_P (op))
1893 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1894 MEM_ADDR_SPACE (op)));
1897 /* Similar, but don't require a strictly valid mem ref:
1898 consider pseudo-regs valid as index or base regs. */
1901 offsettable_nonstrict_memref_p (rtx op)
1903 return ((MEM_P (op))
1904 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1905 MEM_ADDR_SPACE (op)));
1908 /* Return 1 if Y is a memory address which contains no side effects
1909 and would remain valid for address space AS after the addition of
1910 a positive integer less than the size of that mode.
1912 We assume that the original address is valid and do not check it.
1913 We do check that it is valid for narrower modes.
1915 If STRICTP is nonzero, we require a strictly valid address,
1916 for the sake of use in reload.c. */
1919 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1920 addr_space_t as)
1922 enum rtx_code ycode = GET_CODE (y);
1923 rtx z;
1924 rtx y1 = y;
1925 rtx *y2;
1926 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1927 (strictp ? strict_memory_address_addr_space_p
1928 : memory_address_addr_space_p);
1929 unsigned int mode_sz = GET_MODE_SIZE (mode);
1931 if (CONSTANT_ADDRESS_P (y))
1932 return 1;
1934 /* Adjusting an offsettable address involves changing to a narrower mode.
1935 Make sure that's OK. */
1937 if (mode_dependent_address_p (y))
1938 return 0;
1940 /* ??? How much offset does an offsettable BLKmode reference need?
1941 Clearly that depends on the situation in which it's being used.
1942 However, the current situation in which we test 0xffffffff is
1943 less than ideal. Caveat user. */
1944 if (mode_sz == 0)
1945 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1947 /* If the expression contains a constant term,
1948 see if it remains valid when max possible offset is added. */
1950 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1952 int good;
1954 y1 = *y2;
1955 *y2 = plus_constant (*y2, mode_sz - 1);
1956 /* Use QImode because an odd displacement may be automatically invalid
1957 for any wider mode. But it should be valid for a single byte. */
1958 good = (*addressp) (QImode, y, as);
1960 /* In any case, restore old contents of memory. */
1961 *y2 = y1;
1962 return good;
1965 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1966 return 0;
1968 /* The offset added here is chosen as the maximum offset that
1969 any instruction could need to add when operating on something
1970 of the specified mode. We assume that if Y and Y+c are
1971 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1972 go inside a LO_SUM here, so we do so as well. */
1973 if (GET_CODE (y) == LO_SUM
1974 && mode != BLKmode
1975 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1976 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1977 plus_constant (XEXP (y, 1), mode_sz - 1));
1978 else
1979 z = plus_constant (y, mode_sz - 1);
1981 /* Use QImode because an odd displacement may be automatically invalid
1982 for any wider mode. But it should be valid for a single byte. */
1983 return (*addressp) (QImode, z, as);
1986 /* Return 1 if ADDR is an address-expression whose effect depends
1987 on the mode of the memory reference it is used in.
1989 Autoincrement addressing is a typical example of mode-dependence
1990 because the amount of the increment depends on the mode. */
1992 bool
1993 mode_dependent_address_p (rtx addr)
1995 /* Auto-increment addressing with anything other than post_modify
1996 or pre_modify always introduces a mode dependency. Catch such
1997 cases now instead of deferring to the target. */
1998 if (GET_CODE (addr) == PRE_INC
1999 || GET_CODE (addr) == POST_INC
2000 || GET_CODE (addr) == PRE_DEC
2001 || GET_CODE (addr) == POST_DEC)
2002 return true;
2004 return targetm.mode_dependent_address_p (addr);
2007 /* Like extract_insn, but save insn extracted and don't extract again, when
2008 called again for the same insn expecting that recog_data still contain the
2009 valid information. This is used primary by gen_attr infrastructure that
2010 often does extract insn again and again. */
2011 void
2012 extract_insn_cached (rtx insn)
2014 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2015 return;
2016 extract_insn (insn);
2017 recog_data.insn = insn;
2020 /* Do cached extract_insn, constrain_operands and complain about failures.
2021 Used by insn_attrtab. */
2022 void
2023 extract_constrain_insn_cached (rtx insn)
2025 extract_insn_cached (insn);
2026 if (which_alternative == -1
2027 && !constrain_operands (reload_completed))
2028 fatal_insn_not_found (insn);
2031 /* Do cached constrain_operands and complain about failures. */
2033 constrain_operands_cached (int strict)
2035 if (which_alternative == -1)
2036 return constrain_operands (strict);
2037 else
2038 return 1;
2041 /* Analyze INSN and fill in recog_data. */
2043 void
2044 extract_insn (rtx insn)
2046 int i;
2047 int icode;
2048 int noperands;
2049 rtx body = PATTERN (insn);
2051 recog_data.n_operands = 0;
2052 recog_data.n_alternatives = 0;
2053 recog_data.n_dups = 0;
2054 recog_data.is_asm = false;
2056 switch (GET_CODE (body))
2058 case USE:
2059 case CLOBBER:
2060 case ASM_INPUT:
2061 case ADDR_VEC:
2062 case ADDR_DIFF_VEC:
2063 case VAR_LOCATION:
2064 return;
2066 case SET:
2067 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2068 goto asm_insn;
2069 else
2070 goto normal_insn;
2071 case PARALLEL:
2072 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2073 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2074 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2075 goto asm_insn;
2076 else
2077 goto normal_insn;
2078 case ASM_OPERANDS:
2079 asm_insn:
2080 recog_data.n_operands = noperands = asm_noperands (body);
2081 if (noperands >= 0)
2083 /* This insn is an `asm' with operands. */
2085 /* expand_asm_operands makes sure there aren't too many operands. */
2086 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2088 /* Now get the operand values and constraints out of the insn. */
2089 decode_asm_operands (body, recog_data.operand,
2090 recog_data.operand_loc,
2091 recog_data.constraints,
2092 recog_data.operand_mode, NULL);
2093 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2094 if (noperands > 0)
2096 const char *p = recog_data.constraints[0];
2097 recog_data.n_alternatives = 1;
2098 while (*p)
2099 recog_data.n_alternatives += (*p++ == ',');
2101 recog_data.is_asm = true;
2102 break;
2104 fatal_insn_not_found (insn);
2106 default:
2107 normal_insn:
2108 /* Ordinary insn: recognize it, get the operands via insn_extract
2109 and get the constraints. */
2111 icode = recog_memoized (insn);
2112 if (icode < 0)
2113 fatal_insn_not_found (insn);
2115 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2116 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2117 recog_data.n_dups = insn_data[icode].n_dups;
2119 insn_extract (insn);
2121 for (i = 0; i < noperands; i++)
2123 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2124 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2125 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2126 /* VOIDmode match_operands gets mode from their real operand. */
2127 if (recog_data.operand_mode[i] == VOIDmode)
2128 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2131 for (i = 0; i < noperands; i++)
2132 recog_data.operand_type[i]
2133 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2134 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2135 : OP_IN);
2137 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2139 if (INSN_CODE (insn) < 0)
2140 for (i = 0; i < recog_data.n_alternatives; i++)
2141 recog_data.alternative_enabled_p[i] = true;
2142 else
2144 recog_data.insn = insn;
2145 for (i = 0; i < recog_data.n_alternatives; i++)
2147 which_alternative = i;
2148 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2152 recog_data.insn = NULL;
2153 which_alternative = -1;
2156 /* After calling extract_insn, you can use this function to extract some
2157 information from the constraint strings into a more usable form.
2158 The collected data is stored in recog_op_alt. */
2159 void
2160 preprocess_constraints (void)
2162 int i;
2164 for (i = 0; i < recog_data.n_operands; i++)
2165 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2166 * sizeof (struct operand_alternative)));
2168 for (i = 0; i < recog_data.n_operands; i++)
2170 int j;
2171 struct operand_alternative *op_alt;
2172 const char *p = recog_data.constraints[i];
2174 op_alt = recog_op_alt[i];
2176 for (j = 0; j < recog_data.n_alternatives; j++)
2178 op_alt[j].cl = NO_REGS;
2179 op_alt[j].constraint = p;
2180 op_alt[j].matches = -1;
2181 op_alt[j].matched = -1;
2183 if (!recog_data.alternative_enabled_p[j])
2185 p = skip_alternative (p);
2186 continue;
2189 if (*p == '\0' || *p == ',')
2191 op_alt[j].anything_ok = 1;
2192 continue;
2195 for (;;)
2197 char c = *p;
2198 if (c == '#')
2200 c = *++p;
2201 while (c != ',' && c != '\0');
2202 if (c == ',' || c == '\0')
2204 p++;
2205 break;
2208 switch (c)
2210 case '=': case '+': case '*': case '%':
2211 case 'E': case 'F': case 'G': case 'H':
2212 case 's': case 'i': case 'n':
2213 case 'I': case 'J': case 'K': case 'L':
2214 case 'M': case 'N': case 'O': case 'P':
2215 /* These don't say anything we care about. */
2216 break;
2218 case '?':
2219 op_alt[j].reject += 6;
2220 break;
2221 case '!':
2222 op_alt[j].reject += 600;
2223 break;
2224 case '&':
2225 op_alt[j].earlyclobber = 1;
2226 break;
2228 case '0': case '1': case '2': case '3': case '4':
2229 case '5': case '6': case '7': case '8': case '9':
2231 char *end;
2232 op_alt[j].matches = strtoul (p, &end, 10);
2233 recog_op_alt[op_alt[j].matches][j].matched = i;
2234 p = end;
2236 continue;
2238 case TARGET_MEM_CONSTRAINT:
2239 op_alt[j].memory_ok = 1;
2240 break;
2241 case '<':
2242 op_alt[j].decmem_ok = 1;
2243 break;
2244 case '>':
2245 op_alt[j].incmem_ok = 1;
2246 break;
2247 case 'V':
2248 op_alt[j].nonoffmem_ok = 1;
2249 break;
2250 case 'o':
2251 op_alt[j].offmem_ok = 1;
2252 break;
2253 case 'X':
2254 op_alt[j].anything_ok = 1;
2255 break;
2257 case 'p':
2258 op_alt[j].is_address = 1;
2259 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2260 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2261 break;
2263 case 'g':
2264 case 'r':
2265 op_alt[j].cl =
2266 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2267 break;
2269 default:
2270 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2272 op_alt[j].memory_ok = 1;
2273 break;
2275 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2277 op_alt[j].is_address = 1;
2278 op_alt[j].cl
2279 = (reg_class_subunion
2280 [(int) op_alt[j].cl]
2281 [(int) base_reg_class (VOIDmode, ADDRESS,
2282 SCRATCH)]);
2283 break;
2286 op_alt[j].cl
2287 = (reg_class_subunion
2288 [(int) op_alt[j].cl]
2289 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2290 break;
2292 p += CONSTRAINT_LEN (c, p);
2298 /* Check the operands of an insn against the insn's operand constraints
2299 and return 1 if they are valid.
2300 The information about the insn's operands, constraints, operand modes
2301 etc. is obtained from the global variables set up by extract_insn.
2303 WHICH_ALTERNATIVE is set to a number which indicates which
2304 alternative of constraints was matched: 0 for the first alternative,
2305 1 for the next, etc.
2307 In addition, when two operands are required to match
2308 and it happens that the output operand is (reg) while the
2309 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2310 make the output operand look like the input.
2311 This is because the output operand is the one the template will print.
2313 This is used in final, just before printing the assembler code and by
2314 the routines that determine an insn's attribute.
2316 If STRICT is a positive nonzero value, it means that we have been
2317 called after reload has been completed. In that case, we must
2318 do all checks strictly. If it is zero, it means that we have been called
2319 before reload has completed. In that case, we first try to see if we can
2320 find an alternative that matches strictly. If not, we try again, this
2321 time assuming that reload will fix up the insn. This provides a "best
2322 guess" for the alternative and is used to compute attributes of insns prior
2323 to reload. A negative value of STRICT is used for this internal call. */
2325 struct funny_match
2327 int this_op, other;
2331 constrain_operands (int strict)
2333 const char *constraints[MAX_RECOG_OPERANDS];
2334 int matching_operands[MAX_RECOG_OPERANDS];
2335 int earlyclobber[MAX_RECOG_OPERANDS];
2336 int c;
2338 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2339 int funny_match_index;
2341 which_alternative = 0;
2342 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2343 return 1;
2345 for (c = 0; c < recog_data.n_operands; c++)
2347 constraints[c] = recog_data.constraints[c];
2348 matching_operands[c] = -1;
2353 int seen_earlyclobber_at = -1;
2354 int opno;
2355 int lose = 0;
2356 funny_match_index = 0;
2358 if (!recog_data.alternative_enabled_p[which_alternative])
2360 int i;
2362 for (i = 0; i < recog_data.n_operands; i++)
2363 constraints[i] = skip_alternative (constraints[i]);
2365 which_alternative++;
2366 continue;
2369 for (opno = 0; opno < recog_data.n_operands; opno++)
2371 rtx op = recog_data.operand[opno];
2372 enum machine_mode mode = GET_MODE (op);
2373 const char *p = constraints[opno];
2374 int offset = 0;
2375 int win = 0;
2376 int val;
2377 int len;
2379 earlyclobber[opno] = 0;
2381 /* A unary operator may be accepted by the predicate, but it
2382 is irrelevant for matching constraints. */
2383 if (UNARY_P (op))
2384 op = XEXP (op, 0);
2386 if (GET_CODE (op) == SUBREG)
2388 if (REG_P (SUBREG_REG (op))
2389 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2390 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2391 GET_MODE (SUBREG_REG (op)),
2392 SUBREG_BYTE (op),
2393 GET_MODE (op));
2394 op = SUBREG_REG (op);
2397 /* An empty constraint or empty alternative
2398 allows anything which matched the pattern. */
2399 if (*p == 0 || *p == ',')
2400 win = 1;
2403 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2405 case '\0':
2406 len = 0;
2407 break;
2408 case ',':
2409 c = '\0';
2410 break;
2412 case '?': case '!': case '*': case '%':
2413 case '=': case '+':
2414 break;
2416 case '#':
2417 /* Ignore rest of this alternative as far as
2418 constraint checking is concerned. */
2420 p++;
2421 while (*p && *p != ',');
2422 len = 0;
2423 break;
2425 case '&':
2426 earlyclobber[opno] = 1;
2427 if (seen_earlyclobber_at < 0)
2428 seen_earlyclobber_at = opno;
2429 break;
2431 case '0': case '1': case '2': case '3': case '4':
2432 case '5': case '6': case '7': case '8': case '9':
2434 /* This operand must be the same as a previous one.
2435 This kind of constraint is used for instructions such
2436 as add when they take only two operands.
2438 Note that the lower-numbered operand is passed first.
2440 If we are not testing strictly, assume that this
2441 constraint will be satisfied. */
2443 char *end;
2444 int match;
2446 match = strtoul (p, &end, 10);
2447 p = end;
2449 if (strict < 0)
2450 val = 1;
2451 else
2453 rtx op1 = recog_data.operand[match];
2454 rtx op2 = recog_data.operand[opno];
2456 /* A unary operator may be accepted by the predicate,
2457 but it is irrelevant for matching constraints. */
2458 if (UNARY_P (op1))
2459 op1 = XEXP (op1, 0);
2460 if (UNARY_P (op2))
2461 op2 = XEXP (op2, 0);
2463 val = operands_match_p (op1, op2);
2466 matching_operands[opno] = match;
2467 matching_operands[match] = opno;
2469 if (val != 0)
2470 win = 1;
2472 /* If output is *x and input is *--x, arrange later
2473 to change the output to *--x as well, since the
2474 output op is the one that will be printed. */
2475 if (val == 2 && strict > 0)
2477 funny_match[funny_match_index].this_op = opno;
2478 funny_match[funny_match_index++].other = match;
2481 len = 0;
2482 break;
2484 case 'p':
2485 /* p is used for address_operands. When we are called by
2486 gen_reload, no one will have checked that the address is
2487 strictly valid, i.e., that all pseudos requiring hard regs
2488 have gotten them. */
2489 if (strict <= 0
2490 || (strict_memory_address_p (recog_data.operand_mode[opno],
2491 op)))
2492 win = 1;
2493 break;
2495 /* No need to check general_operand again;
2496 it was done in insn-recog.c. Well, except that reload
2497 doesn't check the validity of its replacements, but
2498 that should only matter when there's a bug. */
2499 case 'g':
2500 /* Anything goes unless it is a REG and really has a hard reg
2501 but the hard reg is not in the class GENERAL_REGS. */
2502 if (REG_P (op))
2504 if (strict < 0
2505 || GENERAL_REGS == ALL_REGS
2506 || (reload_in_progress
2507 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2508 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2509 win = 1;
2511 else if (strict < 0 || general_operand (op, mode))
2512 win = 1;
2513 break;
2515 case 'X':
2516 /* This is used for a MATCH_SCRATCH in the cases when
2517 we don't actually need anything. So anything goes
2518 any time. */
2519 win = 1;
2520 break;
2522 case TARGET_MEM_CONSTRAINT:
2523 /* Memory operands must be valid, to the extent
2524 required by STRICT. */
2525 if (MEM_P (op))
2527 if (strict > 0
2528 && !strict_memory_address_addr_space_p
2529 (GET_MODE (op), XEXP (op, 0),
2530 MEM_ADDR_SPACE (op)))
2531 break;
2532 if (strict == 0
2533 && !memory_address_addr_space_p
2534 (GET_MODE (op), XEXP (op, 0),
2535 MEM_ADDR_SPACE (op)))
2536 break;
2537 win = 1;
2539 /* Before reload, accept what reload can turn into mem. */
2540 else if (strict < 0 && CONSTANT_P (op))
2541 win = 1;
2542 /* During reload, accept a pseudo */
2543 else if (reload_in_progress && REG_P (op)
2544 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2545 win = 1;
2546 break;
2548 case '<':
2549 if (MEM_P (op)
2550 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2551 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2552 win = 1;
2553 break;
2555 case '>':
2556 if (MEM_P (op)
2557 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2558 || GET_CODE (XEXP (op, 0)) == POST_INC))
2559 win = 1;
2560 break;
2562 case 'E':
2563 case 'F':
2564 if (GET_CODE (op) == CONST_DOUBLE
2565 || (GET_CODE (op) == CONST_VECTOR
2566 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2567 win = 1;
2568 break;
2570 case 'G':
2571 case 'H':
2572 if (GET_CODE (op) == CONST_DOUBLE
2573 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2574 win = 1;
2575 break;
2577 case 's':
2578 if (CONST_INT_P (op)
2579 || (GET_CODE (op) == CONST_DOUBLE
2580 && GET_MODE (op) == VOIDmode))
2581 break;
2582 case 'i':
2583 if (CONSTANT_P (op))
2584 win = 1;
2585 break;
2587 case 'n':
2588 if (CONST_INT_P (op)
2589 || (GET_CODE (op) == CONST_DOUBLE
2590 && GET_MODE (op) == VOIDmode))
2591 win = 1;
2592 break;
2594 case 'I':
2595 case 'J':
2596 case 'K':
2597 case 'L':
2598 case 'M':
2599 case 'N':
2600 case 'O':
2601 case 'P':
2602 if (CONST_INT_P (op)
2603 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2604 win = 1;
2605 break;
2607 case 'V':
2608 if (MEM_P (op)
2609 && ((strict > 0 && ! offsettable_memref_p (op))
2610 || (strict < 0
2611 && !(CONSTANT_P (op) || MEM_P (op)))
2612 || (reload_in_progress
2613 && !(REG_P (op)
2614 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2615 win = 1;
2616 break;
2618 case 'o':
2619 if ((strict > 0 && offsettable_memref_p (op))
2620 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2621 /* Before reload, accept what reload can handle. */
2622 || (strict < 0
2623 && (CONSTANT_P (op) || MEM_P (op)))
2624 /* During reload, accept a pseudo */
2625 || (reload_in_progress && REG_P (op)
2626 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2627 win = 1;
2628 break;
2630 default:
2632 enum reg_class cl;
2634 cl = (c == 'r'
2635 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2636 if (cl != NO_REGS)
2638 if (strict < 0
2639 || (strict == 0
2640 && REG_P (op)
2641 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2642 || (strict == 0 && GET_CODE (op) == SCRATCH)
2643 || (REG_P (op)
2644 && reg_fits_class_p (op, cl, offset, mode)))
2645 win = 1;
2647 #ifdef EXTRA_CONSTRAINT_STR
2648 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2649 win = 1;
2651 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2652 /* Every memory operand can be reloaded to fit. */
2653 && ((strict < 0 && MEM_P (op))
2654 /* Before reload, accept what reload can turn
2655 into mem. */
2656 || (strict < 0 && CONSTANT_P (op))
2657 /* During reload, accept a pseudo */
2658 || (reload_in_progress && REG_P (op)
2659 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2660 win = 1;
2661 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2662 /* Every address operand can be reloaded to fit. */
2663 && strict < 0)
2664 win = 1;
2665 #endif
2666 break;
2669 while (p += len, c);
2671 constraints[opno] = p;
2672 /* If this operand did not win somehow,
2673 this alternative loses. */
2674 if (! win)
2675 lose = 1;
2677 /* This alternative won; the operands are ok.
2678 Change whichever operands this alternative says to change. */
2679 if (! lose)
2681 int opno, eopno;
2683 /* See if any earlyclobber operand conflicts with some other
2684 operand. */
2686 if (strict > 0 && seen_earlyclobber_at >= 0)
2687 for (eopno = seen_earlyclobber_at;
2688 eopno < recog_data.n_operands;
2689 eopno++)
2690 /* Ignore earlyclobber operands now in memory,
2691 because we would often report failure when we have
2692 two memory operands, one of which was formerly a REG. */
2693 if (earlyclobber[eopno]
2694 && REG_P (recog_data.operand[eopno]))
2695 for (opno = 0; opno < recog_data.n_operands; opno++)
2696 if ((MEM_P (recog_data.operand[opno])
2697 || recog_data.operand_type[opno] != OP_OUT)
2698 && opno != eopno
2699 /* Ignore things like match_operator operands. */
2700 && *recog_data.constraints[opno] != 0
2701 && ! (matching_operands[opno] == eopno
2702 && operands_match_p (recog_data.operand[opno],
2703 recog_data.operand[eopno]))
2704 && ! safe_from_earlyclobber (recog_data.operand[opno],
2705 recog_data.operand[eopno]))
2706 lose = 1;
2708 if (! lose)
2710 while (--funny_match_index >= 0)
2712 recog_data.operand[funny_match[funny_match_index].other]
2713 = recog_data.operand[funny_match[funny_match_index].this_op];
2716 #ifdef AUTO_INC_DEC
2717 /* For operands without < or > constraints reject side-effects. */
2718 if (recog_data.is_asm)
2720 for (opno = 0; opno < recog_data.n_operands; opno++)
2721 if (MEM_P (recog_data.operand[opno]))
2722 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2724 case PRE_INC:
2725 case POST_INC:
2726 case PRE_DEC:
2727 case POST_DEC:
2728 case PRE_MODIFY:
2729 case POST_MODIFY:
2730 if (strchr (recog_data.constraints[opno], '<') == NULL
2731 && strchr (recog_data.constraints[opno], '>')
2732 == NULL)
2733 return 0;
2734 break;
2735 default:
2736 break;
2739 #endif
2740 return 1;
2744 which_alternative++;
2746 while (which_alternative < recog_data.n_alternatives);
2748 which_alternative = -1;
2749 /* If we are about to reject this, but we are not to test strictly,
2750 try a very loose test. Only return failure if it fails also. */
2751 if (strict == 0)
2752 return constrain_operands (-1);
2753 else
2754 return 0;
2757 /* Return true iff OPERAND (assumed to be a REG rtx)
2758 is a hard reg in class CLASS when its regno is offset by OFFSET
2759 and changed to mode MODE.
2760 If REG occupies multiple hard regs, all of them must be in CLASS. */
2762 bool
2763 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2764 enum machine_mode mode)
2766 int regno = REGNO (operand);
2768 if (cl == NO_REGS)
2769 return false;
2771 return (HARD_REGISTER_NUM_P (regno)
2772 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2773 mode, regno + offset));
2776 /* Split single instruction. Helper function for split_all_insns and
2777 split_all_insns_noflow. Return last insn in the sequence if successful,
2778 or NULL if unsuccessful. */
2780 static rtx
2781 split_insn (rtx insn)
2783 /* Split insns here to get max fine-grain parallelism. */
2784 rtx first = PREV_INSN (insn);
2785 rtx last = try_split (PATTERN (insn), insn, 1);
2786 rtx insn_set, last_set, note;
2788 if (last == insn)
2789 return NULL_RTX;
2791 /* If the original instruction was a single set that was known to be
2792 equivalent to a constant, see if we can say the same about the last
2793 instruction in the split sequence. The two instructions must set
2794 the same destination. */
2795 insn_set = single_set (insn);
2796 if (insn_set)
2798 last_set = single_set (last);
2799 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2801 note = find_reg_equal_equiv_note (insn);
2802 if (note && CONSTANT_P (XEXP (note, 0)))
2803 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2804 else if (CONSTANT_P (SET_SRC (insn_set)))
2805 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2809 /* try_split returns the NOTE that INSN became. */
2810 SET_INSN_DELETED (insn);
2812 /* ??? Coddle to md files that generate subregs in post-reload
2813 splitters instead of computing the proper hard register. */
2814 if (reload_completed && first != last)
2816 first = NEXT_INSN (first);
2817 for (;;)
2819 if (INSN_P (first))
2820 cleanup_subreg_operands (first);
2821 if (first == last)
2822 break;
2823 first = NEXT_INSN (first);
2827 return last;
2830 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2832 void
2833 split_all_insns (void)
2835 sbitmap blocks;
2836 bool changed;
2837 basic_block bb;
2839 blocks = sbitmap_alloc (last_basic_block);
2840 sbitmap_zero (blocks);
2841 changed = false;
2843 FOR_EACH_BB_REVERSE (bb)
2845 rtx insn, next;
2846 bool finish = false;
2848 rtl_profile_for_bb (bb);
2849 for (insn = BB_HEAD (bb); !finish ; insn = next)
2851 /* Can't use `next_real_insn' because that might go across
2852 CODE_LABELS and short-out basic blocks. */
2853 next = NEXT_INSN (insn);
2854 finish = (insn == BB_END (bb));
2855 if (INSN_P (insn))
2857 rtx set = single_set (insn);
2859 /* Don't split no-op move insns. These should silently
2860 disappear later in final. Splitting such insns would
2861 break the code that handles LIBCALL blocks. */
2862 if (set && set_noop_p (set))
2864 /* Nops get in the way while scheduling, so delete them
2865 now if register allocation has already been done. It
2866 is too risky to try to do this before register
2867 allocation, and there are unlikely to be very many
2868 nops then anyways. */
2869 if (reload_completed)
2870 delete_insn_and_edges (insn);
2872 else
2874 if (split_insn (insn))
2876 SET_BIT (blocks, bb->index);
2877 changed = true;
2884 default_rtl_profile ();
2885 if (changed)
2886 find_many_sub_basic_blocks (blocks);
2888 #ifdef ENABLE_CHECKING
2889 verify_flow_info ();
2890 #endif
2892 sbitmap_free (blocks);
2895 /* Same as split_all_insns, but do not expect CFG to be available.
2896 Used by machine dependent reorg passes. */
2898 unsigned int
2899 split_all_insns_noflow (void)
2901 rtx next, insn;
2903 for (insn = get_insns (); insn; insn = next)
2905 next = NEXT_INSN (insn);
2906 if (INSN_P (insn))
2908 /* Don't split no-op move insns. These should silently
2909 disappear later in final. Splitting such insns would
2910 break the code that handles LIBCALL blocks. */
2911 rtx set = single_set (insn);
2912 if (set && set_noop_p (set))
2914 /* Nops get in the way while scheduling, so delete them
2915 now if register allocation has already been done. It
2916 is too risky to try to do this before register
2917 allocation, and there are unlikely to be very many
2918 nops then anyways.
2920 ??? Should we use delete_insn when the CFG isn't valid? */
2921 if (reload_completed)
2922 delete_insn_and_edges (insn);
2924 else
2925 split_insn (insn);
2928 return 0;
2931 #ifdef HAVE_peephole2
2932 struct peep2_insn_data
2934 rtx insn;
2935 regset live_before;
2938 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2939 static int peep2_current;
2941 static bool peep2_do_rebuild_jump_labels;
2942 static bool peep2_do_cleanup_cfg;
2944 /* The number of instructions available to match a peep2. */
2945 int peep2_current_count;
2947 /* A non-insn marker indicating the last insn of the block.
2948 The live_before regset for this element is correct, indicating
2949 DF_LIVE_OUT for the block. */
2950 #define PEEP2_EOB pc_rtx
2952 /* Wrap N to fit into the peep2_insn_data buffer. */
2954 static int
2955 peep2_buf_position (int n)
2957 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2958 n -= MAX_INSNS_PER_PEEP2 + 1;
2959 return n;
2962 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2963 does not exist. Used by the recognizer to find the next insn to match
2964 in a multi-insn pattern. */
2967 peep2_next_insn (int n)
2969 gcc_assert (n <= peep2_current_count);
2971 n = peep2_buf_position (peep2_current + n);
2973 return peep2_insn_data[n].insn;
2976 /* Return true if REGNO is dead before the Nth non-note insn
2977 after `current'. */
2980 peep2_regno_dead_p (int ofs, int regno)
2982 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2984 ofs = peep2_buf_position (peep2_current + ofs);
2986 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2988 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2991 /* Similarly for a REG. */
2994 peep2_reg_dead_p (int ofs, rtx reg)
2996 int regno, n;
2998 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3000 ofs = peep2_buf_position (peep2_current + ofs);
3002 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3004 regno = REGNO (reg);
3005 n = hard_regno_nregs[regno][GET_MODE (reg)];
3006 while (--n >= 0)
3007 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3008 return 0;
3009 return 1;
3012 /* Try to find a hard register of mode MODE, matching the register class in
3013 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3014 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3015 in which case the only condition is that the register must be available
3016 before CURRENT_INSN.
3017 Registers that already have bits set in REG_SET will not be considered.
3019 If an appropriate register is available, it will be returned and the
3020 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3021 returned. */
3024 peep2_find_free_register (int from, int to, const char *class_str,
3025 enum machine_mode mode, HARD_REG_SET *reg_set)
3027 static int search_ofs;
3028 enum reg_class cl;
3029 HARD_REG_SET live;
3030 int i;
3032 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3033 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3035 from = peep2_buf_position (peep2_current + from);
3036 to = peep2_buf_position (peep2_current + to);
3038 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3039 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3041 while (from != to)
3043 HARD_REG_SET this_live;
3045 from = peep2_buf_position (from + 1);
3046 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3047 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3048 IOR_HARD_REG_SET (live, this_live);
3051 cl = (class_str[0] == 'r' ? GENERAL_REGS
3052 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3054 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3056 int raw_regno, regno, success, j;
3058 /* Distribute the free registers as much as possible. */
3059 raw_regno = search_ofs + i;
3060 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3061 raw_regno -= FIRST_PSEUDO_REGISTER;
3062 #ifdef REG_ALLOC_ORDER
3063 regno = reg_alloc_order[raw_regno];
3064 #else
3065 regno = raw_regno;
3066 #endif
3068 /* Don't allocate fixed registers. */
3069 if (fixed_regs[regno])
3070 continue;
3071 /* Don't allocate global registers. */
3072 if (global_regs[regno])
3073 continue;
3074 /* Make sure the register is of the right class. */
3075 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3076 continue;
3077 /* And can support the mode we need. */
3078 if (! HARD_REGNO_MODE_OK (regno, mode))
3079 continue;
3080 /* And that we don't create an extra save/restore. */
3081 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3082 continue;
3083 if (! targetm.hard_regno_scratch_ok (regno))
3084 continue;
3086 /* And we don't clobber traceback for noreturn functions. */
3087 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3088 && (! reload_completed || frame_pointer_needed))
3089 continue;
3091 success = 1;
3092 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3094 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3095 || TEST_HARD_REG_BIT (live, regno + j))
3097 success = 0;
3098 break;
3101 if (success)
3103 add_to_hard_reg_set (reg_set, mode, regno);
3105 /* Start the next search with the next register. */
3106 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3107 raw_regno = 0;
3108 search_ofs = raw_regno;
3110 return gen_rtx_REG (mode, regno);
3114 search_ofs = 0;
3115 return NULL_RTX;
3118 /* Forget all currently tracked instructions, only remember current
3119 LIVE regset. */
3121 static void
3122 peep2_reinit_state (regset live)
3124 int i;
3126 /* Indicate that all slots except the last holds invalid data. */
3127 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3128 peep2_insn_data[i].insn = NULL_RTX;
3129 peep2_current_count = 0;
3131 /* Indicate that the last slot contains live_after data. */
3132 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3133 peep2_current = MAX_INSNS_PER_PEEP2;
3135 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3138 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3139 starting at INSN. Perform the replacement, removing the old insns and
3140 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3141 if the replacement is rejected. */
3143 static rtx
3144 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3146 int i;
3147 rtx last, note, before_try, x;
3148 rtx old_insn, new_insn;
3149 bool was_call = false;
3151 /* If we are splittind an RTX_FRAME_RELATED_P insn, do not allow it to
3152 match more than one insn, or to be split into more than one insn. */
3153 old_insn = peep2_insn_data[peep2_current].insn;
3154 if (RTX_FRAME_RELATED_P (old_insn))
3156 bool any_note = false;
3158 if (match_len != 0)
3159 return NULL;
3161 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3162 may be in the stream for the purpose of register allocation. */
3163 if (active_insn_p (attempt))
3164 new_insn = attempt;
3165 else
3166 new_insn = next_active_insn (attempt);
3167 if (next_active_insn (new_insn))
3168 return NULL;
3170 /* We have a 1-1 replacement. Copy over any frame-related info. */
3171 RTX_FRAME_RELATED_P (new_insn) = 1;
3173 /* Allow the backend to fill in a note during the split. */
3174 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3175 switch (REG_NOTE_KIND (note))
3177 case REG_FRAME_RELATED_EXPR:
3178 case REG_CFA_DEF_CFA:
3179 case REG_CFA_ADJUST_CFA:
3180 case REG_CFA_OFFSET:
3181 case REG_CFA_REGISTER:
3182 case REG_CFA_EXPRESSION:
3183 case REG_CFA_RESTORE:
3184 case REG_CFA_SET_VDRAP:
3185 any_note = true;
3186 break;
3187 default:
3188 break;
3191 /* If the backend didn't supply a note, copy one over. */
3192 if (!any_note)
3193 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3194 switch (REG_NOTE_KIND (note))
3196 case REG_FRAME_RELATED_EXPR:
3197 case REG_CFA_DEF_CFA:
3198 case REG_CFA_ADJUST_CFA:
3199 case REG_CFA_OFFSET:
3200 case REG_CFA_REGISTER:
3201 case REG_CFA_EXPRESSION:
3202 case REG_CFA_RESTORE:
3203 case REG_CFA_SET_VDRAP:
3204 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3205 any_note = true;
3206 break;
3207 default:
3208 break;
3211 /* If there still isn't a note, make sure the unwind info sees the
3212 same expression as before the split. */
3213 if (!any_note)
3215 rtx old_set, new_set;
3217 /* The old insn had better have been simple, or annotated. */
3218 old_set = single_set (old_insn);
3219 gcc_assert (old_set != NULL);
3221 new_set = single_set (new_insn);
3222 if (!new_set || !rtx_equal_p (new_set, old_set))
3223 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3226 /* Copy prologue/epilogue status. This is required in order to keep
3227 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3228 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3231 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3232 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3233 cfg-related call notes. */
3234 for (i = 0; i <= match_len; ++i)
3236 int j;
3238 j = peep2_buf_position (peep2_current + i);
3239 old_insn = peep2_insn_data[j].insn;
3240 if (!CALL_P (old_insn))
3241 continue;
3242 was_call = true;
3244 new_insn = attempt;
3245 while (new_insn != NULL_RTX)
3247 if (CALL_P (new_insn))
3248 break;
3249 new_insn = NEXT_INSN (new_insn);
3252 gcc_assert (new_insn != NULL_RTX);
3254 CALL_INSN_FUNCTION_USAGE (new_insn)
3255 = CALL_INSN_FUNCTION_USAGE (old_insn);
3257 for (note = REG_NOTES (old_insn);
3258 note;
3259 note = XEXP (note, 1))
3260 switch (REG_NOTE_KIND (note))
3262 case REG_NORETURN:
3263 case REG_SETJMP:
3264 add_reg_note (new_insn, REG_NOTE_KIND (note),
3265 XEXP (note, 0));
3266 break;
3267 default:
3268 /* Discard all other reg notes. */
3269 break;
3272 /* Croak if there is another call in the sequence. */
3273 while (++i <= match_len)
3275 j = peep2_buf_position (peep2_current + i);
3276 old_insn = peep2_insn_data[j].insn;
3277 gcc_assert (!CALL_P (old_insn));
3279 break;
3282 i = peep2_buf_position (peep2_current + match_len);
3284 note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3286 /* Replace the old sequence with the new. */
3287 last = emit_insn_after_setloc (attempt,
3288 peep2_insn_data[i].insn,
3289 INSN_LOCATOR (peep2_insn_data[i].insn));
3290 before_try = PREV_INSN (insn);
3291 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3293 /* Re-insert the EH_REGION notes. */
3294 if (note || (was_call && nonlocal_goto_handler_labels))
3296 edge eh_edge;
3297 edge_iterator ei;
3299 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3300 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3301 break;
3303 if (note)
3304 copy_reg_eh_region_note_backward (note, last, before_try);
3306 if (eh_edge)
3307 for (x = last; x != before_try; x = PREV_INSN (x))
3308 if (x != BB_END (bb)
3309 && (can_throw_internal (x)
3310 || can_nonlocal_goto (x)))
3312 edge nfte, nehe;
3313 int flags;
3315 nfte = split_block (bb, x);
3316 flags = (eh_edge->flags
3317 & (EDGE_EH | EDGE_ABNORMAL));
3318 if (CALL_P (x))
3319 flags |= EDGE_ABNORMAL_CALL;
3320 nehe = make_edge (nfte->src, eh_edge->dest,
3321 flags);
3323 nehe->probability = eh_edge->probability;
3324 nfte->probability
3325 = REG_BR_PROB_BASE - nehe->probability;
3327 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3328 bb = nfte->src;
3329 eh_edge = nehe;
3332 /* Converting possibly trapping insn to non-trapping is
3333 possible. Zap dummy outgoing edges. */
3334 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3337 /* If we generated a jump instruction, it won't have
3338 JUMP_LABEL set. Recompute after we're done. */
3339 for (x = last; x != before_try; x = PREV_INSN (x))
3340 if (JUMP_P (x))
3342 peep2_do_rebuild_jump_labels = true;
3343 break;
3346 return last;
3349 /* After performing a replacement in basic block BB, fix up the life
3350 information in our buffer. LAST is the last of the insns that we
3351 emitted as a replacement. PREV is the insn before the start of
3352 the replacement. MATCH_LEN is the number of instructions that were
3353 matched, and which now need to be replaced in the buffer. */
3355 static void
3356 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3358 int i = peep2_buf_position (peep2_current + match_len + 1);
3359 rtx x;
3360 regset_head live;
3362 INIT_REG_SET (&live);
3363 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3365 gcc_assert (peep2_current_count >= match_len + 1);
3366 peep2_current_count -= match_len + 1;
3368 x = last;
3371 if (INSN_P (x))
3373 df_insn_rescan (x);
3374 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3376 peep2_current_count++;
3377 if (--i < 0)
3378 i = MAX_INSNS_PER_PEEP2;
3379 peep2_insn_data[i].insn = x;
3380 df_simulate_one_insn_backwards (bb, x, &live);
3381 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3384 x = PREV_INSN (x);
3386 while (x != prev);
3387 CLEAR_REG_SET (&live);
3389 peep2_current = i;
3392 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3393 Return true if we added it, false otherwise. The caller will try to match
3394 peepholes against the buffer if we return false; otherwise it will try to
3395 add more instructions to the buffer. */
3397 static bool
3398 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3400 int pos;
3402 /* Once we have filled the maximum number of insns the buffer can hold,
3403 allow the caller to match the insns against peepholes. We wait until
3404 the buffer is full in case the target has similar peepholes of different
3405 length; we always want to match the longest if possible. */
3406 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3407 return false;
3409 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3410 any other pattern, lest it change the semantics of the frame info. */
3411 if (RTX_FRAME_RELATED_P (insn))
3413 /* Let the buffer drain first. */
3414 if (peep2_current_count > 0)
3415 return false;
3416 /* Now the insn will be the only thing in the buffer. */
3419 pos = peep2_buf_position (peep2_current + peep2_current_count);
3420 peep2_insn_data[pos].insn = insn;
3421 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3422 peep2_current_count++;
3424 df_simulate_one_insn_forwards (bb, insn, live);
3425 return true;
3428 /* Perform the peephole2 optimization pass. */
3430 static void
3431 peephole2_optimize (void)
3433 rtx insn;
3434 bitmap live;
3435 int i;
3436 basic_block bb;
3438 peep2_do_cleanup_cfg = false;
3439 peep2_do_rebuild_jump_labels = false;
3441 df_set_flags (DF_LR_RUN_DCE);
3442 df_note_add_problem ();
3443 df_analyze ();
3445 /* Initialize the regsets we're going to use. */
3446 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3447 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3448 live = BITMAP_ALLOC (&reg_obstack);
3450 FOR_EACH_BB_REVERSE (bb)
3452 bool past_end = false;
3453 int pos;
3455 rtl_profile_for_bb (bb);
3457 /* Start up propagation. */
3458 bitmap_copy (live, DF_LR_IN (bb));
3459 df_simulate_initialize_forwards (bb, live);
3460 peep2_reinit_state (live);
3462 insn = BB_HEAD (bb);
3463 for (;;)
3465 rtx attempt, head;
3466 int match_len;
3468 if (!past_end && !NONDEBUG_INSN_P (insn))
3470 next_insn:
3471 insn = NEXT_INSN (insn);
3472 if (insn == NEXT_INSN (BB_END (bb)))
3473 past_end = true;
3474 continue;
3476 if (!past_end && peep2_fill_buffer (bb, insn, live))
3477 goto next_insn;
3479 /* If we did not fill an empty buffer, it signals the end of the
3480 block. */
3481 if (peep2_current_count == 0)
3482 break;
3484 /* The buffer filled to the current maximum, so try to match. */
3486 pos = peep2_buf_position (peep2_current + peep2_current_count);
3487 peep2_insn_data[pos].insn = PEEP2_EOB;
3488 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3490 /* Match the peephole. */
3491 head = peep2_insn_data[peep2_current].insn;
3492 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3493 if (attempt != NULL)
3495 rtx last = peep2_attempt (bb, head, match_len, attempt);
3496 if (last)
3498 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3499 continue;
3503 /* No match: advance the buffer by one insn. */
3504 peep2_current = peep2_buf_position (peep2_current + 1);
3505 peep2_current_count--;
3509 default_rtl_profile ();
3510 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3511 BITMAP_FREE (peep2_insn_data[i].live_before);
3512 BITMAP_FREE (live);
3513 if (peep2_do_rebuild_jump_labels)
3514 rebuild_jump_labels (get_insns ());
3516 #endif /* HAVE_peephole2 */
3518 /* Common predicates for use with define_bypass. */
3520 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3521 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3522 must be either a single_set or a PARALLEL with SETs inside. */
3525 store_data_bypass_p (rtx out_insn, rtx in_insn)
3527 rtx out_set, in_set;
3528 rtx out_pat, in_pat;
3529 rtx out_exp, in_exp;
3530 int i, j;
3532 in_set = single_set (in_insn);
3533 if (in_set)
3535 if (!MEM_P (SET_DEST (in_set)))
3536 return false;
3538 out_set = single_set (out_insn);
3539 if (out_set)
3541 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3542 return false;
3544 else
3546 out_pat = PATTERN (out_insn);
3548 if (GET_CODE (out_pat) != PARALLEL)
3549 return false;
3551 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3553 out_exp = XVECEXP (out_pat, 0, i);
3555 if (GET_CODE (out_exp) == CLOBBER)
3556 continue;
3558 gcc_assert (GET_CODE (out_exp) == SET);
3560 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3561 return false;
3565 else
3567 in_pat = PATTERN (in_insn);
3568 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3570 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3572 in_exp = XVECEXP (in_pat, 0, i);
3574 if (GET_CODE (in_exp) == CLOBBER)
3575 continue;
3577 gcc_assert (GET_CODE (in_exp) == SET);
3579 if (!MEM_P (SET_DEST (in_exp)))
3580 return false;
3582 out_set = single_set (out_insn);
3583 if (out_set)
3585 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3586 return false;
3588 else
3590 out_pat = PATTERN (out_insn);
3591 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3593 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3595 out_exp = XVECEXP (out_pat, 0, j);
3597 if (GET_CODE (out_exp) == CLOBBER)
3598 continue;
3600 gcc_assert (GET_CODE (out_exp) == SET);
3602 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3603 return false;
3609 return true;
3612 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3613 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3614 or multiple set; IN_INSN should be single_set for truth, but for convenience
3615 of insn categorization may be any JUMP or CALL insn. */
3618 if_test_bypass_p (rtx out_insn, rtx in_insn)
3620 rtx out_set, in_set;
3622 in_set = single_set (in_insn);
3623 if (! in_set)
3625 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3626 return false;
3629 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3630 return false;
3631 in_set = SET_SRC (in_set);
3633 out_set = single_set (out_insn);
3634 if (out_set)
3636 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3637 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3638 return false;
3640 else
3642 rtx out_pat;
3643 int i;
3645 out_pat = PATTERN (out_insn);
3646 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3648 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3650 rtx exp = XVECEXP (out_pat, 0, i);
3652 if (GET_CODE (exp) == CLOBBER)
3653 continue;
3655 gcc_assert (GET_CODE (exp) == SET);
3657 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3658 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3659 return false;
3663 return true;
3666 static bool
3667 gate_handle_peephole2 (void)
3669 return (optimize > 0 && flag_peephole2);
3672 static unsigned int
3673 rest_of_handle_peephole2 (void)
3675 #ifdef HAVE_peephole2
3676 peephole2_optimize ();
3677 #endif
3678 return 0;
3681 struct rtl_opt_pass pass_peephole2 =
3684 RTL_PASS,
3685 "peephole2", /* name */
3686 gate_handle_peephole2, /* gate */
3687 rest_of_handle_peephole2, /* execute */
3688 NULL, /* sub */
3689 NULL, /* next */
3690 0, /* static_pass_number */
3691 TV_PEEPHOLE2, /* tv_id */
3692 0, /* properties_required */
3693 0, /* properties_provided */
3694 0, /* properties_destroyed */
3695 0, /* todo_flags_start */
3696 TODO_df_finish | TODO_verify_rtl_sharing |
3697 TODO_dump_func /* todo_flags_finish */
3701 static unsigned int
3702 rest_of_handle_split_all_insns (void)
3704 split_all_insns ();
3705 return 0;
3708 struct rtl_opt_pass pass_split_all_insns =
3711 RTL_PASS,
3712 "split1", /* name */
3713 NULL, /* gate */
3714 rest_of_handle_split_all_insns, /* execute */
3715 NULL, /* sub */
3716 NULL, /* next */
3717 0, /* static_pass_number */
3718 TV_NONE, /* tv_id */
3719 0, /* properties_required */
3720 0, /* properties_provided */
3721 0, /* properties_destroyed */
3722 0, /* todo_flags_start */
3723 TODO_dump_func /* todo_flags_finish */
3727 static unsigned int
3728 rest_of_handle_split_after_reload (void)
3730 /* If optimizing, then go ahead and split insns now. */
3731 #ifndef STACK_REGS
3732 if (optimize > 0)
3733 #endif
3734 split_all_insns ();
3735 return 0;
3738 struct rtl_opt_pass pass_split_after_reload =
3741 RTL_PASS,
3742 "split2", /* name */
3743 NULL, /* gate */
3744 rest_of_handle_split_after_reload, /* execute */
3745 NULL, /* sub */
3746 NULL, /* next */
3747 0, /* static_pass_number */
3748 TV_NONE, /* tv_id */
3749 0, /* properties_required */
3750 0, /* properties_provided */
3751 0, /* properties_destroyed */
3752 0, /* todo_flags_start */
3753 TODO_dump_func /* todo_flags_finish */
3757 static bool
3758 gate_handle_split_before_regstack (void)
3760 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3761 /* If flow2 creates new instructions which need splitting
3762 and scheduling after reload is not done, they might not be
3763 split until final which doesn't allow splitting
3764 if HAVE_ATTR_length. */
3765 # ifdef INSN_SCHEDULING
3766 return (optimize && !flag_schedule_insns_after_reload);
3767 # else
3768 return (optimize);
3769 # endif
3770 #else
3771 return 0;
3772 #endif
3775 static unsigned int
3776 rest_of_handle_split_before_regstack (void)
3778 split_all_insns ();
3779 return 0;
3782 struct rtl_opt_pass pass_split_before_regstack =
3785 RTL_PASS,
3786 "split3", /* name */
3787 gate_handle_split_before_regstack, /* gate */
3788 rest_of_handle_split_before_regstack, /* execute */
3789 NULL, /* sub */
3790 NULL, /* next */
3791 0, /* static_pass_number */
3792 TV_NONE, /* tv_id */
3793 0, /* properties_required */
3794 0, /* properties_provided */
3795 0, /* properties_destroyed */
3796 0, /* todo_flags_start */
3797 TODO_dump_func /* todo_flags_finish */
3801 static bool
3802 gate_handle_split_before_sched2 (void)
3804 #ifdef INSN_SCHEDULING
3805 return optimize > 0 && flag_schedule_insns_after_reload;
3806 #else
3807 return 0;
3808 #endif
3811 static unsigned int
3812 rest_of_handle_split_before_sched2 (void)
3814 #ifdef INSN_SCHEDULING
3815 split_all_insns ();
3816 #endif
3817 return 0;
3820 struct rtl_opt_pass pass_split_before_sched2 =
3823 RTL_PASS,
3824 "split4", /* name */
3825 gate_handle_split_before_sched2, /* gate */
3826 rest_of_handle_split_before_sched2, /* execute */
3827 NULL, /* sub */
3828 NULL, /* next */
3829 0, /* static_pass_number */
3830 TV_NONE, /* tv_id */
3831 0, /* properties_required */
3832 0, /* properties_provided */
3833 0, /* properties_destroyed */
3834 0, /* todo_flags_start */
3835 TODO_verify_flow |
3836 TODO_dump_func /* todo_flags_finish */
3840 /* The placement of the splitting that we do for shorten_branches
3841 depends on whether regstack is used by the target or not. */
3842 static bool
3843 gate_do_final_split (void)
3845 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3846 return 1;
3847 #else
3848 return 0;
3849 #endif
3852 struct rtl_opt_pass pass_split_for_shorten_branches =
3855 RTL_PASS,
3856 "split5", /* name */
3857 gate_do_final_split, /* gate */
3858 split_all_insns_noflow, /* execute */
3859 NULL, /* sub */
3860 NULL, /* next */
3861 0, /* static_pass_number */
3862 TV_NONE, /* tv_id */
3863 0, /* properties_required */
3864 0, /* properties_provided */
3865 0, /* properties_destroyed */
3866 0, /* todo_flags_start */
3867 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */