2011-08-15 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / recog.c
blob22a5402f00f28e37f2c423edcfb8613721c11af9
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl-error.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
41 #include "target.h"
42 #include "timevar.h"
43 #include "tree-pass.h"
44 #include "df.h"
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
57 #else
58 #define STACK_POP_CODE POST_DEC
59 #endif
60 #endif
62 #ifndef HAVE_ATTR_enabled
63 static inline bool
64 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
66 return true;
68 #endif
70 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
71 static void validate_replace_src_1 (rtx *, void *);
72 static rtx split_insn (rtx);
74 /* Nonzero means allow operands to be volatile.
75 This should be 0 if you are generating rtl, such as if you are calling
76 the functions in optabs.c and expmed.c (most of the time).
77 This should be 1 if all valid insns need to be recognized,
78 such as in reginfo.c and final.c and reload.c.
80 init_recog and init_recog_no_volatile are responsible for setting this. */
82 int volatile_ok;
84 struct recog_data recog_data;
86 /* Contains a vector of operand_alternative structures for every operand.
87 Set up by preprocess_constraints. */
88 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
90 /* On return from `constrain_operands', indicate which alternative
91 was satisfied. */
93 int which_alternative;
95 /* Nonzero after end of reload pass.
96 Set to 1 or 0 by toplev.c.
97 Controls the significance of (SUBREG (MEM)). */
99 int reload_completed;
101 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
102 int epilogue_completed;
104 /* Initialize data used by the function `recog'.
105 This must be called once in the compilation of a function
106 before any insn recognition may be done in the function. */
108 void
109 init_recog_no_volatile (void)
111 volatile_ok = 0;
114 void
115 init_recog (void)
117 volatile_ok = 1;
121 /* Check that X is an insn-body for an `asm' with operands
122 and that the operands mentioned in it are legitimate. */
125 check_asm_operands (rtx x)
127 int noperands;
128 rtx *operands;
129 const char **constraints;
130 int i;
132 /* Post-reload, be more strict with things. */
133 if (reload_completed)
135 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
136 extract_insn (make_insn_raw (x));
137 constrain_operands (1);
138 return which_alternative >= 0;
141 noperands = asm_noperands (x);
142 if (noperands < 0)
143 return 0;
144 if (noperands == 0)
145 return 1;
147 operands = XALLOCAVEC (rtx, noperands);
148 constraints = XALLOCAVEC (const char *, noperands);
150 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
152 for (i = 0; i < noperands; i++)
154 const char *c = constraints[i];
155 if (c[0] == '%')
156 c++;
157 if (! asm_operand_ok (operands[i], c, constraints))
158 return 0;
161 return 1;
164 /* Static data for the next two routines. */
166 typedef struct change_t
168 rtx object;
169 int old_code;
170 rtx *loc;
171 rtx old;
172 bool unshare;
173 } change_t;
175 static change_t *changes;
176 static int changes_allocated;
178 static int num_changes = 0;
180 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
181 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
182 the change is simply made.
184 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
185 will be called with the address and mode as parameters. If OBJECT is
186 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
187 the change in place.
189 IN_GROUP is nonzero if this is part of a group of changes that must be
190 performed as a group. In that case, the changes will be stored. The
191 function `apply_change_group' will validate and apply the changes.
193 If IN_GROUP is zero, this is a single change. Try to recognize the insn
194 or validate the memory reference with the change applied. If the result
195 is not valid for the machine, suppress the change and return zero.
196 Otherwise, perform the change and return 1. */
198 static bool
199 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
201 rtx old = *loc;
203 if (old == new_rtx || rtx_equal_p (old, new_rtx))
204 return 1;
206 gcc_assert (in_group != 0 || num_changes == 0);
208 *loc = new_rtx;
210 /* Save the information describing this change. */
211 if (num_changes >= changes_allocated)
213 if (changes_allocated == 0)
214 /* This value allows for repeated substitutions inside complex
215 indexed addresses, or changes in up to 5 insns. */
216 changes_allocated = MAX_RECOG_OPERANDS * 5;
217 else
218 changes_allocated *= 2;
220 changes = XRESIZEVEC (change_t, changes, changes_allocated);
223 changes[num_changes].object = object;
224 changes[num_changes].loc = loc;
225 changes[num_changes].old = old;
226 changes[num_changes].unshare = unshare;
228 if (object && !MEM_P (object))
230 /* Set INSN_CODE to force rerecognition of insn. Save old code in
231 case invalid. */
232 changes[num_changes].old_code = INSN_CODE (object);
233 INSN_CODE (object) = -1;
236 num_changes++;
238 /* If we are making a group of changes, return 1. Otherwise, validate the
239 change group we made. */
241 if (in_group)
242 return 1;
243 else
244 return apply_change_group ();
247 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
248 UNSHARE to false. */
250 bool
251 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
253 return validate_change_1 (object, loc, new_rtx, in_group, false);
256 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
257 UNSHARE to true. */
259 bool
260 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
262 return validate_change_1 (object, loc, new_rtx, in_group, true);
266 /* Keep X canonicalized if some changes have made it non-canonical; only
267 modifies the operands of X, not (for example) its code. Simplifications
268 are not the job of this routine.
270 Return true if anything was changed. */
271 bool
272 canonicalize_change_group (rtx insn, rtx x)
274 if (COMMUTATIVE_P (x)
275 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
277 /* Oops, the caller has made X no longer canonical.
278 Let's redo the changes in the correct order. */
279 rtx tem = XEXP (x, 0);
280 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
281 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
282 return true;
284 else
285 return false;
289 /* This subroutine of apply_change_group verifies whether the changes to INSN
290 were valid; i.e. whether INSN can still be recognized. */
293 insn_invalid_p (rtx insn)
295 rtx pat = PATTERN (insn);
296 int num_clobbers = 0;
297 /* If we are before reload and the pattern is a SET, see if we can add
298 clobbers. */
299 int icode = recog (pat, insn,
300 (GET_CODE (pat) == SET
301 && ! reload_completed && ! reload_in_progress)
302 ? &num_clobbers : 0);
303 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
306 /* If this is an asm and the operand aren't legal, then fail. Likewise if
307 this is not an asm and the insn wasn't recognized. */
308 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
309 || (!is_asm && icode < 0))
310 return 1;
312 /* If we have to add CLOBBERs, fail if we have to add ones that reference
313 hard registers since our callers can't know if they are live or not.
314 Otherwise, add them. */
315 if (num_clobbers > 0)
317 rtx newpat;
319 if (added_clobbers_hard_reg_p (icode))
320 return 1;
322 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
323 XVECEXP (newpat, 0, 0) = pat;
324 add_clobbers (newpat, icode);
325 PATTERN (insn) = pat = newpat;
328 /* After reload, verify that all constraints are satisfied. */
329 if (reload_completed)
331 extract_insn (insn);
333 if (! constrain_operands (1))
334 return 1;
337 INSN_CODE (insn) = icode;
338 return 0;
341 /* Return number of changes made and not validated yet. */
343 num_changes_pending (void)
345 return num_changes;
348 /* Tentatively apply the changes numbered NUM and up.
349 Return 1 if all changes are valid, zero otherwise. */
352 verify_changes (int num)
354 int i;
355 rtx last_validated = NULL_RTX;
357 /* The changes have been applied and all INSN_CODEs have been reset to force
358 rerecognition.
360 The changes are valid if we aren't given an object, or if we are
361 given a MEM and it still is a valid address, or if this is in insn
362 and it is recognized. In the latter case, if reload has completed,
363 we also require that the operands meet the constraints for
364 the insn. */
366 for (i = num; i < num_changes; i++)
368 rtx object = changes[i].object;
370 /* If there is no object to test or if it is the same as the one we
371 already tested, ignore it. */
372 if (object == 0 || object == last_validated)
373 continue;
375 if (MEM_P (object))
377 if (! memory_address_addr_space_p (GET_MODE (object),
378 XEXP (object, 0),
379 MEM_ADDR_SPACE (object)))
380 break;
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
392 else if (DEBUG_INSN_P (object))
393 continue;
394 else if (insn_invalid_p (object))
396 rtx pat = PATTERN (object);
398 /* Perhaps we couldn't recognize the insn because there were
399 extra CLOBBERs at the end. If so, try to re-recognize
400 without the last CLOBBER (later iterations will cause each of
401 them to be eliminated, in turn). But don't do this if we
402 have an ASM_OPERAND. */
403 if (GET_CODE (pat) == PARALLEL
404 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
405 && asm_noperands (PATTERN (object)) < 0)
407 rtx newpat;
409 if (XVECLEN (pat, 0) == 2)
410 newpat = XVECEXP (pat, 0, 0);
411 else
413 int j;
415 newpat
416 = gen_rtx_PARALLEL (VOIDmode,
417 rtvec_alloc (XVECLEN (pat, 0) - 1));
418 for (j = 0; j < XVECLEN (newpat, 0); j++)
419 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
422 /* Add a new change to this group to replace the pattern
423 with this new pattern. Then consider this change
424 as having succeeded. The change we added will
425 cause the entire call to fail if things remain invalid.
427 Note that this can lose if a later change than the one
428 we are processing specified &XVECEXP (PATTERN (object), 0, X)
429 but this shouldn't occur. */
431 validate_change (object, &PATTERN (object), newpat, 1);
432 continue;
434 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
435 || GET_CODE (pat) == VAR_LOCATION)
436 /* If this insn is a CLOBBER or USE, it is always valid, but is
437 never recognized. */
438 continue;
439 else
440 break;
442 last_validated = object;
445 return (i == num_changes);
448 /* A group of changes has previously been issued with validate_change
449 and verified with verify_changes. Call df_insn_rescan for each of
450 the insn changed and clear num_changes. */
452 void
453 confirm_change_group (void)
455 int i;
456 rtx last_object = NULL;
458 for (i = 0; i < num_changes; i++)
460 rtx object = changes[i].object;
462 if (changes[i].unshare)
463 *changes[i].loc = copy_rtx (*changes[i].loc);
465 /* Avoid unnecessary rescanning when multiple changes to same instruction
466 are made. */
467 if (object)
469 if (object != last_object && last_object && INSN_P (last_object))
470 df_insn_rescan (last_object);
471 last_object = object;
475 if (last_object && INSN_P (last_object))
476 df_insn_rescan (last_object);
477 num_changes = 0;
480 /* Apply a group of changes previously issued with `validate_change'.
481 If all changes are valid, call confirm_change_group and return 1,
482 otherwise, call cancel_changes and return 0. */
485 apply_change_group (void)
487 if (verify_changes (0))
489 confirm_change_group ();
490 return 1;
492 else
494 cancel_changes (0);
495 return 0;
500 /* Return the number of changes so far in the current group. */
503 num_validated_changes (void)
505 return num_changes;
508 /* Retract the changes numbered NUM and up. */
510 void
511 cancel_changes (int num)
513 int i;
515 /* Back out all the changes. Do this in the opposite order in which
516 they were made. */
517 for (i = num_changes - 1; i >= num; i--)
519 *changes[i].loc = changes[i].old;
520 if (changes[i].object && !MEM_P (changes[i].object))
521 INSN_CODE (changes[i].object) = changes[i].old_code;
523 num_changes = num;
526 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
527 rtx. */
529 static void
530 simplify_while_replacing (rtx *loc, rtx to, rtx object,
531 enum machine_mode op0_mode)
533 rtx x = *loc;
534 enum rtx_code code = GET_CODE (x);
535 rtx new_rtx;
537 if (SWAPPABLE_OPERANDS_P (x)
538 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
540 validate_unshare_change (object, loc,
541 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
542 : swap_condition (code),
543 GET_MODE (x), XEXP (x, 1),
544 XEXP (x, 0)), 1);
545 x = *loc;
546 code = GET_CODE (x);
549 switch (code)
551 case PLUS:
552 /* If we have a PLUS whose second operand is now a CONST_INT, use
553 simplify_gen_binary to try to simplify it.
554 ??? We may want later to remove this, once simplification is
555 separated from this function. */
556 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
557 validate_change (object, loc,
558 simplify_gen_binary
559 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
560 break;
561 case MINUS:
562 if (CONST_INT_P (XEXP (x, 1))
563 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
564 validate_change (object, loc,
565 simplify_gen_binary
566 (PLUS, GET_MODE (x), XEXP (x, 0),
567 simplify_gen_unary (NEG,
568 GET_MODE (x), XEXP (x, 1),
569 GET_MODE (x))), 1);
570 break;
571 case ZERO_EXTEND:
572 case SIGN_EXTEND:
573 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
575 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
576 op0_mode);
577 /* If any of the above failed, substitute in something that
578 we know won't be recognized. */
579 if (!new_rtx)
580 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
581 validate_change (object, loc, new_rtx, 1);
583 break;
584 case SUBREG:
585 /* All subregs possible to simplify should be simplified. */
586 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
587 SUBREG_BYTE (x));
589 /* Subregs of VOIDmode operands are incorrect. */
590 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
591 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
592 if (new_rtx)
593 validate_change (object, loc, new_rtx, 1);
594 break;
595 case ZERO_EXTRACT:
596 case SIGN_EXTRACT:
597 /* If we are replacing a register with memory, try to change the memory
598 to be the mode required for memory in extract operations (this isn't
599 likely to be an insertion operation; if it was, nothing bad will
600 happen, we might just fail in some cases). */
602 if (MEM_P (XEXP (x, 0))
603 && CONST_INT_P (XEXP (x, 1))
604 && CONST_INT_P (XEXP (x, 2))
605 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
606 && !MEM_VOLATILE_P (XEXP (x, 0)))
608 enum machine_mode wanted_mode = VOIDmode;
609 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
610 int pos = INTVAL (XEXP (x, 2));
612 if (GET_CODE (x) == ZERO_EXTRACT)
614 enum machine_mode new_mode
615 = mode_for_extraction (EP_extzv, 1);
616 if (new_mode != MAX_MACHINE_MODE)
617 wanted_mode = new_mode;
619 else if (GET_CODE (x) == SIGN_EXTRACT)
621 enum machine_mode new_mode
622 = mode_for_extraction (EP_extv, 1);
623 if (new_mode != MAX_MACHINE_MODE)
624 wanted_mode = new_mode;
627 /* If we have a narrower mode, we can do something. */
628 if (wanted_mode != VOIDmode
629 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
631 int offset = pos / BITS_PER_UNIT;
632 rtx newmem;
634 /* If the bytes and bits are counted differently, we
635 must adjust the offset. */
636 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
637 offset =
638 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
639 offset);
641 gcc_assert (GET_MODE_PRECISION (wanted_mode)
642 == GET_MODE_BITSIZE (wanted_mode));
643 pos %= GET_MODE_BITSIZE (wanted_mode);
645 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
647 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
648 validate_change (object, &XEXP (x, 0), newmem, 1);
652 break;
654 default:
655 break;
659 /* Replace every occurrence of FROM in X with TO. Mark each change with
660 validate_change passing OBJECT. */
662 static void
663 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
664 bool simplify)
666 int i, j;
667 const char *fmt;
668 rtx x = *loc;
669 enum rtx_code code;
670 enum machine_mode op0_mode = VOIDmode;
671 int prev_changes = num_changes;
673 if (!x)
674 return;
676 code = GET_CODE (x);
677 fmt = GET_RTX_FORMAT (code);
678 if (fmt[0] == 'e')
679 op0_mode = GET_MODE (XEXP (x, 0));
681 /* X matches FROM if it is the same rtx or they are both referring to the
682 same register in the same mode. Avoid calling rtx_equal_p unless the
683 operands look similar. */
685 if (x == from
686 || (REG_P (x) && REG_P (from)
687 && GET_MODE (x) == GET_MODE (from)
688 && REGNO (x) == REGNO (from))
689 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
690 && rtx_equal_p (x, from)))
692 validate_unshare_change (object, loc, to, 1);
693 return;
696 /* Call ourself recursively to perform the replacements.
697 We must not replace inside already replaced expression, otherwise we
698 get infinite recursion for replacements like (reg X)->(subreg (reg X))
699 done by regmove, so we must special case shared ASM_OPERANDS. */
701 if (GET_CODE (x) == PARALLEL)
703 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
705 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
706 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
708 /* Verify that operands are really shared. */
709 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
710 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
711 (x, 0, j))));
712 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
713 from, to, object, simplify);
715 else
716 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
717 simplify);
720 else
721 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
723 if (fmt[i] == 'e')
724 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
725 else if (fmt[i] == 'E')
726 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
727 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
728 simplify);
731 /* If we didn't substitute, there is nothing more to do. */
732 if (num_changes == prev_changes)
733 return;
735 /* Allow substituted expression to have different mode. This is used by
736 regmove to change mode of pseudo register. */
737 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
738 op0_mode = GET_MODE (XEXP (x, 0));
740 /* Do changes needed to keep rtx consistent. Don't do any other
741 simplifications, as it is not our job. */
742 if (simplify)
743 simplify_while_replacing (loc, to, object, op0_mode);
746 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
747 with TO. After all changes have been made, validate by seeing
748 if INSN is still valid. */
751 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
753 validate_replace_rtx_1 (loc, from, to, insn, true);
754 return apply_change_group ();
757 /* Try replacing every occurrence of FROM in INSN with TO. After all
758 changes have been made, validate by seeing if INSN is still valid. */
761 validate_replace_rtx (rtx from, rtx to, rtx insn)
763 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
764 return apply_change_group ();
767 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
768 is a part of INSN. After all changes have been made, validate by seeing if
769 INSN is still valid.
770 validate_replace_rtx (from, to, insn) is equivalent to
771 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
774 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
776 validate_replace_rtx_1 (where, from, to, insn, true);
777 return apply_change_group ();
780 /* Same as above, but do not simplify rtx afterwards. */
782 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
783 rtx insn)
785 validate_replace_rtx_1 (where, from, to, insn, false);
786 return apply_change_group ();
790 /* Try replacing every occurrence of FROM in INSN with TO. This also
791 will replace in REG_EQUAL and REG_EQUIV notes. */
793 void
794 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
796 rtx note;
797 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
798 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
799 if (REG_NOTE_KIND (note) == REG_EQUAL
800 || REG_NOTE_KIND (note) == REG_EQUIV)
801 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
804 /* Function called by note_uses to replace used subexpressions. */
805 struct validate_replace_src_data
807 rtx from; /* Old RTX */
808 rtx to; /* New RTX */
809 rtx insn; /* Insn in which substitution is occurring. */
812 static void
813 validate_replace_src_1 (rtx *x, void *data)
815 struct validate_replace_src_data *d
816 = (struct validate_replace_src_data *) data;
818 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
821 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
822 SET_DESTs. */
824 void
825 validate_replace_src_group (rtx from, rtx to, rtx insn)
827 struct validate_replace_src_data d;
829 d.from = from;
830 d.to = to;
831 d.insn = insn;
832 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
835 /* Try simplify INSN.
836 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
837 pattern and return true if something was simplified. */
839 bool
840 validate_simplify_insn (rtx insn)
842 int i;
843 rtx pat = NULL;
844 rtx newpat = NULL;
846 pat = PATTERN (insn);
848 if (GET_CODE (pat) == SET)
850 newpat = simplify_rtx (SET_SRC (pat));
851 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
852 validate_change (insn, &SET_SRC (pat), newpat, 1);
853 newpat = simplify_rtx (SET_DEST (pat));
854 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
855 validate_change (insn, &SET_DEST (pat), newpat, 1);
857 else if (GET_CODE (pat) == PARALLEL)
858 for (i = 0; i < XVECLEN (pat, 0); i++)
860 rtx s = XVECEXP (pat, 0, i);
862 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
864 newpat = simplify_rtx (SET_SRC (s));
865 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
866 validate_change (insn, &SET_SRC (s), newpat, 1);
867 newpat = simplify_rtx (SET_DEST (s));
868 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
869 validate_change (insn, &SET_DEST (s), newpat, 1);
872 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
875 #ifdef HAVE_cc0
876 /* Return 1 if the insn using CC0 set by INSN does not contain
877 any ordered tests applied to the condition codes.
878 EQ and NE tests do not count. */
881 next_insn_tests_no_inequality (rtx insn)
883 rtx next = next_cc0_user (insn);
885 /* If there is no next insn, we have to take the conservative choice. */
886 if (next == 0)
887 return 0;
889 return (INSN_P (next)
890 && ! inequality_comparisons_p (PATTERN (next)));
892 #endif
894 /* Return 1 if OP is a valid general operand for machine mode MODE.
895 This is either a register reference, a memory reference,
896 or a constant. In the case of a memory reference, the address
897 is checked for general validity for the target machine.
899 Register and memory references must have mode MODE in order to be valid,
900 but some constants have no machine mode and are valid for any mode.
902 If MODE is VOIDmode, OP is checked for validity for whatever mode
903 it has.
905 The main use of this function is as a predicate in match_operand
906 expressions in the machine description.
908 For an explanation of this function's behavior for registers of
909 class NO_REGS, see the comment for `register_operand'. */
912 general_operand (rtx op, enum machine_mode mode)
914 enum rtx_code code = GET_CODE (op);
916 if (mode == VOIDmode)
917 mode = GET_MODE (op);
919 /* Don't accept CONST_INT or anything similar
920 if the caller wants something floating. */
921 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
922 && GET_MODE_CLASS (mode) != MODE_INT
923 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
924 return 0;
926 if (CONST_INT_P (op)
927 && mode != VOIDmode
928 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
929 return 0;
931 if (CONSTANT_P (op))
932 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
933 || mode == VOIDmode)
934 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
935 && targetm.legitimate_constant_p (mode == VOIDmode
936 ? GET_MODE (op)
937 : mode, op));
939 /* Except for certain constants with VOIDmode, already checked for,
940 OP's mode must match MODE if MODE specifies a mode. */
942 if (GET_MODE (op) != mode)
943 return 0;
945 if (code == SUBREG)
947 rtx sub = SUBREG_REG (op);
949 #ifdef INSN_SCHEDULING
950 /* On machines that have insn scheduling, we want all memory
951 reference to be explicit, so outlaw paradoxical SUBREGs.
952 However, we must allow them after reload so that they can
953 get cleaned up by cleanup_subreg_operands. */
954 if (!reload_completed && MEM_P (sub)
955 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
956 return 0;
957 #endif
958 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
959 may result in incorrect reference. We should simplify all valid
960 subregs of MEM anyway. But allow this after reload because we
961 might be called from cleanup_subreg_operands.
963 ??? This is a kludge. */
964 if (!reload_completed && SUBREG_BYTE (op) != 0
965 && MEM_P (sub))
966 return 0;
968 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
969 create such rtl, and we must reject it. */
970 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
971 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
972 return 0;
974 op = sub;
975 code = GET_CODE (op);
978 if (code == REG)
979 /* A register whose class is NO_REGS is not a general operand. */
980 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
981 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
983 if (code == MEM)
985 rtx y = XEXP (op, 0);
987 if (! volatile_ok && MEM_VOLATILE_P (op))
988 return 0;
990 /* Use the mem's mode, since it will be reloaded thus. */
991 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
992 return 1;
995 return 0;
998 /* Return 1 if OP is a valid memory address for a memory reference
999 of mode MODE.
1001 The main use of this function is as a predicate in match_operand
1002 expressions in the machine description. */
1005 address_operand (rtx op, enum machine_mode mode)
1007 return memory_address_p (mode, op);
1010 /* Return 1 if OP is a register reference of mode MODE.
1011 If MODE is VOIDmode, accept a register in any mode.
1013 The main use of this function is as a predicate in match_operand
1014 expressions in the machine description.
1016 As a special exception, registers whose class is NO_REGS are
1017 not accepted by `register_operand'. The reason for this change
1018 is to allow the representation of special architecture artifacts
1019 (such as a condition code register) without extending the rtl
1020 definitions. Since registers of class NO_REGS cannot be used
1021 as registers in any case where register classes are examined,
1022 it is most consistent to keep this function from accepting them. */
1025 register_operand (rtx op, enum machine_mode mode)
1027 if (GET_MODE (op) != mode && mode != VOIDmode)
1028 return 0;
1030 if (GET_CODE (op) == SUBREG)
1032 rtx sub = SUBREG_REG (op);
1034 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1035 because it is guaranteed to be reloaded into one.
1036 Just make sure the MEM is valid in itself.
1037 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1038 but currently it does result from (SUBREG (REG)...) where the
1039 reg went on the stack.) */
1040 if (! reload_completed && MEM_P (sub))
1041 return general_operand (op, mode);
1043 #ifdef CANNOT_CHANGE_MODE_CLASS
1044 if (REG_P (sub)
1045 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1046 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1047 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1048 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1049 return 0;
1050 #endif
1052 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1053 create such rtl, and we must reject it. */
1054 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1055 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1056 return 0;
1058 op = sub;
1061 /* We don't consider registers whose class is NO_REGS
1062 to be a register operand. */
1063 return (REG_P (op)
1064 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1065 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1068 /* Return 1 for a register in Pmode; ignore the tested mode. */
1071 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1073 return register_operand (op, Pmode);
1076 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1077 or a hard register. */
1080 scratch_operand (rtx op, enum machine_mode mode)
1082 if (GET_MODE (op) != mode && mode != VOIDmode)
1083 return 0;
1085 return (GET_CODE (op) == SCRATCH
1086 || (REG_P (op)
1087 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1090 /* Return 1 if OP is a valid immediate operand for mode MODE.
1092 The main use of this function is as a predicate in match_operand
1093 expressions in the machine description. */
1096 immediate_operand (rtx op, enum machine_mode mode)
1098 /* Don't accept CONST_INT or anything similar
1099 if the caller wants something floating. */
1100 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1101 && GET_MODE_CLASS (mode) != MODE_INT
1102 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1103 return 0;
1105 if (CONST_INT_P (op)
1106 && mode != VOIDmode
1107 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1108 return 0;
1110 return (CONSTANT_P (op)
1111 && (GET_MODE (op) == mode || mode == VOIDmode
1112 || GET_MODE (op) == VOIDmode)
1113 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1114 && targetm.legitimate_constant_p (mode == VOIDmode
1115 ? GET_MODE (op)
1116 : mode, op));
1119 /* Returns 1 if OP is an operand that is a CONST_INT. */
1122 const_int_operand (rtx op, enum machine_mode mode)
1124 if (!CONST_INT_P (op))
1125 return 0;
1127 if (mode != VOIDmode
1128 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1129 return 0;
1131 return 1;
1134 /* Returns 1 if OP is an operand that is a constant integer or constant
1135 floating-point number. */
1138 const_double_operand (rtx op, enum machine_mode mode)
1140 /* Don't accept CONST_INT or anything similar
1141 if the caller wants something floating. */
1142 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1143 && GET_MODE_CLASS (mode) != MODE_INT
1144 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1145 return 0;
1147 return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1148 && (mode == VOIDmode || GET_MODE (op) == mode
1149 || GET_MODE (op) == VOIDmode));
1152 /* Return 1 if OP is a general operand that is not an immediate operand. */
1155 nonimmediate_operand (rtx op, enum machine_mode mode)
1157 return (general_operand (op, mode) && ! CONSTANT_P (op));
1160 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1163 nonmemory_operand (rtx op, enum machine_mode mode)
1165 if (CONSTANT_P (op))
1166 return immediate_operand (op, mode);
1168 if (GET_MODE (op) != mode && mode != VOIDmode)
1169 return 0;
1171 if (GET_CODE (op) == SUBREG)
1173 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1174 because it is guaranteed to be reloaded into one.
1175 Just make sure the MEM is valid in itself.
1176 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1177 but currently it does result from (SUBREG (REG)...) where the
1178 reg went on the stack.) */
1179 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1180 return general_operand (op, mode);
1181 op = SUBREG_REG (op);
1184 /* We don't consider registers whose class is NO_REGS
1185 to be a register operand. */
1186 return (REG_P (op)
1187 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1188 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1191 /* Return 1 if OP is a valid operand that stands for pushing a
1192 value of mode MODE onto the stack.
1194 The main use of this function is as a predicate in match_operand
1195 expressions in the machine description. */
1198 push_operand (rtx op, enum machine_mode mode)
1200 unsigned int rounded_size = GET_MODE_SIZE (mode);
1202 #ifdef PUSH_ROUNDING
1203 rounded_size = PUSH_ROUNDING (rounded_size);
1204 #endif
1206 if (!MEM_P (op))
1207 return 0;
1209 if (mode != VOIDmode && GET_MODE (op) != mode)
1210 return 0;
1212 op = XEXP (op, 0);
1214 if (rounded_size == GET_MODE_SIZE (mode))
1216 if (GET_CODE (op) != STACK_PUSH_CODE)
1217 return 0;
1219 else
1221 if (GET_CODE (op) != PRE_MODIFY
1222 || GET_CODE (XEXP (op, 1)) != PLUS
1223 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1224 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1225 #ifdef STACK_GROWS_DOWNWARD
1226 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1227 #else
1228 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1229 #endif
1231 return 0;
1234 return XEXP (op, 0) == stack_pointer_rtx;
1237 /* Return 1 if OP is a valid operand that stands for popping a
1238 value of mode MODE off the stack.
1240 The main use of this function is as a predicate in match_operand
1241 expressions in the machine description. */
1244 pop_operand (rtx op, enum machine_mode mode)
1246 if (!MEM_P (op))
1247 return 0;
1249 if (mode != VOIDmode && GET_MODE (op) != mode)
1250 return 0;
1252 op = XEXP (op, 0);
1254 if (GET_CODE (op) != STACK_POP_CODE)
1255 return 0;
1257 return XEXP (op, 0) == stack_pointer_rtx;
1260 /* Return 1 if ADDR is a valid memory address
1261 for mode MODE in address space AS. */
1264 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1265 rtx addr, addr_space_t as)
1267 #ifdef GO_IF_LEGITIMATE_ADDRESS
1268 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1269 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1270 return 0;
1272 win:
1273 return 1;
1274 #else
1275 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1276 #endif
1279 /* Return 1 if OP is a valid memory reference with mode MODE,
1280 including a valid address.
1282 The main use of this function is as a predicate in match_operand
1283 expressions in the machine description. */
1286 memory_operand (rtx op, enum machine_mode mode)
1288 rtx inner;
1290 if (! reload_completed)
1291 /* Note that no SUBREG is a memory operand before end of reload pass,
1292 because (SUBREG (MEM...)) forces reloading into a register. */
1293 return MEM_P (op) && general_operand (op, mode);
1295 if (mode != VOIDmode && GET_MODE (op) != mode)
1296 return 0;
1298 inner = op;
1299 if (GET_CODE (inner) == SUBREG)
1300 inner = SUBREG_REG (inner);
1302 return (MEM_P (inner) && general_operand (op, mode));
1305 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1306 that is, a memory reference whose address is a general_operand. */
1309 indirect_operand (rtx op, enum machine_mode mode)
1311 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1312 if (! reload_completed
1313 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1315 int offset = SUBREG_BYTE (op);
1316 rtx inner = SUBREG_REG (op);
1318 if (mode != VOIDmode && GET_MODE (op) != mode)
1319 return 0;
1321 /* The only way that we can have a general_operand as the resulting
1322 address is if OFFSET is zero and the address already is an operand
1323 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1324 operand. */
1326 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1327 || (GET_CODE (XEXP (inner, 0)) == PLUS
1328 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1329 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1330 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1333 return (MEM_P (op)
1334 && memory_operand (op, mode)
1335 && general_operand (XEXP (op, 0), Pmode));
1338 /* Return 1 if this is an ordered comparison operator (not including
1339 ORDERED and UNORDERED). */
1342 ordered_comparison_operator (rtx op, enum machine_mode mode)
1344 if (mode != VOIDmode && GET_MODE (op) != mode)
1345 return false;
1346 switch (GET_CODE (op))
1348 case EQ:
1349 case NE:
1350 case LT:
1351 case LTU:
1352 case LE:
1353 case LEU:
1354 case GT:
1355 case GTU:
1356 case GE:
1357 case GEU:
1358 return true;
1359 default:
1360 return false;
1364 /* Return 1 if this is a comparison operator. This allows the use of
1365 MATCH_OPERATOR to recognize all the branch insns. */
1368 comparison_operator (rtx op, enum machine_mode mode)
1370 return ((mode == VOIDmode || GET_MODE (op) == mode)
1371 && COMPARISON_P (op));
1374 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1377 extract_asm_operands (rtx body)
1379 rtx tmp;
1380 switch (GET_CODE (body))
1382 case ASM_OPERANDS:
1383 return body;
1385 case SET:
1386 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1387 tmp = SET_SRC (body);
1388 if (GET_CODE (tmp) == ASM_OPERANDS)
1389 return tmp;
1390 break;
1392 case PARALLEL:
1393 tmp = XVECEXP (body, 0, 0);
1394 if (GET_CODE (tmp) == ASM_OPERANDS)
1395 return tmp;
1396 if (GET_CODE (tmp) == SET)
1398 tmp = SET_SRC (tmp);
1399 if (GET_CODE (tmp) == ASM_OPERANDS)
1400 return tmp;
1402 break;
1404 default:
1405 break;
1407 return NULL;
1410 /* If BODY is an insn body that uses ASM_OPERANDS,
1411 return the number of operands (both input and output) in the insn.
1412 Otherwise return -1. */
1415 asm_noperands (const_rtx body)
1417 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1418 int n_sets = 0;
1420 if (asm_op == NULL)
1421 return -1;
1423 if (GET_CODE (body) == SET)
1424 n_sets = 1;
1425 else if (GET_CODE (body) == PARALLEL)
1427 int i;
1428 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1430 /* Multiple output operands, or 1 output plus some clobbers:
1431 body is
1432 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1433 /* Count backwards through CLOBBERs to determine number of SETs. */
1434 for (i = XVECLEN (body, 0); i > 0; i--)
1436 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1437 break;
1438 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1439 return -1;
1442 /* N_SETS is now number of output operands. */
1443 n_sets = i;
1445 /* Verify that all the SETs we have
1446 came from a single original asm_operands insn
1447 (so that invalid combinations are blocked). */
1448 for (i = 0; i < n_sets; i++)
1450 rtx elt = XVECEXP (body, 0, i);
1451 if (GET_CODE (elt) != SET)
1452 return -1;
1453 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1454 return -1;
1455 /* If these ASM_OPERANDS rtx's came from different original insns
1456 then they aren't allowed together. */
1457 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1458 != ASM_OPERANDS_INPUT_VEC (asm_op))
1459 return -1;
1462 else
1464 /* 0 outputs, but some clobbers:
1465 body is [(asm_operands ...) (clobber (reg ...))...]. */
1466 /* Make sure all the other parallel things really are clobbers. */
1467 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1468 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1469 return -1;
1473 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1474 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1477 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1478 copy its operands (both input and output) into the vector OPERANDS,
1479 the locations of the operands within the insn into the vector OPERAND_LOCS,
1480 and the constraints for the operands into CONSTRAINTS.
1481 Write the modes of the operands into MODES.
1482 Return the assembler-template.
1484 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1485 we don't store that info. */
1487 const char *
1488 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1489 const char **constraints, enum machine_mode *modes,
1490 location_t *loc)
1492 int nbase = 0, n, i;
1493 rtx asmop;
1495 switch (GET_CODE (body))
1497 case ASM_OPERANDS:
1498 /* Zero output asm: BODY is (asm_operands ...). */
1499 asmop = body;
1500 break;
1502 case SET:
1503 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1504 asmop = SET_SRC (body);
1506 /* The output is in the SET.
1507 Its constraint is in the ASM_OPERANDS itself. */
1508 if (operands)
1509 operands[0] = SET_DEST (body);
1510 if (operand_locs)
1511 operand_locs[0] = &SET_DEST (body);
1512 if (constraints)
1513 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1514 if (modes)
1515 modes[0] = GET_MODE (SET_DEST (body));
1516 nbase = 1;
1517 break;
1519 case PARALLEL:
1521 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1523 asmop = XVECEXP (body, 0, 0);
1524 if (GET_CODE (asmop) == SET)
1526 asmop = SET_SRC (asmop);
1528 /* At least one output, plus some CLOBBERs. The outputs are in
1529 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1530 for (i = 0; i < nparallel; i++)
1532 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1533 break; /* Past last SET */
1534 if (operands)
1535 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1536 if (operand_locs)
1537 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1538 if (constraints)
1539 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1540 if (modes)
1541 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1543 nbase = i;
1545 break;
1548 default:
1549 gcc_unreachable ();
1552 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1553 for (i = 0; i < n; i++)
1555 if (operand_locs)
1556 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1557 if (operands)
1558 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1559 if (constraints)
1560 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1561 if (modes)
1562 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1564 nbase += n;
1566 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1567 for (i = 0; i < n; i++)
1569 if (operand_locs)
1570 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1571 if (operands)
1572 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1573 if (constraints)
1574 constraints[nbase + i] = "";
1575 if (modes)
1576 modes[nbase + i] = Pmode;
1579 if (loc)
1580 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1582 return ASM_OPERANDS_TEMPLATE (asmop);
1585 /* Check if an asm_operand matches its constraints.
1586 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1589 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1591 int result = 0;
1592 #ifdef AUTO_INC_DEC
1593 bool incdec_ok = false;
1594 #endif
1596 /* Use constrain_operands after reload. */
1597 gcc_assert (!reload_completed);
1599 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1600 many alternatives as required to match the other operands. */
1601 if (*constraint == '\0')
1602 result = 1;
1604 while (*constraint)
1606 char c = *constraint;
1607 int len;
1608 switch (c)
1610 case ',':
1611 constraint++;
1612 continue;
1613 case '=':
1614 case '+':
1615 case '*':
1616 case '%':
1617 case '!':
1618 case '#':
1619 case '&':
1620 case '?':
1621 break;
1623 case '0': case '1': case '2': case '3': case '4':
1624 case '5': case '6': case '7': case '8': case '9':
1625 /* If caller provided constraints pointer, look up
1626 the maching constraint. Otherwise, our caller should have
1627 given us the proper matching constraint, but we can't
1628 actually fail the check if they didn't. Indicate that
1629 results are inconclusive. */
1630 if (constraints)
1632 char *end;
1633 unsigned long match;
1635 match = strtoul (constraint, &end, 10);
1636 if (!result)
1637 result = asm_operand_ok (op, constraints[match], NULL);
1638 constraint = (const char *) end;
1640 else
1643 constraint++;
1644 while (ISDIGIT (*constraint));
1645 if (! result)
1646 result = -1;
1648 continue;
1650 case 'p':
1651 if (address_operand (op, VOIDmode))
1652 result = 1;
1653 break;
1655 case TARGET_MEM_CONSTRAINT:
1656 case 'V': /* non-offsettable */
1657 if (memory_operand (op, VOIDmode))
1658 result = 1;
1659 break;
1661 case 'o': /* offsettable */
1662 if (offsettable_nonstrict_memref_p (op))
1663 result = 1;
1664 break;
1666 case '<':
1667 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1668 excepting those that expand_call created. Further, on some
1669 machines which do not have generalized auto inc/dec, an inc/dec
1670 is not a memory_operand.
1672 Match any memory and hope things are resolved after reload. */
1674 if (MEM_P (op)
1675 && (1
1676 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1677 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1678 result = 1;
1679 #ifdef AUTO_INC_DEC
1680 incdec_ok = true;
1681 #endif
1682 break;
1684 case '>':
1685 if (MEM_P (op)
1686 && (1
1687 || GET_CODE (XEXP (op, 0)) == PRE_INC
1688 || GET_CODE (XEXP (op, 0)) == POST_INC))
1689 result = 1;
1690 #ifdef AUTO_INC_DEC
1691 incdec_ok = true;
1692 #endif
1693 break;
1695 case 'E':
1696 case 'F':
1697 if (GET_CODE (op) == CONST_DOUBLE
1698 || (GET_CODE (op) == CONST_VECTOR
1699 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1700 result = 1;
1701 break;
1703 case 'G':
1704 if (GET_CODE (op) == CONST_DOUBLE
1705 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1706 result = 1;
1707 break;
1708 case 'H':
1709 if (GET_CODE (op) == CONST_DOUBLE
1710 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1711 result = 1;
1712 break;
1714 case 's':
1715 if (CONST_INT_P (op)
1716 || (GET_CODE (op) == CONST_DOUBLE
1717 && GET_MODE (op) == VOIDmode))
1718 break;
1719 /* Fall through. */
1721 case 'i':
1722 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1723 result = 1;
1724 break;
1726 case 'n':
1727 if (CONST_INT_P (op)
1728 || (GET_CODE (op) == CONST_DOUBLE
1729 && GET_MODE (op) == VOIDmode))
1730 result = 1;
1731 break;
1733 case 'I':
1734 if (CONST_INT_P (op)
1735 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1736 result = 1;
1737 break;
1738 case 'J':
1739 if (CONST_INT_P (op)
1740 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1741 result = 1;
1742 break;
1743 case 'K':
1744 if (CONST_INT_P (op)
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1746 result = 1;
1747 break;
1748 case 'L':
1749 if (CONST_INT_P (op)
1750 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1751 result = 1;
1752 break;
1753 case 'M':
1754 if (CONST_INT_P (op)
1755 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1756 result = 1;
1757 break;
1758 case 'N':
1759 if (CONST_INT_P (op)
1760 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1761 result = 1;
1762 break;
1763 case 'O':
1764 if (CONST_INT_P (op)
1765 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1766 result = 1;
1767 break;
1768 case 'P':
1769 if (CONST_INT_P (op)
1770 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1771 result = 1;
1772 break;
1774 case 'X':
1775 result = 1;
1776 break;
1778 case 'g':
1779 if (general_operand (op, VOIDmode))
1780 result = 1;
1781 break;
1783 default:
1784 /* For all other letters, we first check for a register class,
1785 otherwise it is an EXTRA_CONSTRAINT. */
1786 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1788 case 'r':
1789 if (GET_MODE (op) == BLKmode)
1790 break;
1791 if (register_operand (op, VOIDmode))
1792 result = 1;
1794 #ifdef EXTRA_CONSTRAINT_STR
1795 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1796 /* Every memory operand can be reloaded to fit. */
1797 result = result || memory_operand (op, VOIDmode);
1798 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1799 /* Every address operand can be reloaded to fit. */
1800 result = result || address_operand (op, VOIDmode);
1801 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1802 result = 1;
1803 #endif
1804 break;
1806 len = CONSTRAINT_LEN (c, constraint);
1808 constraint++;
1809 while (--len && *constraint);
1810 if (len)
1811 return 0;
1814 #ifdef AUTO_INC_DEC
1815 /* For operands without < or > constraints reject side-effects. */
1816 if (!incdec_ok && result && MEM_P (op))
1817 switch (GET_CODE (XEXP (op, 0)))
1819 case PRE_INC:
1820 case POST_INC:
1821 case PRE_DEC:
1822 case POST_DEC:
1823 case PRE_MODIFY:
1824 case POST_MODIFY:
1825 return 0;
1826 default:
1827 break;
1829 #endif
1831 return result;
1834 /* Given an rtx *P, if it is a sum containing an integer constant term,
1835 return the location (type rtx *) of the pointer to that constant term.
1836 Otherwise, return a null pointer. */
1838 rtx *
1839 find_constant_term_loc (rtx *p)
1841 rtx *tem;
1842 enum rtx_code code = GET_CODE (*p);
1844 /* If *P IS such a constant term, P is its location. */
1846 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1847 || code == CONST)
1848 return p;
1850 /* Otherwise, if not a sum, it has no constant term. */
1852 if (GET_CODE (*p) != PLUS)
1853 return 0;
1855 /* If one of the summands is constant, return its location. */
1857 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1858 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1859 return p;
1861 /* Otherwise, check each summand for containing a constant term. */
1863 if (XEXP (*p, 0) != 0)
1865 tem = find_constant_term_loc (&XEXP (*p, 0));
1866 if (tem != 0)
1867 return tem;
1870 if (XEXP (*p, 1) != 0)
1872 tem = find_constant_term_loc (&XEXP (*p, 1));
1873 if (tem != 0)
1874 return tem;
1877 return 0;
1880 /* Return 1 if OP is a memory reference
1881 whose address contains no side effects
1882 and remains valid after the addition
1883 of a positive integer less than the
1884 size of the object being referenced.
1886 We assume that the original address is valid and do not check it.
1888 This uses strict_memory_address_p as a subroutine, so
1889 don't use it before reload. */
1892 offsettable_memref_p (rtx op)
1894 return ((MEM_P (op))
1895 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1896 MEM_ADDR_SPACE (op)));
1899 /* Similar, but don't require a strictly valid mem ref:
1900 consider pseudo-regs valid as index or base regs. */
1903 offsettable_nonstrict_memref_p (rtx op)
1905 return ((MEM_P (op))
1906 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1907 MEM_ADDR_SPACE (op)));
1910 /* Return 1 if Y is a memory address which contains no side effects
1911 and would remain valid for address space AS after the addition of
1912 a positive integer less than the size of that mode.
1914 We assume that the original address is valid and do not check it.
1915 We do check that it is valid for narrower modes.
1917 If STRICTP is nonzero, we require a strictly valid address,
1918 for the sake of use in reload.c. */
1921 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1922 addr_space_t as)
1924 enum rtx_code ycode = GET_CODE (y);
1925 rtx z;
1926 rtx y1 = y;
1927 rtx *y2;
1928 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1929 (strictp ? strict_memory_address_addr_space_p
1930 : memory_address_addr_space_p);
1931 unsigned int mode_sz = GET_MODE_SIZE (mode);
1933 if (CONSTANT_ADDRESS_P (y))
1934 return 1;
1936 /* Adjusting an offsettable address involves changing to a narrower mode.
1937 Make sure that's OK. */
1939 if (mode_dependent_address_p (y))
1940 return 0;
1942 /* ??? How much offset does an offsettable BLKmode reference need?
1943 Clearly that depends on the situation in which it's being used.
1944 However, the current situation in which we test 0xffffffff is
1945 less than ideal. Caveat user. */
1946 if (mode_sz == 0)
1947 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1949 /* If the expression contains a constant term,
1950 see if it remains valid when max possible offset is added. */
1952 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1954 int good;
1956 y1 = *y2;
1957 *y2 = plus_constant (*y2, mode_sz - 1);
1958 /* Use QImode because an odd displacement may be automatically invalid
1959 for any wider mode. But it should be valid for a single byte. */
1960 good = (*addressp) (QImode, y, as);
1962 /* In any case, restore old contents of memory. */
1963 *y2 = y1;
1964 return good;
1967 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1968 return 0;
1970 /* The offset added here is chosen as the maximum offset that
1971 any instruction could need to add when operating on something
1972 of the specified mode. We assume that if Y and Y+c are
1973 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1974 go inside a LO_SUM here, so we do so as well. */
1975 if (GET_CODE (y) == LO_SUM
1976 && mode != BLKmode
1977 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1978 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1979 plus_constant (XEXP (y, 1), mode_sz - 1));
1980 else
1981 z = plus_constant (y, mode_sz - 1);
1983 /* Use QImode because an odd displacement may be automatically invalid
1984 for any wider mode. But it should be valid for a single byte. */
1985 return (*addressp) (QImode, z, as);
1988 /* Return 1 if ADDR is an address-expression whose effect depends
1989 on the mode of the memory reference it is used in.
1991 Autoincrement addressing is a typical example of mode-dependence
1992 because the amount of the increment depends on the mode. */
1994 bool
1995 mode_dependent_address_p (rtx addr)
1997 /* Auto-increment addressing with anything other than post_modify
1998 or pre_modify always introduces a mode dependency. Catch such
1999 cases now instead of deferring to the target. */
2000 if (GET_CODE (addr) == PRE_INC
2001 || GET_CODE (addr) == POST_INC
2002 || GET_CODE (addr) == PRE_DEC
2003 || GET_CODE (addr) == POST_DEC)
2004 return true;
2006 return targetm.mode_dependent_address_p (addr);
2009 /* Like extract_insn, but save insn extracted and don't extract again, when
2010 called again for the same insn expecting that recog_data still contain the
2011 valid information. This is used primary by gen_attr infrastructure that
2012 often does extract insn again and again. */
2013 void
2014 extract_insn_cached (rtx insn)
2016 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2017 return;
2018 extract_insn (insn);
2019 recog_data.insn = insn;
2022 /* Do cached extract_insn, constrain_operands and complain about failures.
2023 Used by insn_attrtab. */
2024 void
2025 extract_constrain_insn_cached (rtx insn)
2027 extract_insn_cached (insn);
2028 if (which_alternative == -1
2029 && !constrain_operands (reload_completed))
2030 fatal_insn_not_found (insn);
2033 /* Do cached constrain_operands and complain about failures. */
2035 constrain_operands_cached (int strict)
2037 if (which_alternative == -1)
2038 return constrain_operands (strict);
2039 else
2040 return 1;
2043 /* Analyze INSN and fill in recog_data. */
2045 void
2046 extract_insn (rtx insn)
2048 int i;
2049 int icode;
2050 int noperands;
2051 rtx body = PATTERN (insn);
2053 recog_data.n_operands = 0;
2054 recog_data.n_alternatives = 0;
2055 recog_data.n_dups = 0;
2056 recog_data.is_asm = false;
2058 switch (GET_CODE (body))
2060 case USE:
2061 case CLOBBER:
2062 case ASM_INPUT:
2063 case ADDR_VEC:
2064 case ADDR_DIFF_VEC:
2065 case VAR_LOCATION:
2066 return;
2068 case SET:
2069 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2070 goto asm_insn;
2071 else
2072 goto normal_insn;
2073 case PARALLEL:
2074 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2075 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2076 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2077 goto asm_insn;
2078 else
2079 goto normal_insn;
2080 case ASM_OPERANDS:
2081 asm_insn:
2082 recog_data.n_operands = noperands = asm_noperands (body);
2083 if (noperands >= 0)
2085 /* This insn is an `asm' with operands. */
2087 /* expand_asm_operands makes sure there aren't too many operands. */
2088 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2090 /* Now get the operand values and constraints out of the insn. */
2091 decode_asm_operands (body, recog_data.operand,
2092 recog_data.operand_loc,
2093 recog_data.constraints,
2094 recog_data.operand_mode, NULL);
2095 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2096 if (noperands > 0)
2098 const char *p = recog_data.constraints[0];
2099 recog_data.n_alternatives = 1;
2100 while (*p)
2101 recog_data.n_alternatives += (*p++ == ',');
2103 recog_data.is_asm = true;
2104 break;
2106 fatal_insn_not_found (insn);
2108 default:
2109 normal_insn:
2110 /* Ordinary insn: recognize it, get the operands via insn_extract
2111 and get the constraints. */
2113 icode = recog_memoized (insn);
2114 if (icode < 0)
2115 fatal_insn_not_found (insn);
2117 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2118 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2119 recog_data.n_dups = insn_data[icode].n_dups;
2121 insn_extract (insn);
2123 for (i = 0; i < noperands; i++)
2125 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2126 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2127 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2128 /* VOIDmode match_operands gets mode from their real operand. */
2129 if (recog_data.operand_mode[i] == VOIDmode)
2130 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2133 for (i = 0; i < noperands; i++)
2134 recog_data.operand_type[i]
2135 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2136 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2137 : OP_IN);
2139 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2141 if (INSN_CODE (insn) < 0)
2142 for (i = 0; i < recog_data.n_alternatives; i++)
2143 recog_data.alternative_enabled_p[i] = true;
2144 else
2146 recog_data.insn = insn;
2147 for (i = 0; i < recog_data.n_alternatives; i++)
2149 which_alternative = i;
2150 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2154 recog_data.insn = NULL;
2155 which_alternative = -1;
2158 /* After calling extract_insn, you can use this function to extract some
2159 information from the constraint strings into a more usable form.
2160 The collected data is stored in recog_op_alt. */
2161 void
2162 preprocess_constraints (void)
2164 int i;
2166 for (i = 0; i < recog_data.n_operands; i++)
2167 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2168 * sizeof (struct operand_alternative)));
2170 for (i = 0; i < recog_data.n_operands; i++)
2172 int j;
2173 struct operand_alternative *op_alt;
2174 const char *p = recog_data.constraints[i];
2176 op_alt = recog_op_alt[i];
2178 for (j = 0; j < recog_data.n_alternatives; j++)
2180 op_alt[j].cl = NO_REGS;
2181 op_alt[j].constraint = p;
2182 op_alt[j].matches = -1;
2183 op_alt[j].matched = -1;
2185 if (!recog_data.alternative_enabled_p[j])
2187 p = skip_alternative (p);
2188 continue;
2191 if (*p == '\0' || *p == ',')
2193 op_alt[j].anything_ok = 1;
2194 continue;
2197 for (;;)
2199 char c = *p;
2200 if (c == '#')
2202 c = *++p;
2203 while (c != ',' && c != '\0');
2204 if (c == ',' || c == '\0')
2206 p++;
2207 break;
2210 switch (c)
2212 case '=': case '+': case '*': case '%':
2213 case 'E': case 'F': case 'G': case 'H':
2214 case 's': case 'i': case 'n':
2215 case 'I': case 'J': case 'K': case 'L':
2216 case 'M': case 'N': case 'O': case 'P':
2217 /* These don't say anything we care about. */
2218 break;
2220 case '?':
2221 op_alt[j].reject += 6;
2222 break;
2223 case '!':
2224 op_alt[j].reject += 600;
2225 break;
2226 case '&':
2227 op_alt[j].earlyclobber = 1;
2228 break;
2230 case '0': case '1': case '2': case '3': case '4':
2231 case '5': case '6': case '7': case '8': case '9':
2233 char *end;
2234 op_alt[j].matches = strtoul (p, &end, 10);
2235 recog_op_alt[op_alt[j].matches][j].matched = i;
2236 p = end;
2238 continue;
2240 case TARGET_MEM_CONSTRAINT:
2241 op_alt[j].memory_ok = 1;
2242 break;
2243 case '<':
2244 op_alt[j].decmem_ok = 1;
2245 break;
2246 case '>':
2247 op_alt[j].incmem_ok = 1;
2248 break;
2249 case 'V':
2250 op_alt[j].nonoffmem_ok = 1;
2251 break;
2252 case 'o':
2253 op_alt[j].offmem_ok = 1;
2254 break;
2255 case 'X':
2256 op_alt[j].anything_ok = 1;
2257 break;
2259 case 'p':
2260 op_alt[j].is_address = 1;
2261 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2262 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2263 break;
2265 case 'g':
2266 case 'r':
2267 op_alt[j].cl =
2268 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2269 break;
2271 default:
2272 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2274 op_alt[j].memory_ok = 1;
2275 break;
2277 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2279 op_alt[j].is_address = 1;
2280 op_alt[j].cl
2281 = (reg_class_subunion
2282 [(int) op_alt[j].cl]
2283 [(int) base_reg_class (VOIDmode, ADDRESS,
2284 SCRATCH)]);
2285 break;
2288 op_alt[j].cl
2289 = (reg_class_subunion
2290 [(int) op_alt[j].cl]
2291 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2292 break;
2294 p += CONSTRAINT_LEN (c, p);
2300 /* Check the operands of an insn against the insn's operand constraints
2301 and return 1 if they are valid.
2302 The information about the insn's operands, constraints, operand modes
2303 etc. is obtained from the global variables set up by extract_insn.
2305 WHICH_ALTERNATIVE is set to a number which indicates which
2306 alternative of constraints was matched: 0 for the first alternative,
2307 1 for the next, etc.
2309 In addition, when two operands are required to match
2310 and it happens that the output operand is (reg) while the
2311 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2312 make the output operand look like the input.
2313 This is because the output operand is the one the template will print.
2315 This is used in final, just before printing the assembler code and by
2316 the routines that determine an insn's attribute.
2318 If STRICT is a positive nonzero value, it means that we have been
2319 called after reload has been completed. In that case, we must
2320 do all checks strictly. If it is zero, it means that we have been called
2321 before reload has completed. In that case, we first try to see if we can
2322 find an alternative that matches strictly. If not, we try again, this
2323 time assuming that reload will fix up the insn. This provides a "best
2324 guess" for the alternative and is used to compute attributes of insns prior
2325 to reload. A negative value of STRICT is used for this internal call. */
2327 struct funny_match
2329 int this_op, other;
2333 constrain_operands (int strict)
2335 const char *constraints[MAX_RECOG_OPERANDS];
2336 int matching_operands[MAX_RECOG_OPERANDS];
2337 int earlyclobber[MAX_RECOG_OPERANDS];
2338 int c;
2340 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2341 int funny_match_index;
2343 which_alternative = 0;
2344 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2345 return 1;
2347 for (c = 0; c < recog_data.n_operands; c++)
2349 constraints[c] = recog_data.constraints[c];
2350 matching_operands[c] = -1;
2355 int seen_earlyclobber_at = -1;
2356 int opno;
2357 int lose = 0;
2358 funny_match_index = 0;
2360 if (!recog_data.alternative_enabled_p[which_alternative])
2362 int i;
2364 for (i = 0; i < recog_data.n_operands; i++)
2365 constraints[i] = skip_alternative (constraints[i]);
2367 which_alternative++;
2368 continue;
2371 for (opno = 0; opno < recog_data.n_operands; opno++)
2373 rtx op = recog_data.operand[opno];
2374 enum machine_mode mode = GET_MODE (op);
2375 const char *p = constraints[opno];
2376 int offset = 0;
2377 int win = 0;
2378 int val;
2379 int len;
2381 earlyclobber[opno] = 0;
2383 /* A unary operator may be accepted by the predicate, but it
2384 is irrelevant for matching constraints. */
2385 if (UNARY_P (op))
2386 op = XEXP (op, 0);
2388 if (GET_CODE (op) == SUBREG)
2390 if (REG_P (SUBREG_REG (op))
2391 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2392 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2393 GET_MODE (SUBREG_REG (op)),
2394 SUBREG_BYTE (op),
2395 GET_MODE (op));
2396 op = SUBREG_REG (op);
2399 /* An empty constraint or empty alternative
2400 allows anything which matched the pattern. */
2401 if (*p == 0 || *p == ',')
2402 win = 1;
2405 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2407 case '\0':
2408 len = 0;
2409 break;
2410 case ',':
2411 c = '\0';
2412 break;
2414 case '?': case '!': case '*': case '%':
2415 case '=': case '+':
2416 break;
2418 case '#':
2419 /* Ignore rest of this alternative as far as
2420 constraint checking is concerned. */
2422 p++;
2423 while (*p && *p != ',');
2424 len = 0;
2425 break;
2427 case '&':
2428 earlyclobber[opno] = 1;
2429 if (seen_earlyclobber_at < 0)
2430 seen_earlyclobber_at = opno;
2431 break;
2433 case '0': case '1': case '2': case '3': case '4':
2434 case '5': case '6': case '7': case '8': case '9':
2436 /* This operand must be the same as a previous one.
2437 This kind of constraint is used for instructions such
2438 as add when they take only two operands.
2440 Note that the lower-numbered operand is passed first.
2442 If we are not testing strictly, assume that this
2443 constraint will be satisfied. */
2445 char *end;
2446 int match;
2448 match = strtoul (p, &end, 10);
2449 p = end;
2451 if (strict < 0)
2452 val = 1;
2453 else
2455 rtx op1 = recog_data.operand[match];
2456 rtx op2 = recog_data.operand[opno];
2458 /* A unary operator may be accepted by the predicate,
2459 but it is irrelevant for matching constraints. */
2460 if (UNARY_P (op1))
2461 op1 = XEXP (op1, 0);
2462 if (UNARY_P (op2))
2463 op2 = XEXP (op2, 0);
2465 val = operands_match_p (op1, op2);
2468 matching_operands[opno] = match;
2469 matching_operands[match] = opno;
2471 if (val != 0)
2472 win = 1;
2474 /* If output is *x and input is *--x, arrange later
2475 to change the output to *--x as well, since the
2476 output op is the one that will be printed. */
2477 if (val == 2 && strict > 0)
2479 funny_match[funny_match_index].this_op = opno;
2480 funny_match[funny_match_index++].other = match;
2483 len = 0;
2484 break;
2486 case 'p':
2487 /* p is used for address_operands. When we are called by
2488 gen_reload, no one will have checked that the address is
2489 strictly valid, i.e., that all pseudos requiring hard regs
2490 have gotten them. */
2491 if (strict <= 0
2492 || (strict_memory_address_p (recog_data.operand_mode[opno],
2493 op)))
2494 win = 1;
2495 break;
2497 /* No need to check general_operand again;
2498 it was done in insn-recog.c. Well, except that reload
2499 doesn't check the validity of its replacements, but
2500 that should only matter when there's a bug. */
2501 case 'g':
2502 /* Anything goes unless it is a REG and really has a hard reg
2503 but the hard reg is not in the class GENERAL_REGS. */
2504 if (REG_P (op))
2506 if (strict < 0
2507 || GENERAL_REGS == ALL_REGS
2508 || (reload_in_progress
2509 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2510 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2511 win = 1;
2513 else if (strict < 0 || general_operand (op, mode))
2514 win = 1;
2515 break;
2517 case 'X':
2518 /* This is used for a MATCH_SCRATCH in the cases when
2519 we don't actually need anything. So anything goes
2520 any time. */
2521 win = 1;
2522 break;
2524 case TARGET_MEM_CONSTRAINT:
2525 /* Memory operands must be valid, to the extent
2526 required by STRICT. */
2527 if (MEM_P (op))
2529 if (strict > 0
2530 && !strict_memory_address_addr_space_p
2531 (GET_MODE (op), XEXP (op, 0),
2532 MEM_ADDR_SPACE (op)))
2533 break;
2534 if (strict == 0
2535 && !memory_address_addr_space_p
2536 (GET_MODE (op), XEXP (op, 0),
2537 MEM_ADDR_SPACE (op)))
2538 break;
2539 win = 1;
2541 /* Before reload, accept what reload can turn into mem. */
2542 else if (strict < 0 && CONSTANT_P (op))
2543 win = 1;
2544 /* During reload, accept a pseudo */
2545 else if (reload_in_progress && REG_P (op)
2546 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2547 win = 1;
2548 break;
2550 case '<':
2551 if (MEM_P (op)
2552 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2553 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2554 win = 1;
2555 break;
2557 case '>':
2558 if (MEM_P (op)
2559 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2560 || GET_CODE (XEXP (op, 0)) == POST_INC))
2561 win = 1;
2562 break;
2564 case 'E':
2565 case 'F':
2566 if (GET_CODE (op) == CONST_DOUBLE
2567 || (GET_CODE (op) == CONST_VECTOR
2568 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2569 win = 1;
2570 break;
2572 case 'G':
2573 case 'H':
2574 if (GET_CODE (op) == CONST_DOUBLE
2575 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2576 win = 1;
2577 break;
2579 case 's':
2580 if (CONST_INT_P (op)
2581 || (GET_CODE (op) == CONST_DOUBLE
2582 && GET_MODE (op) == VOIDmode))
2583 break;
2584 case 'i':
2585 if (CONSTANT_P (op))
2586 win = 1;
2587 break;
2589 case 'n':
2590 if (CONST_INT_P (op)
2591 || (GET_CODE (op) == CONST_DOUBLE
2592 && GET_MODE (op) == VOIDmode))
2593 win = 1;
2594 break;
2596 case 'I':
2597 case 'J':
2598 case 'K':
2599 case 'L':
2600 case 'M':
2601 case 'N':
2602 case 'O':
2603 case 'P':
2604 if (CONST_INT_P (op)
2605 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2606 win = 1;
2607 break;
2609 case 'V':
2610 if (MEM_P (op)
2611 && ((strict > 0 && ! offsettable_memref_p (op))
2612 || (strict < 0
2613 && !(CONSTANT_P (op) || MEM_P (op)))
2614 || (reload_in_progress
2615 && !(REG_P (op)
2616 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2617 win = 1;
2618 break;
2620 case 'o':
2621 if ((strict > 0 && offsettable_memref_p (op))
2622 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2623 /* Before reload, accept what reload can handle. */
2624 || (strict < 0
2625 && (CONSTANT_P (op) || MEM_P (op)))
2626 /* During reload, accept a pseudo */
2627 || (reload_in_progress && REG_P (op)
2628 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2629 win = 1;
2630 break;
2632 default:
2634 enum reg_class cl;
2636 cl = (c == 'r'
2637 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2638 if (cl != NO_REGS)
2640 if (strict < 0
2641 || (strict == 0
2642 && REG_P (op)
2643 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2644 || (strict == 0 && GET_CODE (op) == SCRATCH)
2645 || (REG_P (op)
2646 && reg_fits_class_p (op, cl, offset, mode)))
2647 win = 1;
2649 #ifdef EXTRA_CONSTRAINT_STR
2650 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2651 win = 1;
2653 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2654 /* Every memory operand can be reloaded to fit. */
2655 && ((strict < 0 && MEM_P (op))
2656 /* Before reload, accept what reload can turn
2657 into mem. */
2658 || (strict < 0 && CONSTANT_P (op))
2659 /* During reload, accept a pseudo */
2660 || (reload_in_progress && REG_P (op)
2661 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2662 win = 1;
2663 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2664 /* Every address operand can be reloaded to fit. */
2665 && strict < 0)
2666 win = 1;
2667 #endif
2668 break;
2671 while (p += len, c);
2673 constraints[opno] = p;
2674 /* If this operand did not win somehow,
2675 this alternative loses. */
2676 if (! win)
2677 lose = 1;
2679 /* This alternative won; the operands are ok.
2680 Change whichever operands this alternative says to change. */
2681 if (! lose)
2683 int opno, eopno;
2685 /* See if any earlyclobber operand conflicts with some other
2686 operand. */
2688 if (strict > 0 && seen_earlyclobber_at >= 0)
2689 for (eopno = seen_earlyclobber_at;
2690 eopno < recog_data.n_operands;
2691 eopno++)
2692 /* Ignore earlyclobber operands now in memory,
2693 because we would often report failure when we have
2694 two memory operands, one of which was formerly a REG. */
2695 if (earlyclobber[eopno]
2696 && REG_P (recog_data.operand[eopno]))
2697 for (opno = 0; opno < recog_data.n_operands; opno++)
2698 if ((MEM_P (recog_data.operand[opno])
2699 || recog_data.operand_type[opno] != OP_OUT)
2700 && opno != eopno
2701 /* Ignore things like match_operator operands. */
2702 && *recog_data.constraints[opno] != 0
2703 && ! (matching_operands[opno] == eopno
2704 && operands_match_p (recog_data.operand[opno],
2705 recog_data.operand[eopno]))
2706 && ! safe_from_earlyclobber (recog_data.operand[opno],
2707 recog_data.operand[eopno]))
2708 lose = 1;
2710 if (! lose)
2712 while (--funny_match_index >= 0)
2714 recog_data.operand[funny_match[funny_match_index].other]
2715 = recog_data.operand[funny_match[funny_match_index].this_op];
2718 #ifdef AUTO_INC_DEC
2719 /* For operands without < or > constraints reject side-effects. */
2720 if (recog_data.is_asm)
2722 for (opno = 0; opno < recog_data.n_operands; opno++)
2723 if (MEM_P (recog_data.operand[opno]))
2724 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2726 case PRE_INC:
2727 case POST_INC:
2728 case PRE_DEC:
2729 case POST_DEC:
2730 case PRE_MODIFY:
2731 case POST_MODIFY:
2732 if (strchr (recog_data.constraints[opno], '<') == NULL
2733 && strchr (recog_data.constraints[opno], '>')
2734 == NULL)
2735 return 0;
2736 break;
2737 default:
2738 break;
2741 #endif
2742 return 1;
2746 which_alternative++;
2748 while (which_alternative < recog_data.n_alternatives);
2750 which_alternative = -1;
2751 /* If we are about to reject this, but we are not to test strictly,
2752 try a very loose test. Only return failure if it fails also. */
2753 if (strict == 0)
2754 return constrain_operands (-1);
2755 else
2756 return 0;
2759 /* Return true iff OPERAND (assumed to be a REG rtx)
2760 is a hard reg in class CLASS when its regno is offset by OFFSET
2761 and changed to mode MODE.
2762 If REG occupies multiple hard regs, all of them must be in CLASS. */
2764 bool
2765 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2766 enum machine_mode mode)
2768 int regno = REGNO (operand);
2770 if (cl == NO_REGS)
2771 return false;
2773 return (HARD_REGISTER_NUM_P (regno)
2774 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2775 mode, regno + offset));
2778 /* Split single instruction. Helper function for split_all_insns and
2779 split_all_insns_noflow. Return last insn in the sequence if successful,
2780 or NULL if unsuccessful. */
2782 static rtx
2783 split_insn (rtx insn)
2785 /* Split insns here to get max fine-grain parallelism. */
2786 rtx first = PREV_INSN (insn);
2787 rtx last = try_split (PATTERN (insn), insn, 1);
2788 rtx insn_set, last_set, note;
2790 if (last == insn)
2791 return NULL_RTX;
2793 /* If the original instruction was a single set that was known to be
2794 equivalent to a constant, see if we can say the same about the last
2795 instruction in the split sequence. The two instructions must set
2796 the same destination. */
2797 insn_set = single_set (insn);
2798 if (insn_set)
2800 last_set = single_set (last);
2801 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2803 note = find_reg_equal_equiv_note (insn);
2804 if (note && CONSTANT_P (XEXP (note, 0)))
2805 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2806 else if (CONSTANT_P (SET_SRC (insn_set)))
2807 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2811 /* try_split returns the NOTE that INSN became. */
2812 SET_INSN_DELETED (insn);
2814 /* ??? Coddle to md files that generate subregs in post-reload
2815 splitters instead of computing the proper hard register. */
2816 if (reload_completed && first != last)
2818 first = NEXT_INSN (first);
2819 for (;;)
2821 if (INSN_P (first))
2822 cleanup_subreg_operands (first);
2823 if (first == last)
2824 break;
2825 first = NEXT_INSN (first);
2829 return last;
2832 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2834 void
2835 split_all_insns (void)
2837 sbitmap blocks;
2838 bool changed;
2839 basic_block bb;
2841 blocks = sbitmap_alloc (last_basic_block);
2842 sbitmap_zero (blocks);
2843 changed = false;
2845 FOR_EACH_BB_REVERSE (bb)
2847 rtx insn, next;
2848 bool finish = false;
2850 rtl_profile_for_bb (bb);
2851 for (insn = BB_HEAD (bb); !finish ; insn = next)
2853 /* Can't use `next_real_insn' because that might go across
2854 CODE_LABELS and short-out basic blocks. */
2855 next = NEXT_INSN (insn);
2856 finish = (insn == BB_END (bb));
2857 if (INSN_P (insn))
2859 rtx set = single_set (insn);
2861 /* Don't split no-op move insns. These should silently
2862 disappear later in final. Splitting such insns would
2863 break the code that handles LIBCALL blocks. */
2864 if (set && set_noop_p (set))
2866 /* Nops get in the way while scheduling, so delete them
2867 now if register allocation has already been done. It
2868 is too risky to try to do this before register
2869 allocation, and there are unlikely to be very many
2870 nops then anyways. */
2871 if (reload_completed)
2872 delete_insn_and_edges (insn);
2874 else
2876 if (split_insn (insn))
2878 SET_BIT (blocks, bb->index);
2879 changed = true;
2886 default_rtl_profile ();
2887 if (changed)
2888 find_many_sub_basic_blocks (blocks);
2890 #ifdef ENABLE_CHECKING
2891 verify_flow_info ();
2892 #endif
2894 sbitmap_free (blocks);
2897 /* Same as split_all_insns, but do not expect CFG to be available.
2898 Used by machine dependent reorg passes. */
2900 unsigned int
2901 split_all_insns_noflow (void)
2903 rtx next, insn;
2905 for (insn = get_insns (); insn; insn = next)
2907 next = NEXT_INSN (insn);
2908 if (INSN_P (insn))
2910 /* Don't split no-op move insns. These should silently
2911 disappear later in final. Splitting such insns would
2912 break the code that handles LIBCALL blocks. */
2913 rtx set = single_set (insn);
2914 if (set && set_noop_p (set))
2916 /* Nops get in the way while scheduling, so delete them
2917 now if register allocation has already been done. It
2918 is too risky to try to do this before register
2919 allocation, and there are unlikely to be very many
2920 nops then anyways.
2922 ??? Should we use delete_insn when the CFG isn't valid? */
2923 if (reload_completed)
2924 delete_insn_and_edges (insn);
2926 else
2927 split_insn (insn);
2930 return 0;
2933 #ifdef HAVE_peephole2
2934 struct peep2_insn_data
2936 rtx insn;
2937 regset live_before;
2940 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2941 static int peep2_current;
2943 static bool peep2_do_rebuild_jump_labels;
2944 static bool peep2_do_cleanup_cfg;
2946 /* The number of instructions available to match a peep2. */
2947 int peep2_current_count;
2949 /* A non-insn marker indicating the last insn of the block.
2950 The live_before regset for this element is correct, indicating
2951 DF_LIVE_OUT for the block. */
2952 #define PEEP2_EOB pc_rtx
2954 /* Wrap N to fit into the peep2_insn_data buffer. */
2956 static int
2957 peep2_buf_position (int n)
2959 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2960 n -= MAX_INSNS_PER_PEEP2 + 1;
2961 return n;
2964 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2965 does not exist. Used by the recognizer to find the next insn to match
2966 in a multi-insn pattern. */
2969 peep2_next_insn (int n)
2971 gcc_assert (n <= peep2_current_count);
2973 n = peep2_buf_position (peep2_current + n);
2975 return peep2_insn_data[n].insn;
2978 /* Return true if REGNO is dead before the Nth non-note insn
2979 after `current'. */
2982 peep2_regno_dead_p (int ofs, int regno)
2984 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2986 ofs = peep2_buf_position (peep2_current + ofs);
2988 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2990 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2993 /* Similarly for a REG. */
2996 peep2_reg_dead_p (int ofs, rtx reg)
2998 int regno, n;
3000 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3002 ofs = peep2_buf_position (peep2_current + ofs);
3004 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3006 regno = REGNO (reg);
3007 n = hard_regno_nregs[regno][GET_MODE (reg)];
3008 while (--n >= 0)
3009 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3010 return 0;
3011 return 1;
3014 /* Try to find a hard register of mode MODE, matching the register class in
3015 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3016 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3017 in which case the only condition is that the register must be available
3018 before CURRENT_INSN.
3019 Registers that already have bits set in REG_SET will not be considered.
3021 If an appropriate register is available, it will be returned and the
3022 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3023 returned. */
3026 peep2_find_free_register (int from, int to, const char *class_str,
3027 enum machine_mode mode, HARD_REG_SET *reg_set)
3029 static int search_ofs;
3030 enum reg_class cl;
3031 HARD_REG_SET live;
3032 int i;
3034 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3035 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3037 from = peep2_buf_position (peep2_current + from);
3038 to = peep2_buf_position (peep2_current + to);
3040 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3041 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3043 while (from != to)
3045 HARD_REG_SET this_live;
3047 from = peep2_buf_position (from + 1);
3048 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3049 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3050 IOR_HARD_REG_SET (live, this_live);
3053 cl = (class_str[0] == 'r' ? GENERAL_REGS
3054 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3056 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3058 int raw_regno, regno, success, j;
3060 /* Distribute the free registers as much as possible. */
3061 raw_regno = search_ofs + i;
3062 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3063 raw_regno -= FIRST_PSEUDO_REGISTER;
3064 #ifdef REG_ALLOC_ORDER
3065 regno = reg_alloc_order[raw_regno];
3066 #else
3067 regno = raw_regno;
3068 #endif
3070 /* Don't allocate fixed registers. */
3071 if (fixed_regs[regno])
3072 continue;
3073 /* Don't allocate global registers. */
3074 if (global_regs[regno])
3075 continue;
3076 /* Make sure the register is of the right class. */
3077 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3078 continue;
3079 /* And can support the mode we need. */
3080 if (! HARD_REGNO_MODE_OK (regno, mode))
3081 continue;
3082 /* And that we don't create an extra save/restore. */
3083 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3084 continue;
3085 if (! targetm.hard_regno_scratch_ok (regno))
3086 continue;
3088 /* And we don't clobber traceback for noreturn functions. */
3089 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3090 && (! reload_completed || frame_pointer_needed))
3091 continue;
3093 success = 1;
3094 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3096 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3097 || TEST_HARD_REG_BIT (live, regno + j))
3099 success = 0;
3100 break;
3103 if (success)
3105 add_to_hard_reg_set (reg_set, mode, regno);
3107 /* Start the next search with the next register. */
3108 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3109 raw_regno = 0;
3110 search_ofs = raw_regno;
3112 return gen_rtx_REG (mode, regno);
3116 search_ofs = 0;
3117 return NULL_RTX;
3120 /* Forget all currently tracked instructions, only remember current
3121 LIVE regset. */
3123 static void
3124 peep2_reinit_state (regset live)
3126 int i;
3128 /* Indicate that all slots except the last holds invalid data. */
3129 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3130 peep2_insn_data[i].insn = NULL_RTX;
3131 peep2_current_count = 0;
3133 /* Indicate that the last slot contains live_after data. */
3134 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3135 peep2_current = MAX_INSNS_PER_PEEP2;
3137 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3140 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3141 starting at INSN. Perform the replacement, removing the old insns and
3142 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3143 if the replacement is rejected. */
3145 static rtx
3146 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3148 int i;
3149 rtx last, eh_note, as_note, before_try, x;
3150 rtx old_insn, new_insn;
3151 bool was_call = false;
3153 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3154 match more than one insn, or to be split into more than one insn. */
3155 old_insn = peep2_insn_data[peep2_current].insn;
3156 if (RTX_FRAME_RELATED_P (old_insn))
3158 bool any_note = false;
3159 rtx note;
3161 if (match_len != 0)
3162 return NULL;
3164 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3165 may be in the stream for the purpose of register allocation. */
3166 if (active_insn_p (attempt))
3167 new_insn = attempt;
3168 else
3169 new_insn = next_active_insn (attempt);
3170 if (next_active_insn (new_insn))
3171 return NULL;
3173 /* We have a 1-1 replacement. Copy over any frame-related info. */
3174 RTX_FRAME_RELATED_P (new_insn) = 1;
3176 /* Allow the backend to fill in a note during the split. */
3177 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3178 switch (REG_NOTE_KIND (note))
3180 case REG_FRAME_RELATED_EXPR:
3181 case REG_CFA_DEF_CFA:
3182 case REG_CFA_ADJUST_CFA:
3183 case REG_CFA_OFFSET:
3184 case REG_CFA_REGISTER:
3185 case REG_CFA_EXPRESSION:
3186 case REG_CFA_RESTORE:
3187 case REG_CFA_SET_VDRAP:
3188 any_note = true;
3189 break;
3190 default:
3191 break;
3194 /* If the backend didn't supply a note, copy one over. */
3195 if (!any_note)
3196 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3197 switch (REG_NOTE_KIND (note))
3199 case REG_FRAME_RELATED_EXPR:
3200 case REG_CFA_DEF_CFA:
3201 case REG_CFA_ADJUST_CFA:
3202 case REG_CFA_OFFSET:
3203 case REG_CFA_REGISTER:
3204 case REG_CFA_EXPRESSION:
3205 case REG_CFA_RESTORE:
3206 case REG_CFA_SET_VDRAP:
3207 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3208 any_note = true;
3209 break;
3210 default:
3211 break;
3214 /* If there still isn't a note, make sure the unwind info sees the
3215 same expression as before the split. */
3216 if (!any_note)
3218 rtx old_set, new_set;
3220 /* The old insn had better have been simple, or annotated. */
3221 old_set = single_set (old_insn);
3222 gcc_assert (old_set != NULL);
3224 new_set = single_set (new_insn);
3225 if (!new_set || !rtx_equal_p (new_set, old_set))
3226 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3229 /* Copy prologue/epilogue status. This is required in order to keep
3230 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3231 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3234 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3235 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3236 cfg-related call notes. */
3237 for (i = 0; i <= match_len; ++i)
3239 int j;
3240 rtx note;
3242 j = peep2_buf_position (peep2_current + i);
3243 old_insn = peep2_insn_data[j].insn;
3244 if (!CALL_P (old_insn))
3245 continue;
3246 was_call = true;
3248 new_insn = attempt;
3249 while (new_insn != NULL_RTX)
3251 if (CALL_P (new_insn))
3252 break;
3253 new_insn = NEXT_INSN (new_insn);
3256 gcc_assert (new_insn != NULL_RTX);
3258 CALL_INSN_FUNCTION_USAGE (new_insn)
3259 = CALL_INSN_FUNCTION_USAGE (old_insn);
3261 for (note = REG_NOTES (old_insn);
3262 note;
3263 note = XEXP (note, 1))
3264 switch (REG_NOTE_KIND (note))
3266 case REG_NORETURN:
3267 case REG_SETJMP:
3268 add_reg_note (new_insn, REG_NOTE_KIND (note),
3269 XEXP (note, 0));
3270 break;
3271 default:
3272 /* Discard all other reg notes. */
3273 break;
3276 /* Croak if there is another call in the sequence. */
3277 while (++i <= match_len)
3279 j = peep2_buf_position (peep2_current + i);
3280 old_insn = peep2_insn_data[j].insn;
3281 gcc_assert (!CALL_P (old_insn));
3283 break;
3286 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3287 move those notes over to the new sequence. */
3288 as_note = NULL;
3289 for (i = match_len; i >= 0; --i)
3291 int j = peep2_buf_position (peep2_current + i);
3292 old_insn = peep2_insn_data[j].insn;
3294 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3295 if (as_note)
3296 break;
3299 i = peep2_buf_position (peep2_current + match_len);
3300 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3302 /* Replace the old sequence with the new. */
3303 last = emit_insn_after_setloc (attempt,
3304 peep2_insn_data[i].insn,
3305 INSN_LOCATOR (peep2_insn_data[i].insn));
3306 before_try = PREV_INSN (insn);
3307 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3309 /* Re-insert the EH_REGION notes. */
3310 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3312 edge eh_edge;
3313 edge_iterator ei;
3315 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3316 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3317 break;
3319 if (eh_note)
3320 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3322 if (eh_edge)
3323 for (x = last; x != before_try; x = PREV_INSN (x))
3324 if (x != BB_END (bb)
3325 && (can_throw_internal (x)
3326 || can_nonlocal_goto (x)))
3328 edge nfte, nehe;
3329 int flags;
3331 nfte = split_block (bb, x);
3332 flags = (eh_edge->flags
3333 & (EDGE_EH | EDGE_ABNORMAL));
3334 if (CALL_P (x))
3335 flags |= EDGE_ABNORMAL_CALL;
3336 nehe = make_edge (nfte->src, eh_edge->dest,
3337 flags);
3339 nehe->probability = eh_edge->probability;
3340 nfte->probability
3341 = REG_BR_PROB_BASE - nehe->probability;
3343 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3344 bb = nfte->src;
3345 eh_edge = nehe;
3348 /* Converting possibly trapping insn to non-trapping is
3349 possible. Zap dummy outgoing edges. */
3350 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3353 /* Re-insert the ARGS_SIZE notes. */
3354 if (as_note)
3355 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3357 /* If we generated a jump instruction, it won't have
3358 JUMP_LABEL set. Recompute after we're done. */
3359 for (x = last; x != before_try; x = PREV_INSN (x))
3360 if (JUMP_P (x))
3362 peep2_do_rebuild_jump_labels = true;
3363 break;
3366 return last;
3369 /* After performing a replacement in basic block BB, fix up the life
3370 information in our buffer. LAST is the last of the insns that we
3371 emitted as a replacement. PREV is the insn before the start of
3372 the replacement. MATCH_LEN is the number of instructions that were
3373 matched, and which now need to be replaced in the buffer. */
3375 static void
3376 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3378 int i = peep2_buf_position (peep2_current + match_len + 1);
3379 rtx x;
3380 regset_head live;
3382 INIT_REG_SET (&live);
3383 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3385 gcc_assert (peep2_current_count >= match_len + 1);
3386 peep2_current_count -= match_len + 1;
3388 x = last;
3391 if (INSN_P (x))
3393 df_insn_rescan (x);
3394 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3396 peep2_current_count++;
3397 if (--i < 0)
3398 i = MAX_INSNS_PER_PEEP2;
3399 peep2_insn_data[i].insn = x;
3400 df_simulate_one_insn_backwards (bb, x, &live);
3401 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3404 x = PREV_INSN (x);
3406 while (x != prev);
3407 CLEAR_REG_SET (&live);
3409 peep2_current = i;
3412 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3413 Return true if we added it, false otherwise. The caller will try to match
3414 peepholes against the buffer if we return false; otherwise it will try to
3415 add more instructions to the buffer. */
3417 static bool
3418 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3420 int pos;
3422 /* Once we have filled the maximum number of insns the buffer can hold,
3423 allow the caller to match the insns against peepholes. We wait until
3424 the buffer is full in case the target has similar peepholes of different
3425 length; we always want to match the longest if possible. */
3426 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3427 return false;
3429 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3430 any other pattern, lest it change the semantics of the frame info. */
3431 if (RTX_FRAME_RELATED_P (insn))
3433 /* Let the buffer drain first. */
3434 if (peep2_current_count > 0)
3435 return false;
3436 /* Now the insn will be the only thing in the buffer. */
3439 pos = peep2_buf_position (peep2_current + peep2_current_count);
3440 peep2_insn_data[pos].insn = insn;
3441 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3442 peep2_current_count++;
3444 df_simulate_one_insn_forwards (bb, insn, live);
3445 return true;
3448 /* Perform the peephole2 optimization pass. */
3450 static void
3451 peephole2_optimize (void)
3453 rtx insn;
3454 bitmap live;
3455 int i;
3456 basic_block bb;
3458 peep2_do_cleanup_cfg = false;
3459 peep2_do_rebuild_jump_labels = false;
3461 df_set_flags (DF_LR_RUN_DCE);
3462 df_note_add_problem ();
3463 df_analyze ();
3465 /* Initialize the regsets we're going to use. */
3466 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3467 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3468 live = BITMAP_ALLOC (&reg_obstack);
3470 FOR_EACH_BB_REVERSE (bb)
3472 bool past_end = false;
3473 int pos;
3475 rtl_profile_for_bb (bb);
3477 /* Start up propagation. */
3478 bitmap_copy (live, DF_LR_IN (bb));
3479 df_simulate_initialize_forwards (bb, live);
3480 peep2_reinit_state (live);
3482 insn = BB_HEAD (bb);
3483 for (;;)
3485 rtx attempt, head;
3486 int match_len;
3488 if (!past_end && !NONDEBUG_INSN_P (insn))
3490 next_insn:
3491 insn = NEXT_INSN (insn);
3492 if (insn == NEXT_INSN (BB_END (bb)))
3493 past_end = true;
3494 continue;
3496 if (!past_end && peep2_fill_buffer (bb, insn, live))
3497 goto next_insn;
3499 /* If we did not fill an empty buffer, it signals the end of the
3500 block. */
3501 if (peep2_current_count == 0)
3502 break;
3504 /* The buffer filled to the current maximum, so try to match. */
3506 pos = peep2_buf_position (peep2_current + peep2_current_count);
3507 peep2_insn_data[pos].insn = PEEP2_EOB;
3508 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3510 /* Match the peephole. */
3511 head = peep2_insn_data[peep2_current].insn;
3512 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3513 if (attempt != NULL)
3515 rtx last = peep2_attempt (bb, head, match_len, attempt);
3516 if (last)
3518 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3519 continue;
3523 /* No match: advance the buffer by one insn. */
3524 peep2_current = peep2_buf_position (peep2_current + 1);
3525 peep2_current_count--;
3529 default_rtl_profile ();
3530 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3531 BITMAP_FREE (peep2_insn_data[i].live_before);
3532 BITMAP_FREE (live);
3533 if (peep2_do_rebuild_jump_labels)
3534 rebuild_jump_labels (get_insns ());
3536 #endif /* HAVE_peephole2 */
3538 /* Common predicates for use with define_bypass. */
3540 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3541 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3542 must be either a single_set or a PARALLEL with SETs inside. */
3545 store_data_bypass_p (rtx out_insn, rtx in_insn)
3547 rtx out_set, in_set;
3548 rtx out_pat, in_pat;
3549 rtx out_exp, in_exp;
3550 int i, j;
3552 in_set = single_set (in_insn);
3553 if (in_set)
3555 if (!MEM_P (SET_DEST (in_set)))
3556 return false;
3558 out_set = single_set (out_insn);
3559 if (out_set)
3561 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3562 return false;
3564 else
3566 out_pat = PATTERN (out_insn);
3568 if (GET_CODE (out_pat) != PARALLEL)
3569 return false;
3571 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3573 out_exp = XVECEXP (out_pat, 0, i);
3575 if (GET_CODE (out_exp) == CLOBBER)
3576 continue;
3578 gcc_assert (GET_CODE (out_exp) == SET);
3580 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3581 return false;
3585 else
3587 in_pat = PATTERN (in_insn);
3588 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3590 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3592 in_exp = XVECEXP (in_pat, 0, i);
3594 if (GET_CODE (in_exp) == CLOBBER)
3595 continue;
3597 gcc_assert (GET_CODE (in_exp) == SET);
3599 if (!MEM_P (SET_DEST (in_exp)))
3600 return false;
3602 out_set = single_set (out_insn);
3603 if (out_set)
3605 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3606 return false;
3608 else
3610 out_pat = PATTERN (out_insn);
3611 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3613 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3615 out_exp = XVECEXP (out_pat, 0, j);
3617 if (GET_CODE (out_exp) == CLOBBER)
3618 continue;
3620 gcc_assert (GET_CODE (out_exp) == SET);
3622 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3623 return false;
3629 return true;
3632 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3633 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3634 or multiple set; IN_INSN should be single_set for truth, but for convenience
3635 of insn categorization may be any JUMP or CALL insn. */
3638 if_test_bypass_p (rtx out_insn, rtx in_insn)
3640 rtx out_set, in_set;
3642 in_set = single_set (in_insn);
3643 if (! in_set)
3645 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3646 return false;
3649 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3650 return false;
3651 in_set = SET_SRC (in_set);
3653 out_set = single_set (out_insn);
3654 if (out_set)
3656 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3657 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3658 return false;
3660 else
3662 rtx out_pat;
3663 int i;
3665 out_pat = PATTERN (out_insn);
3666 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3668 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3670 rtx exp = XVECEXP (out_pat, 0, i);
3672 if (GET_CODE (exp) == CLOBBER)
3673 continue;
3675 gcc_assert (GET_CODE (exp) == SET);
3677 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3678 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3679 return false;
3683 return true;
3686 static bool
3687 gate_handle_peephole2 (void)
3689 return (optimize > 0 && flag_peephole2);
3692 static unsigned int
3693 rest_of_handle_peephole2 (void)
3695 #ifdef HAVE_peephole2
3696 peephole2_optimize ();
3697 #endif
3698 return 0;
3701 struct rtl_opt_pass pass_peephole2 =
3704 RTL_PASS,
3705 "peephole2", /* name */
3706 gate_handle_peephole2, /* gate */
3707 rest_of_handle_peephole2, /* execute */
3708 NULL, /* sub */
3709 NULL, /* next */
3710 0, /* static_pass_number */
3711 TV_PEEPHOLE2, /* tv_id */
3712 0, /* properties_required */
3713 0, /* properties_provided */
3714 0, /* properties_destroyed */
3715 0, /* todo_flags_start */
3716 TODO_df_finish | TODO_verify_rtl_sharing |
3717 0 /* todo_flags_finish */
3721 static unsigned int
3722 rest_of_handle_split_all_insns (void)
3724 split_all_insns ();
3725 return 0;
3728 struct rtl_opt_pass pass_split_all_insns =
3731 RTL_PASS,
3732 "split1", /* name */
3733 NULL, /* gate */
3734 rest_of_handle_split_all_insns, /* execute */
3735 NULL, /* sub */
3736 NULL, /* next */
3737 0, /* static_pass_number */
3738 TV_NONE, /* tv_id */
3739 0, /* properties_required */
3740 0, /* properties_provided */
3741 0, /* properties_destroyed */
3742 0, /* todo_flags_start */
3743 0 /* todo_flags_finish */
3747 static unsigned int
3748 rest_of_handle_split_after_reload (void)
3750 /* If optimizing, then go ahead and split insns now. */
3751 #ifndef STACK_REGS
3752 if (optimize > 0)
3753 #endif
3754 split_all_insns ();
3755 return 0;
3758 struct rtl_opt_pass pass_split_after_reload =
3761 RTL_PASS,
3762 "split2", /* name */
3763 NULL, /* gate */
3764 rest_of_handle_split_after_reload, /* execute */
3765 NULL, /* sub */
3766 NULL, /* next */
3767 0, /* static_pass_number */
3768 TV_NONE, /* tv_id */
3769 0, /* properties_required */
3770 0, /* properties_provided */
3771 0, /* properties_destroyed */
3772 0, /* todo_flags_start */
3773 0 /* todo_flags_finish */
3777 static bool
3778 gate_handle_split_before_regstack (void)
3780 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3781 /* If flow2 creates new instructions which need splitting
3782 and scheduling after reload is not done, they might not be
3783 split until final which doesn't allow splitting
3784 if HAVE_ATTR_length. */
3785 # ifdef INSN_SCHEDULING
3786 return (optimize && !flag_schedule_insns_after_reload);
3787 # else
3788 return (optimize);
3789 # endif
3790 #else
3791 return 0;
3792 #endif
3795 static unsigned int
3796 rest_of_handle_split_before_regstack (void)
3798 split_all_insns ();
3799 return 0;
3802 struct rtl_opt_pass pass_split_before_regstack =
3805 RTL_PASS,
3806 "split3", /* name */
3807 gate_handle_split_before_regstack, /* gate */
3808 rest_of_handle_split_before_regstack, /* execute */
3809 NULL, /* sub */
3810 NULL, /* next */
3811 0, /* static_pass_number */
3812 TV_NONE, /* tv_id */
3813 0, /* properties_required */
3814 0, /* properties_provided */
3815 0, /* properties_destroyed */
3816 0, /* todo_flags_start */
3817 0 /* todo_flags_finish */
3821 static bool
3822 gate_handle_split_before_sched2 (void)
3824 #ifdef INSN_SCHEDULING
3825 return optimize > 0 && flag_schedule_insns_after_reload;
3826 #else
3827 return 0;
3828 #endif
3831 static unsigned int
3832 rest_of_handle_split_before_sched2 (void)
3834 #ifdef INSN_SCHEDULING
3835 split_all_insns ();
3836 #endif
3837 return 0;
3840 struct rtl_opt_pass pass_split_before_sched2 =
3843 RTL_PASS,
3844 "split4", /* name */
3845 gate_handle_split_before_sched2, /* gate */
3846 rest_of_handle_split_before_sched2, /* execute */
3847 NULL, /* sub */
3848 NULL, /* next */
3849 0, /* static_pass_number */
3850 TV_NONE, /* tv_id */
3851 0, /* properties_required */
3852 0, /* properties_provided */
3853 0, /* properties_destroyed */
3854 0, /* todo_flags_start */
3855 TODO_verify_flow /* todo_flags_finish */
3859 /* The placement of the splitting that we do for shorten_branches
3860 depends on whether regstack is used by the target or not. */
3861 static bool
3862 gate_do_final_split (void)
3864 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3865 return 1;
3866 #else
3867 return 0;
3868 #endif
3871 struct rtl_opt_pass pass_split_for_shorten_branches =
3874 RTL_PASS,
3875 "split5", /* name */
3876 gate_do_final_split, /* gate */
3877 split_all_insns_noflow, /* execute */
3878 NULL, /* sub */
3879 NULL, /* next */
3880 0, /* static_pass_number */
3881 TV_NONE, /* tv_id */
3882 0, /* properties_required */
3883 0, /* properties_provided */
3884 0, /* properties_destroyed */
3885 0, /* todo_flags_start */
3886 TODO_verify_rtl_sharing /* todo_flags_finish */