* config/avr/avr.c (avr_function_arg_advance): Undo r179037.
[official-gcc.git] / gcc / recog.c
blobd3ecb73c4e8fa5e52d14ee4473035a32509dc1b9
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl-error.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
41 #include "target.h"
42 #include "timevar.h"
43 #include "tree-pass.h"
44 #include "df.h"
46 #ifndef STACK_PUSH_CODE
47 #ifdef STACK_GROWS_DOWNWARD
48 #define STACK_PUSH_CODE PRE_DEC
49 #else
50 #define STACK_PUSH_CODE PRE_INC
51 #endif
52 #endif
54 #ifndef STACK_POP_CODE
55 #ifdef STACK_GROWS_DOWNWARD
56 #define STACK_POP_CODE POST_INC
57 #else
58 #define STACK_POP_CODE POST_DEC
59 #endif
60 #endif
62 #ifndef HAVE_ATTR_enabled
63 static inline bool
64 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
66 return true;
68 #endif
70 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
71 static void validate_replace_src_1 (rtx *, void *);
72 static rtx split_insn (rtx);
74 /* Nonzero means allow operands to be volatile.
75 This should be 0 if you are generating rtl, such as if you are calling
76 the functions in optabs.c and expmed.c (most of the time).
77 This should be 1 if all valid insns need to be recognized,
78 such as in reginfo.c and final.c and reload.c.
80 init_recog and init_recog_no_volatile are responsible for setting this. */
82 int volatile_ok;
84 struct recog_data recog_data;
86 /* Contains a vector of operand_alternative structures for every operand.
87 Set up by preprocess_constraints. */
88 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
90 /* On return from `constrain_operands', indicate which alternative
91 was satisfied. */
93 int which_alternative;
95 /* Nonzero after end of reload pass.
96 Set to 1 or 0 by toplev.c.
97 Controls the significance of (SUBREG (MEM)). */
99 int reload_completed;
101 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
102 int epilogue_completed;
104 /* Initialize data used by the function `recog'.
105 This must be called once in the compilation of a function
106 before any insn recognition may be done in the function. */
108 void
109 init_recog_no_volatile (void)
111 volatile_ok = 0;
114 void
115 init_recog (void)
117 volatile_ok = 1;
121 /* Return true if labels in asm operands BODY are LABEL_REFs. */
123 static bool
124 asm_labels_ok (rtx body)
126 rtx asmop;
127 int i;
129 asmop = extract_asm_operands (body);
130 if (asmop == NULL_RTX)
131 return true;
133 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
134 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
135 return false;
137 return true;
140 /* Check that X is an insn-body for an `asm' with operands
141 and that the operands mentioned in it are legitimate. */
144 check_asm_operands (rtx x)
146 int noperands;
147 rtx *operands;
148 const char **constraints;
149 int i;
151 if (!asm_labels_ok (x))
152 return 0;
154 /* Post-reload, be more strict with things. */
155 if (reload_completed)
157 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
158 extract_insn (make_insn_raw (x));
159 constrain_operands (1);
160 return which_alternative >= 0;
163 noperands = asm_noperands (x);
164 if (noperands < 0)
165 return 0;
166 if (noperands == 0)
167 return 1;
169 operands = XALLOCAVEC (rtx, noperands);
170 constraints = XALLOCAVEC (const char *, noperands);
172 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
174 for (i = 0; i < noperands; i++)
176 const char *c = constraints[i];
177 if (c[0] == '%')
178 c++;
179 if (! asm_operand_ok (operands[i], c, constraints))
180 return 0;
183 return 1;
186 /* Static data for the next two routines. */
188 typedef struct change_t
190 rtx object;
191 int old_code;
192 rtx *loc;
193 rtx old;
194 bool unshare;
195 } change_t;
197 static change_t *changes;
198 static int changes_allocated;
200 static int num_changes = 0;
202 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
203 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
204 the change is simply made.
206 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
207 will be called with the address and mode as parameters. If OBJECT is
208 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
209 the change in place.
211 IN_GROUP is nonzero if this is part of a group of changes that must be
212 performed as a group. In that case, the changes will be stored. The
213 function `apply_change_group' will validate and apply the changes.
215 If IN_GROUP is zero, this is a single change. Try to recognize the insn
216 or validate the memory reference with the change applied. If the result
217 is not valid for the machine, suppress the change and return zero.
218 Otherwise, perform the change and return 1. */
220 static bool
221 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
223 rtx old = *loc;
225 if (old == new_rtx || rtx_equal_p (old, new_rtx))
226 return 1;
228 gcc_assert (in_group != 0 || num_changes == 0);
230 *loc = new_rtx;
232 /* Save the information describing this change. */
233 if (num_changes >= changes_allocated)
235 if (changes_allocated == 0)
236 /* This value allows for repeated substitutions inside complex
237 indexed addresses, or changes in up to 5 insns. */
238 changes_allocated = MAX_RECOG_OPERANDS * 5;
239 else
240 changes_allocated *= 2;
242 changes = XRESIZEVEC (change_t, changes, changes_allocated);
245 changes[num_changes].object = object;
246 changes[num_changes].loc = loc;
247 changes[num_changes].old = old;
248 changes[num_changes].unshare = unshare;
250 if (object && !MEM_P (object))
252 /* Set INSN_CODE to force rerecognition of insn. Save old code in
253 case invalid. */
254 changes[num_changes].old_code = INSN_CODE (object);
255 INSN_CODE (object) = -1;
258 num_changes++;
260 /* If we are making a group of changes, return 1. Otherwise, validate the
261 change group we made. */
263 if (in_group)
264 return 1;
265 else
266 return apply_change_group ();
269 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
270 UNSHARE to false. */
272 bool
273 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
275 return validate_change_1 (object, loc, new_rtx, in_group, false);
278 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
279 UNSHARE to true. */
281 bool
282 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
284 return validate_change_1 (object, loc, new_rtx, in_group, true);
288 /* Keep X canonicalized if some changes have made it non-canonical; only
289 modifies the operands of X, not (for example) its code. Simplifications
290 are not the job of this routine.
292 Return true if anything was changed. */
293 bool
294 canonicalize_change_group (rtx insn, rtx x)
296 if (COMMUTATIVE_P (x)
297 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
299 /* Oops, the caller has made X no longer canonical.
300 Let's redo the changes in the correct order. */
301 rtx tem = XEXP (x, 0);
302 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
303 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
304 return true;
306 else
307 return false;
311 /* This subroutine of apply_change_group verifies whether the changes to INSN
312 were valid; i.e. whether INSN can still be recognized. */
315 insn_invalid_p (rtx insn)
317 rtx pat = PATTERN (insn);
318 int num_clobbers = 0;
319 /* If we are before reload and the pattern is a SET, see if we can add
320 clobbers. */
321 int icode = recog (pat, insn,
322 (GET_CODE (pat) == SET
323 && ! reload_completed && ! reload_in_progress)
324 ? &num_clobbers : 0);
325 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
328 /* If this is an asm and the operand aren't legal, then fail. Likewise if
329 this is not an asm and the insn wasn't recognized. */
330 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
331 || (!is_asm && icode < 0))
332 return 1;
334 /* If we have to add CLOBBERs, fail if we have to add ones that reference
335 hard registers since our callers can't know if they are live or not.
336 Otherwise, add them. */
337 if (num_clobbers > 0)
339 rtx newpat;
341 if (added_clobbers_hard_reg_p (icode))
342 return 1;
344 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
345 XVECEXP (newpat, 0, 0) = pat;
346 add_clobbers (newpat, icode);
347 PATTERN (insn) = pat = newpat;
350 /* After reload, verify that all constraints are satisfied. */
351 if (reload_completed)
353 extract_insn (insn);
355 if (! constrain_operands (1))
356 return 1;
359 INSN_CODE (insn) = icode;
360 return 0;
363 /* Return number of changes made and not validated yet. */
365 num_changes_pending (void)
367 return num_changes;
370 /* Tentatively apply the changes numbered NUM and up.
371 Return 1 if all changes are valid, zero otherwise. */
374 verify_changes (int num)
376 int i;
377 rtx last_validated = NULL_RTX;
379 /* The changes have been applied and all INSN_CODEs have been reset to force
380 rerecognition.
382 The changes are valid if we aren't given an object, or if we are
383 given a MEM and it still is a valid address, or if this is in insn
384 and it is recognized. In the latter case, if reload has completed,
385 we also require that the operands meet the constraints for
386 the insn. */
388 for (i = num; i < num_changes; i++)
390 rtx object = changes[i].object;
392 /* If there is no object to test or if it is the same as the one we
393 already tested, ignore it. */
394 if (object == 0 || object == last_validated)
395 continue;
397 if (MEM_P (object))
399 if (! memory_address_addr_space_p (GET_MODE (object),
400 XEXP (object, 0),
401 MEM_ADDR_SPACE (object)))
402 break;
404 else if (REG_P (changes[i].old)
405 && asm_noperands (PATTERN (object)) > 0
406 && REG_EXPR (changes[i].old) != NULL_TREE
407 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
408 && DECL_REGISTER (REG_EXPR (changes[i].old)))
410 /* Don't allow changes of hard register operands to inline
411 assemblies if they have been defined as register asm ("x"). */
412 break;
414 else if (DEBUG_INSN_P (object))
415 continue;
416 else if (insn_invalid_p (object))
418 rtx pat = PATTERN (object);
420 /* Perhaps we couldn't recognize the insn because there were
421 extra CLOBBERs at the end. If so, try to re-recognize
422 without the last CLOBBER (later iterations will cause each of
423 them to be eliminated, in turn). But don't do this if we
424 have an ASM_OPERAND. */
425 if (GET_CODE (pat) == PARALLEL
426 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
427 && asm_noperands (PATTERN (object)) < 0)
429 rtx newpat;
431 if (XVECLEN (pat, 0) == 2)
432 newpat = XVECEXP (pat, 0, 0);
433 else
435 int j;
437 newpat
438 = gen_rtx_PARALLEL (VOIDmode,
439 rtvec_alloc (XVECLEN (pat, 0) - 1));
440 for (j = 0; j < XVECLEN (newpat, 0); j++)
441 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
444 /* Add a new change to this group to replace the pattern
445 with this new pattern. Then consider this change
446 as having succeeded. The change we added will
447 cause the entire call to fail if things remain invalid.
449 Note that this can lose if a later change than the one
450 we are processing specified &XVECEXP (PATTERN (object), 0, X)
451 but this shouldn't occur. */
453 validate_change (object, &PATTERN (object), newpat, 1);
454 continue;
456 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
457 || GET_CODE (pat) == VAR_LOCATION)
458 /* If this insn is a CLOBBER or USE, it is always valid, but is
459 never recognized. */
460 continue;
461 else
462 break;
464 last_validated = object;
467 return (i == num_changes);
470 /* A group of changes has previously been issued with validate_change
471 and verified with verify_changes. Call df_insn_rescan for each of
472 the insn changed and clear num_changes. */
474 void
475 confirm_change_group (void)
477 int i;
478 rtx last_object = NULL;
480 for (i = 0; i < num_changes; i++)
482 rtx object = changes[i].object;
484 if (changes[i].unshare)
485 *changes[i].loc = copy_rtx (*changes[i].loc);
487 /* Avoid unnecessary rescanning when multiple changes to same instruction
488 are made. */
489 if (object)
491 if (object != last_object && last_object && INSN_P (last_object))
492 df_insn_rescan (last_object);
493 last_object = object;
497 if (last_object && INSN_P (last_object))
498 df_insn_rescan (last_object);
499 num_changes = 0;
502 /* Apply a group of changes previously issued with `validate_change'.
503 If all changes are valid, call confirm_change_group and return 1,
504 otherwise, call cancel_changes and return 0. */
507 apply_change_group (void)
509 if (verify_changes (0))
511 confirm_change_group ();
512 return 1;
514 else
516 cancel_changes (0);
517 return 0;
522 /* Return the number of changes so far in the current group. */
525 num_validated_changes (void)
527 return num_changes;
530 /* Retract the changes numbered NUM and up. */
532 void
533 cancel_changes (int num)
535 int i;
537 /* Back out all the changes. Do this in the opposite order in which
538 they were made. */
539 for (i = num_changes - 1; i >= num; i--)
541 *changes[i].loc = changes[i].old;
542 if (changes[i].object && !MEM_P (changes[i].object))
543 INSN_CODE (changes[i].object) = changes[i].old_code;
545 num_changes = num;
548 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
549 rtx. */
551 static void
552 simplify_while_replacing (rtx *loc, rtx to, rtx object,
553 enum machine_mode op0_mode)
555 rtx x = *loc;
556 enum rtx_code code = GET_CODE (x);
557 rtx new_rtx;
559 if (SWAPPABLE_OPERANDS_P (x)
560 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
562 validate_unshare_change (object, loc,
563 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
564 : swap_condition (code),
565 GET_MODE (x), XEXP (x, 1),
566 XEXP (x, 0)), 1);
567 x = *loc;
568 code = GET_CODE (x);
571 switch (code)
573 case PLUS:
574 /* If we have a PLUS whose second operand is now a CONST_INT, use
575 simplify_gen_binary to try to simplify it.
576 ??? We may want later to remove this, once simplification is
577 separated from this function. */
578 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
579 validate_change (object, loc,
580 simplify_gen_binary
581 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
582 break;
583 case MINUS:
584 if (CONST_INT_P (XEXP (x, 1))
585 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
586 validate_change (object, loc,
587 simplify_gen_binary
588 (PLUS, GET_MODE (x), XEXP (x, 0),
589 simplify_gen_unary (NEG,
590 GET_MODE (x), XEXP (x, 1),
591 GET_MODE (x))), 1);
592 break;
593 case ZERO_EXTEND:
594 case SIGN_EXTEND:
595 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
597 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
598 op0_mode);
599 /* If any of the above failed, substitute in something that
600 we know won't be recognized. */
601 if (!new_rtx)
602 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
603 validate_change (object, loc, new_rtx, 1);
605 break;
606 case SUBREG:
607 /* All subregs possible to simplify should be simplified. */
608 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
609 SUBREG_BYTE (x));
611 /* Subregs of VOIDmode operands are incorrect. */
612 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
613 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
614 if (new_rtx)
615 validate_change (object, loc, new_rtx, 1);
616 break;
617 case ZERO_EXTRACT:
618 case SIGN_EXTRACT:
619 /* If we are replacing a register with memory, try to change the memory
620 to be the mode required for memory in extract operations (this isn't
621 likely to be an insertion operation; if it was, nothing bad will
622 happen, we might just fail in some cases). */
624 if (MEM_P (XEXP (x, 0))
625 && CONST_INT_P (XEXP (x, 1))
626 && CONST_INT_P (XEXP (x, 2))
627 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
628 && !MEM_VOLATILE_P (XEXP (x, 0)))
630 enum machine_mode wanted_mode = VOIDmode;
631 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
632 int pos = INTVAL (XEXP (x, 2));
634 if (GET_CODE (x) == ZERO_EXTRACT)
636 enum machine_mode new_mode
637 = mode_for_extraction (EP_extzv, 1);
638 if (new_mode != MAX_MACHINE_MODE)
639 wanted_mode = new_mode;
641 else if (GET_CODE (x) == SIGN_EXTRACT)
643 enum machine_mode new_mode
644 = mode_for_extraction (EP_extv, 1);
645 if (new_mode != MAX_MACHINE_MODE)
646 wanted_mode = new_mode;
649 /* If we have a narrower mode, we can do something. */
650 if (wanted_mode != VOIDmode
651 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
653 int offset = pos / BITS_PER_UNIT;
654 rtx newmem;
656 /* If the bytes and bits are counted differently, we
657 must adjust the offset. */
658 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
659 offset =
660 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
661 offset);
663 gcc_assert (GET_MODE_PRECISION (wanted_mode)
664 == GET_MODE_BITSIZE (wanted_mode));
665 pos %= GET_MODE_BITSIZE (wanted_mode);
667 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
669 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
670 validate_change (object, &XEXP (x, 0), newmem, 1);
674 break;
676 default:
677 break;
681 /* Replace every occurrence of FROM in X with TO. Mark each change with
682 validate_change passing OBJECT. */
684 static void
685 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
686 bool simplify)
688 int i, j;
689 const char *fmt;
690 rtx x = *loc;
691 enum rtx_code code;
692 enum machine_mode op0_mode = VOIDmode;
693 int prev_changes = num_changes;
695 if (!x)
696 return;
698 code = GET_CODE (x);
699 fmt = GET_RTX_FORMAT (code);
700 if (fmt[0] == 'e')
701 op0_mode = GET_MODE (XEXP (x, 0));
703 /* X matches FROM if it is the same rtx or they are both referring to the
704 same register in the same mode. Avoid calling rtx_equal_p unless the
705 operands look similar. */
707 if (x == from
708 || (REG_P (x) && REG_P (from)
709 && GET_MODE (x) == GET_MODE (from)
710 && REGNO (x) == REGNO (from))
711 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
712 && rtx_equal_p (x, from)))
714 validate_unshare_change (object, loc, to, 1);
715 return;
718 /* Call ourself recursively to perform the replacements.
719 We must not replace inside already replaced expression, otherwise we
720 get infinite recursion for replacements like (reg X)->(subreg (reg X))
721 done by regmove, so we must special case shared ASM_OPERANDS. */
723 if (GET_CODE (x) == PARALLEL)
725 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
727 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
728 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
730 /* Verify that operands are really shared. */
731 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
732 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
733 (x, 0, j))));
734 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
735 from, to, object, simplify);
737 else
738 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
739 simplify);
742 else
743 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
745 if (fmt[i] == 'e')
746 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
747 else if (fmt[i] == 'E')
748 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
749 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
750 simplify);
753 /* If we didn't substitute, there is nothing more to do. */
754 if (num_changes == prev_changes)
755 return;
757 /* Allow substituted expression to have different mode. This is used by
758 regmove to change mode of pseudo register. */
759 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
760 op0_mode = GET_MODE (XEXP (x, 0));
762 /* Do changes needed to keep rtx consistent. Don't do any other
763 simplifications, as it is not our job. */
764 if (simplify)
765 simplify_while_replacing (loc, to, object, op0_mode);
768 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
769 with TO. After all changes have been made, validate by seeing
770 if INSN is still valid. */
773 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
775 validate_replace_rtx_1 (loc, from, to, insn, true);
776 return apply_change_group ();
779 /* Try replacing every occurrence of FROM in INSN with TO. After all
780 changes have been made, validate by seeing if INSN is still valid. */
783 validate_replace_rtx (rtx from, rtx to, rtx insn)
785 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
786 return apply_change_group ();
789 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
790 is a part of INSN. After all changes have been made, validate by seeing if
791 INSN is still valid.
792 validate_replace_rtx (from, to, insn) is equivalent to
793 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
796 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
798 validate_replace_rtx_1 (where, from, to, insn, true);
799 return apply_change_group ();
802 /* Same as above, but do not simplify rtx afterwards. */
804 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
805 rtx insn)
807 validate_replace_rtx_1 (where, from, to, insn, false);
808 return apply_change_group ();
812 /* Try replacing every occurrence of FROM in INSN with TO. This also
813 will replace in REG_EQUAL and REG_EQUIV notes. */
815 void
816 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
818 rtx note;
819 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
820 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
821 if (REG_NOTE_KIND (note) == REG_EQUAL
822 || REG_NOTE_KIND (note) == REG_EQUIV)
823 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
826 /* Function called by note_uses to replace used subexpressions. */
827 struct validate_replace_src_data
829 rtx from; /* Old RTX */
830 rtx to; /* New RTX */
831 rtx insn; /* Insn in which substitution is occurring. */
834 static void
835 validate_replace_src_1 (rtx *x, void *data)
837 struct validate_replace_src_data *d
838 = (struct validate_replace_src_data *) data;
840 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
843 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
844 SET_DESTs. */
846 void
847 validate_replace_src_group (rtx from, rtx to, rtx insn)
849 struct validate_replace_src_data d;
851 d.from = from;
852 d.to = to;
853 d.insn = insn;
854 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
857 /* Try simplify INSN.
858 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
859 pattern and return true if something was simplified. */
861 bool
862 validate_simplify_insn (rtx insn)
864 int i;
865 rtx pat = NULL;
866 rtx newpat = NULL;
868 pat = PATTERN (insn);
870 if (GET_CODE (pat) == SET)
872 newpat = simplify_rtx (SET_SRC (pat));
873 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
874 validate_change (insn, &SET_SRC (pat), newpat, 1);
875 newpat = simplify_rtx (SET_DEST (pat));
876 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
877 validate_change (insn, &SET_DEST (pat), newpat, 1);
879 else if (GET_CODE (pat) == PARALLEL)
880 for (i = 0; i < XVECLEN (pat, 0); i++)
882 rtx s = XVECEXP (pat, 0, i);
884 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
886 newpat = simplify_rtx (SET_SRC (s));
887 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
888 validate_change (insn, &SET_SRC (s), newpat, 1);
889 newpat = simplify_rtx (SET_DEST (s));
890 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
891 validate_change (insn, &SET_DEST (s), newpat, 1);
894 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
897 #ifdef HAVE_cc0
898 /* Return 1 if the insn using CC0 set by INSN does not contain
899 any ordered tests applied to the condition codes.
900 EQ and NE tests do not count. */
903 next_insn_tests_no_inequality (rtx insn)
905 rtx next = next_cc0_user (insn);
907 /* If there is no next insn, we have to take the conservative choice. */
908 if (next == 0)
909 return 0;
911 return (INSN_P (next)
912 && ! inequality_comparisons_p (PATTERN (next)));
914 #endif
916 /* Return 1 if OP is a valid general operand for machine mode MODE.
917 This is either a register reference, a memory reference,
918 or a constant. In the case of a memory reference, the address
919 is checked for general validity for the target machine.
921 Register and memory references must have mode MODE in order to be valid,
922 but some constants have no machine mode and are valid for any mode.
924 If MODE is VOIDmode, OP is checked for validity for whatever mode
925 it has.
927 The main use of this function is as a predicate in match_operand
928 expressions in the machine description.
930 For an explanation of this function's behavior for registers of
931 class NO_REGS, see the comment for `register_operand'. */
934 general_operand (rtx op, enum machine_mode mode)
936 enum rtx_code code = GET_CODE (op);
938 if (mode == VOIDmode)
939 mode = GET_MODE (op);
941 /* Don't accept CONST_INT or anything similar
942 if the caller wants something floating. */
943 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
944 && GET_MODE_CLASS (mode) != MODE_INT
945 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
946 return 0;
948 if (CONST_INT_P (op)
949 && mode != VOIDmode
950 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
951 return 0;
953 if (CONSTANT_P (op))
954 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
955 || mode == VOIDmode)
956 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
957 && targetm.legitimate_constant_p (mode == VOIDmode
958 ? GET_MODE (op)
959 : mode, op));
961 /* Except for certain constants with VOIDmode, already checked for,
962 OP's mode must match MODE if MODE specifies a mode. */
964 if (GET_MODE (op) != mode)
965 return 0;
967 if (code == SUBREG)
969 rtx sub = SUBREG_REG (op);
971 #ifdef INSN_SCHEDULING
972 /* On machines that have insn scheduling, we want all memory
973 reference to be explicit, so outlaw paradoxical SUBREGs.
974 However, we must allow them after reload so that they can
975 get cleaned up by cleanup_subreg_operands. */
976 if (!reload_completed && MEM_P (sub)
977 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
978 return 0;
979 #endif
980 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
981 may result in incorrect reference. We should simplify all valid
982 subregs of MEM anyway. But allow this after reload because we
983 might be called from cleanup_subreg_operands.
985 ??? This is a kludge. */
986 if (!reload_completed && SUBREG_BYTE (op) != 0
987 && MEM_P (sub))
988 return 0;
990 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
991 create such rtl, and we must reject it. */
992 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
993 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
994 return 0;
996 op = sub;
997 code = GET_CODE (op);
1000 if (code == REG)
1001 /* A register whose class is NO_REGS is not a general operand. */
1002 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1003 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1005 if (code == MEM)
1007 rtx y = XEXP (op, 0);
1009 if (! volatile_ok && MEM_VOLATILE_P (op))
1010 return 0;
1012 /* Use the mem's mode, since it will be reloaded thus. */
1013 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1014 return 1;
1017 return 0;
1020 /* Return 1 if OP is a valid memory address for a memory reference
1021 of mode MODE.
1023 The main use of this function is as a predicate in match_operand
1024 expressions in the machine description. */
1027 address_operand (rtx op, enum machine_mode mode)
1029 return memory_address_p (mode, op);
1032 /* Return 1 if OP is a register reference of mode MODE.
1033 If MODE is VOIDmode, accept a register in any mode.
1035 The main use of this function is as a predicate in match_operand
1036 expressions in the machine description.
1038 As a special exception, registers whose class is NO_REGS are
1039 not accepted by `register_operand'. The reason for this change
1040 is to allow the representation of special architecture artifacts
1041 (such as a condition code register) without extending the rtl
1042 definitions. Since registers of class NO_REGS cannot be used
1043 as registers in any case where register classes are examined,
1044 it is most consistent to keep this function from accepting them. */
1047 register_operand (rtx op, enum machine_mode mode)
1049 if (GET_MODE (op) != mode && mode != VOIDmode)
1050 return 0;
1052 if (GET_CODE (op) == SUBREG)
1054 rtx sub = SUBREG_REG (op);
1056 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1057 because it is guaranteed to be reloaded into one.
1058 Just make sure the MEM is valid in itself.
1059 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1060 but currently it does result from (SUBREG (REG)...) where the
1061 reg went on the stack.) */
1062 if (! reload_completed && MEM_P (sub))
1063 return general_operand (op, mode);
1065 #ifdef CANNOT_CHANGE_MODE_CLASS
1066 if (REG_P (sub)
1067 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1068 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1069 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1070 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1071 return 0;
1072 #endif
1074 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1075 create such rtl, and we must reject it. */
1076 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1077 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1078 return 0;
1080 op = sub;
1083 /* We don't consider registers whose class is NO_REGS
1084 to be a register operand. */
1085 return (REG_P (op)
1086 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1087 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1090 /* Return 1 for a register in Pmode; ignore the tested mode. */
1093 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1095 return register_operand (op, Pmode);
1098 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1099 or a hard register. */
1102 scratch_operand (rtx op, enum machine_mode mode)
1104 if (GET_MODE (op) != mode && mode != VOIDmode)
1105 return 0;
1107 return (GET_CODE (op) == SCRATCH
1108 || (REG_P (op)
1109 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1112 /* Return 1 if OP is a valid immediate operand for mode MODE.
1114 The main use of this function is as a predicate in match_operand
1115 expressions in the machine description. */
1118 immediate_operand (rtx op, enum machine_mode mode)
1120 /* Don't accept CONST_INT or anything similar
1121 if the caller wants something floating. */
1122 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1123 && GET_MODE_CLASS (mode) != MODE_INT
1124 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1125 return 0;
1127 if (CONST_INT_P (op)
1128 && mode != VOIDmode
1129 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1130 return 0;
1132 return (CONSTANT_P (op)
1133 && (GET_MODE (op) == mode || mode == VOIDmode
1134 || GET_MODE (op) == VOIDmode)
1135 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1136 && targetm.legitimate_constant_p (mode == VOIDmode
1137 ? GET_MODE (op)
1138 : mode, op));
1141 /* Returns 1 if OP is an operand that is a CONST_INT. */
1144 const_int_operand (rtx op, enum machine_mode mode)
1146 if (!CONST_INT_P (op))
1147 return 0;
1149 if (mode != VOIDmode
1150 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1151 return 0;
1153 return 1;
1156 /* Returns 1 if OP is an operand that is a constant integer or constant
1157 floating-point number. */
1160 const_double_operand (rtx op, enum machine_mode mode)
1162 /* Don't accept CONST_INT or anything similar
1163 if the caller wants something floating. */
1164 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1165 && GET_MODE_CLASS (mode) != MODE_INT
1166 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1167 return 0;
1169 return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1170 && (mode == VOIDmode || GET_MODE (op) == mode
1171 || GET_MODE (op) == VOIDmode));
1174 /* Return 1 if OP is a general operand that is not an immediate operand. */
1177 nonimmediate_operand (rtx op, enum machine_mode mode)
1179 return (general_operand (op, mode) && ! CONSTANT_P (op));
1182 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1185 nonmemory_operand (rtx op, enum machine_mode mode)
1187 if (CONSTANT_P (op))
1188 return immediate_operand (op, mode);
1190 if (GET_MODE (op) != mode && mode != VOIDmode)
1191 return 0;
1193 if (GET_CODE (op) == SUBREG)
1195 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1196 because it is guaranteed to be reloaded into one.
1197 Just make sure the MEM is valid in itself.
1198 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1199 but currently it does result from (SUBREG (REG)...) where the
1200 reg went on the stack.) */
1201 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1202 return general_operand (op, mode);
1203 op = SUBREG_REG (op);
1206 /* We don't consider registers whose class is NO_REGS
1207 to be a register operand. */
1208 return (REG_P (op)
1209 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1210 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1213 /* Return 1 if OP is a valid operand that stands for pushing a
1214 value of mode MODE onto the stack.
1216 The main use of this function is as a predicate in match_operand
1217 expressions in the machine description. */
1220 push_operand (rtx op, enum machine_mode mode)
1222 unsigned int rounded_size = GET_MODE_SIZE (mode);
1224 #ifdef PUSH_ROUNDING
1225 rounded_size = PUSH_ROUNDING (rounded_size);
1226 #endif
1228 if (!MEM_P (op))
1229 return 0;
1231 if (mode != VOIDmode && GET_MODE (op) != mode)
1232 return 0;
1234 op = XEXP (op, 0);
1236 if (rounded_size == GET_MODE_SIZE (mode))
1238 if (GET_CODE (op) != STACK_PUSH_CODE)
1239 return 0;
1241 else
1243 if (GET_CODE (op) != PRE_MODIFY
1244 || GET_CODE (XEXP (op, 1)) != PLUS
1245 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1246 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1247 #ifdef STACK_GROWS_DOWNWARD
1248 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1249 #else
1250 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1251 #endif
1253 return 0;
1256 return XEXP (op, 0) == stack_pointer_rtx;
1259 /* Return 1 if OP is a valid operand that stands for popping a
1260 value of mode MODE off the stack.
1262 The main use of this function is as a predicate in match_operand
1263 expressions in the machine description. */
1266 pop_operand (rtx op, enum machine_mode mode)
1268 if (!MEM_P (op))
1269 return 0;
1271 if (mode != VOIDmode && GET_MODE (op) != mode)
1272 return 0;
1274 op = XEXP (op, 0);
1276 if (GET_CODE (op) != STACK_POP_CODE)
1277 return 0;
1279 return XEXP (op, 0) == stack_pointer_rtx;
1282 /* Return 1 if ADDR is a valid memory address
1283 for mode MODE in address space AS. */
1286 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1287 rtx addr, addr_space_t as)
1289 #ifdef GO_IF_LEGITIMATE_ADDRESS
1290 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1291 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1292 return 0;
1294 win:
1295 return 1;
1296 #else
1297 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1298 #endif
1301 /* Return 1 if OP is a valid memory reference with mode MODE,
1302 including a valid address.
1304 The main use of this function is as a predicate in match_operand
1305 expressions in the machine description. */
1308 memory_operand (rtx op, enum machine_mode mode)
1310 rtx inner;
1312 if (! reload_completed)
1313 /* Note that no SUBREG is a memory operand before end of reload pass,
1314 because (SUBREG (MEM...)) forces reloading into a register. */
1315 return MEM_P (op) && general_operand (op, mode);
1317 if (mode != VOIDmode && GET_MODE (op) != mode)
1318 return 0;
1320 inner = op;
1321 if (GET_CODE (inner) == SUBREG)
1322 inner = SUBREG_REG (inner);
1324 return (MEM_P (inner) && general_operand (op, mode));
1327 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1328 that is, a memory reference whose address is a general_operand. */
1331 indirect_operand (rtx op, enum machine_mode mode)
1333 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1334 if (! reload_completed
1335 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1337 int offset = SUBREG_BYTE (op);
1338 rtx inner = SUBREG_REG (op);
1340 if (mode != VOIDmode && GET_MODE (op) != mode)
1341 return 0;
1343 /* The only way that we can have a general_operand as the resulting
1344 address is if OFFSET is zero and the address already is an operand
1345 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1346 operand. */
1348 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1349 || (GET_CODE (XEXP (inner, 0)) == PLUS
1350 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1351 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1352 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1355 return (MEM_P (op)
1356 && memory_operand (op, mode)
1357 && general_operand (XEXP (op, 0), Pmode));
1360 /* Return 1 if this is an ordered comparison operator (not including
1361 ORDERED and UNORDERED). */
1364 ordered_comparison_operator (rtx op, enum machine_mode mode)
1366 if (mode != VOIDmode && GET_MODE (op) != mode)
1367 return false;
1368 switch (GET_CODE (op))
1370 case EQ:
1371 case NE:
1372 case LT:
1373 case LTU:
1374 case LE:
1375 case LEU:
1376 case GT:
1377 case GTU:
1378 case GE:
1379 case GEU:
1380 return true;
1381 default:
1382 return false;
1386 /* Return 1 if this is a comparison operator. This allows the use of
1387 MATCH_OPERATOR to recognize all the branch insns. */
1390 comparison_operator (rtx op, enum machine_mode mode)
1392 return ((mode == VOIDmode || GET_MODE (op) == mode)
1393 && COMPARISON_P (op));
1396 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1399 extract_asm_operands (rtx body)
1401 rtx tmp;
1402 switch (GET_CODE (body))
1404 case ASM_OPERANDS:
1405 return body;
1407 case SET:
1408 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1409 tmp = SET_SRC (body);
1410 if (GET_CODE (tmp) == ASM_OPERANDS)
1411 return tmp;
1412 break;
1414 case PARALLEL:
1415 tmp = XVECEXP (body, 0, 0);
1416 if (GET_CODE (tmp) == ASM_OPERANDS)
1417 return tmp;
1418 if (GET_CODE (tmp) == SET)
1420 tmp = SET_SRC (tmp);
1421 if (GET_CODE (tmp) == ASM_OPERANDS)
1422 return tmp;
1424 break;
1426 default:
1427 break;
1429 return NULL;
1432 /* If BODY is an insn body that uses ASM_OPERANDS,
1433 return the number of operands (both input and output) in the insn.
1434 Otherwise return -1. */
1437 asm_noperands (const_rtx body)
1439 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1440 int n_sets = 0;
1442 if (asm_op == NULL)
1443 return -1;
1445 if (GET_CODE (body) == SET)
1446 n_sets = 1;
1447 else if (GET_CODE (body) == PARALLEL)
1449 int i;
1450 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1452 /* Multiple output operands, or 1 output plus some clobbers:
1453 body is
1454 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1455 /* Count backwards through CLOBBERs to determine number of SETs. */
1456 for (i = XVECLEN (body, 0); i > 0; i--)
1458 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1459 break;
1460 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1461 return -1;
1464 /* N_SETS is now number of output operands. */
1465 n_sets = i;
1467 /* Verify that all the SETs we have
1468 came from a single original asm_operands insn
1469 (so that invalid combinations are blocked). */
1470 for (i = 0; i < n_sets; i++)
1472 rtx elt = XVECEXP (body, 0, i);
1473 if (GET_CODE (elt) != SET)
1474 return -1;
1475 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1476 return -1;
1477 /* If these ASM_OPERANDS rtx's came from different original insns
1478 then they aren't allowed together. */
1479 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1480 != ASM_OPERANDS_INPUT_VEC (asm_op))
1481 return -1;
1484 else
1486 /* 0 outputs, but some clobbers:
1487 body is [(asm_operands ...) (clobber (reg ...))...]. */
1488 /* Make sure all the other parallel things really are clobbers. */
1489 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1490 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1491 return -1;
1495 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1496 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1499 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1500 copy its operands (both input and output) into the vector OPERANDS,
1501 the locations of the operands within the insn into the vector OPERAND_LOCS,
1502 and the constraints for the operands into CONSTRAINTS.
1503 Write the modes of the operands into MODES.
1504 Return the assembler-template.
1506 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1507 we don't store that info. */
1509 const char *
1510 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1511 const char **constraints, enum machine_mode *modes,
1512 location_t *loc)
1514 int nbase = 0, n, i;
1515 rtx asmop;
1517 switch (GET_CODE (body))
1519 case ASM_OPERANDS:
1520 /* Zero output asm: BODY is (asm_operands ...). */
1521 asmop = body;
1522 break;
1524 case SET:
1525 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1526 asmop = SET_SRC (body);
1528 /* The output is in the SET.
1529 Its constraint is in the ASM_OPERANDS itself. */
1530 if (operands)
1531 operands[0] = SET_DEST (body);
1532 if (operand_locs)
1533 operand_locs[0] = &SET_DEST (body);
1534 if (constraints)
1535 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1536 if (modes)
1537 modes[0] = GET_MODE (SET_DEST (body));
1538 nbase = 1;
1539 break;
1541 case PARALLEL:
1543 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1545 asmop = XVECEXP (body, 0, 0);
1546 if (GET_CODE (asmop) == SET)
1548 asmop = SET_SRC (asmop);
1550 /* At least one output, plus some CLOBBERs. The outputs are in
1551 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1552 for (i = 0; i < nparallel; i++)
1554 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1555 break; /* Past last SET */
1556 if (operands)
1557 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1558 if (operand_locs)
1559 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1560 if (constraints)
1561 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1562 if (modes)
1563 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1565 nbase = i;
1567 break;
1570 default:
1571 gcc_unreachable ();
1574 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1575 for (i = 0; i < n; i++)
1577 if (operand_locs)
1578 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1579 if (operands)
1580 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1581 if (constraints)
1582 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1583 if (modes)
1584 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1586 nbase += n;
1588 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1589 for (i = 0; i < n; i++)
1591 if (operand_locs)
1592 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1593 if (operands)
1594 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1595 if (constraints)
1596 constraints[nbase + i] = "";
1597 if (modes)
1598 modes[nbase + i] = Pmode;
1601 if (loc)
1602 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1604 return ASM_OPERANDS_TEMPLATE (asmop);
1607 /* Check if an asm_operand matches its constraints.
1608 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1611 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1613 int result = 0;
1614 #ifdef AUTO_INC_DEC
1615 bool incdec_ok = false;
1616 #endif
1618 /* Use constrain_operands after reload. */
1619 gcc_assert (!reload_completed);
1621 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1622 many alternatives as required to match the other operands. */
1623 if (*constraint == '\0')
1624 result = 1;
1626 while (*constraint)
1628 char c = *constraint;
1629 int len;
1630 switch (c)
1632 case ',':
1633 constraint++;
1634 continue;
1635 case '=':
1636 case '+':
1637 case '*':
1638 case '%':
1639 case '!':
1640 case '#':
1641 case '&':
1642 case '?':
1643 break;
1645 case '0': case '1': case '2': case '3': case '4':
1646 case '5': case '6': case '7': case '8': case '9':
1647 /* If caller provided constraints pointer, look up
1648 the maching constraint. Otherwise, our caller should have
1649 given us the proper matching constraint, but we can't
1650 actually fail the check if they didn't. Indicate that
1651 results are inconclusive. */
1652 if (constraints)
1654 char *end;
1655 unsigned long match;
1657 match = strtoul (constraint, &end, 10);
1658 if (!result)
1659 result = asm_operand_ok (op, constraints[match], NULL);
1660 constraint = (const char *) end;
1662 else
1665 constraint++;
1666 while (ISDIGIT (*constraint));
1667 if (! result)
1668 result = -1;
1670 continue;
1672 case 'p':
1673 if (address_operand (op, VOIDmode))
1674 result = 1;
1675 break;
1677 case TARGET_MEM_CONSTRAINT:
1678 case 'V': /* non-offsettable */
1679 if (memory_operand (op, VOIDmode))
1680 result = 1;
1681 break;
1683 case 'o': /* offsettable */
1684 if (offsettable_nonstrict_memref_p (op))
1685 result = 1;
1686 break;
1688 case '<':
1689 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1690 excepting those that expand_call created. Further, on some
1691 machines which do not have generalized auto inc/dec, an inc/dec
1692 is not a memory_operand.
1694 Match any memory and hope things are resolved after reload. */
1696 if (MEM_P (op)
1697 && (1
1698 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1699 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1700 result = 1;
1701 #ifdef AUTO_INC_DEC
1702 incdec_ok = true;
1703 #endif
1704 break;
1706 case '>':
1707 if (MEM_P (op)
1708 && (1
1709 || GET_CODE (XEXP (op, 0)) == PRE_INC
1710 || GET_CODE (XEXP (op, 0)) == POST_INC))
1711 result = 1;
1712 #ifdef AUTO_INC_DEC
1713 incdec_ok = true;
1714 #endif
1715 break;
1717 case 'E':
1718 case 'F':
1719 if (GET_CODE (op) == CONST_DOUBLE
1720 || (GET_CODE (op) == CONST_VECTOR
1721 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1722 result = 1;
1723 break;
1725 case 'G':
1726 if (GET_CODE (op) == CONST_DOUBLE
1727 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1728 result = 1;
1729 break;
1730 case 'H':
1731 if (GET_CODE (op) == CONST_DOUBLE
1732 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1733 result = 1;
1734 break;
1736 case 's':
1737 if (CONST_INT_P (op)
1738 || (GET_CODE (op) == CONST_DOUBLE
1739 && GET_MODE (op) == VOIDmode))
1740 break;
1741 /* Fall through. */
1743 case 'i':
1744 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1745 result = 1;
1746 break;
1748 case 'n':
1749 if (CONST_INT_P (op)
1750 || (GET_CODE (op) == CONST_DOUBLE
1751 && GET_MODE (op) == VOIDmode))
1752 result = 1;
1753 break;
1755 case 'I':
1756 if (CONST_INT_P (op)
1757 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1758 result = 1;
1759 break;
1760 case 'J':
1761 if (CONST_INT_P (op)
1762 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1763 result = 1;
1764 break;
1765 case 'K':
1766 if (CONST_INT_P (op)
1767 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1768 result = 1;
1769 break;
1770 case 'L':
1771 if (CONST_INT_P (op)
1772 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1773 result = 1;
1774 break;
1775 case 'M':
1776 if (CONST_INT_P (op)
1777 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1778 result = 1;
1779 break;
1780 case 'N':
1781 if (CONST_INT_P (op)
1782 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1783 result = 1;
1784 break;
1785 case 'O':
1786 if (CONST_INT_P (op)
1787 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1788 result = 1;
1789 break;
1790 case 'P':
1791 if (CONST_INT_P (op)
1792 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1793 result = 1;
1794 break;
1796 case 'X':
1797 result = 1;
1798 break;
1800 case 'g':
1801 if (general_operand (op, VOIDmode))
1802 result = 1;
1803 break;
1805 default:
1806 /* For all other letters, we first check for a register class,
1807 otherwise it is an EXTRA_CONSTRAINT. */
1808 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1810 case 'r':
1811 if (GET_MODE (op) == BLKmode)
1812 break;
1813 if (register_operand (op, VOIDmode))
1814 result = 1;
1816 #ifdef EXTRA_CONSTRAINT_STR
1817 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1818 /* Every memory operand can be reloaded to fit. */
1819 result = result || memory_operand (op, VOIDmode);
1820 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1821 /* Every address operand can be reloaded to fit. */
1822 result = result || address_operand (op, VOIDmode);
1823 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1824 result = 1;
1825 #endif
1826 break;
1828 len = CONSTRAINT_LEN (c, constraint);
1830 constraint++;
1831 while (--len && *constraint);
1832 if (len)
1833 return 0;
1836 #ifdef AUTO_INC_DEC
1837 /* For operands without < or > constraints reject side-effects. */
1838 if (!incdec_ok && result && MEM_P (op))
1839 switch (GET_CODE (XEXP (op, 0)))
1841 case PRE_INC:
1842 case POST_INC:
1843 case PRE_DEC:
1844 case POST_DEC:
1845 case PRE_MODIFY:
1846 case POST_MODIFY:
1847 return 0;
1848 default:
1849 break;
1851 #endif
1853 return result;
1856 /* Given an rtx *P, if it is a sum containing an integer constant term,
1857 return the location (type rtx *) of the pointer to that constant term.
1858 Otherwise, return a null pointer. */
1860 rtx *
1861 find_constant_term_loc (rtx *p)
1863 rtx *tem;
1864 enum rtx_code code = GET_CODE (*p);
1866 /* If *P IS such a constant term, P is its location. */
1868 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1869 || code == CONST)
1870 return p;
1872 /* Otherwise, if not a sum, it has no constant term. */
1874 if (GET_CODE (*p) != PLUS)
1875 return 0;
1877 /* If one of the summands is constant, return its location. */
1879 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1880 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1881 return p;
1883 /* Otherwise, check each summand for containing a constant term. */
1885 if (XEXP (*p, 0) != 0)
1887 tem = find_constant_term_loc (&XEXP (*p, 0));
1888 if (tem != 0)
1889 return tem;
1892 if (XEXP (*p, 1) != 0)
1894 tem = find_constant_term_loc (&XEXP (*p, 1));
1895 if (tem != 0)
1896 return tem;
1899 return 0;
1902 /* Return 1 if OP is a memory reference
1903 whose address contains no side effects
1904 and remains valid after the addition
1905 of a positive integer less than the
1906 size of the object being referenced.
1908 We assume that the original address is valid and do not check it.
1910 This uses strict_memory_address_p as a subroutine, so
1911 don't use it before reload. */
1914 offsettable_memref_p (rtx op)
1916 return ((MEM_P (op))
1917 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1918 MEM_ADDR_SPACE (op)));
1921 /* Similar, but don't require a strictly valid mem ref:
1922 consider pseudo-regs valid as index or base regs. */
1925 offsettable_nonstrict_memref_p (rtx op)
1927 return ((MEM_P (op))
1928 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1929 MEM_ADDR_SPACE (op)));
1932 /* Return 1 if Y is a memory address which contains no side effects
1933 and would remain valid for address space AS after the addition of
1934 a positive integer less than the size of that mode.
1936 We assume that the original address is valid and do not check it.
1937 We do check that it is valid for narrower modes.
1939 If STRICTP is nonzero, we require a strictly valid address,
1940 for the sake of use in reload.c. */
1943 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1944 addr_space_t as)
1946 enum rtx_code ycode = GET_CODE (y);
1947 rtx z;
1948 rtx y1 = y;
1949 rtx *y2;
1950 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1951 (strictp ? strict_memory_address_addr_space_p
1952 : memory_address_addr_space_p);
1953 unsigned int mode_sz = GET_MODE_SIZE (mode);
1955 if (CONSTANT_ADDRESS_P (y))
1956 return 1;
1958 /* Adjusting an offsettable address involves changing to a narrower mode.
1959 Make sure that's OK. */
1961 if (mode_dependent_address_p (y))
1962 return 0;
1964 /* ??? How much offset does an offsettable BLKmode reference need?
1965 Clearly that depends on the situation in which it's being used.
1966 However, the current situation in which we test 0xffffffff is
1967 less than ideal. Caveat user. */
1968 if (mode_sz == 0)
1969 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1971 /* If the expression contains a constant term,
1972 see if it remains valid when max possible offset is added. */
1974 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1976 int good;
1978 y1 = *y2;
1979 *y2 = plus_constant (*y2, mode_sz - 1);
1980 /* Use QImode because an odd displacement may be automatically invalid
1981 for any wider mode. But it should be valid for a single byte. */
1982 good = (*addressp) (QImode, y, as);
1984 /* In any case, restore old contents of memory. */
1985 *y2 = y1;
1986 return good;
1989 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1990 return 0;
1992 /* The offset added here is chosen as the maximum offset that
1993 any instruction could need to add when operating on something
1994 of the specified mode. We assume that if Y and Y+c are
1995 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1996 go inside a LO_SUM here, so we do so as well. */
1997 if (GET_CODE (y) == LO_SUM
1998 && mode != BLKmode
1999 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2000 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2001 plus_constant (XEXP (y, 1), mode_sz - 1));
2002 else
2003 z = plus_constant (y, mode_sz - 1);
2005 /* Use QImode because an odd displacement may be automatically invalid
2006 for any wider mode. But it should be valid for a single byte. */
2007 return (*addressp) (QImode, z, as);
2010 /* Return 1 if ADDR is an address-expression whose effect depends
2011 on the mode of the memory reference it is used in.
2013 Autoincrement addressing is a typical example of mode-dependence
2014 because the amount of the increment depends on the mode. */
2016 bool
2017 mode_dependent_address_p (rtx addr)
2019 /* Auto-increment addressing with anything other than post_modify
2020 or pre_modify always introduces a mode dependency. Catch such
2021 cases now instead of deferring to the target. */
2022 if (GET_CODE (addr) == PRE_INC
2023 || GET_CODE (addr) == POST_INC
2024 || GET_CODE (addr) == PRE_DEC
2025 || GET_CODE (addr) == POST_DEC)
2026 return true;
2028 return targetm.mode_dependent_address_p (addr);
2031 /* Like extract_insn, but save insn extracted and don't extract again, when
2032 called again for the same insn expecting that recog_data still contain the
2033 valid information. This is used primary by gen_attr infrastructure that
2034 often does extract insn again and again. */
2035 void
2036 extract_insn_cached (rtx insn)
2038 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2039 return;
2040 extract_insn (insn);
2041 recog_data.insn = insn;
2044 /* Do cached extract_insn, constrain_operands and complain about failures.
2045 Used by insn_attrtab. */
2046 void
2047 extract_constrain_insn_cached (rtx insn)
2049 extract_insn_cached (insn);
2050 if (which_alternative == -1
2051 && !constrain_operands (reload_completed))
2052 fatal_insn_not_found (insn);
2055 /* Do cached constrain_operands and complain about failures. */
2057 constrain_operands_cached (int strict)
2059 if (which_alternative == -1)
2060 return constrain_operands (strict);
2061 else
2062 return 1;
2065 /* Analyze INSN and fill in recog_data. */
2067 void
2068 extract_insn (rtx insn)
2070 int i;
2071 int icode;
2072 int noperands;
2073 rtx body = PATTERN (insn);
2075 recog_data.n_operands = 0;
2076 recog_data.n_alternatives = 0;
2077 recog_data.n_dups = 0;
2078 recog_data.is_asm = false;
2080 switch (GET_CODE (body))
2082 case USE:
2083 case CLOBBER:
2084 case ASM_INPUT:
2085 case ADDR_VEC:
2086 case ADDR_DIFF_VEC:
2087 case VAR_LOCATION:
2088 return;
2090 case SET:
2091 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2092 goto asm_insn;
2093 else
2094 goto normal_insn;
2095 case PARALLEL:
2096 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2097 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2098 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2099 goto asm_insn;
2100 else
2101 goto normal_insn;
2102 case ASM_OPERANDS:
2103 asm_insn:
2104 recog_data.n_operands = noperands = asm_noperands (body);
2105 if (noperands >= 0)
2107 /* This insn is an `asm' with operands. */
2109 /* expand_asm_operands makes sure there aren't too many operands. */
2110 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2112 /* Now get the operand values and constraints out of the insn. */
2113 decode_asm_operands (body, recog_data.operand,
2114 recog_data.operand_loc,
2115 recog_data.constraints,
2116 recog_data.operand_mode, NULL);
2117 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2118 if (noperands > 0)
2120 const char *p = recog_data.constraints[0];
2121 recog_data.n_alternatives = 1;
2122 while (*p)
2123 recog_data.n_alternatives += (*p++ == ',');
2125 recog_data.is_asm = true;
2126 break;
2128 fatal_insn_not_found (insn);
2130 default:
2131 normal_insn:
2132 /* Ordinary insn: recognize it, get the operands via insn_extract
2133 and get the constraints. */
2135 icode = recog_memoized (insn);
2136 if (icode < 0)
2137 fatal_insn_not_found (insn);
2139 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2140 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2141 recog_data.n_dups = insn_data[icode].n_dups;
2143 insn_extract (insn);
2145 for (i = 0; i < noperands; i++)
2147 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2148 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2149 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2150 /* VOIDmode match_operands gets mode from their real operand. */
2151 if (recog_data.operand_mode[i] == VOIDmode)
2152 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2155 for (i = 0; i < noperands; i++)
2156 recog_data.operand_type[i]
2157 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2158 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2159 : OP_IN);
2161 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2163 if (INSN_CODE (insn) < 0)
2164 for (i = 0; i < recog_data.n_alternatives; i++)
2165 recog_data.alternative_enabled_p[i] = true;
2166 else
2168 recog_data.insn = insn;
2169 for (i = 0; i < recog_data.n_alternatives; i++)
2171 which_alternative = i;
2172 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2176 recog_data.insn = NULL;
2177 which_alternative = -1;
2180 /* After calling extract_insn, you can use this function to extract some
2181 information from the constraint strings into a more usable form.
2182 The collected data is stored in recog_op_alt. */
2183 void
2184 preprocess_constraints (void)
2186 int i;
2188 for (i = 0; i < recog_data.n_operands; i++)
2189 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2190 * sizeof (struct operand_alternative)));
2192 for (i = 0; i < recog_data.n_operands; i++)
2194 int j;
2195 struct operand_alternative *op_alt;
2196 const char *p = recog_data.constraints[i];
2198 op_alt = recog_op_alt[i];
2200 for (j = 0; j < recog_data.n_alternatives; j++)
2202 op_alt[j].cl = NO_REGS;
2203 op_alt[j].constraint = p;
2204 op_alt[j].matches = -1;
2205 op_alt[j].matched = -1;
2207 if (!recog_data.alternative_enabled_p[j])
2209 p = skip_alternative (p);
2210 continue;
2213 if (*p == '\0' || *p == ',')
2215 op_alt[j].anything_ok = 1;
2216 continue;
2219 for (;;)
2221 char c = *p;
2222 if (c == '#')
2224 c = *++p;
2225 while (c != ',' && c != '\0');
2226 if (c == ',' || c == '\0')
2228 p++;
2229 break;
2232 switch (c)
2234 case '=': case '+': case '*': case '%':
2235 case 'E': case 'F': case 'G': case 'H':
2236 case 's': case 'i': case 'n':
2237 case 'I': case 'J': case 'K': case 'L':
2238 case 'M': case 'N': case 'O': case 'P':
2239 /* These don't say anything we care about. */
2240 break;
2242 case '?':
2243 op_alt[j].reject += 6;
2244 break;
2245 case '!':
2246 op_alt[j].reject += 600;
2247 break;
2248 case '&':
2249 op_alt[j].earlyclobber = 1;
2250 break;
2252 case '0': case '1': case '2': case '3': case '4':
2253 case '5': case '6': case '7': case '8': case '9':
2255 char *end;
2256 op_alt[j].matches = strtoul (p, &end, 10);
2257 recog_op_alt[op_alt[j].matches][j].matched = i;
2258 p = end;
2260 continue;
2262 case TARGET_MEM_CONSTRAINT:
2263 op_alt[j].memory_ok = 1;
2264 break;
2265 case '<':
2266 op_alt[j].decmem_ok = 1;
2267 break;
2268 case '>':
2269 op_alt[j].incmem_ok = 1;
2270 break;
2271 case 'V':
2272 op_alt[j].nonoffmem_ok = 1;
2273 break;
2274 case 'o':
2275 op_alt[j].offmem_ok = 1;
2276 break;
2277 case 'X':
2278 op_alt[j].anything_ok = 1;
2279 break;
2281 case 'p':
2282 op_alt[j].is_address = 1;
2283 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2284 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2285 break;
2287 case 'g':
2288 case 'r':
2289 op_alt[j].cl =
2290 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2291 break;
2293 default:
2294 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2296 op_alt[j].memory_ok = 1;
2297 break;
2299 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2301 op_alt[j].is_address = 1;
2302 op_alt[j].cl
2303 = (reg_class_subunion
2304 [(int) op_alt[j].cl]
2305 [(int) base_reg_class (VOIDmode, ADDRESS,
2306 SCRATCH)]);
2307 break;
2310 op_alt[j].cl
2311 = (reg_class_subunion
2312 [(int) op_alt[j].cl]
2313 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2314 break;
2316 p += CONSTRAINT_LEN (c, p);
2322 /* Check the operands of an insn against the insn's operand constraints
2323 and return 1 if they are valid.
2324 The information about the insn's operands, constraints, operand modes
2325 etc. is obtained from the global variables set up by extract_insn.
2327 WHICH_ALTERNATIVE is set to a number which indicates which
2328 alternative of constraints was matched: 0 for the first alternative,
2329 1 for the next, etc.
2331 In addition, when two operands are required to match
2332 and it happens that the output operand is (reg) while the
2333 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2334 make the output operand look like the input.
2335 This is because the output operand is the one the template will print.
2337 This is used in final, just before printing the assembler code and by
2338 the routines that determine an insn's attribute.
2340 If STRICT is a positive nonzero value, it means that we have been
2341 called after reload has been completed. In that case, we must
2342 do all checks strictly. If it is zero, it means that we have been called
2343 before reload has completed. In that case, we first try to see if we can
2344 find an alternative that matches strictly. If not, we try again, this
2345 time assuming that reload will fix up the insn. This provides a "best
2346 guess" for the alternative and is used to compute attributes of insns prior
2347 to reload. A negative value of STRICT is used for this internal call. */
2349 struct funny_match
2351 int this_op, other;
2355 constrain_operands (int strict)
2357 const char *constraints[MAX_RECOG_OPERANDS];
2358 int matching_operands[MAX_RECOG_OPERANDS];
2359 int earlyclobber[MAX_RECOG_OPERANDS];
2360 int c;
2362 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2363 int funny_match_index;
2365 which_alternative = 0;
2366 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2367 return 1;
2369 for (c = 0; c < recog_data.n_operands; c++)
2371 constraints[c] = recog_data.constraints[c];
2372 matching_operands[c] = -1;
2377 int seen_earlyclobber_at = -1;
2378 int opno;
2379 int lose = 0;
2380 funny_match_index = 0;
2382 if (!recog_data.alternative_enabled_p[which_alternative])
2384 int i;
2386 for (i = 0; i < recog_data.n_operands; i++)
2387 constraints[i] = skip_alternative (constraints[i]);
2389 which_alternative++;
2390 continue;
2393 for (opno = 0; opno < recog_data.n_operands; opno++)
2395 rtx op = recog_data.operand[opno];
2396 enum machine_mode mode = GET_MODE (op);
2397 const char *p = constraints[opno];
2398 int offset = 0;
2399 int win = 0;
2400 int val;
2401 int len;
2403 earlyclobber[opno] = 0;
2405 /* A unary operator may be accepted by the predicate, but it
2406 is irrelevant for matching constraints. */
2407 if (UNARY_P (op))
2408 op = XEXP (op, 0);
2410 if (GET_CODE (op) == SUBREG)
2412 if (REG_P (SUBREG_REG (op))
2413 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2414 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2415 GET_MODE (SUBREG_REG (op)),
2416 SUBREG_BYTE (op),
2417 GET_MODE (op));
2418 op = SUBREG_REG (op);
2421 /* An empty constraint or empty alternative
2422 allows anything which matched the pattern. */
2423 if (*p == 0 || *p == ',')
2424 win = 1;
2427 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2429 case '\0':
2430 len = 0;
2431 break;
2432 case ',':
2433 c = '\0';
2434 break;
2436 case '?': case '!': case '*': case '%':
2437 case '=': case '+':
2438 break;
2440 case '#':
2441 /* Ignore rest of this alternative as far as
2442 constraint checking is concerned. */
2444 p++;
2445 while (*p && *p != ',');
2446 len = 0;
2447 break;
2449 case '&':
2450 earlyclobber[opno] = 1;
2451 if (seen_earlyclobber_at < 0)
2452 seen_earlyclobber_at = opno;
2453 break;
2455 case '0': case '1': case '2': case '3': case '4':
2456 case '5': case '6': case '7': case '8': case '9':
2458 /* This operand must be the same as a previous one.
2459 This kind of constraint is used for instructions such
2460 as add when they take only two operands.
2462 Note that the lower-numbered operand is passed first.
2464 If we are not testing strictly, assume that this
2465 constraint will be satisfied. */
2467 char *end;
2468 int match;
2470 match = strtoul (p, &end, 10);
2471 p = end;
2473 if (strict < 0)
2474 val = 1;
2475 else
2477 rtx op1 = recog_data.operand[match];
2478 rtx op2 = recog_data.operand[opno];
2480 /* A unary operator may be accepted by the predicate,
2481 but it is irrelevant for matching constraints. */
2482 if (UNARY_P (op1))
2483 op1 = XEXP (op1, 0);
2484 if (UNARY_P (op2))
2485 op2 = XEXP (op2, 0);
2487 val = operands_match_p (op1, op2);
2490 matching_operands[opno] = match;
2491 matching_operands[match] = opno;
2493 if (val != 0)
2494 win = 1;
2496 /* If output is *x and input is *--x, arrange later
2497 to change the output to *--x as well, since the
2498 output op is the one that will be printed. */
2499 if (val == 2 && strict > 0)
2501 funny_match[funny_match_index].this_op = opno;
2502 funny_match[funny_match_index++].other = match;
2505 len = 0;
2506 break;
2508 case 'p':
2509 /* p is used for address_operands. When we are called by
2510 gen_reload, no one will have checked that the address is
2511 strictly valid, i.e., that all pseudos requiring hard regs
2512 have gotten them. */
2513 if (strict <= 0
2514 || (strict_memory_address_p (recog_data.operand_mode[opno],
2515 op)))
2516 win = 1;
2517 break;
2519 /* No need to check general_operand again;
2520 it was done in insn-recog.c. Well, except that reload
2521 doesn't check the validity of its replacements, but
2522 that should only matter when there's a bug. */
2523 case 'g':
2524 /* Anything goes unless it is a REG and really has a hard reg
2525 but the hard reg is not in the class GENERAL_REGS. */
2526 if (REG_P (op))
2528 if (strict < 0
2529 || GENERAL_REGS == ALL_REGS
2530 || (reload_in_progress
2531 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2532 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2533 win = 1;
2535 else if (strict < 0 || general_operand (op, mode))
2536 win = 1;
2537 break;
2539 case 'X':
2540 /* This is used for a MATCH_SCRATCH in the cases when
2541 we don't actually need anything. So anything goes
2542 any time. */
2543 win = 1;
2544 break;
2546 case TARGET_MEM_CONSTRAINT:
2547 /* Memory operands must be valid, to the extent
2548 required by STRICT. */
2549 if (MEM_P (op))
2551 if (strict > 0
2552 && !strict_memory_address_addr_space_p
2553 (GET_MODE (op), XEXP (op, 0),
2554 MEM_ADDR_SPACE (op)))
2555 break;
2556 if (strict == 0
2557 && !memory_address_addr_space_p
2558 (GET_MODE (op), XEXP (op, 0),
2559 MEM_ADDR_SPACE (op)))
2560 break;
2561 win = 1;
2563 /* Before reload, accept what reload can turn into mem. */
2564 else if (strict < 0 && CONSTANT_P (op))
2565 win = 1;
2566 /* During reload, accept a pseudo */
2567 else if (reload_in_progress && REG_P (op)
2568 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2569 win = 1;
2570 break;
2572 case '<':
2573 if (MEM_P (op)
2574 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2575 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2576 win = 1;
2577 break;
2579 case '>':
2580 if (MEM_P (op)
2581 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2582 || GET_CODE (XEXP (op, 0)) == POST_INC))
2583 win = 1;
2584 break;
2586 case 'E':
2587 case 'F':
2588 if (GET_CODE (op) == CONST_DOUBLE
2589 || (GET_CODE (op) == CONST_VECTOR
2590 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2591 win = 1;
2592 break;
2594 case 'G':
2595 case 'H':
2596 if (GET_CODE (op) == CONST_DOUBLE
2597 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2598 win = 1;
2599 break;
2601 case 's':
2602 if (CONST_INT_P (op)
2603 || (GET_CODE (op) == CONST_DOUBLE
2604 && GET_MODE (op) == VOIDmode))
2605 break;
2606 case 'i':
2607 if (CONSTANT_P (op))
2608 win = 1;
2609 break;
2611 case 'n':
2612 if (CONST_INT_P (op)
2613 || (GET_CODE (op) == CONST_DOUBLE
2614 && GET_MODE (op) == VOIDmode))
2615 win = 1;
2616 break;
2618 case 'I':
2619 case 'J':
2620 case 'K':
2621 case 'L':
2622 case 'M':
2623 case 'N':
2624 case 'O':
2625 case 'P':
2626 if (CONST_INT_P (op)
2627 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2628 win = 1;
2629 break;
2631 case 'V':
2632 if (MEM_P (op)
2633 && ((strict > 0 && ! offsettable_memref_p (op))
2634 || (strict < 0
2635 && !(CONSTANT_P (op) || MEM_P (op)))
2636 || (reload_in_progress
2637 && !(REG_P (op)
2638 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2639 win = 1;
2640 break;
2642 case 'o':
2643 if ((strict > 0 && offsettable_memref_p (op))
2644 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2645 /* Before reload, accept what reload can handle. */
2646 || (strict < 0
2647 && (CONSTANT_P (op) || MEM_P (op)))
2648 /* During reload, accept a pseudo */
2649 || (reload_in_progress && REG_P (op)
2650 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2651 win = 1;
2652 break;
2654 default:
2656 enum reg_class cl;
2658 cl = (c == 'r'
2659 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2660 if (cl != NO_REGS)
2662 if (strict < 0
2663 || (strict == 0
2664 && REG_P (op)
2665 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2666 || (strict == 0 && GET_CODE (op) == SCRATCH)
2667 || (REG_P (op)
2668 && reg_fits_class_p (op, cl, offset, mode)))
2669 win = 1;
2671 #ifdef EXTRA_CONSTRAINT_STR
2672 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2673 win = 1;
2675 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2676 /* Every memory operand can be reloaded to fit. */
2677 && ((strict < 0 && MEM_P (op))
2678 /* Before reload, accept what reload can turn
2679 into mem. */
2680 || (strict < 0 && CONSTANT_P (op))
2681 /* During reload, accept a pseudo */
2682 || (reload_in_progress && REG_P (op)
2683 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2684 win = 1;
2685 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2686 /* Every address operand can be reloaded to fit. */
2687 && strict < 0)
2688 win = 1;
2689 #endif
2690 break;
2693 while (p += len, c);
2695 constraints[opno] = p;
2696 /* If this operand did not win somehow,
2697 this alternative loses. */
2698 if (! win)
2699 lose = 1;
2701 /* This alternative won; the operands are ok.
2702 Change whichever operands this alternative says to change. */
2703 if (! lose)
2705 int opno, eopno;
2707 /* See if any earlyclobber operand conflicts with some other
2708 operand. */
2710 if (strict > 0 && seen_earlyclobber_at >= 0)
2711 for (eopno = seen_earlyclobber_at;
2712 eopno < recog_data.n_operands;
2713 eopno++)
2714 /* Ignore earlyclobber operands now in memory,
2715 because we would often report failure when we have
2716 two memory operands, one of which was formerly a REG. */
2717 if (earlyclobber[eopno]
2718 && REG_P (recog_data.operand[eopno]))
2719 for (opno = 0; opno < recog_data.n_operands; opno++)
2720 if ((MEM_P (recog_data.operand[opno])
2721 || recog_data.operand_type[opno] != OP_OUT)
2722 && opno != eopno
2723 /* Ignore things like match_operator operands. */
2724 && *recog_data.constraints[opno] != 0
2725 && ! (matching_operands[opno] == eopno
2726 && operands_match_p (recog_data.operand[opno],
2727 recog_data.operand[eopno]))
2728 && ! safe_from_earlyclobber (recog_data.operand[opno],
2729 recog_data.operand[eopno]))
2730 lose = 1;
2732 if (! lose)
2734 while (--funny_match_index >= 0)
2736 recog_data.operand[funny_match[funny_match_index].other]
2737 = recog_data.operand[funny_match[funny_match_index].this_op];
2740 #ifdef AUTO_INC_DEC
2741 /* For operands without < or > constraints reject side-effects. */
2742 if (recog_data.is_asm)
2744 for (opno = 0; opno < recog_data.n_operands; opno++)
2745 if (MEM_P (recog_data.operand[opno]))
2746 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2748 case PRE_INC:
2749 case POST_INC:
2750 case PRE_DEC:
2751 case POST_DEC:
2752 case PRE_MODIFY:
2753 case POST_MODIFY:
2754 if (strchr (recog_data.constraints[opno], '<') == NULL
2755 && strchr (recog_data.constraints[opno], '>')
2756 == NULL)
2757 return 0;
2758 break;
2759 default:
2760 break;
2763 #endif
2764 return 1;
2768 which_alternative++;
2770 while (which_alternative < recog_data.n_alternatives);
2772 which_alternative = -1;
2773 /* If we are about to reject this, but we are not to test strictly,
2774 try a very loose test. Only return failure if it fails also. */
2775 if (strict == 0)
2776 return constrain_operands (-1);
2777 else
2778 return 0;
2781 /* Return true iff OPERAND (assumed to be a REG rtx)
2782 is a hard reg in class CLASS when its regno is offset by OFFSET
2783 and changed to mode MODE.
2784 If REG occupies multiple hard regs, all of them must be in CLASS. */
2786 bool
2787 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2788 enum machine_mode mode)
2790 int regno = REGNO (operand);
2792 if (cl == NO_REGS)
2793 return false;
2795 return (HARD_REGISTER_NUM_P (regno)
2796 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2797 mode, regno + offset));
2800 /* Split single instruction. Helper function for split_all_insns and
2801 split_all_insns_noflow. Return last insn in the sequence if successful,
2802 or NULL if unsuccessful. */
2804 static rtx
2805 split_insn (rtx insn)
2807 /* Split insns here to get max fine-grain parallelism. */
2808 rtx first = PREV_INSN (insn);
2809 rtx last = try_split (PATTERN (insn), insn, 1);
2810 rtx insn_set, last_set, note;
2812 if (last == insn)
2813 return NULL_RTX;
2815 /* If the original instruction was a single set that was known to be
2816 equivalent to a constant, see if we can say the same about the last
2817 instruction in the split sequence. The two instructions must set
2818 the same destination. */
2819 insn_set = single_set (insn);
2820 if (insn_set)
2822 last_set = single_set (last);
2823 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2825 note = find_reg_equal_equiv_note (insn);
2826 if (note && CONSTANT_P (XEXP (note, 0)))
2827 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2828 else if (CONSTANT_P (SET_SRC (insn_set)))
2829 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2833 /* try_split returns the NOTE that INSN became. */
2834 SET_INSN_DELETED (insn);
2836 /* ??? Coddle to md files that generate subregs in post-reload
2837 splitters instead of computing the proper hard register. */
2838 if (reload_completed && first != last)
2840 first = NEXT_INSN (first);
2841 for (;;)
2843 if (INSN_P (first))
2844 cleanup_subreg_operands (first);
2845 if (first == last)
2846 break;
2847 first = NEXT_INSN (first);
2851 return last;
2854 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2856 void
2857 split_all_insns (void)
2859 sbitmap blocks;
2860 bool changed;
2861 basic_block bb;
2863 blocks = sbitmap_alloc (last_basic_block);
2864 sbitmap_zero (blocks);
2865 changed = false;
2867 FOR_EACH_BB_REVERSE (bb)
2869 rtx insn, next;
2870 bool finish = false;
2872 rtl_profile_for_bb (bb);
2873 for (insn = BB_HEAD (bb); !finish ; insn = next)
2875 /* Can't use `next_real_insn' because that might go across
2876 CODE_LABELS and short-out basic blocks. */
2877 next = NEXT_INSN (insn);
2878 finish = (insn == BB_END (bb));
2879 if (INSN_P (insn))
2881 rtx set = single_set (insn);
2883 /* Don't split no-op move insns. These should silently
2884 disappear later in final. Splitting such insns would
2885 break the code that handles LIBCALL blocks. */
2886 if (set && set_noop_p (set))
2888 /* Nops get in the way while scheduling, so delete them
2889 now if register allocation has already been done. It
2890 is too risky to try to do this before register
2891 allocation, and there are unlikely to be very many
2892 nops then anyways. */
2893 if (reload_completed)
2894 delete_insn_and_edges (insn);
2896 else
2898 if (split_insn (insn))
2900 SET_BIT (blocks, bb->index);
2901 changed = true;
2908 default_rtl_profile ();
2909 if (changed)
2910 find_many_sub_basic_blocks (blocks);
2912 #ifdef ENABLE_CHECKING
2913 verify_flow_info ();
2914 #endif
2916 sbitmap_free (blocks);
2919 /* Same as split_all_insns, but do not expect CFG to be available.
2920 Used by machine dependent reorg passes. */
2922 unsigned int
2923 split_all_insns_noflow (void)
2925 rtx next, insn;
2927 for (insn = get_insns (); insn; insn = next)
2929 next = NEXT_INSN (insn);
2930 if (INSN_P (insn))
2932 /* Don't split no-op move insns. These should silently
2933 disappear later in final. Splitting such insns would
2934 break the code that handles LIBCALL blocks. */
2935 rtx set = single_set (insn);
2936 if (set && set_noop_p (set))
2938 /* Nops get in the way while scheduling, so delete them
2939 now if register allocation has already been done. It
2940 is too risky to try to do this before register
2941 allocation, and there are unlikely to be very many
2942 nops then anyways.
2944 ??? Should we use delete_insn when the CFG isn't valid? */
2945 if (reload_completed)
2946 delete_insn_and_edges (insn);
2948 else
2949 split_insn (insn);
2952 return 0;
2955 #ifdef HAVE_peephole2
2956 struct peep2_insn_data
2958 rtx insn;
2959 regset live_before;
2962 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2963 static int peep2_current;
2965 static bool peep2_do_rebuild_jump_labels;
2966 static bool peep2_do_cleanup_cfg;
2968 /* The number of instructions available to match a peep2. */
2969 int peep2_current_count;
2971 /* A non-insn marker indicating the last insn of the block.
2972 The live_before regset for this element is correct, indicating
2973 DF_LIVE_OUT for the block. */
2974 #define PEEP2_EOB pc_rtx
2976 /* Wrap N to fit into the peep2_insn_data buffer. */
2978 static int
2979 peep2_buf_position (int n)
2981 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2982 n -= MAX_INSNS_PER_PEEP2 + 1;
2983 return n;
2986 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2987 does not exist. Used by the recognizer to find the next insn to match
2988 in a multi-insn pattern. */
2991 peep2_next_insn (int n)
2993 gcc_assert (n <= peep2_current_count);
2995 n = peep2_buf_position (peep2_current + n);
2997 return peep2_insn_data[n].insn;
3000 /* Return true if REGNO is dead before the Nth non-note insn
3001 after `current'. */
3004 peep2_regno_dead_p (int ofs, int regno)
3006 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3008 ofs = peep2_buf_position (peep2_current + ofs);
3010 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3012 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3015 /* Similarly for a REG. */
3018 peep2_reg_dead_p (int ofs, rtx reg)
3020 int regno, n;
3022 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3024 ofs = peep2_buf_position (peep2_current + ofs);
3026 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3028 regno = REGNO (reg);
3029 n = hard_regno_nregs[regno][GET_MODE (reg)];
3030 while (--n >= 0)
3031 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3032 return 0;
3033 return 1;
3036 /* Try to find a hard register of mode MODE, matching the register class in
3037 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3038 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3039 in which case the only condition is that the register must be available
3040 before CURRENT_INSN.
3041 Registers that already have bits set in REG_SET will not be considered.
3043 If an appropriate register is available, it will be returned and the
3044 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3045 returned. */
3048 peep2_find_free_register (int from, int to, const char *class_str,
3049 enum machine_mode mode, HARD_REG_SET *reg_set)
3051 static int search_ofs;
3052 enum reg_class cl;
3053 HARD_REG_SET live;
3054 int i;
3056 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3057 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3059 from = peep2_buf_position (peep2_current + from);
3060 to = peep2_buf_position (peep2_current + to);
3062 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3063 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3065 while (from != to)
3067 HARD_REG_SET this_live;
3069 from = peep2_buf_position (from + 1);
3070 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3071 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3072 IOR_HARD_REG_SET (live, this_live);
3075 cl = (class_str[0] == 'r' ? GENERAL_REGS
3076 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3078 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3080 int raw_regno, regno, success, j;
3082 /* Distribute the free registers as much as possible. */
3083 raw_regno = search_ofs + i;
3084 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3085 raw_regno -= FIRST_PSEUDO_REGISTER;
3086 #ifdef REG_ALLOC_ORDER
3087 regno = reg_alloc_order[raw_regno];
3088 #else
3089 regno = raw_regno;
3090 #endif
3092 /* Don't allocate fixed registers. */
3093 if (fixed_regs[regno])
3094 continue;
3095 /* Don't allocate global registers. */
3096 if (global_regs[regno])
3097 continue;
3098 /* Make sure the register is of the right class. */
3099 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3100 continue;
3101 /* And can support the mode we need. */
3102 if (! HARD_REGNO_MODE_OK (regno, mode))
3103 continue;
3104 /* And that we don't create an extra save/restore. */
3105 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3106 continue;
3107 if (! targetm.hard_regno_scratch_ok (regno))
3108 continue;
3110 /* And we don't clobber traceback for noreturn functions. */
3111 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3112 && (! reload_completed || frame_pointer_needed))
3113 continue;
3115 success = 1;
3116 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3118 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3119 || TEST_HARD_REG_BIT (live, regno + j))
3121 success = 0;
3122 break;
3125 if (success)
3127 add_to_hard_reg_set (reg_set, mode, regno);
3129 /* Start the next search with the next register. */
3130 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3131 raw_regno = 0;
3132 search_ofs = raw_regno;
3134 return gen_rtx_REG (mode, regno);
3138 search_ofs = 0;
3139 return NULL_RTX;
3142 /* Forget all currently tracked instructions, only remember current
3143 LIVE regset. */
3145 static void
3146 peep2_reinit_state (regset live)
3148 int i;
3150 /* Indicate that all slots except the last holds invalid data. */
3151 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3152 peep2_insn_data[i].insn = NULL_RTX;
3153 peep2_current_count = 0;
3155 /* Indicate that the last slot contains live_after data. */
3156 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3157 peep2_current = MAX_INSNS_PER_PEEP2;
3159 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3162 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3163 starting at INSN. Perform the replacement, removing the old insns and
3164 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3165 if the replacement is rejected. */
3167 static rtx
3168 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3170 int i;
3171 rtx last, eh_note, as_note, before_try, x;
3172 rtx old_insn, new_insn;
3173 bool was_call = false;
3175 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3176 match more than one insn, or to be split into more than one insn. */
3177 old_insn = peep2_insn_data[peep2_current].insn;
3178 if (RTX_FRAME_RELATED_P (old_insn))
3180 bool any_note = false;
3181 rtx note;
3183 if (match_len != 0)
3184 return NULL;
3186 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3187 may be in the stream for the purpose of register allocation. */
3188 if (active_insn_p (attempt))
3189 new_insn = attempt;
3190 else
3191 new_insn = next_active_insn (attempt);
3192 if (next_active_insn (new_insn))
3193 return NULL;
3195 /* We have a 1-1 replacement. Copy over any frame-related info. */
3196 RTX_FRAME_RELATED_P (new_insn) = 1;
3198 /* Allow the backend to fill in a note during the split. */
3199 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3200 switch (REG_NOTE_KIND (note))
3202 case REG_FRAME_RELATED_EXPR:
3203 case REG_CFA_DEF_CFA:
3204 case REG_CFA_ADJUST_CFA:
3205 case REG_CFA_OFFSET:
3206 case REG_CFA_REGISTER:
3207 case REG_CFA_EXPRESSION:
3208 case REG_CFA_RESTORE:
3209 case REG_CFA_SET_VDRAP:
3210 any_note = true;
3211 break;
3212 default:
3213 break;
3216 /* If the backend didn't supply a note, copy one over. */
3217 if (!any_note)
3218 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3219 switch (REG_NOTE_KIND (note))
3221 case REG_FRAME_RELATED_EXPR:
3222 case REG_CFA_DEF_CFA:
3223 case REG_CFA_ADJUST_CFA:
3224 case REG_CFA_OFFSET:
3225 case REG_CFA_REGISTER:
3226 case REG_CFA_EXPRESSION:
3227 case REG_CFA_RESTORE:
3228 case REG_CFA_SET_VDRAP:
3229 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3230 any_note = true;
3231 break;
3232 default:
3233 break;
3236 /* If there still isn't a note, make sure the unwind info sees the
3237 same expression as before the split. */
3238 if (!any_note)
3240 rtx old_set, new_set;
3242 /* The old insn had better have been simple, or annotated. */
3243 old_set = single_set (old_insn);
3244 gcc_assert (old_set != NULL);
3246 new_set = single_set (new_insn);
3247 if (!new_set || !rtx_equal_p (new_set, old_set))
3248 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3251 /* Copy prologue/epilogue status. This is required in order to keep
3252 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3253 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3256 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3257 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3258 cfg-related call notes. */
3259 for (i = 0; i <= match_len; ++i)
3261 int j;
3262 rtx note;
3264 j = peep2_buf_position (peep2_current + i);
3265 old_insn = peep2_insn_data[j].insn;
3266 if (!CALL_P (old_insn))
3267 continue;
3268 was_call = true;
3270 new_insn = attempt;
3271 while (new_insn != NULL_RTX)
3273 if (CALL_P (new_insn))
3274 break;
3275 new_insn = NEXT_INSN (new_insn);
3278 gcc_assert (new_insn != NULL_RTX);
3280 CALL_INSN_FUNCTION_USAGE (new_insn)
3281 = CALL_INSN_FUNCTION_USAGE (old_insn);
3283 for (note = REG_NOTES (old_insn);
3284 note;
3285 note = XEXP (note, 1))
3286 switch (REG_NOTE_KIND (note))
3288 case REG_NORETURN:
3289 case REG_SETJMP:
3290 add_reg_note (new_insn, REG_NOTE_KIND (note),
3291 XEXP (note, 0));
3292 break;
3293 default:
3294 /* Discard all other reg notes. */
3295 break;
3298 /* Croak if there is another call in the sequence. */
3299 while (++i <= match_len)
3301 j = peep2_buf_position (peep2_current + i);
3302 old_insn = peep2_insn_data[j].insn;
3303 gcc_assert (!CALL_P (old_insn));
3305 break;
3308 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3309 move those notes over to the new sequence. */
3310 as_note = NULL;
3311 for (i = match_len; i >= 0; --i)
3313 int j = peep2_buf_position (peep2_current + i);
3314 old_insn = peep2_insn_data[j].insn;
3316 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3317 if (as_note)
3318 break;
3321 i = peep2_buf_position (peep2_current + match_len);
3322 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3324 /* Replace the old sequence with the new. */
3325 last = emit_insn_after_setloc (attempt,
3326 peep2_insn_data[i].insn,
3327 INSN_LOCATOR (peep2_insn_data[i].insn));
3328 before_try = PREV_INSN (insn);
3329 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3331 /* Re-insert the EH_REGION notes. */
3332 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3334 edge eh_edge;
3335 edge_iterator ei;
3337 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3338 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3339 break;
3341 if (eh_note)
3342 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3344 if (eh_edge)
3345 for (x = last; x != before_try; x = PREV_INSN (x))
3346 if (x != BB_END (bb)
3347 && (can_throw_internal (x)
3348 || can_nonlocal_goto (x)))
3350 edge nfte, nehe;
3351 int flags;
3353 nfte = split_block (bb, x);
3354 flags = (eh_edge->flags
3355 & (EDGE_EH | EDGE_ABNORMAL));
3356 if (CALL_P (x))
3357 flags |= EDGE_ABNORMAL_CALL;
3358 nehe = make_edge (nfte->src, eh_edge->dest,
3359 flags);
3361 nehe->probability = eh_edge->probability;
3362 nfte->probability
3363 = REG_BR_PROB_BASE - nehe->probability;
3365 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3366 bb = nfte->src;
3367 eh_edge = nehe;
3370 /* Converting possibly trapping insn to non-trapping is
3371 possible. Zap dummy outgoing edges. */
3372 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3375 /* Re-insert the ARGS_SIZE notes. */
3376 if (as_note)
3377 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3379 /* If we generated a jump instruction, it won't have
3380 JUMP_LABEL set. Recompute after we're done. */
3381 for (x = last; x != before_try; x = PREV_INSN (x))
3382 if (JUMP_P (x))
3384 peep2_do_rebuild_jump_labels = true;
3385 break;
3388 return last;
3391 /* After performing a replacement in basic block BB, fix up the life
3392 information in our buffer. LAST is the last of the insns that we
3393 emitted as a replacement. PREV is the insn before the start of
3394 the replacement. MATCH_LEN is the number of instructions that were
3395 matched, and which now need to be replaced in the buffer. */
3397 static void
3398 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3400 int i = peep2_buf_position (peep2_current + match_len + 1);
3401 rtx x;
3402 regset_head live;
3404 INIT_REG_SET (&live);
3405 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3407 gcc_assert (peep2_current_count >= match_len + 1);
3408 peep2_current_count -= match_len + 1;
3410 x = last;
3413 if (INSN_P (x))
3415 df_insn_rescan (x);
3416 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3418 peep2_current_count++;
3419 if (--i < 0)
3420 i = MAX_INSNS_PER_PEEP2;
3421 peep2_insn_data[i].insn = x;
3422 df_simulate_one_insn_backwards (bb, x, &live);
3423 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3426 x = PREV_INSN (x);
3428 while (x != prev);
3429 CLEAR_REG_SET (&live);
3431 peep2_current = i;
3434 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3435 Return true if we added it, false otherwise. The caller will try to match
3436 peepholes against the buffer if we return false; otherwise it will try to
3437 add more instructions to the buffer. */
3439 static bool
3440 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3442 int pos;
3444 /* Once we have filled the maximum number of insns the buffer can hold,
3445 allow the caller to match the insns against peepholes. We wait until
3446 the buffer is full in case the target has similar peepholes of different
3447 length; we always want to match the longest if possible. */
3448 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3449 return false;
3451 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3452 any other pattern, lest it change the semantics of the frame info. */
3453 if (RTX_FRAME_RELATED_P (insn))
3455 /* Let the buffer drain first. */
3456 if (peep2_current_count > 0)
3457 return false;
3458 /* Now the insn will be the only thing in the buffer. */
3461 pos = peep2_buf_position (peep2_current + peep2_current_count);
3462 peep2_insn_data[pos].insn = insn;
3463 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3464 peep2_current_count++;
3466 df_simulate_one_insn_forwards (bb, insn, live);
3467 return true;
3470 /* Perform the peephole2 optimization pass. */
3472 static void
3473 peephole2_optimize (void)
3475 rtx insn;
3476 bitmap live;
3477 int i;
3478 basic_block bb;
3480 peep2_do_cleanup_cfg = false;
3481 peep2_do_rebuild_jump_labels = false;
3483 df_set_flags (DF_LR_RUN_DCE);
3484 df_note_add_problem ();
3485 df_analyze ();
3487 /* Initialize the regsets we're going to use. */
3488 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3489 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3490 live = BITMAP_ALLOC (&reg_obstack);
3492 FOR_EACH_BB_REVERSE (bb)
3494 bool past_end = false;
3495 int pos;
3497 rtl_profile_for_bb (bb);
3499 /* Start up propagation. */
3500 bitmap_copy (live, DF_LR_IN (bb));
3501 df_simulate_initialize_forwards (bb, live);
3502 peep2_reinit_state (live);
3504 insn = BB_HEAD (bb);
3505 for (;;)
3507 rtx attempt, head;
3508 int match_len;
3510 if (!past_end && !NONDEBUG_INSN_P (insn))
3512 next_insn:
3513 insn = NEXT_INSN (insn);
3514 if (insn == NEXT_INSN (BB_END (bb)))
3515 past_end = true;
3516 continue;
3518 if (!past_end && peep2_fill_buffer (bb, insn, live))
3519 goto next_insn;
3521 /* If we did not fill an empty buffer, it signals the end of the
3522 block. */
3523 if (peep2_current_count == 0)
3524 break;
3526 /* The buffer filled to the current maximum, so try to match. */
3528 pos = peep2_buf_position (peep2_current + peep2_current_count);
3529 peep2_insn_data[pos].insn = PEEP2_EOB;
3530 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3532 /* Match the peephole. */
3533 head = peep2_insn_data[peep2_current].insn;
3534 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3535 if (attempt != NULL)
3537 rtx last = peep2_attempt (bb, head, match_len, attempt);
3538 if (last)
3540 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3541 continue;
3545 /* No match: advance the buffer by one insn. */
3546 peep2_current = peep2_buf_position (peep2_current + 1);
3547 peep2_current_count--;
3551 default_rtl_profile ();
3552 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3553 BITMAP_FREE (peep2_insn_data[i].live_before);
3554 BITMAP_FREE (live);
3555 if (peep2_do_rebuild_jump_labels)
3556 rebuild_jump_labels (get_insns ());
3558 #endif /* HAVE_peephole2 */
3560 /* Common predicates for use with define_bypass. */
3562 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3563 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3564 must be either a single_set or a PARALLEL with SETs inside. */
3567 store_data_bypass_p (rtx out_insn, rtx in_insn)
3569 rtx out_set, in_set;
3570 rtx out_pat, in_pat;
3571 rtx out_exp, in_exp;
3572 int i, j;
3574 in_set = single_set (in_insn);
3575 if (in_set)
3577 if (!MEM_P (SET_DEST (in_set)))
3578 return false;
3580 out_set = single_set (out_insn);
3581 if (out_set)
3583 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3584 return false;
3586 else
3588 out_pat = PATTERN (out_insn);
3590 if (GET_CODE (out_pat) != PARALLEL)
3591 return false;
3593 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3595 out_exp = XVECEXP (out_pat, 0, i);
3597 if (GET_CODE (out_exp) == CLOBBER)
3598 continue;
3600 gcc_assert (GET_CODE (out_exp) == SET);
3602 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3603 return false;
3607 else
3609 in_pat = PATTERN (in_insn);
3610 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3612 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3614 in_exp = XVECEXP (in_pat, 0, i);
3616 if (GET_CODE (in_exp) == CLOBBER)
3617 continue;
3619 gcc_assert (GET_CODE (in_exp) == SET);
3621 if (!MEM_P (SET_DEST (in_exp)))
3622 return false;
3624 out_set = single_set (out_insn);
3625 if (out_set)
3627 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3628 return false;
3630 else
3632 out_pat = PATTERN (out_insn);
3633 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3635 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3637 out_exp = XVECEXP (out_pat, 0, j);
3639 if (GET_CODE (out_exp) == CLOBBER)
3640 continue;
3642 gcc_assert (GET_CODE (out_exp) == SET);
3644 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3645 return false;
3651 return true;
3654 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3655 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3656 or multiple set; IN_INSN should be single_set for truth, but for convenience
3657 of insn categorization may be any JUMP or CALL insn. */
3660 if_test_bypass_p (rtx out_insn, rtx in_insn)
3662 rtx out_set, in_set;
3664 in_set = single_set (in_insn);
3665 if (! in_set)
3667 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3668 return false;
3671 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3672 return false;
3673 in_set = SET_SRC (in_set);
3675 out_set = single_set (out_insn);
3676 if (out_set)
3678 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3679 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3680 return false;
3682 else
3684 rtx out_pat;
3685 int i;
3687 out_pat = PATTERN (out_insn);
3688 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3690 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3692 rtx exp = XVECEXP (out_pat, 0, i);
3694 if (GET_CODE (exp) == CLOBBER)
3695 continue;
3697 gcc_assert (GET_CODE (exp) == SET);
3699 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3700 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3701 return false;
3705 return true;
3708 static bool
3709 gate_handle_peephole2 (void)
3711 return (optimize > 0 && flag_peephole2);
3714 static unsigned int
3715 rest_of_handle_peephole2 (void)
3717 #ifdef HAVE_peephole2
3718 peephole2_optimize ();
3719 #endif
3720 return 0;
3723 struct rtl_opt_pass pass_peephole2 =
3726 RTL_PASS,
3727 "peephole2", /* name */
3728 gate_handle_peephole2, /* gate */
3729 rest_of_handle_peephole2, /* execute */
3730 NULL, /* sub */
3731 NULL, /* next */
3732 0, /* static_pass_number */
3733 TV_PEEPHOLE2, /* tv_id */
3734 0, /* properties_required */
3735 0, /* properties_provided */
3736 0, /* properties_destroyed */
3737 0, /* todo_flags_start */
3738 TODO_df_finish | TODO_verify_rtl_sharing |
3739 0 /* todo_flags_finish */
3743 static unsigned int
3744 rest_of_handle_split_all_insns (void)
3746 split_all_insns ();
3747 return 0;
3750 struct rtl_opt_pass pass_split_all_insns =
3753 RTL_PASS,
3754 "split1", /* name */
3755 NULL, /* gate */
3756 rest_of_handle_split_all_insns, /* execute */
3757 NULL, /* sub */
3758 NULL, /* next */
3759 0, /* static_pass_number */
3760 TV_NONE, /* tv_id */
3761 0, /* properties_required */
3762 0, /* properties_provided */
3763 0, /* properties_destroyed */
3764 0, /* todo_flags_start */
3765 0 /* todo_flags_finish */
3769 static unsigned int
3770 rest_of_handle_split_after_reload (void)
3772 /* If optimizing, then go ahead and split insns now. */
3773 #ifndef STACK_REGS
3774 if (optimize > 0)
3775 #endif
3776 split_all_insns ();
3777 return 0;
3780 struct rtl_opt_pass pass_split_after_reload =
3783 RTL_PASS,
3784 "split2", /* name */
3785 NULL, /* gate */
3786 rest_of_handle_split_after_reload, /* execute */
3787 NULL, /* sub */
3788 NULL, /* next */
3789 0, /* static_pass_number */
3790 TV_NONE, /* tv_id */
3791 0, /* properties_required */
3792 0, /* properties_provided */
3793 0, /* properties_destroyed */
3794 0, /* todo_flags_start */
3795 0 /* todo_flags_finish */
3799 static bool
3800 gate_handle_split_before_regstack (void)
3802 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3803 /* If flow2 creates new instructions which need splitting
3804 and scheduling after reload is not done, they might not be
3805 split until final which doesn't allow splitting
3806 if HAVE_ATTR_length. */
3807 # ifdef INSN_SCHEDULING
3808 return (optimize && !flag_schedule_insns_after_reload);
3809 # else
3810 return (optimize);
3811 # endif
3812 #else
3813 return 0;
3814 #endif
3817 static unsigned int
3818 rest_of_handle_split_before_regstack (void)
3820 split_all_insns ();
3821 return 0;
3824 struct rtl_opt_pass pass_split_before_regstack =
3827 RTL_PASS,
3828 "split3", /* name */
3829 gate_handle_split_before_regstack, /* gate */
3830 rest_of_handle_split_before_regstack, /* execute */
3831 NULL, /* sub */
3832 NULL, /* next */
3833 0, /* static_pass_number */
3834 TV_NONE, /* tv_id */
3835 0, /* properties_required */
3836 0, /* properties_provided */
3837 0, /* properties_destroyed */
3838 0, /* todo_flags_start */
3839 0 /* todo_flags_finish */
3843 static bool
3844 gate_handle_split_before_sched2 (void)
3846 #ifdef INSN_SCHEDULING
3847 return optimize > 0 && flag_schedule_insns_after_reload;
3848 #else
3849 return 0;
3850 #endif
3853 static unsigned int
3854 rest_of_handle_split_before_sched2 (void)
3856 #ifdef INSN_SCHEDULING
3857 split_all_insns ();
3858 #endif
3859 return 0;
3862 struct rtl_opt_pass pass_split_before_sched2 =
3865 RTL_PASS,
3866 "split4", /* name */
3867 gate_handle_split_before_sched2, /* gate */
3868 rest_of_handle_split_before_sched2, /* execute */
3869 NULL, /* sub */
3870 NULL, /* next */
3871 0, /* static_pass_number */
3872 TV_NONE, /* tv_id */
3873 0, /* properties_required */
3874 0, /* properties_provided */
3875 0, /* properties_destroyed */
3876 0, /* todo_flags_start */
3877 TODO_verify_flow /* todo_flags_finish */
3881 /* The placement of the splitting that we do for shorten_branches
3882 depends on whether regstack is used by the target or not. */
3883 static bool
3884 gate_do_final_split (void)
3886 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3887 return 1;
3888 #else
3889 return 0;
3890 #endif
3893 struct rtl_opt_pass pass_split_for_shorten_branches =
3896 RTL_PASS,
3897 "split5", /* name */
3898 gate_do_final_split, /* gate */
3899 split_all_insns_noflow, /* execute */
3900 NULL, /* sub */
3901 NULL, /* next */
3902 0, /* static_pass_number */
3903 TV_NONE, /* tv_id */
3904 0, /* properties_required */
3905 0, /* properties_provided */
3906 0, /* properties_destroyed */
3907 0, /* todo_flags_start */
3908 TODO_verify_rtl_sharing /* todo_flags_finish */