Simplify convert_modes, ignoring invalid old modes for CONST_INTs.
[official-gcc.git] / gcc / recog.c
blobe4f4fadfd86628cc4319f37f804e6ab260f5bfc4
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl-error.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "addresses.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "reload.h"
38 #include "target.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "insn-codes.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
60 static void validate_replace_src_1 (rtx *, void *);
61 static rtx split_insn (rtx);
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in reginfo.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
71 int volatile_ok;
73 struct recog_data_d recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
80 was satisfied. */
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
88 int reload_completed;
90 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
91 int epilogue_completed;
93 /* Initialize data used by the function `recog'.
94 This must be called once in the compilation of a function
95 before any insn recognition may be done in the function. */
97 void
98 init_recog_no_volatile (void)
100 volatile_ok = 0;
103 void
104 init_recog (void)
106 volatile_ok = 1;
110 /* Return true if labels in asm operands BODY are LABEL_REFs. */
112 static bool
113 asm_labels_ok (rtx body)
115 rtx asmop;
116 int i;
118 asmop = extract_asm_operands (body);
119 if (asmop == NULL_RTX)
120 return true;
122 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
123 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
124 return false;
126 return true;
129 /* Check that X is an insn-body for an `asm' with operands
130 and that the operands mentioned in it are legitimate. */
133 check_asm_operands (rtx x)
135 int noperands;
136 rtx *operands;
137 const char **constraints;
138 int i;
140 if (!asm_labels_ok (x))
141 return 0;
143 /* Post-reload, be more strict with things. */
144 if (reload_completed)
146 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
147 extract_insn (make_insn_raw (x));
148 constrain_operands (1);
149 return which_alternative >= 0;
152 noperands = asm_noperands (x);
153 if (noperands < 0)
154 return 0;
155 if (noperands == 0)
156 return 1;
158 operands = XALLOCAVEC (rtx, noperands);
159 constraints = XALLOCAVEC (const char *, noperands);
161 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
163 for (i = 0; i < noperands; i++)
165 const char *c = constraints[i];
166 if (c[0] == '%')
167 c++;
168 if (! asm_operand_ok (operands[i], c, constraints))
169 return 0;
172 return 1;
175 /* Static data for the next two routines. */
177 typedef struct change_t
179 rtx object;
180 int old_code;
181 rtx *loc;
182 rtx old;
183 bool unshare;
184 } change_t;
186 static change_t *changes;
187 static int changes_allocated;
189 static int num_changes = 0;
191 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
192 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
193 the change is simply made.
195 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
196 will be called with the address and mode as parameters. If OBJECT is
197 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
198 the change in place.
200 IN_GROUP is nonzero if this is part of a group of changes that must be
201 performed as a group. In that case, the changes will be stored. The
202 function `apply_change_group' will validate and apply the changes.
204 If IN_GROUP is zero, this is a single change. Try to recognize the insn
205 or validate the memory reference with the change applied. If the result
206 is not valid for the machine, suppress the change and return zero.
207 Otherwise, perform the change and return 1. */
209 static bool
210 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
212 rtx old = *loc;
214 if (old == new_rtx || rtx_equal_p (old, new_rtx))
215 return 1;
217 gcc_assert (in_group != 0 || num_changes == 0);
219 *loc = new_rtx;
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 else
229 changes_allocated *= 2;
231 changes = XRESIZEVEC (change_t, changes, changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
237 changes[num_changes].unshare = unshare;
239 if (object && !MEM_P (object))
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259 UNSHARE to false. */
261 bool
262 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 return validate_change_1 (object, loc, new_rtx, in_group, false);
267 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
268 UNSHARE to true. */
270 bool
271 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
273 return validate_change_1 (object, loc, new_rtx, in_group, true);
277 /* Keep X canonicalized if some changes have made it non-canonical; only
278 modifies the operands of X, not (for example) its code. Simplifications
279 are not the job of this routine.
281 Return true if anything was changed. */
282 bool
283 canonicalize_change_group (rtx insn, rtx x)
285 if (COMMUTATIVE_P (x)
286 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
288 /* Oops, the caller has made X no longer canonical.
289 Let's redo the changes in the correct order. */
290 rtx tem = XEXP (x, 0);
291 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
292 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
293 return true;
295 else
296 return false;
300 /* This subroutine of apply_change_group verifies whether the changes to INSN
301 were valid; i.e. whether INSN can still be recognized.
303 If IN_GROUP is true clobbers which have to be added in order to
304 match the instructions will be added to the current change group.
305 Otherwise the changes will take effect immediately. */
308 insn_invalid_p (rtx insn, bool in_group)
310 rtx pat = PATTERN (insn);
311 int num_clobbers = 0;
312 /* If we are before reload and the pattern is a SET, see if we can add
313 clobbers. */
314 int icode = recog (pat, insn,
315 (GET_CODE (pat) == SET
316 && ! reload_completed && ! reload_in_progress)
317 ? &num_clobbers : 0);
318 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
321 /* If this is an asm and the operand aren't legal, then fail. Likewise if
322 this is not an asm and the insn wasn't recognized. */
323 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
324 || (!is_asm && icode < 0))
325 return 1;
327 /* If we have to add CLOBBERs, fail if we have to add ones that reference
328 hard registers since our callers can't know if they are live or not.
329 Otherwise, add them. */
330 if (num_clobbers > 0)
332 rtx newpat;
334 if (added_clobbers_hard_reg_p (icode))
335 return 1;
337 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
338 XVECEXP (newpat, 0, 0) = pat;
339 add_clobbers (newpat, icode);
340 if (in_group)
341 validate_change (insn, &PATTERN (insn), newpat, 1);
342 else
343 PATTERN (insn) = pat = newpat;
346 /* After reload, verify that all constraints are satisfied. */
347 if (reload_completed)
349 extract_insn (insn);
351 if (! constrain_operands (1))
352 return 1;
355 INSN_CODE (insn) = icode;
356 return 0;
359 /* Return number of changes made and not validated yet. */
361 num_changes_pending (void)
363 return num_changes;
366 /* Tentatively apply the changes numbered NUM and up.
367 Return 1 if all changes are valid, zero otherwise. */
370 verify_changes (int num)
372 int i;
373 rtx last_validated = NULL_RTX;
375 /* The changes have been applied and all INSN_CODEs have been reset to force
376 rerecognition.
378 The changes are valid if we aren't given an object, or if we are
379 given a MEM and it still is a valid address, or if this is in insn
380 and it is recognized. In the latter case, if reload has completed,
381 we also require that the operands meet the constraints for
382 the insn. */
384 for (i = num; i < num_changes; i++)
386 rtx object = changes[i].object;
388 /* If there is no object to test or if it is the same as the one we
389 already tested, ignore it. */
390 if (object == 0 || object == last_validated)
391 continue;
393 if (MEM_P (object))
395 if (! memory_address_addr_space_p (GET_MODE (object),
396 XEXP (object, 0),
397 MEM_ADDR_SPACE (object)))
398 break;
400 else if (/* changes[i].old might be zero, e.g. when putting a
401 REG_FRAME_RELATED_EXPR into a previously empty list. */
402 changes[i].old
403 && REG_P (changes[i].old)
404 && asm_noperands (PATTERN (object)) > 0
405 && REG_EXPR (changes[i].old) != NULL_TREE
406 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
407 && DECL_REGISTER (REG_EXPR (changes[i].old)))
409 /* Don't allow changes of hard register operands to inline
410 assemblies if they have been defined as register asm ("x"). */
411 break;
413 else if (DEBUG_INSN_P (object))
414 continue;
415 else if (insn_invalid_p (object, true))
417 rtx pat = PATTERN (object);
419 /* Perhaps we couldn't recognize the insn because there were
420 extra CLOBBERs at the end. If so, try to re-recognize
421 without the last CLOBBER (later iterations will cause each of
422 them to be eliminated, in turn). But don't do this if we
423 have an ASM_OPERAND. */
424 if (GET_CODE (pat) == PARALLEL
425 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
426 && asm_noperands (PATTERN (object)) < 0)
428 rtx newpat;
430 if (XVECLEN (pat, 0) == 2)
431 newpat = XVECEXP (pat, 0, 0);
432 else
434 int j;
436 newpat
437 = gen_rtx_PARALLEL (VOIDmode,
438 rtvec_alloc (XVECLEN (pat, 0) - 1));
439 for (j = 0; j < XVECLEN (newpat, 0); j++)
440 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
443 /* Add a new change to this group to replace the pattern
444 with this new pattern. Then consider this change
445 as having succeeded. The change we added will
446 cause the entire call to fail if things remain invalid.
448 Note that this can lose if a later change than the one
449 we are processing specified &XVECEXP (PATTERN (object), 0, X)
450 but this shouldn't occur. */
452 validate_change (object, &PATTERN (object), newpat, 1);
453 continue;
455 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
456 || GET_CODE (pat) == VAR_LOCATION)
457 /* If this insn is a CLOBBER or USE, it is always valid, but is
458 never recognized. */
459 continue;
460 else
461 break;
463 last_validated = object;
466 return (i == num_changes);
469 /* A group of changes has previously been issued with validate_change
470 and verified with verify_changes. Call df_insn_rescan for each of
471 the insn changed and clear num_changes. */
473 void
474 confirm_change_group (void)
476 int i;
477 rtx last_object = NULL;
479 for (i = 0; i < num_changes; i++)
481 rtx object = changes[i].object;
483 if (changes[i].unshare)
484 *changes[i].loc = copy_rtx (*changes[i].loc);
486 /* Avoid unnecessary rescanning when multiple changes to same instruction
487 are made. */
488 if (object)
490 if (object != last_object && last_object && INSN_P (last_object))
491 df_insn_rescan (last_object);
492 last_object = object;
496 if (last_object && INSN_P (last_object))
497 df_insn_rescan (last_object);
498 num_changes = 0;
501 /* Apply a group of changes previously issued with `validate_change'.
502 If all changes are valid, call confirm_change_group and return 1,
503 otherwise, call cancel_changes and return 0. */
506 apply_change_group (void)
508 if (verify_changes (0))
510 confirm_change_group ();
511 return 1;
513 else
515 cancel_changes (0);
516 return 0;
521 /* Return the number of changes so far in the current group. */
524 num_validated_changes (void)
526 return num_changes;
529 /* Retract the changes numbered NUM and up. */
531 void
532 cancel_changes (int num)
534 int i;
536 /* Back out all the changes. Do this in the opposite order in which
537 they were made. */
538 for (i = num_changes - 1; i >= num; i--)
540 *changes[i].loc = changes[i].old;
541 if (changes[i].object && !MEM_P (changes[i].object))
542 INSN_CODE (changes[i].object) = changes[i].old_code;
544 num_changes = num;
547 /* Reduce conditional compilation elsewhere. */
548 #ifndef HAVE_extv
549 #define HAVE_extv 0
550 #define CODE_FOR_extv CODE_FOR_nothing
551 #endif
552 #ifndef HAVE_extzv
553 #define HAVE_extzv 0
554 #define CODE_FOR_extzv CODE_FOR_nothing
555 #endif
557 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
558 rtx. */
560 static void
561 simplify_while_replacing (rtx *loc, rtx to, rtx object,
562 enum machine_mode op0_mode)
564 rtx x = *loc;
565 enum rtx_code code = GET_CODE (x);
566 rtx new_rtx;
568 if (SWAPPABLE_OPERANDS_P (x)
569 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
571 validate_unshare_change (object, loc,
572 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
573 : swap_condition (code),
574 GET_MODE (x), XEXP (x, 1),
575 XEXP (x, 0)), 1);
576 x = *loc;
577 code = GET_CODE (x);
580 switch (code)
582 case PLUS:
583 /* If we have a PLUS whose second operand is now a CONST_INT, use
584 simplify_gen_binary to try to simplify it.
585 ??? We may want later to remove this, once simplification is
586 separated from this function. */
587 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
588 validate_change (object, loc,
589 simplify_gen_binary
590 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
591 break;
592 case MINUS:
593 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
594 validate_change (object, loc,
595 simplify_gen_binary
596 (PLUS, GET_MODE (x), XEXP (x, 0),
597 simplify_gen_unary (NEG,
598 GET_MODE (x), XEXP (x, 1),
599 GET_MODE (x))), 1);
600 break;
601 case ZERO_EXTEND:
602 case SIGN_EXTEND:
603 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
605 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
606 op0_mode);
607 /* If any of the above failed, substitute in something that
608 we know won't be recognized. */
609 if (!new_rtx)
610 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
611 validate_change (object, loc, new_rtx, 1);
613 break;
614 case SUBREG:
615 /* All subregs possible to simplify should be simplified. */
616 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
617 SUBREG_BYTE (x));
619 /* Subregs of VOIDmode operands are incorrect. */
620 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
621 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
622 if (new_rtx)
623 validate_change (object, loc, new_rtx, 1);
624 break;
625 case ZERO_EXTRACT:
626 case SIGN_EXTRACT:
627 /* If we are replacing a register with memory, try to change the memory
628 to be the mode required for memory in extract operations (this isn't
629 likely to be an insertion operation; if it was, nothing bad will
630 happen, we might just fail in some cases). */
632 if (MEM_P (XEXP (x, 0))
633 && CONST_INT_P (XEXP (x, 1))
634 && CONST_INT_P (XEXP (x, 2))
635 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
636 MEM_ADDR_SPACE (XEXP (x, 0)))
637 && !MEM_VOLATILE_P (XEXP (x, 0)))
639 enum machine_mode wanted_mode = VOIDmode;
640 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
641 int pos = INTVAL (XEXP (x, 2));
643 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
645 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
646 if (wanted_mode == VOIDmode)
647 wanted_mode = word_mode;
649 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
651 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
652 if (wanted_mode == VOIDmode)
653 wanted_mode = word_mode;
656 /* If we have a narrower mode, we can do something. */
657 if (wanted_mode != VOIDmode
658 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
660 int offset = pos / BITS_PER_UNIT;
661 rtx newmem;
663 /* If the bytes and bits are counted differently, we
664 must adjust the offset. */
665 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
666 offset =
667 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
668 offset);
670 gcc_assert (GET_MODE_PRECISION (wanted_mode)
671 == GET_MODE_BITSIZE (wanted_mode));
672 pos %= GET_MODE_BITSIZE (wanted_mode);
674 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
676 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
677 validate_change (object, &XEXP (x, 0), newmem, 1);
681 break;
683 default:
684 break;
688 /* Replace every occurrence of FROM in X with TO. Mark each change with
689 validate_change passing OBJECT. */
691 static void
692 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
693 bool simplify)
695 int i, j;
696 const char *fmt;
697 rtx x = *loc;
698 enum rtx_code code;
699 enum machine_mode op0_mode = VOIDmode;
700 int prev_changes = num_changes;
702 if (!x)
703 return;
705 code = GET_CODE (x);
706 fmt = GET_RTX_FORMAT (code);
707 if (fmt[0] == 'e')
708 op0_mode = GET_MODE (XEXP (x, 0));
710 /* X matches FROM if it is the same rtx or they are both referring to the
711 same register in the same mode. Avoid calling rtx_equal_p unless the
712 operands look similar. */
714 if (x == from
715 || (REG_P (x) && REG_P (from)
716 && GET_MODE (x) == GET_MODE (from)
717 && REGNO (x) == REGNO (from))
718 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
719 && rtx_equal_p (x, from)))
721 validate_unshare_change (object, loc, to, 1);
722 return;
725 /* Call ourself recursively to perform the replacements.
726 We must not replace inside already replaced expression, otherwise we
727 get infinite recursion for replacements like (reg X)->(subreg (reg X))
728 done by regmove, so we must special case shared ASM_OPERANDS. */
730 if (GET_CODE (x) == PARALLEL)
732 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
734 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
735 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
737 /* Verify that operands are really shared. */
738 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
739 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
740 (x, 0, j))));
741 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
742 from, to, object, simplify);
744 else
745 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
746 simplify);
749 else
750 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
752 if (fmt[i] == 'e')
753 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
754 else if (fmt[i] == 'E')
755 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
756 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
757 simplify);
760 /* If we didn't substitute, there is nothing more to do. */
761 if (num_changes == prev_changes)
762 return;
764 /* Allow substituted expression to have different mode. This is used by
765 regmove to change mode of pseudo register. */
766 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
767 op0_mode = GET_MODE (XEXP (x, 0));
769 /* Do changes needed to keep rtx consistent. Don't do any other
770 simplifications, as it is not our job. */
771 if (simplify)
772 simplify_while_replacing (loc, to, object, op0_mode);
775 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
776 with TO. After all changes have been made, validate by seeing
777 if INSN is still valid. */
780 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
782 validate_replace_rtx_1 (loc, from, to, insn, true);
783 return apply_change_group ();
786 /* Try replacing every occurrence of FROM in INSN with TO. After all
787 changes have been made, validate by seeing if INSN is still valid. */
790 validate_replace_rtx (rtx from, rtx to, rtx insn)
792 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
793 return apply_change_group ();
796 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
797 is a part of INSN. After all changes have been made, validate by seeing if
798 INSN is still valid.
799 validate_replace_rtx (from, to, insn) is equivalent to
800 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
803 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
805 validate_replace_rtx_1 (where, from, to, insn, true);
806 return apply_change_group ();
809 /* Same as above, but do not simplify rtx afterwards. */
811 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
812 rtx insn)
814 validate_replace_rtx_1 (where, from, to, insn, false);
815 return apply_change_group ();
819 /* Try replacing every occurrence of FROM in INSN with TO. This also
820 will replace in REG_EQUAL and REG_EQUIV notes. */
822 void
823 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
825 rtx note;
826 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
827 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
828 if (REG_NOTE_KIND (note) == REG_EQUAL
829 || REG_NOTE_KIND (note) == REG_EQUIV)
830 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
833 /* Function called by note_uses to replace used subexpressions. */
834 struct validate_replace_src_data
836 rtx from; /* Old RTX */
837 rtx to; /* New RTX */
838 rtx insn; /* Insn in which substitution is occurring. */
841 static void
842 validate_replace_src_1 (rtx *x, void *data)
844 struct validate_replace_src_data *d
845 = (struct validate_replace_src_data *) data;
847 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
850 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
851 SET_DESTs. */
853 void
854 validate_replace_src_group (rtx from, rtx to, rtx insn)
856 struct validate_replace_src_data d;
858 d.from = from;
859 d.to = to;
860 d.insn = insn;
861 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
864 /* Try simplify INSN.
865 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
866 pattern and return true if something was simplified. */
868 bool
869 validate_simplify_insn (rtx insn)
871 int i;
872 rtx pat = NULL;
873 rtx newpat = NULL;
875 pat = PATTERN (insn);
877 if (GET_CODE (pat) == SET)
879 newpat = simplify_rtx (SET_SRC (pat));
880 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
881 validate_change (insn, &SET_SRC (pat), newpat, 1);
882 newpat = simplify_rtx (SET_DEST (pat));
883 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
884 validate_change (insn, &SET_DEST (pat), newpat, 1);
886 else if (GET_CODE (pat) == PARALLEL)
887 for (i = 0; i < XVECLEN (pat, 0); i++)
889 rtx s = XVECEXP (pat, 0, i);
891 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
893 newpat = simplify_rtx (SET_SRC (s));
894 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
895 validate_change (insn, &SET_SRC (s), newpat, 1);
896 newpat = simplify_rtx (SET_DEST (s));
897 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
898 validate_change (insn, &SET_DEST (s), newpat, 1);
901 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
904 #ifdef HAVE_cc0
905 /* Return 1 if the insn using CC0 set by INSN does not contain
906 any ordered tests applied to the condition codes.
907 EQ and NE tests do not count. */
910 next_insn_tests_no_inequality (rtx insn)
912 rtx next = next_cc0_user (insn);
914 /* If there is no next insn, we have to take the conservative choice. */
915 if (next == 0)
916 return 0;
918 return (INSN_P (next)
919 && ! inequality_comparisons_p (PATTERN (next)));
921 #endif
923 /* Return 1 if OP is a valid general operand for machine mode MODE.
924 This is either a register reference, a memory reference,
925 or a constant. In the case of a memory reference, the address
926 is checked for general validity for the target machine.
928 Register and memory references must have mode MODE in order to be valid,
929 but some constants have no machine mode and are valid for any mode.
931 If MODE is VOIDmode, OP is checked for validity for whatever mode
932 it has.
934 The main use of this function is as a predicate in match_operand
935 expressions in the machine description. */
938 general_operand (rtx op, enum machine_mode mode)
940 enum rtx_code code = GET_CODE (op);
942 if (mode == VOIDmode)
943 mode = GET_MODE (op);
945 /* Don't accept CONST_INT or anything similar
946 if the caller wants something floating. */
947 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
948 && GET_MODE_CLASS (mode) != MODE_INT
949 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
950 return 0;
952 if (CONST_INT_P (op)
953 && mode != VOIDmode
954 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
955 return 0;
957 if (CONSTANT_P (op))
958 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
959 || mode == VOIDmode)
960 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
961 && targetm.legitimate_constant_p (mode == VOIDmode
962 ? GET_MODE (op)
963 : mode, op));
965 /* Except for certain constants with VOIDmode, already checked for,
966 OP's mode must match MODE if MODE specifies a mode. */
968 if (GET_MODE (op) != mode)
969 return 0;
971 if (code == SUBREG)
973 rtx sub = SUBREG_REG (op);
975 #ifdef INSN_SCHEDULING
976 /* On machines that have insn scheduling, we want all memory
977 reference to be explicit, so outlaw paradoxical SUBREGs.
978 However, we must allow them after reload so that they can
979 get cleaned up by cleanup_subreg_operands. */
980 if (!reload_completed && MEM_P (sub)
981 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
982 return 0;
983 #endif
984 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
985 may result in incorrect reference. We should simplify all valid
986 subregs of MEM anyway. But allow this after reload because we
987 might be called from cleanup_subreg_operands.
989 ??? This is a kludge. */
990 if (!reload_completed && SUBREG_BYTE (op) != 0
991 && MEM_P (sub))
992 return 0;
994 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
995 create such rtl, and we must reject it. */
996 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
997 /* LRA can use subreg to store a floating point value in an
998 integer mode. Although the floating point and the
999 integer modes need the same number of hard registers, the
1000 size of floating point mode can be less than the integer
1001 mode. */
1002 && ! lra_in_progress
1003 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1004 return 0;
1006 op = sub;
1007 code = GET_CODE (op);
1010 if (code == REG)
1011 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1012 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1014 if (code == MEM)
1016 rtx y = XEXP (op, 0);
1018 if (! volatile_ok && MEM_VOLATILE_P (op))
1019 return 0;
1021 /* Use the mem's mode, since it will be reloaded thus. */
1022 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1023 return 1;
1026 return 0;
1029 /* Return 1 if OP is a valid memory address for a memory reference
1030 of mode MODE.
1032 The main use of this function is as a predicate in match_operand
1033 expressions in the machine description. */
1036 address_operand (rtx op, enum machine_mode mode)
1038 return memory_address_p (mode, op);
1041 /* Return 1 if OP is a register reference of mode MODE.
1042 If MODE is VOIDmode, accept a register in any mode.
1044 The main use of this function is as a predicate in match_operand
1045 expressions in the machine description. */
1048 register_operand (rtx op, enum machine_mode mode)
1050 if (GET_MODE (op) != mode && mode != VOIDmode)
1051 return 0;
1053 if (GET_CODE (op) == SUBREG)
1055 rtx sub = SUBREG_REG (op);
1057 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1058 because it is guaranteed to be reloaded into one.
1059 Just make sure the MEM is valid in itself.
1060 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1061 but currently it does result from (SUBREG (REG)...) where the
1062 reg went on the stack.) */
1063 if (! reload_completed && MEM_P (sub))
1064 return general_operand (op, mode);
1066 #ifdef CANNOT_CHANGE_MODE_CLASS
1067 if (REG_P (sub)
1068 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1069 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1070 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1071 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1072 /* LRA can generate some invalid SUBREGS just for matched
1073 operand reload presentation. LRA needs to treat them as
1074 valid. */
1075 && ! LRA_SUBREG_P (op))
1076 return 0;
1077 #endif
1079 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1080 create such rtl, and we must reject it. */
1081 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1082 /* LRA can use subreg to store a floating point value in an
1083 integer mode. Although the floating point and the
1084 integer modes need the same number of hard registers, the
1085 size of floating point mode can be less than the integer
1086 mode. */
1087 && ! lra_in_progress
1088 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1089 return 0;
1091 op = sub;
1094 return (REG_P (op)
1095 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1096 || in_hard_reg_set_p (operand_reg_set,
1097 GET_MODE (op), REGNO (op))));
1100 /* Return 1 for a register in Pmode; ignore the tested mode. */
1103 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1105 return register_operand (op, Pmode);
1108 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1109 or a hard register. */
1112 scratch_operand (rtx op, enum machine_mode mode)
1114 if (GET_MODE (op) != mode && mode != VOIDmode)
1115 return 0;
1117 return (GET_CODE (op) == SCRATCH
1118 || (REG_P (op)
1119 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1122 /* Return 1 if OP is a valid immediate operand for mode MODE.
1124 The main use of this function is as a predicate in match_operand
1125 expressions in the machine description. */
1128 immediate_operand (rtx op, enum machine_mode mode)
1130 /* Don't accept CONST_INT or anything similar
1131 if the caller wants something floating. */
1132 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1133 && GET_MODE_CLASS (mode) != MODE_INT
1134 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1135 return 0;
1137 if (CONST_INT_P (op)
1138 && mode != VOIDmode
1139 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1140 return 0;
1142 return (CONSTANT_P (op)
1143 && (GET_MODE (op) == mode || mode == VOIDmode
1144 || GET_MODE (op) == VOIDmode)
1145 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1146 && targetm.legitimate_constant_p (mode == VOIDmode
1147 ? GET_MODE (op)
1148 : mode, op));
1151 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1154 const_int_operand (rtx op, enum machine_mode mode)
1156 if (!CONST_INT_P (op))
1157 return 0;
1159 if (mode != VOIDmode
1160 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1161 return 0;
1163 return 1;
1166 #if TARGET_SUPPORTS_WIDE_INT
1167 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1168 of mode MODE. */
1170 const_scalar_int_operand (rtx op, enum machine_mode mode)
1172 if (!CONST_SCALAR_INT_P (op))
1173 return 0;
1175 if (CONST_INT_P (op))
1176 return const_int_operand (op, mode);
1178 if (mode != VOIDmode)
1180 int prec = GET_MODE_PRECISION (mode);
1181 int bitsize = GET_MODE_BITSIZE (mode);
1183 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1184 return 0;
1186 if (prec == bitsize)
1187 return 1;
1188 else
1190 /* Multiword partial int. */
1191 HOST_WIDE_INT x
1192 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1193 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1196 return 1;
1199 /* Returns 1 if OP is an operand that is a CONST_WIDE_INT of mode
1200 MODE. This most likely is not as useful as
1201 const_scalar_int_operand since it does not accept CONST_INTs, but
1202 is here for consistancy. */
1204 const_wide_int_operand (rtx op, enum machine_mode mode)
1206 if (!CONST_WIDE_INT_P (op))
1207 return 0;
1209 return const_scalar_int_operand (op, mode);
1212 /* Returns 1 if OP is an operand that is a constant integer or constant
1213 floating-point number of MODE. */
1216 const_double_operand (rtx op, enum machine_mode mode)
1218 return (GET_CODE (op) == CONST_DOUBLE)
1219 && (GET_MODE (op) == mode || mode == VOIDmode);
1221 #else
1222 /* Returns 1 if OP is an operand that is a constant integer or constant
1223 floating-point number of MODE. */
1226 const_double_operand (rtx op, enum machine_mode mode)
1228 /* Don't accept CONST_INT or anything similar
1229 if the caller wants something floating. */
1230 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1231 && GET_MODE_CLASS (mode) != MODE_INT
1232 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1233 return 0;
1235 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1236 && (mode == VOIDmode || GET_MODE (op) == mode
1237 || GET_MODE (op) == VOIDmode));
1239 #endif
1240 /* Return 1 if OP is a general operand that is not an immediate
1241 operand of mode MODE. */
1244 nonimmediate_operand (rtx op, enum machine_mode mode)
1246 return (general_operand (op, mode) && ! CONSTANT_P (op));
1249 /* Return 1 if OP is a register reference or immediate value of mode
1250 MODE. */
1253 nonmemory_operand (rtx op, enum machine_mode mode)
1255 if (CONSTANT_P (op))
1256 return immediate_operand (op, mode);
1258 if (GET_MODE (op) != mode && mode != VOIDmode)
1259 return 0;
1261 if (GET_CODE (op) == SUBREG)
1263 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1264 because it is guaranteed to be reloaded into one.
1265 Just make sure the MEM is valid in itself.
1266 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1267 but currently it does result from (SUBREG (REG)...) where the
1268 reg went on the stack.) */
1269 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1270 return general_operand (op, mode);
1271 op = SUBREG_REG (op);
1274 return (REG_P (op)
1275 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1276 || in_hard_reg_set_p (operand_reg_set,
1277 GET_MODE (op), REGNO (op))));
1280 /* Return 1 if OP is a valid operand that stands for pushing a
1281 value of mode MODE onto the stack.
1283 The main use of this function is as a predicate in match_operand
1284 expressions in the machine description. */
1287 push_operand (rtx op, enum machine_mode mode)
1289 unsigned int rounded_size = GET_MODE_SIZE (mode);
1291 #ifdef PUSH_ROUNDING
1292 rounded_size = PUSH_ROUNDING (rounded_size);
1293 #endif
1295 if (!MEM_P (op))
1296 return 0;
1298 if (mode != VOIDmode && GET_MODE (op) != mode)
1299 return 0;
1301 op = XEXP (op, 0);
1303 if (rounded_size == GET_MODE_SIZE (mode))
1305 if (GET_CODE (op) != STACK_PUSH_CODE)
1306 return 0;
1308 else
1310 if (GET_CODE (op) != PRE_MODIFY
1311 || GET_CODE (XEXP (op, 1)) != PLUS
1312 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1313 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1314 #ifdef STACK_GROWS_DOWNWARD
1315 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1316 #else
1317 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1318 #endif
1320 return 0;
1323 return XEXP (op, 0) == stack_pointer_rtx;
1326 /* Return 1 if OP is a valid operand that stands for popping a
1327 value of mode MODE off the stack.
1329 The main use of this function is as a predicate in match_operand
1330 expressions in the machine description. */
1333 pop_operand (rtx op, enum machine_mode mode)
1335 if (!MEM_P (op))
1336 return 0;
1338 if (mode != VOIDmode && GET_MODE (op) != mode)
1339 return 0;
1341 op = XEXP (op, 0);
1343 if (GET_CODE (op) != STACK_POP_CODE)
1344 return 0;
1346 return XEXP (op, 0) == stack_pointer_rtx;
1349 /* Return 1 if ADDR is a valid memory address
1350 for mode MODE in address space AS. */
1353 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1354 rtx addr, addr_space_t as)
1356 #ifdef GO_IF_LEGITIMATE_ADDRESS
1357 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1358 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1359 return 0;
1361 win:
1362 return 1;
1363 #else
1364 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1365 #endif
1368 /* Return 1 if OP is a valid memory reference with mode MODE,
1369 including a valid address.
1371 The main use of this function is as a predicate in match_operand
1372 expressions in the machine description. */
1375 memory_operand (rtx op, enum machine_mode mode)
1377 rtx inner;
1379 if (! reload_completed)
1380 /* Note that no SUBREG is a memory operand before end of reload pass,
1381 because (SUBREG (MEM...)) forces reloading into a register. */
1382 return MEM_P (op) && general_operand (op, mode);
1384 if (mode != VOIDmode && GET_MODE (op) != mode)
1385 return 0;
1387 inner = op;
1388 if (GET_CODE (inner) == SUBREG)
1389 inner = SUBREG_REG (inner);
1391 return (MEM_P (inner) && general_operand (op, mode));
1394 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1395 that is, a memory reference whose address is a general_operand. */
1398 indirect_operand (rtx op, enum machine_mode mode)
1400 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1401 if (! reload_completed
1402 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1404 int offset = SUBREG_BYTE (op);
1405 rtx inner = SUBREG_REG (op);
1407 if (mode != VOIDmode && GET_MODE (op) != mode)
1408 return 0;
1410 /* The only way that we can have a general_operand as the resulting
1411 address is if OFFSET is zero and the address already is an operand
1412 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1413 operand. */
1415 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1416 || (GET_CODE (XEXP (inner, 0)) == PLUS
1417 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1418 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1419 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1422 return (MEM_P (op)
1423 && memory_operand (op, mode)
1424 && general_operand (XEXP (op, 0), Pmode));
1427 /* Return 1 if this is an ordered comparison operator (not including
1428 ORDERED and UNORDERED). */
1431 ordered_comparison_operator (rtx op, enum machine_mode mode)
1433 if (mode != VOIDmode && GET_MODE (op) != mode)
1434 return false;
1435 switch (GET_CODE (op))
1437 case EQ:
1438 case NE:
1439 case LT:
1440 case LTU:
1441 case LE:
1442 case LEU:
1443 case GT:
1444 case GTU:
1445 case GE:
1446 case GEU:
1447 return true;
1448 default:
1449 return false;
1453 /* Return 1 if this is a comparison operator. This allows the use of
1454 MATCH_OPERATOR to recognize all the branch insns. */
1457 comparison_operator (rtx op, enum machine_mode mode)
1459 return ((mode == VOIDmode || GET_MODE (op) == mode)
1460 && COMPARISON_P (op));
1463 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1466 extract_asm_operands (rtx body)
1468 rtx tmp;
1469 switch (GET_CODE (body))
1471 case ASM_OPERANDS:
1472 return body;
1474 case SET:
1475 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1476 tmp = SET_SRC (body);
1477 if (GET_CODE (tmp) == ASM_OPERANDS)
1478 return tmp;
1479 break;
1481 case PARALLEL:
1482 tmp = XVECEXP (body, 0, 0);
1483 if (GET_CODE (tmp) == ASM_OPERANDS)
1484 return tmp;
1485 if (GET_CODE (tmp) == SET)
1487 tmp = SET_SRC (tmp);
1488 if (GET_CODE (tmp) == ASM_OPERANDS)
1489 return tmp;
1491 break;
1493 default:
1494 break;
1496 return NULL;
1499 /* If BODY is an insn body that uses ASM_OPERANDS,
1500 return the number of operands (both input and output) in the insn.
1501 Otherwise return -1. */
1504 asm_noperands (const_rtx body)
1506 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1507 int n_sets = 0;
1509 if (asm_op == NULL)
1510 return -1;
1512 if (GET_CODE (body) == SET)
1513 n_sets = 1;
1514 else if (GET_CODE (body) == PARALLEL)
1516 int i;
1517 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1519 /* Multiple output operands, or 1 output plus some clobbers:
1520 body is
1521 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1522 /* Count backwards through CLOBBERs to determine number of SETs. */
1523 for (i = XVECLEN (body, 0); i > 0; i--)
1525 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1526 break;
1527 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1528 return -1;
1531 /* N_SETS is now number of output operands. */
1532 n_sets = i;
1534 /* Verify that all the SETs we have
1535 came from a single original asm_operands insn
1536 (so that invalid combinations are blocked). */
1537 for (i = 0; i < n_sets; i++)
1539 rtx elt = XVECEXP (body, 0, i);
1540 if (GET_CODE (elt) != SET)
1541 return -1;
1542 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1543 return -1;
1544 /* If these ASM_OPERANDS rtx's came from different original insns
1545 then they aren't allowed together. */
1546 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1547 != ASM_OPERANDS_INPUT_VEC (asm_op))
1548 return -1;
1551 else
1553 /* 0 outputs, but some clobbers:
1554 body is [(asm_operands ...) (clobber (reg ...))...]. */
1555 /* Make sure all the other parallel things really are clobbers. */
1556 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1557 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1558 return -1;
1562 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1563 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1566 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1567 copy its operands (both input and output) into the vector OPERANDS,
1568 the locations of the operands within the insn into the vector OPERAND_LOCS,
1569 and the constraints for the operands into CONSTRAINTS.
1570 Write the modes of the operands into MODES.
1571 Return the assembler-template.
1573 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1574 we don't store that info. */
1576 const char *
1577 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1578 const char **constraints, enum machine_mode *modes,
1579 location_t *loc)
1581 int nbase = 0, n, i;
1582 rtx asmop;
1584 switch (GET_CODE (body))
1586 case ASM_OPERANDS:
1587 /* Zero output asm: BODY is (asm_operands ...). */
1588 asmop = body;
1589 break;
1591 case SET:
1592 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1593 asmop = SET_SRC (body);
1595 /* The output is in the SET.
1596 Its constraint is in the ASM_OPERANDS itself. */
1597 if (operands)
1598 operands[0] = SET_DEST (body);
1599 if (operand_locs)
1600 operand_locs[0] = &SET_DEST (body);
1601 if (constraints)
1602 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1603 if (modes)
1604 modes[0] = GET_MODE (SET_DEST (body));
1605 nbase = 1;
1606 break;
1608 case PARALLEL:
1610 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1612 asmop = XVECEXP (body, 0, 0);
1613 if (GET_CODE (asmop) == SET)
1615 asmop = SET_SRC (asmop);
1617 /* At least one output, plus some CLOBBERs. The outputs are in
1618 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1619 for (i = 0; i < nparallel; i++)
1621 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1622 break; /* Past last SET */
1623 if (operands)
1624 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1625 if (operand_locs)
1626 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1627 if (constraints)
1628 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1629 if (modes)
1630 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1632 nbase = i;
1634 break;
1637 default:
1638 gcc_unreachable ();
1641 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1642 for (i = 0; i < n; i++)
1644 if (operand_locs)
1645 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1646 if (operands)
1647 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1648 if (constraints)
1649 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1650 if (modes)
1651 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1653 nbase += n;
1655 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1656 for (i = 0; i < n; i++)
1658 if (operand_locs)
1659 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1660 if (operands)
1661 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1662 if (constraints)
1663 constraints[nbase + i] = "";
1664 if (modes)
1665 modes[nbase + i] = Pmode;
1668 if (loc)
1669 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1671 return ASM_OPERANDS_TEMPLATE (asmop);
1674 /* Check if an asm_operand matches its constraints.
1675 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1678 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1680 int result = 0;
1681 #ifdef AUTO_INC_DEC
1682 bool incdec_ok = false;
1683 #endif
1685 /* Use constrain_operands after reload. */
1686 gcc_assert (!reload_completed);
1688 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1689 many alternatives as required to match the other operands. */
1690 if (*constraint == '\0')
1691 result = 1;
1693 while (*constraint)
1695 char c = *constraint;
1696 int len;
1697 switch (c)
1699 case ',':
1700 constraint++;
1701 continue;
1702 case '=':
1703 case '+':
1704 case '*':
1705 case '%':
1706 case '!':
1707 case '#':
1708 case '&':
1709 case '?':
1710 break;
1712 case '0': case '1': case '2': case '3': case '4':
1713 case '5': case '6': case '7': case '8': case '9':
1714 /* If caller provided constraints pointer, look up
1715 the matching constraint. Otherwise, our caller should have
1716 given us the proper matching constraint, but we can't
1717 actually fail the check if they didn't. Indicate that
1718 results are inconclusive. */
1719 if (constraints)
1721 char *end;
1722 unsigned long match;
1724 match = strtoul (constraint, &end, 10);
1725 if (!result)
1726 result = asm_operand_ok (op, constraints[match], NULL);
1727 constraint = (const char *) end;
1729 else
1732 constraint++;
1733 while (ISDIGIT (*constraint));
1734 if (! result)
1735 result = -1;
1737 continue;
1739 case 'p':
1740 if (address_operand (op, VOIDmode))
1741 result = 1;
1742 break;
1744 case TARGET_MEM_CONSTRAINT:
1745 case 'V': /* non-offsettable */
1746 if (memory_operand (op, VOIDmode))
1747 result = 1;
1748 break;
1750 case 'o': /* offsettable */
1751 if (offsettable_nonstrict_memref_p (op))
1752 result = 1;
1753 break;
1755 case '<':
1756 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1757 excepting those that expand_call created. Further, on some
1758 machines which do not have generalized auto inc/dec, an inc/dec
1759 is not a memory_operand.
1761 Match any memory and hope things are resolved after reload. */
1763 if (MEM_P (op)
1764 && (1
1765 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1766 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1767 result = 1;
1768 #ifdef AUTO_INC_DEC
1769 incdec_ok = true;
1770 #endif
1771 break;
1773 case '>':
1774 if (MEM_P (op)
1775 && (1
1776 || GET_CODE (XEXP (op, 0)) == PRE_INC
1777 || GET_CODE (XEXP (op, 0)) == POST_INC))
1778 result = 1;
1779 #ifdef AUTO_INC_DEC
1780 incdec_ok = true;
1781 #endif
1782 break;
1784 case 'E':
1785 case 'F':
1786 if (CONST_DOUBLE_AS_FLOAT_P (op)
1787 || (GET_CODE (op) == CONST_VECTOR
1788 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1789 result = 1;
1790 break;
1792 case 'G':
1793 if (CONST_DOUBLE_AS_FLOAT_P (op)
1794 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1795 result = 1;
1796 break;
1797 case 'H':
1798 if (CONST_DOUBLE_AS_FLOAT_P (op)
1799 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1800 result = 1;
1801 break;
1803 case 's':
1804 if (CONST_SCALAR_INT_P (op))
1805 break;
1806 /* Fall through. */
1808 case 'i':
1809 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1810 result = 1;
1811 break;
1813 case 'n':
1814 if (CONST_SCALAR_INT_P (op))
1815 result = 1;
1816 break;
1818 case 'I':
1819 if (CONST_INT_P (op)
1820 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1821 result = 1;
1822 break;
1823 case 'J':
1824 if (CONST_INT_P (op)
1825 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1826 result = 1;
1827 break;
1828 case 'K':
1829 if (CONST_INT_P (op)
1830 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1831 result = 1;
1832 break;
1833 case 'L':
1834 if (CONST_INT_P (op)
1835 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1836 result = 1;
1837 break;
1838 case 'M':
1839 if (CONST_INT_P (op)
1840 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1841 result = 1;
1842 break;
1843 case 'N':
1844 if (CONST_INT_P (op)
1845 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1846 result = 1;
1847 break;
1848 case 'O':
1849 if (CONST_INT_P (op)
1850 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1851 result = 1;
1852 break;
1853 case 'P':
1854 if (CONST_INT_P (op)
1855 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1856 result = 1;
1857 break;
1859 case 'X':
1860 result = 1;
1861 break;
1863 case 'g':
1864 if (general_operand (op, VOIDmode))
1865 result = 1;
1866 break;
1868 default:
1869 /* For all other letters, we first check for a register class,
1870 otherwise it is an EXTRA_CONSTRAINT. */
1871 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1873 case 'r':
1874 if (GET_MODE (op) == BLKmode)
1875 break;
1876 if (register_operand (op, VOIDmode))
1877 result = 1;
1879 #ifdef EXTRA_CONSTRAINT_STR
1880 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1881 /* Every memory operand can be reloaded to fit. */
1882 result = result || memory_operand (op, VOIDmode);
1883 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1884 /* Every address operand can be reloaded to fit. */
1885 result = result || address_operand (op, VOIDmode);
1886 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1887 result = 1;
1888 #endif
1889 break;
1891 len = CONSTRAINT_LEN (c, constraint);
1893 constraint++;
1894 while (--len && *constraint);
1895 if (len)
1896 return 0;
1899 #ifdef AUTO_INC_DEC
1900 /* For operands without < or > constraints reject side-effects. */
1901 if (!incdec_ok && result && MEM_P (op))
1902 switch (GET_CODE (XEXP (op, 0)))
1904 case PRE_INC:
1905 case POST_INC:
1906 case PRE_DEC:
1907 case POST_DEC:
1908 case PRE_MODIFY:
1909 case POST_MODIFY:
1910 return 0;
1911 default:
1912 break;
1914 #endif
1916 return result;
1919 /* Given an rtx *P, if it is a sum containing an integer constant term,
1920 return the location (type rtx *) of the pointer to that constant term.
1921 Otherwise, return a null pointer. */
1923 rtx *
1924 find_constant_term_loc (rtx *p)
1926 rtx *tem;
1927 enum rtx_code code = GET_CODE (*p);
1929 /* If *P IS such a constant term, P is its location. */
1931 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1932 || code == CONST)
1933 return p;
1935 /* Otherwise, if not a sum, it has no constant term. */
1937 if (GET_CODE (*p) != PLUS)
1938 return 0;
1940 /* If one of the summands is constant, return its location. */
1942 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1943 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1944 return p;
1946 /* Otherwise, check each summand for containing a constant term. */
1948 if (XEXP (*p, 0) != 0)
1950 tem = find_constant_term_loc (&XEXP (*p, 0));
1951 if (tem != 0)
1952 return tem;
1955 if (XEXP (*p, 1) != 0)
1957 tem = find_constant_term_loc (&XEXP (*p, 1));
1958 if (tem != 0)
1959 return tem;
1962 return 0;
1965 /* Return 1 if OP is a memory reference
1966 whose address contains no side effects
1967 and remains valid after the addition
1968 of a positive integer less than the
1969 size of the object being referenced.
1971 We assume that the original address is valid and do not check it.
1973 This uses strict_memory_address_p as a subroutine, so
1974 don't use it before reload. */
1977 offsettable_memref_p (rtx op)
1979 return ((MEM_P (op))
1980 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1981 MEM_ADDR_SPACE (op)));
1984 /* Similar, but don't require a strictly valid mem ref:
1985 consider pseudo-regs valid as index or base regs. */
1988 offsettable_nonstrict_memref_p (rtx op)
1990 return ((MEM_P (op))
1991 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1992 MEM_ADDR_SPACE (op)));
1995 /* Return 1 if Y is a memory address which contains no side effects
1996 and would remain valid for address space AS after the addition of
1997 a positive integer less than the size of that mode.
1999 We assume that the original address is valid and do not check it.
2000 We do check that it is valid for narrower modes.
2002 If STRICTP is nonzero, we require a strictly valid address,
2003 for the sake of use in reload.c. */
2006 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
2007 addr_space_t as)
2009 enum rtx_code ycode = GET_CODE (y);
2010 rtx z;
2011 rtx y1 = y;
2012 rtx *y2;
2013 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
2014 (strictp ? strict_memory_address_addr_space_p
2015 : memory_address_addr_space_p);
2016 unsigned int mode_sz = GET_MODE_SIZE (mode);
2018 if (CONSTANT_ADDRESS_P (y))
2019 return 1;
2021 /* Adjusting an offsettable address involves changing to a narrower mode.
2022 Make sure that's OK. */
2024 if (mode_dependent_address_p (y, as))
2025 return 0;
2027 enum machine_mode address_mode = GET_MODE (y);
2028 if (address_mode == VOIDmode)
2029 address_mode = targetm.addr_space.address_mode (as);
2030 #ifdef POINTERS_EXTEND_UNSIGNED
2031 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2032 #endif
2034 /* ??? How much offset does an offsettable BLKmode reference need?
2035 Clearly that depends on the situation in which it's being used.
2036 However, the current situation in which we test 0xffffffff is
2037 less than ideal. Caveat user. */
2038 if (mode_sz == 0)
2039 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2041 /* If the expression contains a constant term,
2042 see if it remains valid when max possible offset is added. */
2044 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2046 int good;
2048 y1 = *y2;
2049 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2050 /* Use QImode because an odd displacement may be automatically invalid
2051 for any wider mode. But it should be valid for a single byte. */
2052 good = (*addressp) (QImode, y, as);
2054 /* In any case, restore old contents of memory. */
2055 *y2 = y1;
2056 return good;
2059 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2060 return 0;
2062 /* The offset added here is chosen as the maximum offset that
2063 any instruction could need to add when operating on something
2064 of the specified mode. We assume that if Y and Y+c are
2065 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2066 go inside a LO_SUM here, so we do so as well. */
2067 if (GET_CODE (y) == LO_SUM
2068 && mode != BLKmode
2069 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2070 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2071 plus_constant (address_mode, XEXP (y, 1),
2072 mode_sz - 1));
2073 #ifdef POINTERS_EXTEND_UNSIGNED
2074 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2075 else if (POINTERS_EXTEND_UNSIGNED > 0
2076 && GET_CODE (y) == ZERO_EXTEND
2077 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2078 z = gen_rtx_ZERO_EXTEND (address_mode,
2079 plus_constant (pointer_mode, XEXP (y, 0),
2080 mode_sz - 1));
2081 #endif
2082 else
2083 z = plus_constant (address_mode, y, mode_sz - 1);
2085 /* Use QImode because an odd displacement may be automatically invalid
2086 for any wider mode. But it should be valid for a single byte. */
2087 return (*addressp) (QImode, z, as);
2090 /* Return 1 if ADDR is an address-expression whose effect depends
2091 on the mode of the memory reference it is used in.
2093 ADDRSPACE is the address space associated with the address.
2095 Autoincrement addressing is a typical example of mode-dependence
2096 because the amount of the increment depends on the mode. */
2098 bool
2099 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2101 /* Auto-increment addressing with anything other than post_modify
2102 or pre_modify always introduces a mode dependency. Catch such
2103 cases now instead of deferring to the target. */
2104 if (GET_CODE (addr) == PRE_INC
2105 || GET_CODE (addr) == POST_INC
2106 || GET_CODE (addr) == PRE_DEC
2107 || GET_CODE (addr) == POST_DEC)
2108 return true;
2110 return targetm.mode_dependent_address_p (addr, addrspace);
2113 /* Like extract_insn, but save insn extracted and don't extract again, when
2114 called again for the same insn expecting that recog_data still contain the
2115 valid information. This is used primary by gen_attr infrastructure that
2116 often does extract insn again and again. */
2117 void
2118 extract_insn_cached (rtx insn)
2120 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2121 return;
2122 extract_insn (insn);
2123 recog_data.insn = insn;
2126 /* Do cached extract_insn, constrain_operands and complain about failures.
2127 Used by insn_attrtab. */
2128 void
2129 extract_constrain_insn_cached (rtx insn)
2131 extract_insn_cached (insn);
2132 if (which_alternative == -1
2133 && !constrain_operands (reload_completed))
2134 fatal_insn_not_found (insn);
2137 /* Do cached constrain_operands and complain about failures. */
2139 constrain_operands_cached (int strict)
2141 if (which_alternative == -1)
2142 return constrain_operands (strict);
2143 else
2144 return 1;
2147 /* Analyze INSN and fill in recog_data. */
2149 void
2150 extract_insn (rtx insn)
2152 int i;
2153 int icode;
2154 int noperands;
2155 rtx body = PATTERN (insn);
2157 recog_data.n_operands = 0;
2158 recog_data.n_alternatives = 0;
2159 recog_data.n_dups = 0;
2160 recog_data.is_asm = false;
2162 switch (GET_CODE (body))
2164 case USE:
2165 case CLOBBER:
2166 case ASM_INPUT:
2167 case ADDR_VEC:
2168 case ADDR_DIFF_VEC:
2169 case VAR_LOCATION:
2170 return;
2172 case SET:
2173 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2174 goto asm_insn;
2175 else
2176 goto normal_insn;
2177 case PARALLEL:
2178 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2179 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2180 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2181 goto asm_insn;
2182 else
2183 goto normal_insn;
2184 case ASM_OPERANDS:
2185 asm_insn:
2186 recog_data.n_operands = noperands = asm_noperands (body);
2187 if (noperands >= 0)
2189 /* This insn is an `asm' with operands. */
2191 /* expand_asm_operands makes sure there aren't too many operands. */
2192 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2194 /* Now get the operand values and constraints out of the insn. */
2195 decode_asm_operands (body, recog_data.operand,
2196 recog_data.operand_loc,
2197 recog_data.constraints,
2198 recog_data.operand_mode, NULL);
2199 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2200 if (noperands > 0)
2202 const char *p = recog_data.constraints[0];
2203 recog_data.n_alternatives = 1;
2204 while (*p)
2205 recog_data.n_alternatives += (*p++ == ',');
2207 recog_data.is_asm = true;
2208 break;
2210 fatal_insn_not_found (insn);
2212 default:
2213 normal_insn:
2214 /* Ordinary insn: recognize it, get the operands via insn_extract
2215 and get the constraints. */
2217 icode = recog_memoized (insn);
2218 if (icode < 0)
2219 fatal_insn_not_found (insn);
2221 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2222 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2223 recog_data.n_dups = insn_data[icode].n_dups;
2225 insn_extract (insn);
2227 for (i = 0; i < noperands; i++)
2229 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2230 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2231 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2232 /* VOIDmode match_operands gets mode from their real operand. */
2233 if (recog_data.operand_mode[i] == VOIDmode)
2234 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2237 for (i = 0; i < noperands; i++)
2238 recog_data.operand_type[i]
2239 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2240 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2241 : OP_IN);
2243 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2245 if (INSN_CODE (insn) < 0)
2246 for (i = 0; i < recog_data.n_alternatives; i++)
2247 recog_data.alternative_enabled_p[i] = true;
2248 else
2250 recog_data.insn = insn;
2251 for (i = 0; i < recog_data.n_alternatives; i++)
2253 which_alternative = i;
2254 recog_data.alternative_enabled_p[i]
2255 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2259 recog_data.insn = NULL;
2260 which_alternative = -1;
2263 /* After calling extract_insn, you can use this function to extract some
2264 information from the constraint strings into a more usable form.
2265 The collected data is stored in recog_op_alt. */
2266 void
2267 preprocess_constraints (void)
2269 int i;
2271 for (i = 0; i < recog_data.n_operands; i++)
2272 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2273 * sizeof (struct operand_alternative)));
2275 for (i = 0; i < recog_data.n_operands; i++)
2277 int j;
2278 struct operand_alternative *op_alt;
2279 const char *p = recog_data.constraints[i];
2281 op_alt = recog_op_alt[i];
2283 for (j = 0; j < recog_data.n_alternatives; j++)
2285 op_alt[j].cl = NO_REGS;
2286 op_alt[j].constraint = p;
2287 op_alt[j].matches = -1;
2288 op_alt[j].matched = -1;
2290 if (!recog_data.alternative_enabled_p[j])
2292 p = skip_alternative (p);
2293 continue;
2296 if (*p == '\0' || *p == ',')
2298 op_alt[j].anything_ok = 1;
2299 continue;
2302 for (;;)
2304 char c = *p;
2305 if (c == '#')
2307 c = *++p;
2308 while (c != ',' && c != '\0');
2309 if (c == ',' || c == '\0')
2311 p++;
2312 break;
2315 switch (c)
2317 case '=': case '+': case '*': case '%':
2318 case 'E': case 'F': case 'G': case 'H':
2319 case 's': case 'i': case 'n':
2320 case 'I': case 'J': case 'K': case 'L':
2321 case 'M': case 'N': case 'O': case 'P':
2322 /* These don't say anything we care about. */
2323 break;
2325 case '?':
2326 op_alt[j].reject += 6;
2327 break;
2328 case '!':
2329 op_alt[j].reject += 600;
2330 break;
2331 case '&':
2332 op_alt[j].earlyclobber = 1;
2333 break;
2335 case '0': case '1': case '2': case '3': case '4':
2336 case '5': case '6': case '7': case '8': case '9':
2338 char *end;
2339 op_alt[j].matches = strtoul (p, &end, 10);
2340 recog_op_alt[op_alt[j].matches][j].matched = i;
2341 p = end;
2343 continue;
2345 case TARGET_MEM_CONSTRAINT:
2346 op_alt[j].memory_ok = 1;
2347 break;
2348 case '<':
2349 op_alt[j].decmem_ok = 1;
2350 break;
2351 case '>':
2352 op_alt[j].incmem_ok = 1;
2353 break;
2354 case 'V':
2355 op_alt[j].nonoffmem_ok = 1;
2356 break;
2357 case 'o':
2358 op_alt[j].offmem_ok = 1;
2359 break;
2360 case 'X':
2361 op_alt[j].anything_ok = 1;
2362 break;
2364 case 'p':
2365 op_alt[j].is_address = 1;
2366 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2367 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2368 ADDRESS, SCRATCH)];
2369 break;
2371 case 'g':
2372 case 'r':
2373 op_alt[j].cl =
2374 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2375 break;
2377 default:
2378 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2380 op_alt[j].memory_ok = 1;
2381 break;
2383 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2385 op_alt[j].is_address = 1;
2386 op_alt[j].cl
2387 = (reg_class_subunion
2388 [(int) op_alt[j].cl]
2389 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2390 ADDRESS, SCRATCH)]);
2391 break;
2394 op_alt[j].cl
2395 = (reg_class_subunion
2396 [(int) op_alt[j].cl]
2397 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2398 break;
2400 p += CONSTRAINT_LEN (c, p);
2406 /* Check the operands of an insn against the insn's operand constraints
2407 and return 1 if they are valid.
2408 The information about the insn's operands, constraints, operand modes
2409 etc. is obtained from the global variables set up by extract_insn.
2411 WHICH_ALTERNATIVE is set to a number which indicates which
2412 alternative of constraints was matched: 0 for the first alternative,
2413 1 for the next, etc.
2415 In addition, when two operands are required to match
2416 and it happens that the output operand is (reg) while the
2417 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2418 make the output operand look like the input.
2419 This is because the output operand is the one the template will print.
2421 This is used in final, just before printing the assembler code and by
2422 the routines that determine an insn's attribute.
2424 If STRICT is a positive nonzero value, it means that we have been
2425 called after reload has been completed. In that case, we must
2426 do all checks strictly. If it is zero, it means that we have been called
2427 before reload has completed. In that case, we first try to see if we can
2428 find an alternative that matches strictly. If not, we try again, this
2429 time assuming that reload will fix up the insn. This provides a "best
2430 guess" for the alternative and is used to compute attributes of insns prior
2431 to reload. A negative value of STRICT is used for this internal call. */
2433 struct funny_match
2435 int this_op, other;
2439 constrain_operands (int strict)
2441 const char *constraints[MAX_RECOG_OPERANDS];
2442 int matching_operands[MAX_RECOG_OPERANDS];
2443 int earlyclobber[MAX_RECOG_OPERANDS];
2444 int c;
2446 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2447 int funny_match_index;
2449 which_alternative = 0;
2450 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2451 return 1;
2453 for (c = 0; c < recog_data.n_operands; c++)
2455 constraints[c] = recog_data.constraints[c];
2456 matching_operands[c] = -1;
2461 int seen_earlyclobber_at = -1;
2462 int opno;
2463 int lose = 0;
2464 funny_match_index = 0;
2466 if (!recog_data.alternative_enabled_p[which_alternative])
2468 int i;
2470 for (i = 0; i < recog_data.n_operands; i++)
2471 constraints[i] = skip_alternative (constraints[i]);
2473 which_alternative++;
2474 continue;
2477 for (opno = 0; opno < recog_data.n_operands; opno++)
2479 rtx op = recog_data.operand[opno];
2480 enum machine_mode mode = GET_MODE (op);
2481 const char *p = constraints[opno];
2482 int offset = 0;
2483 int win = 0;
2484 int val;
2485 int len;
2487 earlyclobber[opno] = 0;
2489 /* A unary operator may be accepted by the predicate, but it
2490 is irrelevant for matching constraints. */
2491 if (UNARY_P (op))
2492 op = XEXP (op, 0);
2494 if (GET_CODE (op) == SUBREG)
2496 if (REG_P (SUBREG_REG (op))
2497 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2498 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2499 GET_MODE (SUBREG_REG (op)),
2500 SUBREG_BYTE (op),
2501 GET_MODE (op));
2502 op = SUBREG_REG (op);
2505 /* An empty constraint or empty alternative
2506 allows anything which matched the pattern. */
2507 if (*p == 0 || *p == ',')
2508 win = 1;
2511 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2513 case '\0':
2514 len = 0;
2515 break;
2516 case ',':
2517 c = '\0';
2518 break;
2520 case '?': case '!': case '*': case '%':
2521 case '=': case '+':
2522 break;
2524 case '#':
2525 /* Ignore rest of this alternative as far as
2526 constraint checking is concerned. */
2528 p++;
2529 while (*p && *p != ',');
2530 len = 0;
2531 break;
2533 case '&':
2534 earlyclobber[opno] = 1;
2535 if (seen_earlyclobber_at < 0)
2536 seen_earlyclobber_at = opno;
2537 break;
2539 case '0': case '1': case '2': case '3': case '4':
2540 case '5': case '6': case '7': case '8': case '9':
2542 /* This operand must be the same as a previous one.
2543 This kind of constraint is used for instructions such
2544 as add when they take only two operands.
2546 Note that the lower-numbered operand is passed first.
2548 If we are not testing strictly, assume that this
2549 constraint will be satisfied. */
2551 char *end;
2552 int match;
2554 match = strtoul (p, &end, 10);
2555 p = end;
2557 if (strict < 0)
2558 val = 1;
2559 else
2561 rtx op1 = recog_data.operand[match];
2562 rtx op2 = recog_data.operand[opno];
2564 /* A unary operator may be accepted by the predicate,
2565 but it is irrelevant for matching constraints. */
2566 if (UNARY_P (op1))
2567 op1 = XEXP (op1, 0);
2568 if (UNARY_P (op2))
2569 op2 = XEXP (op2, 0);
2571 val = operands_match_p (op1, op2);
2574 matching_operands[opno] = match;
2575 matching_operands[match] = opno;
2577 if (val != 0)
2578 win = 1;
2580 /* If output is *x and input is *--x, arrange later
2581 to change the output to *--x as well, since the
2582 output op is the one that will be printed. */
2583 if (val == 2 && strict > 0)
2585 funny_match[funny_match_index].this_op = opno;
2586 funny_match[funny_match_index++].other = match;
2589 len = 0;
2590 break;
2592 case 'p':
2593 /* p is used for address_operands. When we are called by
2594 gen_reload, no one will have checked that the address is
2595 strictly valid, i.e., that all pseudos requiring hard regs
2596 have gotten them. */
2597 if (strict <= 0
2598 || (strict_memory_address_p (recog_data.operand_mode[opno],
2599 op)))
2600 win = 1;
2601 break;
2603 /* No need to check general_operand again;
2604 it was done in insn-recog.c. Well, except that reload
2605 doesn't check the validity of its replacements, but
2606 that should only matter when there's a bug. */
2607 case 'g':
2608 /* Anything goes unless it is a REG and really has a hard reg
2609 but the hard reg is not in the class GENERAL_REGS. */
2610 if (REG_P (op))
2612 if (strict < 0
2613 || GENERAL_REGS == ALL_REGS
2614 || (reload_in_progress
2615 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2616 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2617 win = 1;
2619 else if (strict < 0 || general_operand (op, mode))
2620 win = 1;
2621 break;
2623 case 'X':
2624 /* This is used for a MATCH_SCRATCH in the cases when
2625 we don't actually need anything. So anything goes
2626 any time. */
2627 win = 1;
2628 break;
2630 case TARGET_MEM_CONSTRAINT:
2631 /* Memory operands must be valid, to the extent
2632 required by STRICT. */
2633 if (MEM_P (op))
2635 if (strict > 0
2636 && !strict_memory_address_addr_space_p
2637 (GET_MODE (op), XEXP (op, 0),
2638 MEM_ADDR_SPACE (op)))
2639 break;
2640 if (strict == 0
2641 && !memory_address_addr_space_p
2642 (GET_MODE (op), XEXP (op, 0),
2643 MEM_ADDR_SPACE (op)))
2644 break;
2645 win = 1;
2647 /* Before reload, accept what reload can turn into mem. */
2648 else if (strict < 0 && CONSTANT_P (op))
2649 win = 1;
2650 /* During reload, accept a pseudo */
2651 else if (reload_in_progress && REG_P (op)
2652 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2653 win = 1;
2654 break;
2656 case '<':
2657 if (MEM_P (op)
2658 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2659 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2660 win = 1;
2661 break;
2663 case '>':
2664 if (MEM_P (op)
2665 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2666 || GET_CODE (XEXP (op, 0)) == POST_INC))
2667 win = 1;
2668 break;
2670 case 'E':
2671 case 'F':
2672 if (CONST_DOUBLE_AS_FLOAT_P (op)
2673 || (GET_CODE (op) == CONST_VECTOR
2674 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2675 win = 1;
2676 break;
2678 case 'G':
2679 case 'H':
2680 if (CONST_DOUBLE_AS_FLOAT_P (op)
2681 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2682 win = 1;
2683 break;
2685 case 's':
2686 if (CONST_SCALAR_INT_P (op))
2687 break;
2688 case 'i':
2689 if (CONSTANT_P (op))
2690 win = 1;
2691 break;
2693 case 'n':
2694 if (CONST_SCALAR_INT_P (op))
2695 win = 1;
2696 break;
2698 case 'I':
2699 case 'J':
2700 case 'K':
2701 case 'L':
2702 case 'M':
2703 case 'N':
2704 case 'O':
2705 case 'P':
2706 if (CONST_INT_P (op)
2707 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2708 win = 1;
2709 break;
2711 case 'V':
2712 if (MEM_P (op)
2713 && ((strict > 0 && ! offsettable_memref_p (op))
2714 || (strict < 0
2715 && !(CONSTANT_P (op) || MEM_P (op)))
2716 || (reload_in_progress
2717 && !(REG_P (op)
2718 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2719 win = 1;
2720 break;
2722 case 'o':
2723 if ((strict > 0 && offsettable_memref_p (op))
2724 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2725 /* Before reload, accept what reload can handle. */
2726 || (strict < 0
2727 && (CONSTANT_P (op) || MEM_P (op)))
2728 /* During reload, accept a pseudo */
2729 || (reload_in_progress && REG_P (op)
2730 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2731 win = 1;
2732 break;
2734 default:
2736 enum reg_class cl;
2738 cl = (c == 'r'
2739 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2740 if (cl != NO_REGS)
2742 if (strict < 0
2743 || (strict == 0
2744 && REG_P (op)
2745 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2746 || (strict == 0 && GET_CODE (op) == SCRATCH)
2747 || (REG_P (op)
2748 && reg_fits_class_p (op, cl, offset, mode)))
2749 win = 1;
2751 #ifdef EXTRA_CONSTRAINT_STR
2752 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2753 win = 1;
2755 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2756 /* Every memory operand can be reloaded to fit. */
2757 && ((strict < 0 && MEM_P (op))
2758 /* Before reload, accept what reload can turn
2759 into mem. */
2760 || (strict < 0 && CONSTANT_P (op))
2761 /* During reload, accept a pseudo */
2762 || (reload_in_progress && REG_P (op)
2763 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2764 win = 1;
2765 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2766 /* Every address operand can be reloaded to fit. */
2767 && strict < 0)
2768 win = 1;
2769 /* Cater to architectures like IA-64 that define extra memory
2770 constraints without using define_memory_constraint. */
2771 else if (reload_in_progress
2772 && REG_P (op)
2773 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2774 && reg_renumber[REGNO (op)] < 0
2775 && reg_equiv_mem (REGNO (op)) != 0
2776 && EXTRA_CONSTRAINT_STR
2777 (reg_equiv_mem (REGNO (op)), c, p))
2778 win = 1;
2779 #endif
2780 break;
2783 while (p += len, c);
2785 constraints[opno] = p;
2786 /* If this operand did not win somehow,
2787 this alternative loses. */
2788 if (! win)
2789 lose = 1;
2791 /* This alternative won; the operands are ok.
2792 Change whichever operands this alternative says to change. */
2793 if (! lose)
2795 int opno, eopno;
2797 /* See if any earlyclobber operand conflicts with some other
2798 operand. */
2800 if (strict > 0 && seen_earlyclobber_at >= 0)
2801 for (eopno = seen_earlyclobber_at;
2802 eopno < recog_data.n_operands;
2803 eopno++)
2804 /* Ignore earlyclobber operands now in memory,
2805 because we would often report failure when we have
2806 two memory operands, one of which was formerly a REG. */
2807 if (earlyclobber[eopno]
2808 && REG_P (recog_data.operand[eopno]))
2809 for (opno = 0; opno < recog_data.n_operands; opno++)
2810 if ((MEM_P (recog_data.operand[opno])
2811 || recog_data.operand_type[opno] != OP_OUT)
2812 && opno != eopno
2813 /* Ignore things like match_operator operands. */
2814 && *recog_data.constraints[opno] != 0
2815 && ! (matching_operands[opno] == eopno
2816 && operands_match_p (recog_data.operand[opno],
2817 recog_data.operand[eopno]))
2818 && ! safe_from_earlyclobber (recog_data.operand[opno],
2819 recog_data.operand[eopno]))
2820 lose = 1;
2822 if (! lose)
2824 while (--funny_match_index >= 0)
2826 recog_data.operand[funny_match[funny_match_index].other]
2827 = recog_data.operand[funny_match[funny_match_index].this_op];
2830 #ifdef AUTO_INC_DEC
2831 /* For operands without < or > constraints reject side-effects. */
2832 if (recog_data.is_asm)
2834 for (opno = 0; opno < recog_data.n_operands; opno++)
2835 if (MEM_P (recog_data.operand[opno]))
2836 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2838 case PRE_INC:
2839 case POST_INC:
2840 case PRE_DEC:
2841 case POST_DEC:
2842 case PRE_MODIFY:
2843 case POST_MODIFY:
2844 if (strchr (recog_data.constraints[opno], '<') == NULL
2845 && strchr (recog_data.constraints[opno], '>')
2846 == NULL)
2847 return 0;
2848 break;
2849 default:
2850 break;
2853 #endif
2854 return 1;
2858 which_alternative++;
2860 while (which_alternative < recog_data.n_alternatives);
2862 which_alternative = -1;
2863 /* If we are about to reject this, but we are not to test strictly,
2864 try a very loose test. Only return failure if it fails also. */
2865 if (strict == 0)
2866 return constrain_operands (-1);
2867 else
2868 return 0;
2871 /* Return true iff OPERAND (assumed to be a REG rtx)
2872 is a hard reg in class CLASS when its regno is offset by OFFSET
2873 and changed to mode MODE.
2874 If REG occupies multiple hard regs, all of them must be in CLASS. */
2876 bool
2877 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2878 enum machine_mode mode)
2880 unsigned int regno = REGNO (operand);
2882 if (cl == NO_REGS)
2883 return false;
2885 /* Regno must not be a pseudo register. Offset may be negative. */
2886 return (HARD_REGISTER_NUM_P (regno)
2887 && HARD_REGISTER_NUM_P (regno + offset)
2888 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2889 regno + offset));
2892 /* Split single instruction. Helper function for split_all_insns and
2893 split_all_insns_noflow. Return last insn in the sequence if successful,
2894 or NULL if unsuccessful. */
2896 static rtx
2897 split_insn (rtx insn)
2899 /* Split insns here to get max fine-grain parallelism. */
2900 rtx first = PREV_INSN (insn);
2901 rtx last = try_split (PATTERN (insn), insn, 1);
2902 rtx insn_set, last_set, note;
2904 if (last == insn)
2905 return NULL_RTX;
2907 /* If the original instruction was a single set that was known to be
2908 equivalent to a constant, see if we can say the same about the last
2909 instruction in the split sequence. The two instructions must set
2910 the same destination. */
2911 insn_set = single_set (insn);
2912 if (insn_set)
2914 last_set = single_set (last);
2915 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2917 note = find_reg_equal_equiv_note (insn);
2918 if (note && CONSTANT_P (XEXP (note, 0)))
2919 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2920 else if (CONSTANT_P (SET_SRC (insn_set)))
2921 set_unique_reg_note (last, REG_EQUAL,
2922 copy_rtx (SET_SRC (insn_set)));
2926 /* try_split returns the NOTE that INSN became. */
2927 SET_INSN_DELETED (insn);
2929 /* ??? Coddle to md files that generate subregs in post-reload
2930 splitters instead of computing the proper hard register. */
2931 if (reload_completed && first != last)
2933 first = NEXT_INSN (first);
2934 for (;;)
2936 if (INSN_P (first))
2937 cleanup_subreg_operands (first);
2938 if (first == last)
2939 break;
2940 first = NEXT_INSN (first);
2944 return last;
2947 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2949 void
2950 split_all_insns (void)
2952 sbitmap blocks;
2953 bool changed;
2954 basic_block bb;
2956 blocks = sbitmap_alloc (last_basic_block);
2957 bitmap_clear (blocks);
2958 changed = false;
2960 FOR_EACH_BB_REVERSE (bb)
2962 rtx insn, next;
2963 bool finish = false;
2965 rtl_profile_for_bb (bb);
2966 for (insn = BB_HEAD (bb); !finish ; insn = next)
2968 /* Can't use `next_real_insn' because that might go across
2969 CODE_LABELS and short-out basic blocks. */
2970 next = NEXT_INSN (insn);
2971 finish = (insn == BB_END (bb));
2972 if (INSN_P (insn))
2974 rtx set = single_set (insn);
2976 /* Don't split no-op move insns. These should silently
2977 disappear later in final. Splitting such insns would
2978 break the code that handles LIBCALL blocks. */
2979 if (set && set_noop_p (set))
2981 /* Nops get in the way while scheduling, so delete them
2982 now if register allocation has already been done. It
2983 is too risky to try to do this before register
2984 allocation, and there are unlikely to be very many
2985 nops then anyways. */
2986 if (reload_completed)
2987 delete_insn_and_edges (insn);
2989 else
2991 if (split_insn (insn))
2993 bitmap_set_bit (blocks, bb->index);
2994 changed = true;
3001 default_rtl_profile ();
3002 if (changed)
3003 find_many_sub_basic_blocks (blocks);
3005 #ifdef ENABLE_CHECKING
3006 verify_flow_info ();
3007 #endif
3009 sbitmap_free (blocks);
3012 /* Same as split_all_insns, but do not expect CFG to be available.
3013 Used by machine dependent reorg passes. */
3015 unsigned int
3016 split_all_insns_noflow (void)
3018 rtx next, insn;
3020 for (insn = get_insns (); insn; insn = next)
3022 next = NEXT_INSN (insn);
3023 if (INSN_P (insn))
3025 /* Don't split no-op move insns. These should silently
3026 disappear later in final. Splitting such insns would
3027 break the code that handles LIBCALL blocks. */
3028 rtx set = single_set (insn);
3029 if (set && set_noop_p (set))
3031 /* Nops get in the way while scheduling, so delete them
3032 now if register allocation has already been done. It
3033 is too risky to try to do this before register
3034 allocation, and there are unlikely to be very many
3035 nops then anyways.
3037 ??? Should we use delete_insn when the CFG isn't valid? */
3038 if (reload_completed)
3039 delete_insn_and_edges (insn);
3041 else
3042 split_insn (insn);
3045 return 0;
3048 #ifdef HAVE_peephole2
3049 struct peep2_insn_data
3051 rtx insn;
3052 regset live_before;
3055 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3056 static int peep2_current;
3058 static bool peep2_do_rebuild_jump_labels;
3059 static bool peep2_do_cleanup_cfg;
3061 /* The number of instructions available to match a peep2. */
3062 int peep2_current_count;
3064 /* A non-insn marker indicating the last insn of the block.
3065 The live_before regset for this element is correct, indicating
3066 DF_LIVE_OUT for the block. */
3067 #define PEEP2_EOB pc_rtx
3069 /* Wrap N to fit into the peep2_insn_data buffer. */
3071 static int
3072 peep2_buf_position (int n)
3074 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3075 n -= MAX_INSNS_PER_PEEP2 + 1;
3076 return n;
3079 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3080 does not exist. Used by the recognizer to find the next insn to match
3081 in a multi-insn pattern. */
3084 peep2_next_insn (int n)
3086 gcc_assert (n <= peep2_current_count);
3088 n = peep2_buf_position (peep2_current + n);
3090 return peep2_insn_data[n].insn;
3093 /* Return true if REGNO is dead before the Nth non-note insn
3094 after `current'. */
3097 peep2_regno_dead_p (int ofs, int regno)
3099 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3101 ofs = peep2_buf_position (peep2_current + ofs);
3103 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3105 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3108 /* Similarly for a REG. */
3111 peep2_reg_dead_p (int ofs, rtx reg)
3113 int regno, n;
3115 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3117 ofs = peep2_buf_position (peep2_current + ofs);
3119 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3121 regno = REGNO (reg);
3122 n = hard_regno_nregs[regno][GET_MODE (reg)];
3123 while (--n >= 0)
3124 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3125 return 0;
3126 return 1;
3129 /* Try to find a hard register of mode MODE, matching the register class in
3130 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3131 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3132 in which case the only condition is that the register must be available
3133 before CURRENT_INSN.
3134 Registers that already have bits set in REG_SET will not be considered.
3136 If an appropriate register is available, it will be returned and the
3137 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3138 returned. */
3141 peep2_find_free_register (int from, int to, const char *class_str,
3142 enum machine_mode mode, HARD_REG_SET *reg_set)
3144 static int search_ofs;
3145 enum reg_class cl;
3146 HARD_REG_SET live;
3147 df_ref *def_rec;
3148 int i;
3150 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3151 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3153 from = peep2_buf_position (peep2_current + from);
3154 to = peep2_buf_position (peep2_current + to);
3156 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3157 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3159 while (from != to)
3161 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3163 /* Don't use registers set or clobbered by the insn. */
3164 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3165 *def_rec; def_rec++)
3166 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3168 from = peep2_buf_position (from + 1);
3171 cl = (class_str[0] == 'r' ? GENERAL_REGS
3172 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3174 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3176 int raw_regno, regno, success, j;
3178 /* Distribute the free registers as much as possible. */
3179 raw_regno = search_ofs + i;
3180 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3181 raw_regno -= FIRST_PSEUDO_REGISTER;
3182 #ifdef REG_ALLOC_ORDER
3183 regno = reg_alloc_order[raw_regno];
3184 #else
3185 regno = raw_regno;
3186 #endif
3188 /* Can it support the mode we need? */
3189 if (! HARD_REGNO_MODE_OK (regno, mode))
3190 continue;
3192 success = 1;
3193 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3195 /* Don't allocate fixed registers. */
3196 if (fixed_regs[regno + j])
3198 success = 0;
3199 break;
3201 /* Don't allocate global registers. */
3202 if (global_regs[regno + j])
3204 success = 0;
3205 break;
3207 /* Make sure the register is of the right class. */
3208 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3210 success = 0;
3211 break;
3213 /* And that we don't create an extra save/restore. */
3214 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3216 success = 0;
3217 break;
3220 if (! targetm.hard_regno_scratch_ok (regno + j))
3222 success = 0;
3223 break;
3226 /* And we don't clobber traceback for noreturn functions. */
3227 if ((regno + j == FRAME_POINTER_REGNUM
3228 || regno + j == HARD_FRAME_POINTER_REGNUM)
3229 && (! reload_completed || frame_pointer_needed))
3231 success = 0;
3232 break;
3235 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3236 || TEST_HARD_REG_BIT (live, regno + j))
3238 success = 0;
3239 break;
3243 if (success)
3245 add_to_hard_reg_set (reg_set, mode, regno);
3247 /* Start the next search with the next register. */
3248 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3249 raw_regno = 0;
3250 search_ofs = raw_regno;
3252 return gen_rtx_REG (mode, regno);
3256 search_ofs = 0;
3257 return NULL_RTX;
3260 /* Forget all currently tracked instructions, only remember current
3261 LIVE regset. */
3263 static void
3264 peep2_reinit_state (regset live)
3266 int i;
3268 /* Indicate that all slots except the last holds invalid data. */
3269 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3270 peep2_insn_data[i].insn = NULL_RTX;
3271 peep2_current_count = 0;
3273 /* Indicate that the last slot contains live_after data. */
3274 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3275 peep2_current = MAX_INSNS_PER_PEEP2;
3277 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3280 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3281 starting at INSN. Perform the replacement, removing the old insns and
3282 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3283 if the replacement is rejected. */
3285 static rtx
3286 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3288 int i;
3289 rtx last, eh_note, as_note, before_try, x;
3290 rtx old_insn, new_insn;
3291 bool was_call = false;
3293 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3294 match more than one insn, or to be split into more than one insn. */
3295 old_insn = peep2_insn_data[peep2_current].insn;
3296 if (RTX_FRAME_RELATED_P (old_insn))
3298 bool any_note = false;
3299 rtx note;
3301 if (match_len != 0)
3302 return NULL;
3304 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3305 may be in the stream for the purpose of register allocation. */
3306 if (active_insn_p (attempt))
3307 new_insn = attempt;
3308 else
3309 new_insn = next_active_insn (attempt);
3310 if (next_active_insn (new_insn))
3311 return NULL;
3313 /* We have a 1-1 replacement. Copy over any frame-related info. */
3314 RTX_FRAME_RELATED_P (new_insn) = 1;
3316 /* Allow the backend to fill in a note during the split. */
3317 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3318 switch (REG_NOTE_KIND (note))
3320 case REG_FRAME_RELATED_EXPR:
3321 case REG_CFA_DEF_CFA:
3322 case REG_CFA_ADJUST_CFA:
3323 case REG_CFA_OFFSET:
3324 case REG_CFA_REGISTER:
3325 case REG_CFA_EXPRESSION:
3326 case REG_CFA_RESTORE:
3327 case REG_CFA_SET_VDRAP:
3328 any_note = true;
3329 break;
3330 default:
3331 break;
3334 /* If the backend didn't supply a note, copy one over. */
3335 if (!any_note)
3336 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3337 switch (REG_NOTE_KIND (note))
3339 case REG_FRAME_RELATED_EXPR:
3340 case REG_CFA_DEF_CFA:
3341 case REG_CFA_ADJUST_CFA:
3342 case REG_CFA_OFFSET:
3343 case REG_CFA_REGISTER:
3344 case REG_CFA_EXPRESSION:
3345 case REG_CFA_RESTORE:
3346 case REG_CFA_SET_VDRAP:
3347 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3348 any_note = true;
3349 break;
3350 default:
3351 break;
3354 /* If there still isn't a note, make sure the unwind info sees the
3355 same expression as before the split. */
3356 if (!any_note)
3358 rtx old_set, new_set;
3360 /* The old insn had better have been simple, or annotated. */
3361 old_set = single_set (old_insn);
3362 gcc_assert (old_set != NULL);
3364 new_set = single_set (new_insn);
3365 if (!new_set || !rtx_equal_p (new_set, old_set))
3366 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3369 /* Copy prologue/epilogue status. This is required in order to keep
3370 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3371 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3374 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3375 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3376 cfg-related call notes. */
3377 for (i = 0; i <= match_len; ++i)
3379 int j;
3380 rtx note;
3382 j = peep2_buf_position (peep2_current + i);
3383 old_insn = peep2_insn_data[j].insn;
3384 if (!CALL_P (old_insn))
3385 continue;
3386 was_call = true;
3388 new_insn = attempt;
3389 while (new_insn != NULL_RTX)
3391 if (CALL_P (new_insn))
3392 break;
3393 new_insn = NEXT_INSN (new_insn);
3396 gcc_assert (new_insn != NULL_RTX);
3398 CALL_INSN_FUNCTION_USAGE (new_insn)
3399 = CALL_INSN_FUNCTION_USAGE (old_insn);
3401 for (note = REG_NOTES (old_insn);
3402 note;
3403 note = XEXP (note, 1))
3404 switch (REG_NOTE_KIND (note))
3406 case REG_NORETURN:
3407 case REG_SETJMP:
3408 case REG_TM:
3409 add_reg_note (new_insn, REG_NOTE_KIND (note),
3410 XEXP (note, 0));
3411 break;
3412 default:
3413 /* Discard all other reg notes. */
3414 break;
3417 /* Croak if there is another call in the sequence. */
3418 while (++i <= match_len)
3420 j = peep2_buf_position (peep2_current + i);
3421 old_insn = peep2_insn_data[j].insn;
3422 gcc_assert (!CALL_P (old_insn));
3424 break;
3427 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3428 move those notes over to the new sequence. */
3429 as_note = NULL;
3430 for (i = match_len; i >= 0; --i)
3432 int j = peep2_buf_position (peep2_current + i);
3433 old_insn = peep2_insn_data[j].insn;
3435 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3436 if (as_note)
3437 break;
3440 i = peep2_buf_position (peep2_current + match_len);
3441 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3443 /* Replace the old sequence with the new. */
3444 last = emit_insn_after_setloc (attempt,
3445 peep2_insn_data[i].insn,
3446 INSN_LOCATION (peep2_insn_data[i].insn));
3447 before_try = PREV_INSN (insn);
3448 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3450 /* Re-insert the EH_REGION notes. */
3451 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3453 edge eh_edge;
3454 edge_iterator ei;
3456 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3457 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3458 break;
3460 if (eh_note)
3461 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3463 if (eh_edge)
3464 for (x = last; x != before_try; x = PREV_INSN (x))
3465 if (x != BB_END (bb)
3466 && (can_throw_internal (x)
3467 || can_nonlocal_goto (x)))
3469 edge nfte, nehe;
3470 int flags;
3472 nfte = split_block (bb, x);
3473 flags = (eh_edge->flags
3474 & (EDGE_EH | EDGE_ABNORMAL));
3475 if (CALL_P (x))
3476 flags |= EDGE_ABNORMAL_CALL;
3477 nehe = make_edge (nfte->src, eh_edge->dest,
3478 flags);
3480 nehe->probability = eh_edge->probability;
3481 nfte->probability
3482 = REG_BR_PROB_BASE - nehe->probability;
3484 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3485 bb = nfte->src;
3486 eh_edge = nehe;
3489 /* Converting possibly trapping insn to non-trapping is
3490 possible. Zap dummy outgoing edges. */
3491 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3494 /* Re-insert the ARGS_SIZE notes. */
3495 if (as_note)
3496 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3498 /* If we generated a jump instruction, it won't have
3499 JUMP_LABEL set. Recompute after we're done. */
3500 for (x = last; x != before_try; x = PREV_INSN (x))
3501 if (JUMP_P (x))
3503 peep2_do_rebuild_jump_labels = true;
3504 break;
3507 return last;
3510 /* After performing a replacement in basic block BB, fix up the life
3511 information in our buffer. LAST is the last of the insns that we
3512 emitted as a replacement. PREV is the insn before the start of
3513 the replacement. MATCH_LEN is the number of instructions that were
3514 matched, and which now need to be replaced in the buffer. */
3516 static void
3517 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3519 int i = peep2_buf_position (peep2_current + match_len + 1);
3520 rtx x;
3521 regset_head live;
3523 INIT_REG_SET (&live);
3524 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3526 gcc_assert (peep2_current_count >= match_len + 1);
3527 peep2_current_count -= match_len + 1;
3529 x = last;
3532 if (INSN_P (x))
3534 df_insn_rescan (x);
3535 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3537 peep2_current_count++;
3538 if (--i < 0)
3539 i = MAX_INSNS_PER_PEEP2;
3540 peep2_insn_data[i].insn = x;
3541 df_simulate_one_insn_backwards (bb, x, &live);
3542 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3545 x = PREV_INSN (x);
3547 while (x != prev);
3548 CLEAR_REG_SET (&live);
3550 peep2_current = i;
3553 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3554 Return true if we added it, false otherwise. The caller will try to match
3555 peepholes against the buffer if we return false; otherwise it will try to
3556 add more instructions to the buffer. */
3558 static bool
3559 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3561 int pos;
3563 /* Once we have filled the maximum number of insns the buffer can hold,
3564 allow the caller to match the insns against peepholes. We wait until
3565 the buffer is full in case the target has similar peepholes of different
3566 length; we always want to match the longest if possible. */
3567 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3568 return false;
3570 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3571 any other pattern, lest it change the semantics of the frame info. */
3572 if (RTX_FRAME_RELATED_P (insn))
3574 /* Let the buffer drain first. */
3575 if (peep2_current_count > 0)
3576 return false;
3577 /* Now the insn will be the only thing in the buffer. */
3580 pos = peep2_buf_position (peep2_current + peep2_current_count);
3581 peep2_insn_data[pos].insn = insn;
3582 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3583 peep2_current_count++;
3585 df_simulate_one_insn_forwards (bb, insn, live);
3586 return true;
3589 /* Perform the peephole2 optimization pass. */
3591 static void
3592 peephole2_optimize (void)
3594 rtx insn;
3595 bitmap live;
3596 int i;
3597 basic_block bb;
3599 peep2_do_cleanup_cfg = false;
3600 peep2_do_rebuild_jump_labels = false;
3602 df_set_flags (DF_LR_RUN_DCE);
3603 df_note_add_problem ();
3604 df_analyze ();
3606 /* Initialize the regsets we're going to use. */
3607 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3608 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3609 live = BITMAP_ALLOC (&reg_obstack);
3611 FOR_EACH_BB_REVERSE (bb)
3613 bool past_end = false;
3614 int pos;
3616 rtl_profile_for_bb (bb);
3618 /* Start up propagation. */
3619 bitmap_copy (live, DF_LR_IN (bb));
3620 df_simulate_initialize_forwards (bb, live);
3621 peep2_reinit_state (live);
3623 insn = BB_HEAD (bb);
3624 for (;;)
3626 rtx attempt, head;
3627 int match_len;
3629 if (!past_end && !NONDEBUG_INSN_P (insn))
3631 next_insn:
3632 insn = NEXT_INSN (insn);
3633 if (insn == NEXT_INSN (BB_END (bb)))
3634 past_end = true;
3635 continue;
3637 if (!past_end && peep2_fill_buffer (bb, insn, live))
3638 goto next_insn;
3640 /* If we did not fill an empty buffer, it signals the end of the
3641 block. */
3642 if (peep2_current_count == 0)
3643 break;
3645 /* The buffer filled to the current maximum, so try to match. */
3647 pos = peep2_buf_position (peep2_current + peep2_current_count);
3648 peep2_insn_data[pos].insn = PEEP2_EOB;
3649 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3651 /* Match the peephole. */
3652 head = peep2_insn_data[peep2_current].insn;
3653 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3654 if (attempt != NULL)
3656 rtx last = peep2_attempt (bb, head, match_len, attempt);
3657 if (last)
3659 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3660 continue;
3664 /* No match: advance the buffer by one insn. */
3665 peep2_current = peep2_buf_position (peep2_current + 1);
3666 peep2_current_count--;
3670 default_rtl_profile ();
3671 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3672 BITMAP_FREE (peep2_insn_data[i].live_before);
3673 BITMAP_FREE (live);
3674 if (peep2_do_rebuild_jump_labels)
3675 rebuild_jump_labels (get_insns ());
3677 #endif /* HAVE_peephole2 */
3679 /* Common predicates for use with define_bypass. */
3681 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3682 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3683 must be either a single_set or a PARALLEL with SETs inside. */
3686 store_data_bypass_p (rtx out_insn, rtx in_insn)
3688 rtx out_set, in_set;
3689 rtx out_pat, in_pat;
3690 rtx out_exp, in_exp;
3691 int i, j;
3693 in_set = single_set (in_insn);
3694 if (in_set)
3696 if (!MEM_P (SET_DEST (in_set)))
3697 return false;
3699 out_set = single_set (out_insn);
3700 if (out_set)
3702 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3703 return false;
3705 else
3707 out_pat = PATTERN (out_insn);
3709 if (GET_CODE (out_pat) != PARALLEL)
3710 return false;
3712 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3714 out_exp = XVECEXP (out_pat, 0, i);
3716 if (GET_CODE (out_exp) == CLOBBER)
3717 continue;
3719 gcc_assert (GET_CODE (out_exp) == SET);
3721 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3722 return false;
3726 else
3728 in_pat = PATTERN (in_insn);
3729 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3731 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3733 in_exp = XVECEXP (in_pat, 0, i);
3735 if (GET_CODE (in_exp) == CLOBBER)
3736 continue;
3738 gcc_assert (GET_CODE (in_exp) == SET);
3740 if (!MEM_P (SET_DEST (in_exp)))
3741 return false;
3743 out_set = single_set (out_insn);
3744 if (out_set)
3746 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3747 return false;
3749 else
3751 out_pat = PATTERN (out_insn);
3752 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3754 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3756 out_exp = XVECEXP (out_pat, 0, j);
3758 if (GET_CODE (out_exp) == CLOBBER)
3759 continue;
3761 gcc_assert (GET_CODE (out_exp) == SET);
3763 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3764 return false;
3770 return true;
3773 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3774 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3775 or multiple set; IN_INSN should be single_set for truth, but for convenience
3776 of insn categorization may be any JUMP or CALL insn. */
3779 if_test_bypass_p (rtx out_insn, rtx in_insn)
3781 rtx out_set, in_set;
3783 in_set = single_set (in_insn);
3784 if (! in_set)
3786 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3787 return false;
3790 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3791 return false;
3792 in_set = SET_SRC (in_set);
3794 out_set = single_set (out_insn);
3795 if (out_set)
3797 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3798 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3799 return false;
3801 else
3803 rtx out_pat;
3804 int i;
3806 out_pat = PATTERN (out_insn);
3807 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3809 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3811 rtx exp = XVECEXP (out_pat, 0, i);
3813 if (GET_CODE (exp) == CLOBBER)
3814 continue;
3816 gcc_assert (GET_CODE (exp) == SET);
3818 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3819 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3820 return false;
3824 return true;
3827 static bool
3828 gate_handle_peephole2 (void)
3830 return (optimize > 0 && flag_peephole2);
3833 static unsigned int
3834 rest_of_handle_peephole2 (void)
3836 #ifdef HAVE_peephole2
3837 peephole2_optimize ();
3838 #endif
3839 return 0;
3842 namespace {
3844 const pass_data pass_data_peephole2 =
3846 RTL_PASS, /* type */
3847 "peephole2", /* name */
3848 OPTGROUP_NONE, /* optinfo_flags */
3849 true, /* has_gate */
3850 true, /* has_execute */
3851 TV_PEEPHOLE2, /* tv_id */
3852 0, /* properties_required */
3853 0, /* properties_provided */
3854 0, /* properties_destroyed */
3855 0, /* todo_flags_start */
3856 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3859 class pass_peephole2 : public rtl_opt_pass
3861 public:
3862 pass_peephole2 (gcc::context *ctxt)
3863 : rtl_opt_pass (pass_data_peephole2, ctxt)
3866 /* opt_pass methods: */
3867 /* The epiphany backend creates a second instance of this pass, so we need
3868 a clone method. */
3869 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3870 bool gate () { return gate_handle_peephole2 (); }
3871 unsigned int execute () { return rest_of_handle_peephole2 (); }
3873 }; // class pass_peephole2
3875 } // anon namespace
3877 rtl_opt_pass *
3878 make_pass_peephole2 (gcc::context *ctxt)
3880 return new pass_peephole2 (ctxt);
3883 static unsigned int
3884 rest_of_handle_split_all_insns (void)
3886 split_all_insns ();
3887 return 0;
3890 namespace {
3892 const pass_data pass_data_split_all_insns =
3894 RTL_PASS, /* type */
3895 "split1", /* name */
3896 OPTGROUP_NONE, /* optinfo_flags */
3897 false, /* has_gate */
3898 true, /* has_execute */
3899 TV_NONE, /* tv_id */
3900 0, /* properties_required */
3901 0, /* properties_provided */
3902 0, /* properties_destroyed */
3903 0, /* todo_flags_start */
3904 0, /* todo_flags_finish */
3907 class pass_split_all_insns : public rtl_opt_pass
3909 public:
3910 pass_split_all_insns (gcc::context *ctxt)
3911 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3914 /* opt_pass methods: */
3915 /* The epiphany backend creates a second instance of this pass, so
3916 we need a clone method. */
3917 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3918 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3920 }; // class pass_split_all_insns
3922 } // anon namespace
3924 rtl_opt_pass *
3925 make_pass_split_all_insns (gcc::context *ctxt)
3927 return new pass_split_all_insns (ctxt);
3930 static unsigned int
3931 rest_of_handle_split_after_reload (void)
3933 /* If optimizing, then go ahead and split insns now. */
3934 #ifndef STACK_REGS
3935 if (optimize > 0)
3936 #endif
3937 split_all_insns ();
3938 return 0;
3941 namespace {
3943 const pass_data pass_data_split_after_reload =
3945 RTL_PASS, /* type */
3946 "split2", /* name */
3947 OPTGROUP_NONE, /* optinfo_flags */
3948 false, /* has_gate */
3949 true, /* has_execute */
3950 TV_NONE, /* tv_id */
3951 0, /* properties_required */
3952 0, /* properties_provided */
3953 0, /* properties_destroyed */
3954 0, /* todo_flags_start */
3955 0, /* todo_flags_finish */
3958 class pass_split_after_reload : public rtl_opt_pass
3960 public:
3961 pass_split_after_reload (gcc::context *ctxt)
3962 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3965 /* opt_pass methods: */
3966 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3968 }; // class pass_split_after_reload
3970 } // anon namespace
3972 rtl_opt_pass *
3973 make_pass_split_after_reload (gcc::context *ctxt)
3975 return new pass_split_after_reload (ctxt);
3978 static bool
3979 gate_handle_split_before_regstack (void)
3981 #if HAVE_ATTR_length && defined (STACK_REGS)
3982 /* If flow2 creates new instructions which need splitting
3983 and scheduling after reload is not done, they might not be
3984 split until final which doesn't allow splitting
3985 if HAVE_ATTR_length. */
3986 # ifdef INSN_SCHEDULING
3987 return (optimize && !flag_schedule_insns_after_reload);
3988 # else
3989 return (optimize);
3990 # endif
3991 #else
3992 return 0;
3993 #endif
3996 static unsigned int
3997 rest_of_handle_split_before_regstack (void)
3999 split_all_insns ();
4000 return 0;
4003 namespace {
4005 const pass_data pass_data_split_before_regstack =
4007 RTL_PASS, /* type */
4008 "split3", /* name */
4009 OPTGROUP_NONE, /* optinfo_flags */
4010 true, /* has_gate */
4011 true, /* has_execute */
4012 TV_NONE, /* tv_id */
4013 0, /* properties_required */
4014 0, /* properties_provided */
4015 0, /* properties_destroyed */
4016 0, /* todo_flags_start */
4017 0, /* todo_flags_finish */
4020 class pass_split_before_regstack : public rtl_opt_pass
4022 public:
4023 pass_split_before_regstack (gcc::context *ctxt)
4024 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4027 /* opt_pass methods: */
4028 bool gate () { return gate_handle_split_before_regstack (); }
4029 unsigned int execute () {
4030 return rest_of_handle_split_before_regstack ();
4033 }; // class pass_split_before_regstack
4035 } // anon namespace
4037 rtl_opt_pass *
4038 make_pass_split_before_regstack (gcc::context *ctxt)
4040 return new pass_split_before_regstack (ctxt);
4043 static bool
4044 gate_handle_split_before_sched2 (void)
4046 #ifdef INSN_SCHEDULING
4047 return optimize > 0 && flag_schedule_insns_after_reload;
4048 #else
4049 return 0;
4050 #endif
4053 static unsigned int
4054 rest_of_handle_split_before_sched2 (void)
4056 #ifdef INSN_SCHEDULING
4057 split_all_insns ();
4058 #endif
4059 return 0;
4062 namespace {
4064 const pass_data pass_data_split_before_sched2 =
4066 RTL_PASS, /* type */
4067 "split4", /* name */
4068 OPTGROUP_NONE, /* optinfo_flags */
4069 true, /* has_gate */
4070 true, /* has_execute */
4071 TV_NONE, /* tv_id */
4072 0, /* properties_required */
4073 0, /* properties_provided */
4074 0, /* properties_destroyed */
4075 0, /* todo_flags_start */
4076 TODO_verify_flow, /* todo_flags_finish */
4079 class pass_split_before_sched2 : public rtl_opt_pass
4081 public:
4082 pass_split_before_sched2 (gcc::context *ctxt)
4083 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4086 /* opt_pass methods: */
4087 bool gate () { return gate_handle_split_before_sched2 (); }
4088 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4090 }; // class pass_split_before_sched2
4092 } // anon namespace
4094 rtl_opt_pass *
4095 make_pass_split_before_sched2 (gcc::context *ctxt)
4097 return new pass_split_before_sched2 (ctxt);
4100 /* The placement of the splitting that we do for shorten_branches
4101 depends on whether regstack is used by the target or not. */
4102 static bool
4103 gate_do_final_split (void)
4105 #if HAVE_ATTR_length && !defined (STACK_REGS)
4106 return 1;
4107 #else
4108 return 0;
4109 #endif
4112 namespace {
4114 const pass_data pass_data_split_for_shorten_branches =
4116 RTL_PASS, /* type */
4117 "split5", /* name */
4118 OPTGROUP_NONE, /* optinfo_flags */
4119 true, /* has_gate */
4120 true, /* has_execute */
4121 TV_NONE, /* tv_id */
4122 0, /* properties_required */
4123 0, /* properties_provided */
4124 0, /* properties_destroyed */
4125 0, /* todo_flags_start */
4126 TODO_verify_rtl_sharing, /* todo_flags_finish */
4129 class pass_split_for_shorten_branches : public rtl_opt_pass
4131 public:
4132 pass_split_for_shorten_branches (gcc::context *ctxt)
4133 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4136 /* opt_pass methods: */
4137 bool gate () { return gate_do_final_split (); }
4138 unsigned int execute () { return split_all_insns_noflow (); }
4140 }; // class pass_split_for_shorten_branches
4142 } // anon namespace
4144 rtl_opt_pass *
4145 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4147 return new pass_split_for_shorten_branches (ctxt);