2013-11-28 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / recog.c
blob5c0ec165bd1423711f6b3bd4628baeefd6759576
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data_d recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
113 static bool
114 asm_labels_ok (rtx body)
116 rtx asmop;
117 int i;
119 asmop = extract_asm_operands (body);
120 if (asmop == NULL_RTX)
121 return true;
123 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125 return false;
127 return true;
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
134 check_asm_operands (rtx x)
136 int noperands;
137 rtx *operands;
138 const char **constraints;
139 int i;
141 if (!asm_labels_ok (x))
142 return 0;
144 /* Post-reload, be more strict with things. */
145 if (reload_completed)
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x));
149 constrain_operands (1);
150 return which_alternative >= 0;
153 noperands = asm_noperands (x);
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
159 operands = XALLOCAVEC (rtx, noperands);
160 constraints = XALLOCAVEC (const char *, noperands);
162 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
164 for (i = 0; i < noperands; i++)
166 const char *c = constraints[i];
167 if (c[0] == '%')
168 c++;
169 if (! asm_operand_ok (operands[i], c, constraints))
170 return 0;
173 return 1;
176 /* Static data for the next two routines. */
178 typedef struct change_t
180 rtx object;
181 int old_code;
182 rtx *loc;
183 rtx old;
184 bool unshare;
185 } change_t;
187 static change_t *changes;
188 static int changes_allocated;
190 static int num_changes = 0;
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 the change in place.
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
210 static bool
211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
213 rtx old = *loc;
215 if (old == new_rtx || rtx_equal_p (old, new_rtx))
216 return 1;
218 gcc_assert (in_group != 0 || num_changes == 0);
220 *loc = new_rtx;
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
232 changes = XRESIZEVEC (change_t, changes, changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238 changes[num_changes].unshare = unshare;
240 if (object && !MEM_P (object))
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
248 num_changes++;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260 UNSHARE to false. */
262 bool
263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
265 return validate_change_1 (object, loc, new_rtx, in_group, false);
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269 UNSHARE to true. */
271 bool
272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
274 return validate_change_1 (object, loc, new_rtx, in_group, true);
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
282 Return true if anything was changed. */
283 bool
284 canonicalize_change_group (rtx insn, rtx x)
286 if (COMMUTATIVE_P (x)
287 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem = XEXP (x, 0);
292 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294 return true;
296 else
297 return false;
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
309 insn_invalid_p (rtx insn, bool in_group)
311 rtx pat = PATTERN (insn);
312 int num_clobbers = 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
314 clobbers. */
315 int icode = recog (pat, insn,
316 (GET_CODE (pat) == SET
317 && ! reload_completed
318 && ! reload_in_progress
319 && ! lra_in_progress)
320 ? &num_clobbers : 0);
321 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
324 /* If this is an asm and the operand aren't legal, then fail. Likewise if
325 this is not an asm and the insn wasn't recognized. */
326 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
327 || (!is_asm && icode < 0))
328 return 1;
330 /* If we have to add CLOBBERs, fail if we have to add ones that reference
331 hard registers since our callers can't know if they are live or not.
332 Otherwise, add them. */
333 if (num_clobbers > 0)
335 rtx newpat;
337 if (added_clobbers_hard_reg_p (icode))
338 return 1;
340 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
341 XVECEXP (newpat, 0, 0) = pat;
342 add_clobbers (newpat, icode);
343 if (in_group)
344 validate_change (insn, &PATTERN (insn), newpat, 1);
345 else
346 PATTERN (insn) = pat = newpat;
349 /* After reload, verify that all constraints are satisfied. */
350 if (reload_completed)
352 extract_insn (insn);
354 if (! constrain_operands (1))
355 return 1;
358 INSN_CODE (insn) = icode;
359 return 0;
362 /* Return number of changes made and not validated yet. */
364 num_changes_pending (void)
366 return num_changes;
369 /* Tentatively apply the changes numbered NUM and up.
370 Return 1 if all changes are valid, zero otherwise. */
373 verify_changes (int num)
375 int i;
376 rtx last_validated = NULL_RTX;
378 /* The changes have been applied and all INSN_CODEs have been reset to force
379 rerecognition.
381 The changes are valid if we aren't given an object, or if we are
382 given a MEM and it still is a valid address, or if this is in insn
383 and it is recognized. In the latter case, if reload has completed,
384 we also require that the operands meet the constraints for
385 the insn. */
387 for (i = num; i < num_changes; i++)
389 rtx object = changes[i].object;
391 /* If there is no object to test or if it is the same as the one we
392 already tested, ignore it. */
393 if (object == 0 || object == last_validated)
394 continue;
396 if (MEM_P (object))
398 if (! memory_address_addr_space_p (GET_MODE (object),
399 XEXP (object, 0),
400 MEM_ADDR_SPACE (object)))
401 break;
403 else if (/* changes[i].old might be zero, e.g. when putting a
404 REG_FRAME_RELATED_EXPR into a previously empty list. */
405 changes[i].old
406 && REG_P (changes[i].old)
407 && asm_noperands (PATTERN (object)) > 0
408 && REG_EXPR (changes[i].old) != NULL_TREE
409 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
410 && DECL_REGISTER (REG_EXPR (changes[i].old)))
412 /* Don't allow changes of hard register operands to inline
413 assemblies if they have been defined as register asm ("x"). */
414 break;
416 else if (DEBUG_INSN_P (object))
417 continue;
418 else if (insn_invalid_p (object, true))
420 rtx pat = PATTERN (object);
422 /* Perhaps we couldn't recognize the insn because there were
423 extra CLOBBERs at the end. If so, try to re-recognize
424 without the last CLOBBER (later iterations will cause each of
425 them to be eliminated, in turn). But don't do this if we
426 have an ASM_OPERAND. */
427 if (GET_CODE (pat) == PARALLEL
428 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
429 && asm_noperands (PATTERN (object)) < 0)
431 rtx newpat;
433 if (XVECLEN (pat, 0) == 2)
434 newpat = XVECEXP (pat, 0, 0);
435 else
437 int j;
439 newpat
440 = gen_rtx_PARALLEL (VOIDmode,
441 rtvec_alloc (XVECLEN (pat, 0) - 1));
442 for (j = 0; j < XVECLEN (newpat, 0); j++)
443 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
446 /* Add a new change to this group to replace the pattern
447 with this new pattern. Then consider this change
448 as having succeeded. The change we added will
449 cause the entire call to fail if things remain invalid.
451 Note that this can lose if a later change than the one
452 we are processing specified &XVECEXP (PATTERN (object), 0, X)
453 but this shouldn't occur. */
455 validate_change (object, &PATTERN (object), newpat, 1);
456 continue;
458 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
459 || GET_CODE (pat) == VAR_LOCATION)
460 /* If this insn is a CLOBBER or USE, it is always valid, but is
461 never recognized. */
462 continue;
463 else
464 break;
466 last_validated = object;
469 return (i == num_changes);
472 /* A group of changes has previously been issued with validate_change
473 and verified with verify_changes. Call df_insn_rescan for each of
474 the insn changed and clear num_changes. */
476 void
477 confirm_change_group (void)
479 int i;
480 rtx last_object = NULL;
482 for (i = 0; i < num_changes; i++)
484 rtx object = changes[i].object;
486 if (changes[i].unshare)
487 *changes[i].loc = copy_rtx (*changes[i].loc);
489 /* Avoid unnecessary rescanning when multiple changes to same instruction
490 are made. */
491 if (object)
493 if (object != last_object && last_object && INSN_P (last_object))
494 df_insn_rescan (last_object);
495 last_object = object;
499 if (last_object && INSN_P (last_object))
500 df_insn_rescan (last_object);
501 num_changes = 0;
504 /* Apply a group of changes previously issued with `validate_change'.
505 If all changes are valid, call confirm_change_group and return 1,
506 otherwise, call cancel_changes and return 0. */
509 apply_change_group (void)
511 if (verify_changes (0))
513 confirm_change_group ();
514 return 1;
516 else
518 cancel_changes (0);
519 return 0;
524 /* Return the number of changes so far in the current group. */
527 num_validated_changes (void)
529 return num_changes;
532 /* Retract the changes numbered NUM and up. */
534 void
535 cancel_changes (int num)
537 int i;
539 /* Back out all the changes. Do this in the opposite order in which
540 they were made. */
541 for (i = num_changes - 1; i >= num; i--)
543 *changes[i].loc = changes[i].old;
544 if (changes[i].object && !MEM_P (changes[i].object))
545 INSN_CODE (changes[i].object) = changes[i].old_code;
547 num_changes = num;
550 /* Reduce conditional compilation elsewhere. */
551 #ifndef HAVE_extv
552 #define HAVE_extv 0
553 #define CODE_FOR_extv CODE_FOR_nothing
554 #endif
555 #ifndef HAVE_extzv
556 #define HAVE_extzv 0
557 #define CODE_FOR_extzv CODE_FOR_nothing
558 #endif
560 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
561 rtx. */
563 static void
564 simplify_while_replacing (rtx *loc, rtx to, rtx object,
565 enum machine_mode op0_mode)
567 rtx x = *loc;
568 enum rtx_code code = GET_CODE (x);
569 rtx new_rtx;
571 if (SWAPPABLE_OPERANDS_P (x)
572 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
574 validate_unshare_change (object, loc,
575 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
576 : swap_condition (code),
577 GET_MODE (x), XEXP (x, 1),
578 XEXP (x, 0)), 1);
579 x = *loc;
580 code = GET_CODE (x);
583 switch (code)
585 case PLUS:
586 /* If we have a PLUS whose second operand is now a CONST_INT, use
587 simplify_gen_binary to try to simplify it.
588 ??? We may want later to remove this, once simplification is
589 separated from this function. */
590 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
591 validate_change (object, loc,
592 simplify_gen_binary
593 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
594 break;
595 case MINUS:
596 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
597 validate_change (object, loc,
598 simplify_gen_binary
599 (PLUS, GET_MODE (x), XEXP (x, 0),
600 simplify_gen_unary (NEG,
601 GET_MODE (x), XEXP (x, 1),
602 GET_MODE (x))), 1);
603 break;
604 case ZERO_EXTEND:
605 case SIGN_EXTEND:
606 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
608 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
609 op0_mode);
610 /* If any of the above failed, substitute in something that
611 we know won't be recognized. */
612 if (!new_rtx)
613 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
614 validate_change (object, loc, new_rtx, 1);
616 break;
617 case SUBREG:
618 /* All subregs possible to simplify should be simplified. */
619 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
620 SUBREG_BYTE (x));
622 /* Subregs of VOIDmode operands are incorrect. */
623 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
624 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
625 if (new_rtx)
626 validate_change (object, loc, new_rtx, 1);
627 break;
628 case ZERO_EXTRACT:
629 case SIGN_EXTRACT:
630 /* If we are replacing a register with memory, try to change the memory
631 to be the mode required for memory in extract operations (this isn't
632 likely to be an insertion operation; if it was, nothing bad will
633 happen, we might just fail in some cases). */
635 if (MEM_P (XEXP (x, 0))
636 && CONST_INT_P (XEXP (x, 1))
637 && CONST_INT_P (XEXP (x, 2))
638 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
639 MEM_ADDR_SPACE (XEXP (x, 0)))
640 && !MEM_VOLATILE_P (XEXP (x, 0)))
642 enum machine_mode wanted_mode = VOIDmode;
643 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
644 int pos = INTVAL (XEXP (x, 2));
646 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
648 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
649 if (wanted_mode == VOIDmode)
650 wanted_mode = word_mode;
652 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
654 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
655 if (wanted_mode == VOIDmode)
656 wanted_mode = word_mode;
659 /* If we have a narrower mode, we can do something. */
660 if (wanted_mode != VOIDmode
661 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
663 int offset = pos / BITS_PER_UNIT;
664 rtx newmem;
666 /* If the bytes and bits are counted differently, we
667 must adjust the offset. */
668 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
669 offset =
670 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
671 offset);
673 gcc_assert (GET_MODE_PRECISION (wanted_mode)
674 == GET_MODE_BITSIZE (wanted_mode));
675 pos %= GET_MODE_BITSIZE (wanted_mode);
677 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
679 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
680 validate_change (object, &XEXP (x, 0), newmem, 1);
684 break;
686 default:
687 break;
691 /* Replace every occurrence of FROM in X with TO. Mark each change with
692 validate_change passing OBJECT. */
694 static void
695 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
696 bool simplify)
698 int i, j;
699 const char *fmt;
700 rtx x = *loc;
701 enum rtx_code code;
702 enum machine_mode op0_mode = VOIDmode;
703 int prev_changes = num_changes;
705 if (!x)
706 return;
708 code = GET_CODE (x);
709 fmt = GET_RTX_FORMAT (code);
710 if (fmt[0] == 'e')
711 op0_mode = GET_MODE (XEXP (x, 0));
713 /* X matches FROM if it is the same rtx or they are both referring to the
714 same register in the same mode. Avoid calling rtx_equal_p unless the
715 operands look similar. */
717 if (x == from
718 || (REG_P (x) && REG_P (from)
719 && GET_MODE (x) == GET_MODE (from)
720 && REGNO (x) == REGNO (from))
721 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
722 && rtx_equal_p (x, from)))
724 validate_unshare_change (object, loc, to, 1);
725 return;
728 /* Call ourself recursively to perform the replacements.
729 We must not replace inside already replaced expression, otherwise we
730 get infinite recursion for replacements like (reg X)->(subreg (reg X))
731 so we must special case shared ASM_OPERANDS. */
733 if (GET_CODE (x) == PARALLEL)
735 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
737 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
738 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
740 /* Verify that operands are really shared. */
741 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
742 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
743 (x, 0, j))));
744 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
745 from, to, object, simplify);
747 else
748 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
749 simplify);
752 else
753 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
755 if (fmt[i] == 'e')
756 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
757 else if (fmt[i] == 'E')
758 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
759 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
760 simplify);
763 /* If we didn't substitute, there is nothing more to do. */
764 if (num_changes == prev_changes)
765 return;
767 /* ??? The regmove is no more, so is this aberration still necessary? */
768 /* Allow substituted expression to have different mode. This is used by
769 regmove to change mode of pseudo register. */
770 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
771 op0_mode = GET_MODE (XEXP (x, 0));
773 /* Do changes needed to keep rtx consistent. Don't do any other
774 simplifications, as it is not our job. */
775 if (simplify)
776 simplify_while_replacing (loc, to, object, op0_mode);
779 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
780 with TO. After all changes have been made, validate by seeing
781 if INSN is still valid. */
784 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
786 validate_replace_rtx_1 (loc, from, to, insn, true);
787 return apply_change_group ();
790 /* Try replacing every occurrence of FROM in INSN with TO. After all
791 changes have been made, validate by seeing if INSN is still valid. */
794 validate_replace_rtx (rtx from, rtx to, rtx insn)
796 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
797 return apply_change_group ();
800 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
801 is a part of INSN. After all changes have been made, validate by seeing if
802 INSN is still valid.
803 validate_replace_rtx (from, to, insn) is equivalent to
804 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
807 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
809 validate_replace_rtx_1 (where, from, to, insn, true);
810 return apply_change_group ();
813 /* Same as above, but do not simplify rtx afterwards. */
815 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
816 rtx insn)
818 validate_replace_rtx_1 (where, from, to, insn, false);
819 return apply_change_group ();
823 /* Try replacing every occurrence of FROM in INSN with TO. This also
824 will replace in REG_EQUAL and REG_EQUIV notes. */
826 void
827 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
829 rtx note;
830 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
831 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
832 if (REG_NOTE_KIND (note) == REG_EQUAL
833 || REG_NOTE_KIND (note) == REG_EQUIV)
834 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
837 /* Function called by note_uses to replace used subexpressions. */
838 struct validate_replace_src_data
840 rtx from; /* Old RTX */
841 rtx to; /* New RTX */
842 rtx insn; /* Insn in which substitution is occurring. */
845 static void
846 validate_replace_src_1 (rtx *x, void *data)
848 struct validate_replace_src_data *d
849 = (struct validate_replace_src_data *) data;
851 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
854 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
855 SET_DESTs. */
857 void
858 validate_replace_src_group (rtx from, rtx to, rtx insn)
860 struct validate_replace_src_data d;
862 d.from = from;
863 d.to = to;
864 d.insn = insn;
865 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
868 /* Try simplify INSN.
869 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
870 pattern and return true if something was simplified. */
872 bool
873 validate_simplify_insn (rtx insn)
875 int i;
876 rtx pat = NULL;
877 rtx newpat = NULL;
879 pat = PATTERN (insn);
881 if (GET_CODE (pat) == SET)
883 newpat = simplify_rtx (SET_SRC (pat));
884 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
885 validate_change (insn, &SET_SRC (pat), newpat, 1);
886 newpat = simplify_rtx (SET_DEST (pat));
887 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
888 validate_change (insn, &SET_DEST (pat), newpat, 1);
890 else if (GET_CODE (pat) == PARALLEL)
891 for (i = 0; i < XVECLEN (pat, 0); i++)
893 rtx s = XVECEXP (pat, 0, i);
895 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
897 newpat = simplify_rtx (SET_SRC (s));
898 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
899 validate_change (insn, &SET_SRC (s), newpat, 1);
900 newpat = simplify_rtx (SET_DEST (s));
901 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
902 validate_change (insn, &SET_DEST (s), newpat, 1);
905 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
908 #ifdef HAVE_cc0
909 /* Return 1 if the insn using CC0 set by INSN does not contain
910 any ordered tests applied to the condition codes.
911 EQ and NE tests do not count. */
914 next_insn_tests_no_inequality (rtx insn)
916 rtx next = next_cc0_user (insn);
918 /* If there is no next insn, we have to take the conservative choice. */
919 if (next == 0)
920 return 0;
922 return (INSN_P (next)
923 && ! inequality_comparisons_p (PATTERN (next)));
925 #endif
927 /* Return 1 if OP is a valid general operand for machine mode MODE.
928 This is either a register reference, a memory reference,
929 or a constant. In the case of a memory reference, the address
930 is checked for general validity for the target machine.
932 Register and memory references must have mode MODE in order to be valid,
933 but some constants have no machine mode and are valid for any mode.
935 If MODE is VOIDmode, OP is checked for validity for whatever mode
936 it has.
938 The main use of this function is as a predicate in match_operand
939 expressions in the machine description. */
942 general_operand (rtx op, enum machine_mode mode)
944 enum rtx_code code = GET_CODE (op);
946 if (mode == VOIDmode)
947 mode = GET_MODE (op);
949 /* Don't accept CONST_INT or anything similar
950 if the caller wants something floating. */
951 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
952 && GET_MODE_CLASS (mode) != MODE_INT
953 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
954 return 0;
956 if (CONST_INT_P (op)
957 && mode != VOIDmode
958 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
959 return 0;
961 if (CONSTANT_P (op))
962 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
963 || mode == VOIDmode)
964 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
965 && targetm.legitimate_constant_p (mode == VOIDmode
966 ? GET_MODE (op)
967 : mode, op));
969 /* Except for certain constants with VOIDmode, already checked for,
970 OP's mode must match MODE if MODE specifies a mode. */
972 if (GET_MODE (op) != mode)
973 return 0;
975 if (code == SUBREG)
977 rtx sub = SUBREG_REG (op);
979 #ifdef INSN_SCHEDULING
980 /* On machines that have insn scheduling, we want all memory
981 reference to be explicit, so outlaw paradoxical SUBREGs.
982 However, we must allow them after reload so that they can
983 get cleaned up by cleanup_subreg_operands. */
984 if (!reload_completed && MEM_P (sub)
985 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
986 return 0;
987 #endif
988 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
989 may result in incorrect reference. We should simplify all valid
990 subregs of MEM anyway. But allow this after reload because we
991 might be called from cleanup_subreg_operands.
993 ??? This is a kludge. */
994 if (!reload_completed && SUBREG_BYTE (op) != 0
995 && MEM_P (sub))
996 return 0;
998 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
999 create such rtl, and we must reject it. */
1000 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1001 /* LRA can use subreg to store a floating point value in an
1002 integer mode. Although the floating point and the
1003 integer modes need the same number of hard registers, the
1004 size of floating point mode can be less than the integer
1005 mode. */
1006 && ! lra_in_progress
1007 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1008 return 0;
1010 op = sub;
1011 code = GET_CODE (op);
1014 if (code == REG)
1015 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1016 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1018 if (code == MEM)
1020 rtx y = XEXP (op, 0);
1022 if (! volatile_ok && MEM_VOLATILE_P (op))
1023 return 0;
1025 /* Use the mem's mode, since it will be reloaded thus. */
1026 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1027 return 1;
1030 return 0;
1033 /* Return 1 if OP is a valid memory address for a memory reference
1034 of mode MODE.
1036 The main use of this function is as a predicate in match_operand
1037 expressions in the machine description. */
1040 address_operand (rtx op, enum machine_mode mode)
1042 return memory_address_p (mode, op);
1045 /* Return 1 if OP is a register reference of mode MODE.
1046 If MODE is VOIDmode, accept a register in any mode.
1048 The main use of this function is as a predicate in match_operand
1049 expressions in the machine description. */
1052 register_operand (rtx op, enum machine_mode mode)
1054 if (GET_MODE (op) != mode && mode != VOIDmode)
1055 return 0;
1057 if (GET_CODE (op) == SUBREG)
1059 rtx sub = SUBREG_REG (op);
1061 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1062 because it is guaranteed to be reloaded into one.
1063 Just make sure the MEM is valid in itself.
1064 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1065 but currently it does result from (SUBREG (REG)...) where the
1066 reg went on the stack.) */
1067 if (! reload_completed && MEM_P (sub))
1068 return general_operand (op, mode);
1070 #ifdef CANNOT_CHANGE_MODE_CLASS
1071 if (REG_P (sub)
1072 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1073 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1074 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1075 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1076 /* LRA can generate some invalid SUBREGS just for matched
1077 operand reload presentation. LRA needs to treat them as
1078 valid. */
1079 && ! LRA_SUBREG_P (op))
1080 return 0;
1081 #endif
1083 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1084 create such rtl, and we must reject it. */
1085 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1086 /* LRA can use subreg to store a floating point value in an
1087 integer mode. Although the floating point and the
1088 integer modes need the same number of hard registers, the
1089 size of floating point mode can be less than the integer
1090 mode. */
1091 && ! lra_in_progress
1092 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1093 return 0;
1095 op = sub;
1098 return (REG_P (op)
1099 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1100 || in_hard_reg_set_p (operand_reg_set,
1101 GET_MODE (op), REGNO (op))));
1104 /* Return 1 for a register in Pmode; ignore the tested mode. */
1107 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1109 return register_operand (op, Pmode);
1112 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1113 or a hard register. */
1116 scratch_operand (rtx op, enum machine_mode mode)
1118 if (GET_MODE (op) != mode && mode != VOIDmode)
1119 return 0;
1121 return (GET_CODE (op) == SCRATCH
1122 || (REG_P (op)
1123 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1126 /* Return 1 if OP is a valid immediate operand for mode MODE.
1128 The main use of this function is as a predicate in match_operand
1129 expressions in the machine description. */
1132 immediate_operand (rtx op, enum machine_mode mode)
1134 /* Don't accept CONST_INT or anything similar
1135 if the caller wants something floating. */
1136 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1137 && GET_MODE_CLASS (mode) != MODE_INT
1138 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1139 return 0;
1141 if (CONST_INT_P (op)
1142 && mode != VOIDmode
1143 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1144 return 0;
1146 return (CONSTANT_P (op)
1147 && (GET_MODE (op) == mode || mode == VOIDmode
1148 || GET_MODE (op) == VOIDmode)
1149 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1150 && targetm.legitimate_constant_p (mode == VOIDmode
1151 ? GET_MODE (op)
1152 : mode, op));
1155 /* Returns 1 if OP is an operand that is a CONST_INT. */
1158 const_int_operand (rtx op, enum machine_mode mode)
1160 if (!CONST_INT_P (op))
1161 return 0;
1163 if (mode != VOIDmode
1164 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1165 return 0;
1167 return 1;
1170 /* Returns 1 if OP is an operand that is a constant integer or constant
1171 floating-point number. */
1174 const_double_operand (rtx op, enum machine_mode mode)
1176 /* Don't accept CONST_INT or anything similar
1177 if the caller wants something floating. */
1178 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1179 && GET_MODE_CLASS (mode) != MODE_INT
1180 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1181 return 0;
1183 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1184 && (mode == VOIDmode || GET_MODE (op) == mode
1185 || GET_MODE (op) == VOIDmode));
1188 /* Return 1 if OP is a general operand that is not an immediate operand. */
1191 nonimmediate_operand (rtx op, enum machine_mode mode)
1193 return (general_operand (op, mode) && ! CONSTANT_P (op));
1196 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1199 nonmemory_operand (rtx op, enum machine_mode mode)
1201 if (CONSTANT_P (op))
1202 return immediate_operand (op, mode);
1204 if (GET_MODE (op) != mode && mode != VOIDmode)
1205 return 0;
1207 if (GET_CODE (op) == SUBREG)
1209 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1210 because it is guaranteed to be reloaded into one.
1211 Just make sure the MEM is valid in itself.
1212 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1213 but currently it does result from (SUBREG (REG)...) where the
1214 reg went on the stack.) */
1215 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1216 return general_operand (op, mode);
1217 op = SUBREG_REG (op);
1220 return (REG_P (op)
1221 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1222 || in_hard_reg_set_p (operand_reg_set,
1223 GET_MODE (op), REGNO (op))));
1226 /* Return 1 if OP is a valid operand that stands for pushing a
1227 value of mode MODE onto the stack.
1229 The main use of this function is as a predicate in match_operand
1230 expressions in the machine description. */
1233 push_operand (rtx op, enum machine_mode mode)
1235 unsigned int rounded_size = GET_MODE_SIZE (mode);
1237 #ifdef PUSH_ROUNDING
1238 rounded_size = PUSH_ROUNDING (rounded_size);
1239 #endif
1241 if (!MEM_P (op))
1242 return 0;
1244 if (mode != VOIDmode && GET_MODE (op) != mode)
1245 return 0;
1247 op = XEXP (op, 0);
1249 if (rounded_size == GET_MODE_SIZE (mode))
1251 if (GET_CODE (op) != STACK_PUSH_CODE)
1252 return 0;
1254 else
1256 if (GET_CODE (op) != PRE_MODIFY
1257 || GET_CODE (XEXP (op, 1)) != PLUS
1258 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1259 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1260 #ifdef STACK_GROWS_DOWNWARD
1261 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1262 #else
1263 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1264 #endif
1266 return 0;
1269 return XEXP (op, 0) == stack_pointer_rtx;
1272 /* Return 1 if OP is a valid operand that stands for popping a
1273 value of mode MODE off the stack.
1275 The main use of this function is as a predicate in match_operand
1276 expressions in the machine description. */
1279 pop_operand (rtx op, enum machine_mode mode)
1281 if (!MEM_P (op))
1282 return 0;
1284 if (mode != VOIDmode && GET_MODE (op) != mode)
1285 return 0;
1287 op = XEXP (op, 0);
1289 if (GET_CODE (op) != STACK_POP_CODE)
1290 return 0;
1292 return XEXP (op, 0) == stack_pointer_rtx;
1295 /* Return 1 if ADDR is a valid memory address
1296 for mode MODE in address space AS. */
1299 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1300 rtx addr, addr_space_t as)
1302 #ifdef GO_IF_LEGITIMATE_ADDRESS
1303 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1304 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1305 return 0;
1307 win:
1308 return 1;
1309 #else
1310 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1311 #endif
1314 /* Return 1 if OP is a valid memory reference with mode MODE,
1315 including a valid address.
1317 The main use of this function is as a predicate in match_operand
1318 expressions in the machine description. */
1321 memory_operand (rtx op, enum machine_mode mode)
1323 rtx inner;
1325 if (! reload_completed)
1326 /* Note that no SUBREG is a memory operand before end of reload pass,
1327 because (SUBREG (MEM...)) forces reloading into a register. */
1328 return MEM_P (op) && general_operand (op, mode);
1330 if (mode != VOIDmode && GET_MODE (op) != mode)
1331 return 0;
1333 inner = op;
1334 if (GET_CODE (inner) == SUBREG)
1335 inner = SUBREG_REG (inner);
1337 return (MEM_P (inner) && general_operand (op, mode));
1340 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1341 that is, a memory reference whose address is a general_operand. */
1344 indirect_operand (rtx op, enum machine_mode mode)
1346 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1347 if (! reload_completed
1348 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1350 int offset = SUBREG_BYTE (op);
1351 rtx inner = SUBREG_REG (op);
1353 if (mode != VOIDmode && GET_MODE (op) != mode)
1354 return 0;
1356 /* The only way that we can have a general_operand as the resulting
1357 address is if OFFSET is zero and the address already is an operand
1358 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1359 operand. */
1361 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1362 || (GET_CODE (XEXP (inner, 0)) == PLUS
1363 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1364 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1365 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1368 return (MEM_P (op)
1369 && memory_operand (op, mode)
1370 && general_operand (XEXP (op, 0), Pmode));
1373 /* Return 1 if this is an ordered comparison operator (not including
1374 ORDERED and UNORDERED). */
1377 ordered_comparison_operator (rtx op, enum machine_mode mode)
1379 if (mode != VOIDmode && GET_MODE (op) != mode)
1380 return false;
1381 switch (GET_CODE (op))
1383 case EQ:
1384 case NE:
1385 case LT:
1386 case LTU:
1387 case LE:
1388 case LEU:
1389 case GT:
1390 case GTU:
1391 case GE:
1392 case GEU:
1393 return true;
1394 default:
1395 return false;
1399 /* Return 1 if this is a comparison operator. This allows the use of
1400 MATCH_OPERATOR to recognize all the branch insns. */
1403 comparison_operator (rtx op, enum machine_mode mode)
1405 return ((mode == VOIDmode || GET_MODE (op) == mode)
1406 && COMPARISON_P (op));
1409 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1412 extract_asm_operands (rtx body)
1414 rtx tmp;
1415 switch (GET_CODE (body))
1417 case ASM_OPERANDS:
1418 return body;
1420 case SET:
1421 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1422 tmp = SET_SRC (body);
1423 if (GET_CODE (tmp) == ASM_OPERANDS)
1424 return tmp;
1425 break;
1427 case PARALLEL:
1428 tmp = XVECEXP (body, 0, 0);
1429 if (GET_CODE (tmp) == ASM_OPERANDS)
1430 return tmp;
1431 if (GET_CODE (tmp) == SET)
1433 tmp = SET_SRC (tmp);
1434 if (GET_CODE (tmp) == ASM_OPERANDS)
1435 return tmp;
1437 break;
1439 default:
1440 break;
1442 return NULL;
1445 /* If BODY is an insn body that uses ASM_OPERANDS,
1446 return the number of operands (both input and output) in the insn.
1447 Otherwise return -1. */
1450 asm_noperands (const_rtx body)
1452 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1453 int n_sets = 0;
1455 if (asm_op == NULL)
1456 return -1;
1458 if (GET_CODE (body) == SET)
1459 n_sets = 1;
1460 else if (GET_CODE (body) == PARALLEL)
1462 int i;
1463 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1465 /* Multiple output operands, or 1 output plus some clobbers:
1466 body is
1467 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1468 /* Count backwards through CLOBBERs to determine number of SETs. */
1469 for (i = XVECLEN (body, 0); i > 0; i--)
1471 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1472 break;
1473 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1474 return -1;
1477 /* N_SETS is now number of output operands. */
1478 n_sets = i;
1480 /* Verify that all the SETs we have
1481 came from a single original asm_operands insn
1482 (so that invalid combinations are blocked). */
1483 for (i = 0; i < n_sets; i++)
1485 rtx elt = XVECEXP (body, 0, i);
1486 if (GET_CODE (elt) != SET)
1487 return -1;
1488 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1489 return -1;
1490 /* If these ASM_OPERANDS rtx's came from different original insns
1491 then they aren't allowed together. */
1492 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1493 != ASM_OPERANDS_INPUT_VEC (asm_op))
1494 return -1;
1497 else
1499 /* 0 outputs, but some clobbers:
1500 body is [(asm_operands ...) (clobber (reg ...))...]. */
1501 /* Make sure all the other parallel things really are clobbers. */
1502 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1503 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1504 return -1;
1508 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1509 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1512 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1513 copy its operands (both input and output) into the vector OPERANDS,
1514 the locations of the operands within the insn into the vector OPERAND_LOCS,
1515 and the constraints for the operands into CONSTRAINTS.
1516 Write the modes of the operands into MODES.
1517 Return the assembler-template.
1519 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1520 we don't store that info. */
1522 const char *
1523 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1524 const char **constraints, enum machine_mode *modes,
1525 location_t *loc)
1527 int nbase = 0, n, i;
1528 rtx asmop;
1530 switch (GET_CODE (body))
1532 case ASM_OPERANDS:
1533 /* Zero output asm: BODY is (asm_operands ...). */
1534 asmop = body;
1535 break;
1537 case SET:
1538 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1539 asmop = SET_SRC (body);
1541 /* The output is in the SET.
1542 Its constraint is in the ASM_OPERANDS itself. */
1543 if (operands)
1544 operands[0] = SET_DEST (body);
1545 if (operand_locs)
1546 operand_locs[0] = &SET_DEST (body);
1547 if (constraints)
1548 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1549 if (modes)
1550 modes[0] = GET_MODE (SET_DEST (body));
1551 nbase = 1;
1552 break;
1554 case PARALLEL:
1556 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1558 asmop = XVECEXP (body, 0, 0);
1559 if (GET_CODE (asmop) == SET)
1561 asmop = SET_SRC (asmop);
1563 /* At least one output, plus some CLOBBERs. The outputs are in
1564 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1565 for (i = 0; i < nparallel; i++)
1567 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1568 break; /* Past last SET */
1569 if (operands)
1570 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1571 if (operand_locs)
1572 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1573 if (constraints)
1574 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1575 if (modes)
1576 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1578 nbase = i;
1580 break;
1583 default:
1584 gcc_unreachable ();
1587 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1588 for (i = 0; i < n; i++)
1590 if (operand_locs)
1591 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1592 if (operands)
1593 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1594 if (constraints)
1595 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1596 if (modes)
1597 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1599 nbase += n;
1601 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1602 for (i = 0; i < n; i++)
1604 if (operand_locs)
1605 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1606 if (operands)
1607 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1608 if (constraints)
1609 constraints[nbase + i] = "";
1610 if (modes)
1611 modes[nbase + i] = Pmode;
1614 if (loc)
1615 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1617 return ASM_OPERANDS_TEMPLATE (asmop);
1620 /* Check if an asm_operand matches its constraints.
1621 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1624 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1626 int result = 0;
1627 #ifdef AUTO_INC_DEC
1628 bool incdec_ok = false;
1629 #endif
1631 /* Use constrain_operands after reload. */
1632 gcc_assert (!reload_completed);
1634 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1635 many alternatives as required to match the other operands. */
1636 if (*constraint == '\0')
1637 result = 1;
1639 while (*constraint)
1641 char c = *constraint;
1642 int len;
1643 switch (c)
1645 case ',':
1646 constraint++;
1647 continue;
1648 case '=':
1649 case '+':
1650 case '*':
1651 case '%':
1652 case '!':
1653 case '#':
1654 case '&':
1655 case '?':
1656 break;
1658 case '0': case '1': case '2': case '3': case '4':
1659 case '5': case '6': case '7': case '8': case '9':
1660 /* If caller provided constraints pointer, look up
1661 the matching constraint. Otherwise, our caller should have
1662 given us the proper matching constraint, but we can't
1663 actually fail the check if they didn't. Indicate that
1664 results are inconclusive. */
1665 if (constraints)
1667 char *end;
1668 unsigned long match;
1670 match = strtoul (constraint, &end, 10);
1671 if (!result)
1672 result = asm_operand_ok (op, constraints[match], NULL);
1673 constraint = (const char *) end;
1675 else
1678 constraint++;
1679 while (ISDIGIT (*constraint));
1680 if (! result)
1681 result = -1;
1683 continue;
1685 case 'p':
1686 if (address_operand (op, VOIDmode))
1687 result = 1;
1688 break;
1690 case TARGET_MEM_CONSTRAINT:
1691 case 'V': /* non-offsettable */
1692 if (memory_operand (op, VOIDmode))
1693 result = 1;
1694 break;
1696 case 'o': /* offsettable */
1697 if (offsettable_nonstrict_memref_p (op))
1698 result = 1;
1699 break;
1701 case '<':
1702 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1703 excepting those that expand_call created. Further, on some
1704 machines which do not have generalized auto inc/dec, an inc/dec
1705 is not a memory_operand.
1707 Match any memory and hope things are resolved after reload. */
1709 if (MEM_P (op)
1710 && (1
1711 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1712 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1713 result = 1;
1714 #ifdef AUTO_INC_DEC
1715 incdec_ok = true;
1716 #endif
1717 break;
1719 case '>':
1720 if (MEM_P (op)
1721 && (1
1722 || GET_CODE (XEXP (op, 0)) == PRE_INC
1723 || GET_CODE (XEXP (op, 0)) == POST_INC))
1724 result = 1;
1725 #ifdef AUTO_INC_DEC
1726 incdec_ok = true;
1727 #endif
1728 break;
1730 case 'E':
1731 case 'F':
1732 if (CONST_DOUBLE_AS_FLOAT_P (op)
1733 || (GET_CODE (op) == CONST_VECTOR
1734 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1735 result = 1;
1736 break;
1738 case 'G':
1739 if (CONST_DOUBLE_AS_FLOAT_P (op)
1740 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1741 result = 1;
1742 break;
1743 case 'H':
1744 if (CONST_DOUBLE_AS_FLOAT_P (op)
1745 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1746 result = 1;
1747 break;
1749 case 's':
1750 if (CONST_SCALAR_INT_P (op))
1751 break;
1752 /* Fall through. */
1754 case 'i':
1755 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1756 result = 1;
1757 break;
1759 case 'n':
1760 if (CONST_SCALAR_INT_P (op))
1761 result = 1;
1762 break;
1764 case 'I':
1765 if (CONST_INT_P (op)
1766 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1767 result = 1;
1768 break;
1769 case 'J':
1770 if (CONST_INT_P (op)
1771 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1772 result = 1;
1773 break;
1774 case 'K':
1775 if (CONST_INT_P (op)
1776 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1777 result = 1;
1778 break;
1779 case 'L':
1780 if (CONST_INT_P (op)
1781 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1782 result = 1;
1783 break;
1784 case 'M':
1785 if (CONST_INT_P (op)
1786 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1787 result = 1;
1788 break;
1789 case 'N':
1790 if (CONST_INT_P (op)
1791 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1792 result = 1;
1793 break;
1794 case 'O':
1795 if (CONST_INT_P (op)
1796 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1797 result = 1;
1798 break;
1799 case 'P':
1800 if (CONST_INT_P (op)
1801 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1802 result = 1;
1803 break;
1805 case 'X':
1806 result = 1;
1807 break;
1809 case 'g':
1810 if (general_operand (op, VOIDmode))
1811 result = 1;
1812 break;
1814 default:
1815 /* For all other letters, we first check for a register class,
1816 otherwise it is an EXTRA_CONSTRAINT. */
1817 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1819 case 'r':
1820 if (GET_MODE (op) == BLKmode)
1821 break;
1822 if (register_operand (op, VOIDmode))
1823 result = 1;
1825 #ifdef EXTRA_CONSTRAINT_STR
1826 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1827 /* Every memory operand can be reloaded to fit. */
1828 result = result || memory_operand (op, VOIDmode);
1829 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1830 /* Every address operand can be reloaded to fit. */
1831 result = result || address_operand (op, VOIDmode);
1832 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1833 result = 1;
1834 #endif
1835 break;
1837 len = CONSTRAINT_LEN (c, constraint);
1839 constraint++;
1840 while (--len && *constraint);
1841 if (len)
1842 return 0;
1845 #ifdef AUTO_INC_DEC
1846 /* For operands without < or > constraints reject side-effects. */
1847 if (!incdec_ok && result && MEM_P (op))
1848 switch (GET_CODE (XEXP (op, 0)))
1850 case PRE_INC:
1851 case POST_INC:
1852 case PRE_DEC:
1853 case POST_DEC:
1854 case PRE_MODIFY:
1855 case POST_MODIFY:
1856 return 0;
1857 default:
1858 break;
1860 #endif
1862 return result;
1865 /* Given an rtx *P, if it is a sum containing an integer constant term,
1866 return the location (type rtx *) of the pointer to that constant term.
1867 Otherwise, return a null pointer. */
1869 rtx *
1870 find_constant_term_loc (rtx *p)
1872 rtx *tem;
1873 enum rtx_code code = GET_CODE (*p);
1875 /* If *P IS such a constant term, P is its location. */
1877 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1878 || code == CONST)
1879 return p;
1881 /* Otherwise, if not a sum, it has no constant term. */
1883 if (GET_CODE (*p) != PLUS)
1884 return 0;
1886 /* If one of the summands is constant, return its location. */
1888 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1889 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1890 return p;
1892 /* Otherwise, check each summand for containing a constant term. */
1894 if (XEXP (*p, 0) != 0)
1896 tem = find_constant_term_loc (&XEXP (*p, 0));
1897 if (tem != 0)
1898 return tem;
1901 if (XEXP (*p, 1) != 0)
1903 tem = find_constant_term_loc (&XEXP (*p, 1));
1904 if (tem != 0)
1905 return tem;
1908 return 0;
1911 /* Return 1 if OP is a memory reference
1912 whose address contains no side effects
1913 and remains valid after the addition
1914 of a positive integer less than the
1915 size of the object being referenced.
1917 We assume that the original address is valid and do not check it.
1919 This uses strict_memory_address_p as a subroutine, so
1920 don't use it before reload. */
1923 offsettable_memref_p (rtx op)
1925 return ((MEM_P (op))
1926 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1927 MEM_ADDR_SPACE (op)));
1930 /* Similar, but don't require a strictly valid mem ref:
1931 consider pseudo-regs valid as index or base regs. */
1934 offsettable_nonstrict_memref_p (rtx op)
1936 return ((MEM_P (op))
1937 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1938 MEM_ADDR_SPACE (op)));
1941 /* Return 1 if Y is a memory address which contains no side effects
1942 and would remain valid for address space AS after the addition of
1943 a positive integer less than the size of that mode.
1945 We assume that the original address is valid and do not check it.
1946 We do check that it is valid for narrower modes.
1948 If STRICTP is nonzero, we require a strictly valid address,
1949 for the sake of use in reload.c. */
1952 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1953 addr_space_t as)
1955 enum rtx_code ycode = GET_CODE (y);
1956 rtx z;
1957 rtx y1 = y;
1958 rtx *y2;
1959 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1960 (strictp ? strict_memory_address_addr_space_p
1961 : memory_address_addr_space_p);
1962 unsigned int mode_sz = GET_MODE_SIZE (mode);
1964 if (CONSTANT_ADDRESS_P (y))
1965 return 1;
1967 /* Adjusting an offsettable address involves changing to a narrower mode.
1968 Make sure that's OK. */
1970 if (mode_dependent_address_p (y, as))
1971 return 0;
1973 enum machine_mode address_mode = GET_MODE (y);
1974 if (address_mode == VOIDmode)
1975 address_mode = targetm.addr_space.address_mode (as);
1976 #ifdef POINTERS_EXTEND_UNSIGNED
1977 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1978 #endif
1980 /* ??? How much offset does an offsettable BLKmode reference need?
1981 Clearly that depends on the situation in which it's being used.
1982 However, the current situation in which we test 0xffffffff is
1983 less than ideal. Caveat user. */
1984 if (mode_sz == 0)
1985 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1987 /* If the expression contains a constant term,
1988 see if it remains valid when max possible offset is added. */
1990 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1992 int good;
1994 y1 = *y2;
1995 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1996 /* Use QImode because an odd displacement may be automatically invalid
1997 for any wider mode. But it should be valid for a single byte. */
1998 good = (*addressp) (QImode, y, as);
2000 /* In any case, restore old contents of memory. */
2001 *y2 = y1;
2002 return good;
2005 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2006 return 0;
2008 /* The offset added here is chosen as the maximum offset that
2009 any instruction could need to add when operating on something
2010 of the specified mode. We assume that if Y and Y+c are
2011 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2012 go inside a LO_SUM here, so we do so as well. */
2013 if (GET_CODE (y) == LO_SUM
2014 && mode != BLKmode
2015 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2016 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2017 plus_constant (address_mode, XEXP (y, 1),
2018 mode_sz - 1));
2019 #ifdef POINTERS_EXTEND_UNSIGNED
2020 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2021 else if (POINTERS_EXTEND_UNSIGNED > 0
2022 && GET_CODE (y) == ZERO_EXTEND
2023 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2024 z = gen_rtx_ZERO_EXTEND (address_mode,
2025 plus_constant (pointer_mode, XEXP (y, 0),
2026 mode_sz - 1));
2027 #endif
2028 else
2029 z = plus_constant (address_mode, y, mode_sz - 1);
2031 /* Use QImode because an odd displacement may be automatically invalid
2032 for any wider mode. But it should be valid for a single byte. */
2033 return (*addressp) (QImode, z, as);
2036 /* Return 1 if ADDR is an address-expression whose effect depends
2037 on the mode of the memory reference it is used in.
2039 ADDRSPACE is the address space associated with the address.
2041 Autoincrement addressing is a typical example of mode-dependence
2042 because the amount of the increment depends on the mode. */
2044 bool
2045 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2047 /* Auto-increment addressing with anything other than post_modify
2048 or pre_modify always introduces a mode dependency. Catch such
2049 cases now instead of deferring to the target. */
2050 if (GET_CODE (addr) == PRE_INC
2051 || GET_CODE (addr) == POST_INC
2052 || GET_CODE (addr) == PRE_DEC
2053 || GET_CODE (addr) == POST_DEC)
2054 return true;
2056 return targetm.mode_dependent_address_p (addr, addrspace);
2059 /* Like extract_insn, but save insn extracted and don't extract again, when
2060 called again for the same insn expecting that recog_data still contain the
2061 valid information. This is used primary by gen_attr infrastructure that
2062 often does extract insn again and again. */
2063 void
2064 extract_insn_cached (rtx insn)
2066 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2067 return;
2068 extract_insn (insn);
2069 recog_data.insn = insn;
2072 /* Do cached extract_insn, constrain_operands and complain about failures.
2073 Used by insn_attrtab. */
2074 void
2075 extract_constrain_insn_cached (rtx insn)
2077 extract_insn_cached (insn);
2078 if (which_alternative == -1
2079 && !constrain_operands (reload_completed))
2080 fatal_insn_not_found (insn);
2083 /* Do cached constrain_operands and complain about failures. */
2085 constrain_operands_cached (int strict)
2087 if (which_alternative == -1)
2088 return constrain_operands (strict);
2089 else
2090 return 1;
2093 /* Analyze INSN and fill in recog_data. */
2095 void
2096 extract_insn (rtx insn)
2098 int i;
2099 int icode;
2100 int noperands;
2101 rtx body = PATTERN (insn);
2103 recog_data.n_operands = 0;
2104 recog_data.n_alternatives = 0;
2105 recog_data.n_dups = 0;
2106 recog_data.is_asm = false;
2108 switch (GET_CODE (body))
2110 case USE:
2111 case CLOBBER:
2112 case ASM_INPUT:
2113 case ADDR_VEC:
2114 case ADDR_DIFF_VEC:
2115 case VAR_LOCATION:
2116 return;
2118 case SET:
2119 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2120 goto asm_insn;
2121 else
2122 goto normal_insn;
2123 case PARALLEL:
2124 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2125 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2126 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2127 goto asm_insn;
2128 else
2129 goto normal_insn;
2130 case ASM_OPERANDS:
2131 asm_insn:
2132 recog_data.n_operands = noperands = asm_noperands (body);
2133 if (noperands >= 0)
2135 /* This insn is an `asm' with operands. */
2137 /* expand_asm_operands makes sure there aren't too many operands. */
2138 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2140 /* Now get the operand values and constraints out of the insn. */
2141 decode_asm_operands (body, recog_data.operand,
2142 recog_data.operand_loc,
2143 recog_data.constraints,
2144 recog_data.operand_mode, NULL);
2145 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2146 if (noperands > 0)
2148 const char *p = recog_data.constraints[0];
2149 recog_data.n_alternatives = 1;
2150 while (*p)
2151 recog_data.n_alternatives += (*p++ == ',');
2153 recog_data.is_asm = true;
2154 break;
2156 fatal_insn_not_found (insn);
2158 default:
2159 normal_insn:
2160 /* Ordinary insn: recognize it, get the operands via insn_extract
2161 and get the constraints. */
2163 icode = recog_memoized (insn);
2164 if (icode < 0)
2165 fatal_insn_not_found (insn);
2167 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2168 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2169 recog_data.n_dups = insn_data[icode].n_dups;
2171 insn_extract (insn);
2173 for (i = 0; i < noperands; i++)
2175 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2176 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2177 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2178 /* VOIDmode match_operands gets mode from their real operand. */
2179 if (recog_data.operand_mode[i] == VOIDmode)
2180 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2183 for (i = 0; i < noperands; i++)
2184 recog_data.operand_type[i]
2185 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2186 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2187 : OP_IN);
2189 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2191 if (INSN_CODE (insn) < 0)
2192 for (i = 0; i < recog_data.n_alternatives; i++)
2193 recog_data.alternative_enabled_p[i] = true;
2194 else
2196 recog_data.insn = insn;
2197 for (i = 0; i < recog_data.n_alternatives; i++)
2199 which_alternative = i;
2200 recog_data.alternative_enabled_p[i]
2201 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2205 recog_data.insn = NULL;
2206 which_alternative = -1;
2209 /* After calling extract_insn, you can use this function to extract some
2210 information from the constraint strings into a more usable form.
2211 The collected data is stored in recog_op_alt. */
2212 void
2213 preprocess_constraints (void)
2215 int i;
2217 for (i = 0; i < recog_data.n_operands; i++)
2218 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2219 * sizeof (struct operand_alternative)));
2221 for (i = 0; i < recog_data.n_operands; i++)
2223 int j;
2224 struct operand_alternative *op_alt;
2225 const char *p = recog_data.constraints[i];
2227 op_alt = recog_op_alt[i];
2229 for (j = 0; j < recog_data.n_alternatives; j++)
2231 op_alt[j].cl = NO_REGS;
2232 op_alt[j].constraint = p;
2233 op_alt[j].matches = -1;
2234 op_alt[j].matched = -1;
2236 if (!recog_data.alternative_enabled_p[j])
2238 p = skip_alternative (p);
2239 continue;
2242 if (*p == '\0' || *p == ',')
2244 op_alt[j].anything_ok = 1;
2245 continue;
2248 for (;;)
2250 char c = *p;
2251 if (c == '#')
2253 c = *++p;
2254 while (c != ',' && c != '\0');
2255 if (c == ',' || c == '\0')
2257 p++;
2258 break;
2261 switch (c)
2263 case '=': case '+': case '*': case '%':
2264 case 'E': case 'F': case 'G': case 'H':
2265 case 's': case 'i': case 'n':
2266 case 'I': case 'J': case 'K': case 'L':
2267 case 'M': case 'N': case 'O': case 'P':
2268 /* These don't say anything we care about. */
2269 break;
2271 case '?':
2272 op_alt[j].reject += 6;
2273 break;
2274 case '!':
2275 op_alt[j].reject += 600;
2276 break;
2277 case '&':
2278 op_alt[j].earlyclobber = 1;
2279 break;
2281 case '0': case '1': case '2': case '3': case '4':
2282 case '5': case '6': case '7': case '8': case '9':
2284 char *end;
2285 op_alt[j].matches = strtoul (p, &end, 10);
2286 recog_op_alt[op_alt[j].matches][j].matched = i;
2287 p = end;
2289 continue;
2291 case TARGET_MEM_CONSTRAINT:
2292 op_alt[j].memory_ok = 1;
2293 break;
2294 case '<':
2295 op_alt[j].decmem_ok = 1;
2296 break;
2297 case '>':
2298 op_alt[j].incmem_ok = 1;
2299 break;
2300 case 'V':
2301 op_alt[j].nonoffmem_ok = 1;
2302 break;
2303 case 'o':
2304 op_alt[j].offmem_ok = 1;
2305 break;
2306 case 'X':
2307 op_alt[j].anything_ok = 1;
2308 break;
2310 case 'p':
2311 op_alt[j].is_address = 1;
2312 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2313 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2314 ADDRESS, SCRATCH)];
2315 break;
2317 case 'g':
2318 case 'r':
2319 op_alt[j].cl =
2320 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2321 break;
2323 default:
2324 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2326 op_alt[j].memory_ok = 1;
2327 break;
2329 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2331 op_alt[j].is_address = 1;
2332 op_alt[j].cl
2333 = (reg_class_subunion
2334 [(int) op_alt[j].cl]
2335 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2336 ADDRESS, SCRATCH)]);
2337 break;
2340 op_alt[j].cl
2341 = (reg_class_subunion
2342 [(int) op_alt[j].cl]
2343 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2344 break;
2346 p += CONSTRAINT_LEN (c, p);
2352 /* Check the operands of an insn against the insn's operand constraints
2353 and return 1 if they are valid.
2354 The information about the insn's operands, constraints, operand modes
2355 etc. is obtained from the global variables set up by extract_insn.
2357 WHICH_ALTERNATIVE is set to a number which indicates which
2358 alternative of constraints was matched: 0 for the first alternative,
2359 1 for the next, etc.
2361 In addition, when two operands are required to match
2362 and it happens that the output operand is (reg) while the
2363 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2364 make the output operand look like the input.
2365 This is because the output operand is the one the template will print.
2367 This is used in final, just before printing the assembler code and by
2368 the routines that determine an insn's attribute.
2370 If STRICT is a positive nonzero value, it means that we have been
2371 called after reload has been completed. In that case, we must
2372 do all checks strictly. If it is zero, it means that we have been called
2373 before reload has completed. In that case, we first try to see if we can
2374 find an alternative that matches strictly. If not, we try again, this
2375 time assuming that reload will fix up the insn. This provides a "best
2376 guess" for the alternative and is used to compute attributes of insns prior
2377 to reload. A negative value of STRICT is used for this internal call. */
2379 struct funny_match
2381 int this_op, other;
2385 constrain_operands (int strict)
2387 const char *constraints[MAX_RECOG_OPERANDS];
2388 int matching_operands[MAX_RECOG_OPERANDS];
2389 int earlyclobber[MAX_RECOG_OPERANDS];
2390 int c;
2392 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2393 int funny_match_index;
2395 which_alternative = 0;
2396 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2397 return 1;
2399 for (c = 0; c < recog_data.n_operands; c++)
2401 constraints[c] = recog_data.constraints[c];
2402 matching_operands[c] = -1;
2407 int seen_earlyclobber_at = -1;
2408 int opno;
2409 int lose = 0;
2410 funny_match_index = 0;
2412 if (!recog_data.alternative_enabled_p[which_alternative])
2414 int i;
2416 for (i = 0; i < recog_data.n_operands; i++)
2417 constraints[i] = skip_alternative (constraints[i]);
2419 which_alternative++;
2420 continue;
2423 for (opno = 0; opno < recog_data.n_operands; opno++)
2425 rtx op = recog_data.operand[opno];
2426 enum machine_mode mode = GET_MODE (op);
2427 const char *p = constraints[opno];
2428 int offset = 0;
2429 int win = 0;
2430 int val;
2431 int len;
2433 earlyclobber[opno] = 0;
2435 /* A unary operator may be accepted by the predicate, but it
2436 is irrelevant for matching constraints. */
2437 if (UNARY_P (op))
2438 op = XEXP (op, 0);
2440 if (GET_CODE (op) == SUBREG)
2442 if (REG_P (SUBREG_REG (op))
2443 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2444 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2445 GET_MODE (SUBREG_REG (op)),
2446 SUBREG_BYTE (op),
2447 GET_MODE (op));
2448 op = SUBREG_REG (op);
2451 /* An empty constraint or empty alternative
2452 allows anything which matched the pattern. */
2453 if (*p == 0 || *p == ',')
2454 win = 1;
2457 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2459 case '\0':
2460 len = 0;
2461 break;
2462 case ',':
2463 c = '\0';
2464 break;
2466 case '?': case '!': case '*': case '%':
2467 case '=': case '+':
2468 break;
2470 case '#':
2471 /* Ignore rest of this alternative as far as
2472 constraint checking is concerned. */
2474 p++;
2475 while (*p && *p != ',');
2476 len = 0;
2477 break;
2479 case '&':
2480 earlyclobber[opno] = 1;
2481 if (seen_earlyclobber_at < 0)
2482 seen_earlyclobber_at = opno;
2483 break;
2485 case '0': case '1': case '2': case '3': case '4':
2486 case '5': case '6': case '7': case '8': case '9':
2488 /* This operand must be the same as a previous one.
2489 This kind of constraint is used for instructions such
2490 as add when they take only two operands.
2492 Note that the lower-numbered operand is passed first.
2494 If we are not testing strictly, assume that this
2495 constraint will be satisfied. */
2497 char *end;
2498 int match;
2500 match = strtoul (p, &end, 10);
2501 p = end;
2503 if (strict < 0)
2504 val = 1;
2505 else
2507 rtx op1 = recog_data.operand[match];
2508 rtx op2 = recog_data.operand[opno];
2510 /* A unary operator may be accepted by the predicate,
2511 but it is irrelevant for matching constraints. */
2512 if (UNARY_P (op1))
2513 op1 = XEXP (op1, 0);
2514 if (UNARY_P (op2))
2515 op2 = XEXP (op2, 0);
2517 val = operands_match_p (op1, op2);
2520 matching_operands[opno] = match;
2521 matching_operands[match] = opno;
2523 if (val != 0)
2524 win = 1;
2526 /* If output is *x and input is *--x, arrange later
2527 to change the output to *--x as well, since the
2528 output op is the one that will be printed. */
2529 if (val == 2 && strict > 0)
2531 funny_match[funny_match_index].this_op = opno;
2532 funny_match[funny_match_index++].other = match;
2535 len = 0;
2536 break;
2538 case 'p':
2539 /* p is used for address_operands. When we are called by
2540 gen_reload, no one will have checked that the address is
2541 strictly valid, i.e., that all pseudos requiring hard regs
2542 have gotten them. */
2543 if (strict <= 0
2544 || (strict_memory_address_p (recog_data.operand_mode[opno],
2545 op)))
2546 win = 1;
2547 break;
2549 /* No need to check general_operand again;
2550 it was done in insn-recog.c. Well, except that reload
2551 doesn't check the validity of its replacements, but
2552 that should only matter when there's a bug. */
2553 case 'g':
2554 /* Anything goes unless it is a REG and really has a hard reg
2555 but the hard reg is not in the class GENERAL_REGS. */
2556 if (REG_P (op))
2558 if (strict < 0
2559 || GENERAL_REGS == ALL_REGS
2560 || (reload_in_progress
2561 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2562 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2563 win = 1;
2565 else if (strict < 0 || general_operand (op, mode))
2566 win = 1;
2567 break;
2569 case 'X':
2570 /* This is used for a MATCH_SCRATCH in the cases when
2571 we don't actually need anything. So anything goes
2572 any time. */
2573 win = 1;
2574 break;
2576 case TARGET_MEM_CONSTRAINT:
2577 /* Memory operands must be valid, to the extent
2578 required by STRICT. */
2579 if (MEM_P (op))
2581 if (strict > 0
2582 && !strict_memory_address_addr_space_p
2583 (GET_MODE (op), XEXP (op, 0),
2584 MEM_ADDR_SPACE (op)))
2585 break;
2586 if (strict == 0
2587 && !memory_address_addr_space_p
2588 (GET_MODE (op), XEXP (op, 0),
2589 MEM_ADDR_SPACE (op)))
2590 break;
2591 win = 1;
2593 /* Before reload, accept what reload can turn into mem. */
2594 else if (strict < 0 && CONSTANT_P (op))
2595 win = 1;
2596 /* During reload, accept a pseudo */
2597 else if (reload_in_progress && REG_P (op)
2598 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2599 win = 1;
2600 break;
2602 case '<':
2603 if (MEM_P (op)
2604 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2605 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2606 win = 1;
2607 break;
2609 case '>':
2610 if (MEM_P (op)
2611 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2612 || GET_CODE (XEXP (op, 0)) == POST_INC))
2613 win = 1;
2614 break;
2616 case 'E':
2617 case 'F':
2618 if (CONST_DOUBLE_AS_FLOAT_P (op)
2619 || (GET_CODE (op) == CONST_VECTOR
2620 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2621 win = 1;
2622 break;
2624 case 'G':
2625 case 'H':
2626 if (CONST_DOUBLE_AS_FLOAT_P (op)
2627 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2628 win = 1;
2629 break;
2631 case 's':
2632 if (CONST_SCALAR_INT_P (op))
2633 break;
2634 case 'i':
2635 if (CONSTANT_P (op))
2636 win = 1;
2637 break;
2639 case 'n':
2640 if (CONST_SCALAR_INT_P (op))
2641 win = 1;
2642 break;
2644 case 'I':
2645 case 'J':
2646 case 'K':
2647 case 'L':
2648 case 'M':
2649 case 'N':
2650 case 'O':
2651 case 'P':
2652 if (CONST_INT_P (op)
2653 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2654 win = 1;
2655 break;
2657 case 'V':
2658 if (MEM_P (op)
2659 && ((strict > 0 && ! offsettable_memref_p (op))
2660 || (strict < 0
2661 && !(CONSTANT_P (op) || MEM_P (op)))
2662 || (reload_in_progress
2663 && !(REG_P (op)
2664 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2665 win = 1;
2666 break;
2668 case 'o':
2669 if ((strict > 0 && offsettable_memref_p (op))
2670 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2671 /* Before reload, accept what reload can handle. */
2672 || (strict < 0
2673 && (CONSTANT_P (op) || MEM_P (op)))
2674 /* During reload, accept a pseudo */
2675 || (reload_in_progress && REG_P (op)
2676 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2677 win = 1;
2678 break;
2680 default:
2682 enum reg_class cl;
2684 cl = (c == 'r'
2685 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2686 if (cl != NO_REGS)
2688 if (strict < 0
2689 || (strict == 0
2690 && REG_P (op)
2691 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2692 || (strict == 0 && GET_CODE (op) == SCRATCH)
2693 || (REG_P (op)
2694 && reg_fits_class_p (op, cl, offset, mode)))
2695 win = 1;
2697 #ifdef EXTRA_CONSTRAINT_STR
2698 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2699 win = 1;
2701 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2702 /* Every memory operand can be reloaded to fit. */
2703 && ((strict < 0 && MEM_P (op))
2704 /* Before reload, accept what reload can turn
2705 into mem. */
2706 || (strict < 0 && CONSTANT_P (op))
2707 /* During reload, accept a pseudo */
2708 || (reload_in_progress && REG_P (op)
2709 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2710 win = 1;
2711 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2712 /* Every address operand can be reloaded to fit. */
2713 && strict < 0)
2714 win = 1;
2715 /* Cater to architectures like IA-64 that define extra memory
2716 constraints without using define_memory_constraint. */
2717 else if (reload_in_progress
2718 && REG_P (op)
2719 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2720 && reg_renumber[REGNO (op)] < 0
2721 && reg_equiv_mem (REGNO (op)) != 0
2722 && EXTRA_CONSTRAINT_STR
2723 (reg_equiv_mem (REGNO (op)), c, p))
2724 win = 1;
2725 #endif
2726 break;
2729 while (p += len, c);
2731 constraints[opno] = p;
2732 /* If this operand did not win somehow,
2733 this alternative loses. */
2734 if (! win)
2735 lose = 1;
2737 /* This alternative won; the operands are ok.
2738 Change whichever operands this alternative says to change. */
2739 if (! lose)
2741 int opno, eopno;
2743 /* See if any earlyclobber operand conflicts with some other
2744 operand. */
2746 if (strict > 0 && seen_earlyclobber_at >= 0)
2747 for (eopno = seen_earlyclobber_at;
2748 eopno < recog_data.n_operands;
2749 eopno++)
2750 /* Ignore earlyclobber operands now in memory,
2751 because we would often report failure when we have
2752 two memory operands, one of which was formerly a REG. */
2753 if (earlyclobber[eopno]
2754 && REG_P (recog_data.operand[eopno]))
2755 for (opno = 0; opno < recog_data.n_operands; opno++)
2756 if ((MEM_P (recog_data.operand[opno])
2757 || recog_data.operand_type[opno] != OP_OUT)
2758 && opno != eopno
2759 /* Ignore things like match_operator operands. */
2760 && *recog_data.constraints[opno] != 0
2761 && ! (matching_operands[opno] == eopno
2762 && operands_match_p (recog_data.operand[opno],
2763 recog_data.operand[eopno]))
2764 && ! safe_from_earlyclobber (recog_data.operand[opno],
2765 recog_data.operand[eopno]))
2766 lose = 1;
2768 if (! lose)
2770 while (--funny_match_index >= 0)
2772 recog_data.operand[funny_match[funny_match_index].other]
2773 = recog_data.operand[funny_match[funny_match_index].this_op];
2776 #ifdef AUTO_INC_DEC
2777 /* For operands without < or > constraints reject side-effects. */
2778 if (recog_data.is_asm)
2780 for (opno = 0; opno < recog_data.n_operands; opno++)
2781 if (MEM_P (recog_data.operand[opno]))
2782 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2784 case PRE_INC:
2785 case POST_INC:
2786 case PRE_DEC:
2787 case POST_DEC:
2788 case PRE_MODIFY:
2789 case POST_MODIFY:
2790 if (strchr (recog_data.constraints[opno], '<') == NULL
2791 && strchr (recog_data.constraints[opno], '>')
2792 == NULL)
2793 return 0;
2794 break;
2795 default:
2796 break;
2799 #endif
2800 return 1;
2804 which_alternative++;
2806 while (which_alternative < recog_data.n_alternatives);
2808 which_alternative = -1;
2809 /* If we are about to reject this, but we are not to test strictly,
2810 try a very loose test. Only return failure if it fails also. */
2811 if (strict == 0)
2812 return constrain_operands (-1);
2813 else
2814 return 0;
2817 /* Return true iff OPERAND (assumed to be a REG rtx)
2818 is a hard reg in class CLASS when its regno is offset by OFFSET
2819 and changed to mode MODE.
2820 If REG occupies multiple hard regs, all of them must be in CLASS. */
2822 bool
2823 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2824 enum machine_mode mode)
2826 unsigned int regno = REGNO (operand);
2828 if (cl == NO_REGS)
2829 return false;
2831 /* Regno must not be a pseudo register. Offset may be negative. */
2832 return (HARD_REGISTER_NUM_P (regno)
2833 && HARD_REGISTER_NUM_P (regno + offset)
2834 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2835 regno + offset));
2838 /* Split single instruction. Helper function for split_all_insns and
2839 split_all_insns_noflow. Return last insn in the sequence if successful,
2840 or NULL if unsuccessful. */
2842 static rtx
2843 split_insn (rtx insn)
2845 /* Split insns here to get max fine-grain parallelism. */
2846 rtx first = PREV_INSN (insn);
2847 rtx last = try_split (PATTERN (insn), insn, 1);
2848 rtx insn_set, last_set, note;
2850 if (last == insn)
2851 return NULL_RTX;
2853 /* If the original instruction was a single set that was known to be
2854 equivalent to a constant, see if we can say the same about the last
2855 instruction in the split sequence. The two instructions must set
2856 the same destination. */
2857 insn_set = single_set (insn);
2858 if (insn_set)
2860 last_set = single_set (last);
2861 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2863 note = find_reg_equal_equiv_note (insn);
2864 if (note && CONSTANT_P (XEXP (note, 0)))
2865 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2866 else if (CONSTANT_P (SET_SRC (insn_set)))
2867 set_unique_reg_note (last, REG_EQUAL,
2868 copy_rtx (SET_SRC (insn_set)));
2872 /* try_split returns the NOTE that INSN became. */
2873 SET_INSN_DELETED (insn);
2875 /* ??? Coddle to md files that generate subregs in post-reload
2876 splitters instead of computing the proper hard register. */
2877 if (reload_completed && first != last)
2879 first = NEXT_INSN (first);
2880 for (;;)
2882 if (INSN_P (first))
2883 cleanup_subreg_operands (first);
2884 if (first == last)
2885 break;
2886 first = NEXT_INSN (first);
2890 return last;
2893 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2895 void
2896 split_all_insns (void)
2898 sbitmap blocks;
2899 bool changed;
2900 basic_block bb;
2902 blocks = sbitmap_alloc (last_basic_block);
2903 bitmap_clear (blocks);
2904 changed = false;
2906 FOR_EACH_BB_REVERSE (bb)
2908 rtx insn, next;
2909 bool finish = false;
2911 rtl_profile_for_bb (bb);
2912 for (insn = BB_HEAD (bb); !finish ; insn = next)
2914 /* Can't use `next_real_insn' because that might go across
2915 CODE_LABELS and short-out basic blocks. */
2916 next = NEXT_INSN (insn);
2917 finish = (insn == BB_END (bb));
2918 if (INSN_P (insn))
2920 rtx set = single_set (insn);
2922 /* Don't split no-op move insns. These should silently
2923 disappear later in final. Splitting such insns would
2924 break the code that handles LIBCALL blocks. */
2925 if (set && set_noop_p (set))
2927 /* Nops get in the way while scheduling, so delete them
2928 now if register allocation has already been done. It
2929 is too risky to try to do this before register
2930 allocation, and there are unlikely to be very many
2931 nops then anyways. */
2932 if (reload_completed)
2933 delete_insn_and_edges (insn);
2935 else
2937 if (split_insn (insn))
2939 bitmap_set_bit (blocks, bb->index);
2940 changed = true;
2947 default_rtl_profile ();
2948 if (changed)
2949 find_many_sub_basic_blocks (blocks);
2951 #ifdef ENABLE_CHECKING
2952 verify_flow_info ();
2953 #endif
2955 sbitmap_free (blocks);
2958 /* Same as split_all_insns, but do not expect CFG to be available.
2959 Used by machine dependent reorg passes. */
2961 unsigned int
2962 split_all_insns_noflow (void)
2964 rtx next, insn;
2966 for (insn = get_insns (); insn; insn = next)
2968 next = NEXT_INSN (insn);
2969 if (INSN_P (insn))
2971 /* Don't split no-op move insns. These should silently
2972 disappear later in final. Splitting such insns would
2973 break the code that handles LIBCALL blocks. */
2974 rtx set = single_set (insn);
2975 if (set && set_noop_p (set))
2977 /* Nops get in the way while scheduling, so delete them
2978 now if register allocation has already been done. It
2979 is too risky to try to do this before register
2980 allocation, and there are unlikely to be very many
2981 nops then anyways.
2983 ??? Should we use delete_insn when the CFG isn't valid? */
2984 if (reload_completed)
2985 delete_insn_and_edges (insn);
2987 else
2988 split_insn (insn);
2991 return 0;
2994 #ifdef HAVE_peephole2
2995 struct peep2_insn_data
2997 rtx insn;
2998 regset live_before;
3001 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3002 static int peep2_current;
3004 static bool peep2_do_rebuild_jump_labels;
3005 static bool peep2_do_cleanup_cfg;
3007 /* The number of instructions available to match a peep2. */
3008 int peep2_current_count;
3010 /* A non-insn marker indicating the last insn of the block.
3011 The live_before regset for this element is correct, indicating
3012 DF_LIVE_OUT for the block. */
3013 #define PEEP2_EOB pc_rtx
3015 /* Wrap N to fit into the peep2_insn_data buffer. */
3017 static int
3018 peep2_buf_position (int n)
3020 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3021 n -= MAX_INSNS_PER_PEEP2 + 1;
3022 return n;
3025 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3026 does not exist. Used by the recognizer to find the next insn to match
3027 in a multi-insn pattern. */
3030 peep2_next_insn (int n)
3032 gcc_assert (n <= peep2_current_count);
3034 n = peep2_buf_position (peep2_current + n);
3036 return peep2_insn_data[n].insn;
3039 /* Return true if REGNO is dead before the Nth non-note insn
3040 after `current'. */
3043 peep2_regno_dead_p (int ofs, int regno)
3045 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3047 ofs = peep2_buf_position (peep2_current + ofs);
3049 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3051 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3054 /* Similarly for a REG. */
3057 peep2_reg_dead_p (int ofs, rtx reg)
3059 int regno, n;
3061 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3063 ofs = peep2_buf_position (peep2_current + ofs);
3065 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3067 regno = REGNO (reg);
3068 n = hard_regno_nregs[regno][GET_MODE (reg)];
3069 while (--n >= 0)
3070 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3071 return 0;
3072 return 1;
3075 /* Regno offset to be used in the register search. */
3076 static int search_ofs;
3078 /* Try to find a hard register of mode MODE, matching the register class in
3079 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3080 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3081 in which case the only condition is that the register must be available
3082 before CURRENT_INSN.
3083 Registers that already have bits set in REG_SET will not be considered.
3085 If an appropriate register is available, it will be returned and the
3086 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3087 returned. */
3090 peep2_find_free_register (int from, int to, const char *class_str,
3091 enum machine_mode mode, HARD_REG_SET *reg_set)
3093 enum reg_class cl;
3094 HARD_REG_SET live;
3095 df_ref *def_rec;
3096 int i;
3098 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3099 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3101 from = peep2_buf_position (peep2_current + from);
3102 to = peep2_buf_position (peep2_current + to);
3104 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3105 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3107 while (from != to)
3109 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3111 /* Don't use registers set or clobbered by the insn. */
3112 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3113 *def_rec; def_rec++)
3114 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3116 from = peep2_buf_position (from + 1);
3119 cl = (class_str[0] == 'r' ? GENERAL_REGS
3120 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3122 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3124 int raw_regno, regno, success, j;
3126 /* Distribute the free registers as much as possible. */
3127 raw_regno = search_ofs + i;
3128 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3129 raw_regno -= FIRST_PSEUDO_REGISTER;
3130 #ifdef REG_ALLOC_ORDER
3131 regno = reg_alloc_order[raw_regno];
3132 #else
3133 regno = raw_regno;
3134 #endif
3136 /* Can it support the mode we need? */
3137 if (! HARD_REGNO_MODE_OK (regno, mode))
3138 continue;
3140 success = 1;
3141 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3143 /* Don't allocate fixed registers. */
3144 if (fixed_regs[regno + j])
3146 success = 0;
3147 break;
3149 /* Don't allocate global registers. */
3150 if (global_regs[regno + j])
3152 success = 0;
3153 break;
3155 /* Make sure the register is of the right class. */
3156 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3158 success = 0;
3159 break;
3161 /* And that we don't create an extra save/restore. */
3162 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3164 success = 0;
3165 break;
3168 if (! targetm.hard_regno_scratch_ok (regno + j))
3170 success = 0;
3171 break;
3174 /* And we don't clobber traceback for noreturn functions. */
3175 if ((regno + j == FRAME_POINTER_REGNUM
3176 || regno + j == HARD_FRAME_POINTER_REGNUM)
3177 && (! reload_completed || frame_pointer_needed))
3179 success = 0;
3180 break;
3183 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3184 || TEST_HARD_REG_BIT (live, regno + j))
3186 success = 0;
3187 break;
3191 if (success)
3193 add_to_hard_reg_set (reg_set, mode, regno);
3195 /* Start the next search with the next register. */
3196 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3197 raw_regno = 0;
3198 search_ofs = raw_regno;
3200 return gen_rtx_REG (mode, regno);
3204 search_ofs = 0;
3205 return NULL_RTX;
3208 /* Forget all currently tracked instructions, only remember current
3209 LIVE regset. */
3211 static void
3212 peep2_reinit_state (regset live)
3214 int i;
3216 /* Indicate that all slots except the last holds invalid data. */
3217 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3218 peep2_insn_data[i].insn = NULL_RTX;
3219 peep2_current_count = 0;
3221 /* Indicate that the last slot contains live_after data. */
3222 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3223 peep2_current = MAX_INSNS_PER_PEEP2;
3225 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3228 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3229 starting at INSN. Perform the replacement, removing the old insns and
3230 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3231 if the replacement is rejected. */
3233 static rtx
3234 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3236 int i;
3237 rtx last, eh_note, as_note, before_try, x;
3238 rtx old_insn, new_insn;
3239 bool was_call = false;
3241 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3242 match more than one insn, or to be split into more than one insn. */
3243 old_insn = peep2_insn_data[peep2_current].insn;
3244 if (RTX_FRAME_RELATED_P (old_insn))
3246 bool any_note = false;
3247 rtx note;
3249 if (match_len != 0)
3250 return NULL;
3252 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3253 may be in the stream for the purpose of register allocation. */
3254 if (active_insn_p (attempt))
3255 new_insn = attempt;
3256 else
3257 new_insn = next_active_insn (attempt);
3258 if (next_active_insn (new_insn))
3259 return NULL;
3261 /* We have a 1-1 replacement. Copy over any frame-related info. */
3262 RTX_FRAME_RELATED_P (new_insn) = 1;
3264 /* Allow the backend to fill in a note during the split. */
3265 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3266 switch (REG_NOTE_KIND (note))
3268 case REG_FRAME_RELATED_EXPR:
3269 case REG_CFA_DEF_CFA:
3270 case REG_CFA_ADJUST_CFA:
3271 case REG_CFA_OFFSET:
3272 case REG_CFA_REGISTER:
3273 case REG_CFA_EXPRESSION:
3274 case REG_CFA_RESTORE:
3275 case REG_CFA_SET_VDRAP:
3276 any_note = true;
3277 break;
3278 default:
3279 break;
3282 /* If the backend didn't supply a note, copy one over. */
3283 if (!any_note)
3284 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3285 switch (REG_NOTE_KIND (note))
3287 case REG_FRAME_RELATED_EXPR:
3288 case REG_CFA_DEF_CFA:
3289 case REG_CFA_ADJUST_CFA:
3290 case REG_CFA_OFFSET:
3291 case REG_CFA_REGISTER:
3292 case REG_CFA_EXPRESSION:
3293 case REG_CFA_RESTORE:
3294 case REG_CFA_SET_VDRAP:
3295 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3296 any_note = true;
3297 break;
3298 default:
3299 break;
3302 /* If there still isn't a note, make sure the unwind info sees the
3303 same expression as before the split. */
3304 if (!any_note)
3306 rtx old_set, new_set;
3308 /* The old insn had better have been simple, or annotated. */
3309 old_set = single_set (old_insn);
3310 gcc_assert (old_set != NULL);
3312 new_set = single_set (new_insn);
3313 if (!new_set || !rtx_equal_p (new_set, old_set))
3314 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3317 /* Copy prologue/epilogue status. This is required in order to keep
3318 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3319 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3322 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3323 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3324 cfg-related call notes. */
3325 for (i = 0; i <= match_len; ++i)
3327 int j;
3328 rtx note;
3330 j = peep2_buf_position (peep2_current + i);
3331 old_insn = peep2_insn_data[j].insn;
3332 if (!CALL_P (old_insn))
3333 continue;
3334 was_call = true;
3336 new_insn = attempt;
3337 while (new_insn != NULL_RTX)
3339 if (CALL_P (new_insn))
3340 break;
3341 new_insn = NEXT_INSN (new_insn);
3344 gcc_assert (new_insn != NULL_RTX);
3346 CALL_INSN_FUNCTION_USAGE (new_insn)
3347 = CALL_INSN_FUNCTION_USAGE (old_insn);
3349 for (note = REG_NOTES (old_insn);
3350 note;
3351 note = XEXP (note, 1))
3352 switch (REG_NOTE_KIND (note))
3354 case REG_NORETURN:
3355 case REG_SETJMP:
3356 case REG_TM:
3357 add_reg_note (new_insn, REG_NOTE_KIND (note),
3358 XEXP (note, 0));
3359 break;
3360 default:
3361 /* Discard all other reg notes. */
3362 break;
3365 /* Croak if there is another call in the sequence. */
3366 while (++i <= match_len)
3368 j = peep2_buf_position (peep2_current + i);
3369 old_insn = peep2_insn_data[j].insn;
3370 gcc_assert (!CALL_P (old_insn));
3372 break;
3375 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3376 move those notes over to the new sequence. */
3377 as_note = NULL;
3378 for (i = match_len; i >= 0; --i)
3380 int j = peep2_buf_position (peep2_current + i);
3381 old_insn = peep2_insn_data[j].insn;
3383 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3384 if (as_note)
3385 break;
3388 i = peep2_buf_position (peep2_current + match_len);
3389 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3391 /* Replace the old sequence with the new. */
3392 last = emit_insn_after_setloc (attempt,
3393 peep2_insn_data[i].insn,
3394 INSN_LOCATION (peep2_insn_data[i].insn));
3395 before_try = PREV_INSN (insn);
3396 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3398 /* Re-insert the EH_REGION notes. */
3399 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3401 edge eh_edge;
3402 edge_iterator ei;
3404 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3405 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3406 break;
3408 if (eh_note)
3409 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3411 if (eh_edge)
3412 for (x = last; x != before_try; x = PREV_INSN (x))
3413 if (x != BB_END (bb)
3414 && (can_throw_internal (x)
3415 || can_nonlocal_goto (x)))
3417 edge nfte, nehe;
3418 int flags;
3420 nfte = split_block (bb, x);
3421 flags = (eh_edge->flags
3422 & (EDGE_EH | EDGE_ABNORMAL));
3423 if (CALL_P (x))
3424 flags |= EDGE_ABNORMAL_CALL;
3425 nehe = make_edge (nfte->src, eh_edge->dest,
3426 flags);
3428 nehe->probability = eh_edge->probability;
3429 nfte->probability
3430 = REG_BR_PROB_BASE - nehe->probability;
3432 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3433 bb = nfte->src;
3434 eh_edge = nehe;
3437 /* Converting possibly trapping insn to non-trapping is
3438 possible. Zap dummy outgoing edges. */
3439 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3442 /* Re-insert the ARGS_SIZE notes. */
3443 if (as_note)
3444 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3446 /* If we generated a jump instruction, it won't have
3447 JUMP_LABEL set. Recompute after we're done. */
3448 for (x = last; x != before_try; x = PREV_INSN (x))
3449 if (JUMP_P (x))
3451 peep2_do_rebuild_jump_labels = true;
3452 break;
3455 return last;
3458 /* After performing a replacement in basic block BB, fix up the life
3459 information in our buffer. LAST is the last of the insns that we
3460 emitted as a replacement. PREV is the insn before the start of
3461 the replacement. MATCH_LEN is the number of instructions that were
3462 matched, and which now need to be replaced in the buffer. */
3464 static void
3465 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3467 int i = peep2_buf_position (peep2_current + match_len + 1);
3468 rtx x;
3469 regset_head live;
3471 INIT_REG_SET (&live);
3472 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3474 gcc_assert (peep2_current_count >= match_len + 1);
3475 peep2_current_count -= match_len + 1;
3477 x = last;
3480 if (INSN_P (x))
3482 df_insn_rescan (x);
3483 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3485 peep2_current_count++;
3486 if (--i < 0)
3487 i = MAX_INSNS_PER_PEEP2;
3488 peep2_insn_data[i].insn = x;
3489 df_simulate_one_insn_backwards (bb, x, &live);
3490 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3493 x = PREV_INSN (x);
3495 while (x != prev);
3496 CLEAR_REG_SET (&live);
3498 peep2_current = i;
3501 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3502 Return true if we added it, false otherwise. The caller will try to match
3503 peepholes against the buffer if we return false; otherwise it will try to
3504 add more instructions to the buffer. */
3506 static bool
3507 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3509 int pos;
3511 /* Once we have filled the maximum number of insns the buffer can hold,
3512 allow the caller to match the insns against peepholes. We wait until
3513 the buffer is full in case the target has similar peepholes of different
3514 length; we always want to match the longest if possible. */
3515 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3516 return false;
3518 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3519 any other pattern, lest it change the semantics of the frame info. */
3520 if (RTX_FRAME_RELATED_P (insn))
3522 /* Let the buffer drain first. */
3523 if (peep2_current_count > 0)
3524 return false;
3525 /* Now the insn will be the only thing in the buffer. */
3528 pos = peep2_buf_position (peep2_current + peep2_current_count);
3529 peep2_insn_data[pos].insn = insn;
3530 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3531 peep2_current_count++;
3533 df_simulate_one_insn_forwards (bb, insn, live);
3534 return true;
3537 /* Perform the peephole2 optimization pass. */
3539 static void
3540 peephole2_optimize (void)
3542 rtx insn;
3543 bitmap live;
3544 int i;
3545 basic_block bb;
3547 peep2_do_cleanup_cfg = false;
3548 peep2_do_rebuild_jump_labels = false;
3550 df_set_flags (DF_LR_RUN_DCE);
3551 df_note_add_problem ();
3552 df_analyze ();
3554 /* Initialize the regsets we're going to use. */
3555 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3556 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3557 search_ofs = 0;
3558 live = BITMAP_ALLOC (&reg_obstack);
3560 FOR_EACH_BB_REVERSE (bb)
3562 bool past_end = false;
3563 int pos;
3565 rtl_profile_for_bb (bb);
3567 /* Start up propagation. */
3568 bitmap_copy (live, DF_LR_IN (bb));
3569 df_simulate_initialize_forwards (bb, live);
3570 peep2_reinit_state (live);
3572 insn = BB_HEAD (bb);
3573 for (;;)
3575 rtx attempt, head;
3576 int match_len;
3578 if (!past_end && !NONDEBUG_INSN_P (insn))
3580 next_insn:
3581 insn = NEXT_INSN (insn);
3582 if (insn == NEXT_INSN (BB_END (bb)))
3583 past_end = true;
3584 continue;
3586 if (!past_end && peep2_fill_buffer (bb, insn, live))
3587 goto next_insn;
3589 /* If we did not fill an empty buffer, it signals the end of the
3590 block. */
3591 if (peep2_current_count == 0)
3592 break;
3594 /* The buffer filled to the current maximum, so try to match. */
3596 pos = peep2_buf_position (peep2_current + peep2_current_count);
3597 peep2_insn_data[pos].insn = PEEP2_EOB;
3598 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3600 /* Match the peephole. */
3601 head = peep2_insn_data[peep2_current].insn;
3602 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3603 if (attempt != NULL)
3605 rtx last = peep2_attempt (bb, head, match_len, attempt);
3606 if (last)
3608 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3609 continue;
3613 /* No match: advance the buffer by one insn. */
3614 peep2_current = peep2_buf_position (peep2_current + 1);
3615 peep2_current_count--;
3619 default_rtl_profile ();
3620 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3621 BITMAP_FREE (peep2_insn_data[i].live_before);
3622 BITMAP_FREE (live);
3623 if (peep2_do_rebuild_jump_labels)
3624 rebuild_jump_labels (get_insns ());
3626 #endif /* HAVE_peephole2 */
3628 /* Common predicates for use with define_bypass. */
3630 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3631 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3632 must be either a single_set or a PARALLEL with SETs inside. */
3635 store_data_bypass_p (rtx out_insn, rtx in_insn)
3637 rtx out_set, in_set;
3638 rtx out_pat, in_pat;
3639 rtx out_exp, in_exp;
3640 int i, j;
3642 in_set = single_set (in_insn);
3643 if (in_set)
3645 if (!MEM_P (SET_DEST (in_set)))
3646 return false;
3648 out_set = single_set (out_insn);
3649 if (out_set)
3651 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3652 return false;
3654 else
3656 out_pat = PATTERN (out_insn);
3658 if (GET_CODE (out_pat) != PARALLEL)
3659 return false;
3661 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3663 out_exp = XVECEXP (out_pat, 0, i);
3665 if (GET_CODE (out_exp) == CLOBBER)
3666 continue;
3668 gcc_assert (GET_CODE (out_exp) == SET);
3670 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3671 return false;
3675 else
3677 in_pat = PATTERN (in_insn);
3678 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3680 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3682 in_exp = XVECEXP (in_pat, 0, i);
3684 if (GET_CODE (in_exp) == CLOBBER)
3685 continue;
3687 gcc_assert (GET_CODE (in_exp) == SET);
3689 if (!MEM_P (SET_DEST (in_exp)))
3690 return false;
3692 out_set = single_set (out_insn);
3693 if (out_set)
3695 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3696 return false;
3698 else
3700 out_pat = PATTERN (out_insn);
3701 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3703 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3705 out_exp = XVECEXP (out_pat, 0, j);
3707 if (GET_CODE (out_exp) == CLOBBER)
3708 continue;
3710 gcc_assert (GET_CODE (out_exp) == SET);
3712 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3713 return false;
3719 return true;
3722 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3723 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3724 or multiple set; IN_INSN should be single_set for truth, but for convenience
3725 of insn categorization may be any JUMP or CALL insn. */
3728 if_test_bypass_p (rtx out_insn, rtx in_insn)
3730 rtx out_set, in_set;
3732 in_set = single_set (in_insn);
3733 if (! in_set)
3735 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3736 return false;
3739 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3740 return false;
3741 in_set = SET_SRC (in_set);
3743 out_set = single_set (out_insn);
3744 if (out_set)
3746 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3747 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3748 return false;
3750 else
3752 rtx out_pat;
3753 int i;
3755 out_pat = PATTERN (out_insn);
3756 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3758 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3760 rtx exp = XVECEXP (out_pat, 0, i);
3762 if (GET_CODE (exp) == CLOBBER)
3763 continue;
3765 gcc_assert (GET_CODE (exp) == SET);
3767 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3768 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3769 return false;
3773 return true;
3776 static bool
3777 gate_handle_peephole2 (void)
3779 return (optimize > 0 && flag_peephole2);
3782 static unsigned int
3783 rest_of_handle_peephole2 (void)
3785 #ifdef HAVE_peephole2
3786 peephole2_optimize ();
3787 #endif
3788 return 0;
3791 namespace {
3793 const pass_data pass_data_peephole2 =
3795 RTL_PASS, /* type */
3796 "peephole2", /* name */
3797 OPTGROUP_NONE, /* optinfo_flags */
3798 true, /* has_gate */
3799 true, /* has_execute */
3800 TV_PEEPHOLE2, /* tv_id */
3801 0, /* properties_required */
3802 0, /* properties_provided */
3803 0, /* properties_destroyed */
3804 0, /* todo_flags_start */
3805 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3808 class pass_peephole2 : public rtl_opt_pass
3810 public:
3811 pass_peephole2 (gcc::context *ctxt)
3812 : rtl_opt_pass (pass_data_peephole2, ctxt)
3815 /* opt_pass methods: */
3816 /* The epiphany backend creates a second instance of this pass, so we need
3817 a clone method. */
3818 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3819 bool gate () { return gate_handle_peephole2 (); }
3820 unsigned int execute () { return rest_of_handle_peephole2 (); }
3822 }; // class pass_peephole2
3824 } // anon namespace
3826 rtl_opt_pass *
3827 make_pass_peephole2 (gcc::context *ctxt)
3829 return new pass_peephole2 (ctxt);
3832 static unsigned int
3833 rest_of_handle_split_all_insns (void)
3835 split_all_insns ();
3836 return 0;
3839 namespace {
3841 const pass_data pass_data_split_all_insns =
3843 RTL_PASS, /* type */
3844 "split1", /* name */
3845 OPTGROUP_NONE, /* optinfo_flags */
3846 false, /* has_gate */
3847 true, /* has_execute */
3848 TV_NONE, /* tv_id */
3849 0, /* properties_required */
3850 0, /* properties_provided */
3851 0, /* properties_destroyed */
3852 0, /* todo_flags_start */
3853 0, /* todo_flags_finish */
3856 class pass_split_all_insns : public rtl_opt_pass
3858 public:
3859 pass_split_all_insns (gcc::context *ctxt)
3860 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3863 /* opt_pass methods: */
3864 /* The epiphany backend creates a second instance of this pass, so
3865 we need a clone method. */
3866 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3867 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3869 }; // class pass_split_all_insns
3871 } // anon namespace
3873 rtl_opt_pass *
3874 make_pass_split_all_insns (gcc::context *ctxt)
3876 return new pass_split_all_insns (ctxt);
3879 static unsigned int
3880 rest_of_handle_split_after_reload (void)
3882 /* If optimizing, then go ahead and split insns now. */
3883 #ifndef STACK_REGS
3884 if (optimize > 0)
3885 #endif
3886 split_all_insns ();
3887 return 0;
3890 namespace {
3892 const pass_data pass_data_split_after_reload =
3894 RTL_PASS, /* type */
3895 "split2", /* name */
3896 OPTGROUP_NONE, /* optinfo_flags */
3897 false, /* has_gate */
3898 true, /* has_execute */
3899 TV_NONE, /* tv_id */
3900 0, /* properties_required */
3901 0, /* properties_provided */
3902 0, /* properties_destroyed */
3903 0, /* todo_flags_start */
3904 0, /* todo_flags_finish */
3907 class pass_split_after_reload : public rtl_opt_pass
3909 public:
3910 pass_split_after_reload (gcc::context *ctxt)
3911 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3914 /* opt_pass methods: */
3915 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3917 }; // class pass_split_after_reload
3919 } // anon namespace
3921 rtl_opt_pass *
3922 make_pass_split_after_reload (gcc::context *ctxt)
3924 return new pass_split_after_reload (ctxt);
3927 static bool
3928 gate_handle_split_before_regstack (void)
3930 #if HAVE_ATTR_length && defined (STACK_REGS)
3931 /* If flow2 creates new instructions which need splitting
3932 and scheduling after reload is not done, they might not be
3933 split until final which doesn't allow splitting
3934 if HAVE_ATTR_length. */
3935 # ifdef INSN_SCHEDULING
3936 return (optimize && !flag_schedule_insns_after_reload);
3937 # else
3938 return (optimize);
3939 # endif
3940 #else
3941 return 0;
3942 #endif
3945 static unsigned int
3946 rest_of_handle_split_before_regstack (void)
3948 split_all_insns ();
3949 return 0;
3952 namespace {
3954 const pass_data pass_data_split_before_regstack =
3956 RTL_PASS, /* type */
3957 "split3", /* name */
3958 OPTGROUP_NONE, /* optinfo_flags */
3959 true, /* has_gate */
3960 true, /* has_execute */
3961 TV_NONE, /* tv_id */
3962 0, /* properties_required */
3963 0, /* properties_provided */
3964 0, /* properties_destroyed */
3965 0, /* todo_flags_start */
3966 0, /* todo_flags_finish */
3969 class pass_split_before_regstack : public rtl_opt_pass
3971 public:
3972 pass_split_before_regstack (gcc::context *ctxt)
3973 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3976 /* opt_pass methods: */
3977 bool gate () { return gate_handle_split_before_regstack (); }
3978 unsigned int execute () {
3979 return rest_of_handle_split_before_regstack ();
3982 }; // class pass_split_before_regstack
3984 } // anon namespace
3986 rtl_opt_pass *
3987 make_pass_split_before_regstack (gcc::context *ctxt)
3989 return new pass_split_before_regstack (ctxt);
3992 static bool
3993 gate_handle_split_before_sched2 (void)
3995 #ifdef INSN_SCHEDULING
3996 return optimize > 0 && flag_schedule_insns_after_reload;
3997 #else
3998 return 0;
3999 #endif
4002 static unsigned int
4003 rest_of_handle_split_before_sched2 (void)
4005 #ifdef INSN_SCHEDULING
4006 split_all_insns ();
4007 #endif
4008 return 0;
4011 namespace {
4013 const pass_data pass_data_split_before_sched2 =
4015 RTL_PASS, /* type */
4016 "split4", /* name */
4017 OPTGROUP_NONE, /* optinfo_flags */
4018 true, /* has_gate */
4019 true, /* has_execute */
4020 TV_NONE, /* tv_id */
4021 0, /* properties_required */
4022 0, /* properties_provided */
4023 0, /* properties_destroyed */
4024 0, /* todo_flags_start */
4025 TODO_verify_flow, /* todo_flags_finish */
4028 class pass_split_before_sched2 : public rtl_opt_pass
4030 public:
4031 pass_split_before_sched2 (gcc::context *ctxt)
4032 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4035 /* opt_pass methods: */
4036 bool gate () { return gate_handle_split_before_sched2 (); }
4037 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4039 }; // class pass_split_before_sched2
4041 } // anon namespace
4043 rtl_opt_pass *
4044 make_pass_split_before_sched2 (gcc::context *ctxt)
4046 return new pass_split_before_sched2 (ctxt);
4049 /* The placement of the splitting that we do for shorten_branches
4050 depends on whether regstack is used by the target or not. */
4051 static bool
4052 gate_do_final_split (void)
4054 #if HAVE_ATTR_length && !defined (STACK_REGS)
4055 return 1;
4056 #else
4057 return 0;
4058 #endif
4061 namespace {
4063 const pass_data pass_data_split_for_shorten_branches =
4065 RTL_PASS, /* type */
4066 "split5", /* name */
4067 OPTGROUP_NONE, /* optinfo_flags */
4068 true, /* has_gate */
4069 true, /* has_execute */
4070 TV_NONE, /* tv_id */
4071 0, /* properties_required */
4072 0, /* properties_provided */
4073 0, /* properties_destroyed */
4074 0, /* todo_flags_start */
4075 TODO_verify_rtl_sharing, /* todo_flags_finish */
4078 class pass_split_for_shorten_branches : public rtl_opt_pass
4080 public:
4081 pass_split_for_shorten_branches (gcc::context *ctxt)
4082 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4085 /* opt_pass methods: */
4086 bool gate () { return gate_do_final_split (); }
4087 unsigned int execute () { return split_all_insns_noflow (); }
4089 }; // class pass_split_for_shorten_branches
4091 } // anon namespace
4093 rtl_opt_pass *
4094 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4096 return new pass_split_for_shorten_branches (ctxt);