2013-10-01 Saurabh Verma <saurabh.verma@codito.com>
[official-gcc.git] / gcc / recog.c
bloba3ca98c62fd7d952262c0d92ee1fde1b2aea2fd6
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "rtl-error.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "addresses.h"
33 #include "expr.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "reload.h"
38 #include "target.h"
39 #include "tree-pass.h"
40 #include "df.h"
41 #include "insn-codes.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
60 static void validate_replace_src_1 (rtx *, void *);
61 static rtx split_insn (rtx);
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in reginfo.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
71 int volatile_ok;
73 struct recog_data_d recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
80 was satisfied. */
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
88 int reload_completed;
90 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
91 int epilogue_completed;
93 /* Initialize data used by the function `recog'.
94 This must be called once in the compilation of a function
95 before any insn recognition may be done in the function. */
97 void
98 init_recog_no_volatile (void)
100 volatile_ok = 0;
103 void
104 init_recog (void)
106 volatile_ok = 1;
110 /* Return true if labels in asm operands BODY are LABEL_REFs. */
112 static bool
113 asm_labels_ok (rtx body)
115 rtx asmop;
116 int i;
118 asmop = extract_asm_operands (body);
119 if (asmop == NULL_RTX)
120 return true;
122 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
123 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
124 return false;
126 return true;
129 /* Check that X is an insn-body for an `asm' with operands
130 and that the operands mentioned in it are legitimate. */
133 check_asm_operands (rtx x)
135 int noperands;
136 rtx *operands;
137 const char **constraints;
138 int i;
140 if (!asm_labels_ok (x))
141 return 0;
143 /* Post-reload, be more strict with things. */
144 if (reload_completed)
146 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
147 extract_insn (make_insn_raw (x));
148 constrain_operands (1);
149 return which_alternative >= 0;
152 noperands = asm_noperands (x);
153 if (noperands < 0)
154 return 0;
155 if (noperands == 0)
156 return 1;
158 operands = XALLOCAVEC (rtx, noperands);
159 constraints = XALLOCAVEC (const char *, noperands);
161 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
163 for (i = 0; i < noperands; i++)
165 const char *c = constraints[i];
166 if (c[0] == '%')
167 c++;
168 if (! asm_operand_ok (operands[i], c, constraints))
169 return 0;
172 return 1;
175 /* Static data for the next two routines. */
177 typedef struct change_t
179 rtx object;
180 int old_code;
181 rtx *loc;
182 rtx old;
183 bool unshare;
184 } change_t;
186 static change_t *changes;
187 static int changes_allocated;
189 static int num_changes = 0;
191 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
192 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
193 the change is simply made.
195 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
196 will be called with the address and mode as parameters. If OBJECT is
197 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
198 the change in place.
200 IN_GROUP is nonzero if this is part of a group of changes that must be
201 performed as a group. In that case, the changes will be stored. The
202 function `apply_change_group' will validate and apply the changes.
204 If IN_GROUP is zero, this is a single change. Try to recognize the insn
205 or validate the memory reference with the change applied. If the result
206 is not valid for the machine, suppress the change and return zero.
207 Otherwise, perform the change and return 1. */
209 static bool
210 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
212 rtx old = *loc;
214 if (old == new_rtx || rtx_equal_p (old, new_rtx))
215 return 1;
217 gcc_assert (in_group != 0 || num_changes == 0);
219 *loc = new_rtx;
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 else
229 changes_allocated *= 2;
231 changes = XRESIZEVEC (change_t, changes, changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
237 changes[num_changes].unshare = unshare;
239 if (object && !MEM_P (object))
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259 UNSHARE to false. */
261 bool
262 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 return validate_change_1 (object, loc, new_rtx, in_group, false);
267 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
268 UNSHARE to true. */
270 bool
271 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
273 return validate_change_1 (object, loc, new_rtx, in_group, true);
277 /* Keep X canonicalized if some changes have made it non-canonical; only
278 modifies the operands of X, not (for example) its code. Simplifications
279 are not the job of this routine.
281 Return true if anything was changed. */
282 bool
283 canonicalize_change_group (rtx insn, rtx x)
285 if (COMMUTATIVE_P (x)
286 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
288 /* Oops, the caller has made X no longer canonical.
289 Let's redo the changes in the correct order. */
290 rtx tem = XEXP (x, 0);
291 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
292 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
293 return true;
295 else
296 return false;
300 /* This subroutine of apply_change_group verifies whether the changes to INSN
301 were valid; i.e. whether INSN can still be recognized.
303 If IN_GROUP is true clobbers which have to be added in order to
304 match the instructions will be added to the current change group.
305 Otherwise the changes will take effect immediately. */
308 insn_invalid_p (rtx insn, bool in_group)
310 rtx pat = PATTERN (insn);
311 int num_clobbers = 0;
312 /* If we are before reload and the pattern is a SET, see if we can add
313 clobbers. */
314 int icode = recog (pat, insn,
315 (GET_CODE (pat) == SET
316 && ! reload_completed && ! reload_in_progress)
317 ? &num_clobbers : 0);
318 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
321 /* If this is an asm and the operand aren't legal, then fail. Likewise if
322 this is not an asm and the insn wasn't recognized. */
323 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
324 || (!is_asm && icode < 0))
325 return 1;
327 /* If we have to add CLOBBERs, fail if we have to add ones that reference
328 hard registers since our callers can't know if they are live or not.
329 Otherwise, add them. */
330 if (num_clobbers > 0)
332 rtx newpat;
334 if (added_clobbers_hard_reg_p (icode))
335 return 1;
337 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
338 XVECEXP (newpat, 0, 0) = pat;
339 add_clobbers (newpat, icode);
340 if (in_group)
341 validate_change (insn, &PATTERN (insn), newpat, 1);
342 else
343 PATTERN (insn) = pat = newpat;
346 /* After reload, verify that all constraints are satisfied. */
347 if (reload_completed)
349 extract_insn (insn);
351 if (! constrain_operands (1))
352 return 1;
355 INSN_CODE (insn) = icode;
356 return 0;
359 /* Return number of changes made and not validated yet. */
361 num_changes_pending (void)
363 return num_changes;
366 /* Tentatively apply the changes numbered NUM and up.
367 Return 1 if all changes are valid, zero otherwise. */
370 verify_changes (int num)
372 int i;
373 rtx last_validated = NULL_RTX;
375 /* The changes have been applied and all INSN_CODEs have been reset to force
376 rerecognition.
378 The changes are valid if we aren't given an object, or if we are
379 given a MEM and it still is a valid address, or if this is in insn
380 and it is recognized. In the latter case, if reload has completed,
381 we also require that the operands meet the constraints for
382 the insn. */
384 for (i = num; i < num_changes; i++)
386 rtx object = changes[i].object;
388 /* If there is no object to test or if it is the same as the one we
389 already tested, ignore it. */
390 if (object == 0 || object == last_validated)
391 continue;
393 if (MEM_P (object))
395 if (! memory_address_addr_space_p (GET_MODE (object),
396 XEXP (object, 0),
397 MEM_ADDR_SPACE (object)))
398 break;
400 else if (/* changes[i].old might be zero, e.g. when putting a
401 REG_FRAME_RELATED_EXPR into a previously empty list. */
402 changes[i].old
403 && REG_P (changes[i].old)
404 && asm_noperands (PATTERN (object)) > 0
405 && REG_EXPR (changes[i].old) != NULL_TREE
406 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
407 && DECL_REGISTER (REG_EXPR (changes[i].old)))
409 /* Don't allow changes of hard register operands to inline
410 assemblies if they have been defined as register asm ("x"). */
411 break;
413 else if (DEBUG_INSN_P (object))
414 continue;
415 else if (insn_invalid_p (object, true))
417 rtx pat = PATTERN (object);
419 /* Perhaps we couldn't recognize the insn because there were
420 extra CLOBBERs at the end. If so, try to re-recognize
421 without the last CLOBBER (later iterations will cause each of
422 them to be eliminated, in turn). But don't do this if we
423 have an ASM_OPERAND. */
424 if (GET_CODE (pat) == PARALLEL
425 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
426 && asm_noperands (PATTERN (object)) < 0)
428 rtx newpat;
430 if (XVECLEN (pat, 0) == 2)
431 newpat = XVECEXP (pat, 0, 0);
432 else
434 int j;
436 newpat
437 = gen_rtx_PARALLEL (VOIDmode,
438 rtvec_alloc (XVECLEN (pat, 0) - 1));
439 for (j = 0; j < XVECLEN (newpat, 0); j++)
440 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
443 /* Add a new change to this group to replace the pattern
444 with this new pattern. Then consider this change
445 as having succeeded. The change we added will
446 cause the entire call to fail if things remain invalid.
448 Note that this can lose if a later change than the one
449 we are processing specified &XVECEXP (PATTERN (object), 0, X)
450 but this shouldn't occur. */
452 validate_change (object, &PATTERN (object), newpat, 1);
453 continue;
455 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
456 || GET_CODE (pat) == VAR_LOCATION)
457 /* If this insn is a CLOBBER or USE, it is always valid, but is
458 never recognized. */
459 continue;
460 else
461 break;
463 last_validated = object;
466 return (i == num_changes);
469 /* A group of changes has previously been issued with validate_change
470 and verified with verify_changes. Call df_insn_rescan for each of
471 the insn changed and clear num_changes. */
473 void
474 confirm_change_group (void)
476 int i;
477 rtx last_object = NULL;
479 for (i = 0; i < num_changes; i++)
481 rtx object = changes[i].object;
483 if (changes[i].unshare)
484 *changes[i].loc = copy_rtx (*changes[i].loc);
486 /* Avoid unnecessary rescanning when multiple changes to same instruction
487 are made. */
488 if (object)
490 if (object != last_object && last_object && INSN_P (last_object))
491 df_insn_rescan (last_object);
492 last_object = object;
496 if (last_object && INSN_P (last_object))
497 df_insn_rescan (last_object);
498 num_changes = 0;
501 /* Apply a group of changes previously issued with `validate_change'.
502 If all changes are valid, call confirm_change_group and return 1,
503 otherwise, call cancel_changes and return 0. */
506 apply_change_group (void)
508 if (verify_changes (0))
510 confirm_change_group ();
511 return 1;
513 else
515 cancel_changes (0);
516 return 0;
521 /* Return the number of changes so far in the current group. */
524 num_validated_changes (void)
526 return num_changes;
529 /* Retract the changes numbered NUM and up. */
531 void
532 cancel_changes (int num)
534 int i;
536 /* Back out all the changes. Do this in the opposite order in which
537 they were made. */
538 for (i = num_changes - 1; i >= num; i--)
540 *changes[i].loc = changes[i].old;
541 if (changes[i].object && !MEM_P (changes[i].object))
542 INSN_CODE (changes[i].object) = changes[i].old_code;
544 num_changes = num;
547 /* Reduce conditional compilation elsewhere. */
548 #ifndef HAVE_extv
549 #define HAVE_extv 0
550 #define CODE_FOR_extv CODE_FOR_nothing
551 #endif
552 #ifndef HAVE_extzv
553 #define HAVE_extzv 0
554 #define CODE_FOR_extzv CODE_FOR_nothing
555 #endif
557 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
558 rtx. */
560 static void
561 simplify_while_replacing (rtx *loc, rtx to, rtx object,
562 enum machine_mode op0_mode)
564 rtx x = *loc;
565 enum rtx_code code = GET_CODE (x);
566 rtx new_rtx;
568 if (SWAPPABLE_OPERANDS_P (x)
569 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
571 validate_unshare_change (object, loc,
572 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
573 : swap_condition (code),
574 GET_MODE (x), XEXP (x, 1),
575 XEXP (x, 0)), 1);
576 x = *loc;
577 code = GET_CODE (x);
580 switch (code)
582 case PLUS:
583 /* If we have a PLUS whose second operand is now a CONST_INT, use
584 simplify_gen_binary to try to simplify it.
585 ??? We may want later to remove this, once simplification is
586 separated from this function. */
587 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
588 validate_change (object, loc,
589 simplify_gen_binary
590 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
591 break;
592 case MINUS:
593 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
594 validate_change (object, loc,
595 simplify_gen_binary
596 (PLUS, GET_MODE (x), XEXP (x, 0),
597 simplify_gen_unary (NEG,
598 GET_MODE (x), XEXP (x, 1),
599 GET_MODE (x))), 1);
600 break;
601 case ZERO_EXTEND:
602 case SIGN_EXTEND:
603 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
605 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
606 op0_mode);
607 /* If any of the above failed, substitute in something that
608 we know won't be recognized. */
609 if (!new_rtx)
610 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
611 validate_change (object, loc, new_rtx, 1);
613 break;
614 case SUBREG:
615 /* All subregs possible to simplify should be simplified. */
616 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
617 SUBREG_BYTE (x));
619 /* Subregs of VOIDmode operands are incorrect. */
620 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
621 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
622 if (new_rtx)
623 validate_change (object, loc, new_rtx, 1);
624 break;
625 case ZERO_EXTRACT:
626 case SIGN_EXTRACT:
627 /* If we are replacing a register with memory, try to change the memory
628 to be the mode required for memory in extract operations (this isn't
629 likely to be an insertion operation; if it was, nothing bad will
630 happen, we might just fail in some cases). */
632 if (MEM_P (XEXP (x, 0))
633 && CONST_INT_P (XEXP (x, 1))
634 && CONST_INT_P (XEXP (x, 2))
635 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
636 MEM_ADDR_SPACE (XEXP (x, 0)))
637 && !MEM_VOLATILE_P (XEXP (x, 0)))
639 enum machine_mode wanted_mode = VOIDmode;
640 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
641 int pos = INTVAL (XEXP (x, 2));
643 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
645 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
646 if (wanted_mode == VOIDmode)
647 wanted_mode = word_mode;
649 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
651 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
652 if (wanted_mode == VOIDmode)
653 wanted_mode = word_mode;
656 /* If we have a narrower mode, we can do something. */
657 if (wanted_mode != VOIDmode
658 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
660 int offset = pos / BITS_PER_UNIT;
661 rtx newmem;
663 /* If the bytes and bits are counted differently, we
664 must adjust the offset. */
665 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
666 offset =
667 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
668 offset);
670 gcc_assert (GET_MODE_PRECISION (wanted_mode)
671 == GET_MODE_BITSIZE (wanted_mode));
672 pos %= GET_MODE_BITSIZE (wanted_mode);
674 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
676 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
677 validate_change (object, &XEXP (x, 0), newmem, 1);
681 break;
683 default:
684 break;
688 /* Replace every occurrence of FROM in X with TO. Mark each change with
689 validate_change passing OBJECT. */
691 static void
692 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
693 bool simplify)
695 int i, j;
696 const char *fmt;
697 rtx x = *loc;
698 enum rtx_code code;
699 enum machine_mode op0_mode = VOIDmode;
700 int prev_changes = num_changes;
702 if (!x)
703 return;
705 code = GET_CODE (x);
706 fmt = GET_RTX_FORMAT (code);
707 if (fmt[0] == 'e')
708 op0_mode = GET_MODE (XEXP (x, 0));
710 /* X matches FROM if it is the same rtx or they are both referring to the
711 same register in the same mode. Avoid calling rtx_equal_p unless the
712 operands look similar. */
714 if (x == from
715 || (REG_P (x) && REG_P (from)
716 && GET_MODE (x) == GET_MODE (from)
717 && REGNO (x) == REGNO (from))
718 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
719 && rtx_equal_p (x, from)))
721 validate_unshare_change (object, loc, to, 1);
722 return;
725 /* Call ourself recursively to perform the replacements.
726 We must not replace inside already replaced expression, otherwise we
727 get infinite recursion for replacements like (reg X)->(subreg (reg X))
728 done by regmove, so we must special case shared ASM_OPERANDS. */
730 if (GET_CODE (x) == PARALLEL)
732 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
734 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
735 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
737 /* Verify that operands are really shared. */
738 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
739 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
740 (x, 0, j))));
741 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
742 from, to, object, simplify);
744 else
745 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
746 simplify);
749 else
750 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
752 if (fmt[i] == 'e')
753 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
754 else if (fmt[i] == 'E')
755 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
756 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
757 simplify);
760 /* If we didn't substitute, there is nothing more to do. */
761 if (num_changes == prev_changes)
762 return;
764 /* Allow substituted expression to have different mode. This is used by
765 regmove to change mode of pseudo register. */
766 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
767 op0_mode = GET_MODE (XEXP (x, 0));
769 /* Do changes needed to keep rtx consistent. Don't do any other
770 simplifications, as it is not our job. */
771 if (simplify)
772 simplify_while_replacing (loc, to, object, op0_mode);
775 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
776 with TO. After all changes have been made, validate by seeing
777 if INSN is still valid. */
780 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
782 validate_replace_rtx_1 (loc, from, to, insn, true);
783 return apply_change_group ();
786 /* Try replacing every occurrence of FROM in INSN with TO. After all
787 changes have been made, validate by seeing if INSN is still valid. */
790 validate_replace_rtx (rtx from, rtx to, rtx insn)
792 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
793 return apply_change_group ();
796 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
797 is a part of INSN. After all changes have been made, validate by seeing if
798 INSN is still valid.
799 validate_replace_rtx (from, to, insn) is equivalent to
800 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
803 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
805 validate_replace_rtx_1 (where, from, to, insn, true);
806 return apply_change_group ();
809 /* Same as above, but do not simplify rtx afterwards. */
811 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
812 rtx insn)
814 validate_replace_rtx_1 (where, from, to, insn, false);
815 return apply_change_group ();
819 /* Try replacing every occurrence of FROM in INSN with TO. This also
820 will replace in REG_EQUAL and REG_EQUIV notes. */
822 void
823 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
825 rtx note;
826 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
827 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
828 if (REG_NOTE_KIND (note) == REG_EQUAL
829 || REG_NOTE_KIND (note) == REG_EQUIV)
830 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
833 /* Function called by note_uses to replace used subexpressions. */
834 struct validate_replace_src_data
836 rtx from; /* Old RTX */
837 rtx to; /* New RTX */
838 rtx insn; /* Insn in which substitution is occurring. */
841 static void
842 validate_replace_src_1 (rtx *x, void *data)
844 struct validate_replace_src_data *d
845 = (struct validate_replace_src_data *) data;
847 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
850 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
851 SET_DESTs. */
853 void
854 validate_replace_src_group (rtx from, rtx to, rtx insn)
856 struct validate_replace_src_data d;
858 d.from = from;
859 d.to = to;
860 d.insn = insn;
861 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
864 /* Try simplify INSN.
865 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
866 pattern and return true if something was simplified. */
868 bool
869 validate_simplify_insn (rtx insn)
871 int i;
872 rtx pat = NULL;
873 rtx newpat = NULL;
875 pat = PATTERN (insn);
877 if (GET_CODE (pat) == SET)
879 newpat = simplify_rtx (SET_SRC (pat));
880 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
881 validate_change (insn, &SET_SRC (pat), newpat, 1);
882 newpat = simplify_rtx (SET_DEST (pat));
883 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
884 validate_change (insn, &SET_DEST (pat), newpat, 1);
886 else if (GET_CODE (pat) == PARALLEL)
887 for (i = 0; i < XVECLEN (pat, 0); i++)
889 rtx s = XVECEXP (pat, 0, i);
891 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
893 newpat = simplify_rtx (SET_SRC (s));
894 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
895 validate_change (insn, &SET_SRC (s), newpat, 1);
896 newpat = simplify_rtx (SET_DEST (s));
897 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
898 validate_change (insn, &SET_DEST (s), newpat, 1);
901 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
904 #ifdef HAVE_cc0
905 /* Return 1 if the insn using CC0 set by INSN does not contain
906 any ordered tests applied to the condition codes.
907 EQ and NE tests do not count. */
910 next_insn_tests_no_inequality (rtx insn)
912 rtx next = next_cc0_user (insn);
914 /* If there is no next insn, we have to take the conservative choice. */
915 if (next == 0)
916 return 0;
918 return (INSN_P (next)
919 && ! inequality_comparisons_p (PATTERN (next)));
921 #endif
923 /* Return 1 if OP is a valid general operand for machine mode MODE.
924 This is either a register reference, a memory reference,
925 or a constant. In the case of a memory reference, the address
926 is checked for general validity for the target machine.
928 Register and memory references must have mode MODE in order to be valid,
929 but some constants have no machine mode and are valid for any mode.
931 If MODE is VOIDmode, OP is checked for validity for whatever mode
932 it has.
934 The main use of this function is as a predicate in match_operand
935 expressions in the machine description. */
938 general_operand (rtx op, enum machine_mode mode)
940 enum rtx_code code = GET_CODE (op);
942 if (mode == VOIDmode)
943 mode = GET_MODE (op);
945 /* Don't accept CONST_INT or anything similar
946 if the caller wants something floating. */
947 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
948 && GET_MODE_CLASS (mode) != MODE_INT
949 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
950 return 0;
952 if (CONST_INT_P (op)
953 && mode != VOIDmode
954 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
955 return 0;
957 if (CONSTANT_P (op))
958 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
959 || mode == VOIDmode)
960 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
961 && targetm.legitimate_constant_p (mode == VOIDmode
962 ? GET_MODE (op)
963 : mode, op));
965 /* Except for certain constants with VOIDmode, already checked for,
966 OP's mode must match MODE if MODE specifies a mode. */
968 if (GET_MODE (op) != mode)
969 return 0;
971 if (code == SUBREG)
973 rtx sub = SUBREG_REG (op);
975 #ifdef INSN_SCHEDULING
976 /* On machines that have insn scheduling, we want all memory
977 reference to be explicit, so outlaw paradoxical SUBREGs.
978 However, we must allow them after reload so that they can
979 get cleaned up by cleanup_subreg_operands. */
980 if (!reload_completed && MEM_P (sub)
981 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
982 return 0;
983 #endif
984 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
985 may result in incorrect reference. We should simplify all valid
986 subregs of MEM anyway. But allow this after reload because we
987 might be called from cleanup_subreg_operands.
989 ??? This is a kludge. */
990 if (!reload_completed && SUBREG_BYTE (op) != 0
991 && MEM_P (sub))
992 return 0;
994 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
995 create such rtl, and we must reject it. */
996 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
997 /* LRA can use subreg to store a floating point value in an
998 integer mode. Although the floating point and the
999 integer modes need the same number of hard registers, the
1000 size of floating point mode can be less than the integer
1001 mode. */
1002 && ! lra_in_progress
1003 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1004 return 0;
1006 op = sub;
1007 code = GET_CODE (op);
1010 if (code == REG)
1011 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1012 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1014 if (code == MEM)
1016 rtx y = XEXP (op, 0);
1018 if (! volatile_ok && MEM_VOLATILE_P (op))
1019 return 0;
1021 /* Use the mem's mode, since it will be reloaded thus. */
1022 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1023 return 1;
1026 return 0;
1029 /* Return 1 if OP is a valid memory address for a memory reference
1030 of mode MODE.
1032 The main use of this function is as a predicate in match_operand
1033 expressions in the machine description. */
1036 address_operand (rtx op, enum machine_mode mode)
1038 return memory_address_p (mode, op);
1041 /* Return 1 if OP is a register reference of mode MODE.
1042 If MODE is VOIDmode, accept a register in any mode.
1044 The main use of this function is as a predicate in match_operand
1045 expressions in the machine description. */
1048 register_operand (rtx op, enum machine_mode mode)
1050 if (GET_MODE (op) != mode && mode != VOIDmode)
1051 return 0;
1053 if (GET_CODE (op) == SUBREG)
1055 rtx sub = SUBREG_REG (op);
1057 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1058 because it is guaranteed to be reloaded into one.
1059 Just make sure the MEM is valid in itself.
1060 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1061 but currently it does result from (SUBREG (REG)...) where the
1062 reg went on the stack.) */
1063 if (! reload_completed && MEM_P (sub))
1064 return general_operand (op, mode);
1066 #ifdef CANNOT_CHANGE_MODE_CLASS
1067 if (REG_P (sub)
1068 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1069 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1070 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1071 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1072 /* LRA can generate some invalid SUBREGS just for matched
1073 operand reload presentation. LRA needs to treat them as
1074 valid. */
1075 && ! LRA_SUBREG_P (op))
1076 return 0;
1077 #endif
1079 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1080 create such rtl, and we must reject it. */
1081 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1082 /* LRA can use subreg to store a floating point value in an
1083 integer mode. Although the floating point and the
1084 integer modes need the same number of hard registers, the
1085 size of floating point mode can be less than the integer
1086 mode. */
1087 && ! lra_in_progress
1088 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1089 return 0;
1091 op = sub;
1094 return (REG_P (op)
1095 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1096 || in_hard_reg_set_p (operand_reg_set,
1097 GET_MODE (op), REGNO (op))));
1100 /* Return 1 for a register in Pmode; ignore the tested mode. */
1103 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1105 return register_operand (op, Pmode);
1108 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1109 or a hard register. */
1112 scratch_operand (rtx op, enum machine_mode mode)
1114 if (GET_MODE (op) != mode && mode != VOIDmode)
1115 return 0;
1117 return (GET_CODE (op) == SCRATCH
1118 || (REG_P (op)
1119 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1122 /* Return 1 if OP is a valid immediate operand for mode MODE.
1124 The main use of this function is as a predicate in match_operand
1125 expressions in the machine description. */
1128 immediate_operand (rtx op, enum machine_mode mode)
1130 /* Don't accept CONST_INT or anything similar
1131 if the caller wants something floating. */
1132 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1133 && GET_MODE_CLASS (mode) != MODE_INT
1134 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1135 return 0;
1137 if (CONST_INT_P (op)
1138 && mode != VOIDmode
1139 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1140 return 0;
1142 return (CONSTANT_P (op)
1143 && (GET_MODE (op) == mode || mode == VOIDmode
1144 || GET_MODE (op) == VOIDmode)
1145 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1146 && targetm.legitimate_constant_p (mode == VOIDmode
1147 ? GET_MODE (op)
1148 : mode, op));
1151 /* Returns 1 if OP is an operand that is a CONST_INT. */
1154 const_int_operand (rtx op, enum machine_mode mode)
1156 if (!CONST_INT_P (op))
1157 return 0;
1159 if (mode != VOIDmode
1160 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1161 return 0;
1163 return 1;
1166 /* Returns 1 if OP is an operand that is a constant integer or constant
1167 floating-point number. */
1170 const_double_operand (rtx op, enum machine_mode mode)
1172 /* Don't accept CONST_INT or anything similar
1173 if the caller wants something floating. */
1174 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1175 && GET_MODE_CLASS (mode) != MODE_INT
1176 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1177 return 0;
1179 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1180 && (mode == VOIDmode || GET_MODE (op) == mode
1181 || GET_MODE (op) == VOIDmode));
1184 /* Return 1 if OP is a general operand that is not an immediate operand. */
1187 nonimmediate_operand (rtx op, enum machine_mode mode)
1189 return (general_operand (op, mode) && ! CONSTANT_P (op));
1192 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1195 nonmemory_operand (rtx op, enum machine_mode mode)
1197 if (CONSTANT_P (op))
1198 return immediate_operand (op, mode);
1200 if (GET_MODE (op) != mode && mode != VOIDmode)
1201 return 0;
1203 if (GET_CODE (op) == SUBREG)
1205 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1206 because it is guaranteed to be reloaded into one.
1207 Just make sure the MEM is valid in itself.
1208 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1209 but currently it does result from (SUBREG (REG)...) where the
1210 reg went on the stack.) */
1211 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1212 return general_operand (op, mode);
1213 op = SUBREG_REG (op);
1216 return (REG_P (op)
1217 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1218 || in_hard_reg_set_p (operand_reg_set,
1219 GET_MODE (op), REGNO (op))));
1222 /* Return 1 if OP is a valid operand that stands for pushing a
1223 value of mode MODE onto the stack.
1225 The main use of this function is as a predicate in match_operand
1226 expressions in the machine description. */
1229 push_operand (rtx op, enum machine_mode mode)
1231 unsigned int rounded_size = GET_MODE_SIZE (mode);
1233 #ifdef PUSH_ROUNDING
1234 rounded_size = PUSH_ROUNDING (rounded_size);
1235 #endif
1237 if (!MEM_P (op))
1238 return 0;
1240 if (mode != VOIDmode && GET_MODE (op) != mode)
1241 return 0;
1243 op = XEXP (op, 0);
1245 if (rounded_size == GET_MODE_SIZE (mode))
1247 if (GET_CODE (op) != STACK_PUSH_CODE)
1248 return 0;
1250 else
1252 if (GET_CODE (op) != PRE_MODIFY
1253 || GET_CODE (XEXP (op, 1)) != PLUS
1254 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1255 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1256 #ifdef STACK_GROWS_DOWNWARD
1257 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1258 #else
1259 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1260 #endif
1262 return 0;
1265 return XEXP (op, 0) == stack_pointer_rtx;
1268 /* Return 1 if OP is a valid operand that stands for popping a
1269 value of mode MODE off the stack.
1271 The main use of this function is as a predicate in match_operand
1272 expressions in the machine description. */
1275 pop_operand (rtx op, enum machine_mode mode)
1277 if (!MEM_P (op))
1278 return 0;
1280 if (mode != VOIDmode && GET_MODE (op) != mode)
1281 return 0;
1283 op = XEXP (op, 0);
1285 if (GET_CODE (op) != STACK_POP_CODE)
1286 return 0;
1288 return XEXP (op, 0) == stack_pointer_rtx;
1291 /* Return 1 if ADDR is a valid memory address
1292 for mode MODE in address space AS. */
1295 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1296 rtx addr, addr_space_t as)
1298 #ifdef GO_IF_LEGITIMATE_ADDRESS
1299 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1300 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1301 return 0;
1303 win:
1304 return 1;
1305 #else
1306 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1307 #endif
1310 /* Return 1 if OP is a valid memory reference with mode MODE,
1311 including a valid address.
1313 The main use of this function is as a predicate in match_operand
1314 expressions in the machine description. */
1317 memory_operand (rtx op, enum machine_mode mode)
1319 rtx inner;
1321 if (! reload_completed)
1322 /* Note that no SUBREG is a memory operand before end of reload pass,
1323 because (SUBREG (MEM...)) forces reloading into a register. */
1324 return MEM_P (op) && general_operand (op, mode);
1326 if (mode != VOIDmode && GET_MODE (op) != mode)
1327 return 0;
1329 inner = op;
1330 if (GET_CODE (inner) == SUBREG)
1331 inner = SUBREG_REG (inner);
1333 return (MEM_P (inner) && general_operand (op, mode));
1336 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1337 that is, a memory reference whose address is a general_operand. */
1340 indirect_operand (rtx op, enum machine_mode mode)
1342 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1343 if (! reload_completed
1344 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1346 int offset = SUBREG_BYTE (op);
1347 rtx inner = SUBREG_REG (op);
1349 if (mode != VOIDmode && GET_MODE (op) != mode)
1350 return 0;
1352 /* The only way that we can have a general_operand as the resulting
1353 address is if OFFSET is zero and the address already is an operand
1354 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1355 operand. */
1357 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1358 || (GET_CODE (XEXP (inner, 0)) == PLUS
1359 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1360 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1361 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1364 return (MEM_P (op)
1365 && memory_operand (op, mode)
1366 && general_operand (XEXP (op, 0), Pmode));
1369 /* Return 1 if this is an ordered comparison operator (not including
1370 ORDERED and UNORDERED). */
1373 ordered_comparison_operator (rtx op, enum machine_mode mode)
1375 if (mode != VOIDmode && GET_MODE (op) != mode)
1376 return false;
1377 switch (GET_CODE (op))
1379 case EQ:
1380 case NE:
1381 case LT:
1382 case LTU:
1383 case LE:
1384 case LEU:
1385 case GT:
1386 case GTU:
1387 case GE:
1388 case GEU:
1389 return true;
1390 default:
1391 return false;
1395 /* Return 1 if this is a comparison operator. This allows the use of
1396 MATCH_OPERATOR to recognize all the branch insns. */
1399 comparison_operator (rtx op, enum machine_mode mode)
1401 return ((mode == VOIDmode || GET_MODE (op) == mode)
1402 && COMPARISON_P (op));
1405 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1408 extract_asm_operands (rtx body)
1410 rtx tmp;
1411 switch (GET_CODE (body))
1413 case ASM_OPERANDS:
1414 return body;
1416 case SET:
1417 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1418 tmp = SET_SRC (body);
1419 if (GET_CODE (tmp) == ASM_OPERANDS)
1420 return tmp;
1421 break;
1423 case PARALLEL:
1424 tmp = XVECEXP (body, 0, 0);
1425 if (GET_CODE (tmp) == ASM_OPERANDS)
1426 return tmp;
1427 if (GET_CODE (tmp) == SET)
1429 tmp = SET_SRC (tmp);
1430 if (GET_CODE (tmp) == ASM_OPERANDS)
1431 return tmp;
1433 break;
1435 default:
1436 break;
1438 return NULL;
1441 /* If BODY is an insn body that uses ASM_OPERANDS,
1442 return the number of operands (both input and output) in the insn.
1443 Otherwise return -1. */
1446 asm_noperands (const_rtx body)
1448 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1449 int n_sets = 0;
1451 if (asm_op == NULL)
1452 return -1;
1454 if (GET_CODE (body) == SET)
1455 n_sets = 1;
1456 else if (GET_CODE (body) == PARALLEL)
1458 int i;
1459 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1461 /* Multiple output operands, or 1 output plus some clobbers:
1462 body is
1463 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1464 /* Count backwards through CLOBBERs to determine number of SETs. */
1465 for (i = XVECLEN (body, 0); i > 0; i--)
1467 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1468 break;
1469 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1470 return -1;
1473 /* N_SETS is now number of output operands. */
1474 n_sets = i;
1476 /* Verify that all the SETs we have
1477 came from a single original asm_operands insn
1478 (so that invalid combinations are blocked). */
1479 for (i = 0; i < n_sets; i++)
1481 rtx elt = XVECEXP (body, 0, i);
1482 if (GET_CODE (elt) != SET)
1483 return -1;
1484 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1485 return -1;
1486 /* If these ASM_OPERANDS rtx's came from different original insns
1487 then they aren't allowed together. */
1488 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1489 != ASM_OPERANDS_INPUT_VEC (asm_op))
1490 return -1;
1493 else
1495 /* 0 outputs, but some clobbers:
1496 body is [(asm_operands ...) (clobber (reg ...))...]. */
1497 /* Make sure all the other parallel things really are clobbers. */
1498 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1499 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1500 return -1;
1504 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1505 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1508 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1509 copy its operands (both input and output) into the vector OPERANDS,
1510 the locations of the operands within the insn into the vector OPERAND_LOCS,
1511 and the constraints for the operands into CONSTRAINTS.
1512 Write the modes of the operands into MODES.
1513 Return the assembler-template.
1515 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1516 we don't store that info. */
1518 const char *
1519 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1520 const char **constraints, enum machine_mode *modes,
1521 location_t *loc)
1523 int nbase = 0, n, i;
1524 rtx asmop;
1526 switch (GET_CODE (body))
1528 case ASM_OPERANDS:
1529 /* Zero output asm: BODY is (asm_operands ...). */
1530 asmop = body;
1531 break;
1533 case SET:
1534 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1535 asmop = SET_SRC (body);
1537 /* The output is in the SET.
1538 Its constraint is in the ASM_OPERANDS itself. */
1539 if (operands)
1540 operands[0] = SET_DEST (body);
1541 if (operand_locs)
1542 operand_locs[0] = &SET_DEST (body);
1543 if (constraints)
1544 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1545 if (modes)
1546 modes[0] = GET_MODE (SET_DEST (body));
1547 nbase = 1;
1548 break;
1550 case PARALLEL:
1552 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1554 asmop = XVECEXP (body, 0, 0);
1555 if (GET_CODE (asmop) == SET)
1557 asmop = SET_SRC (asmop);
1559 /* At least one output, plus some CLOBBERs. The outputs are in
1560 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1561 for (i = 0; i < nparallel; i++)
1563 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1564 break; /* Past last SET */
1565 if (operands)
1566 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1567 if (operand_locs)
1568 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1569 if (constraints)
1570 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1571 if (modes)
1572 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1574 nbase = i;
1576 break;
1579 default:
1580 gcc_unreachable ();
1583 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1584 for (i = 0; i < n; i++)
1586 if (operand_locs)
1587 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1588 if (operands)
1589 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1590 if (constraints)
1591 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1592 if (modes)
1593 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1595 nbase += n;
1597 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1598 for (i = 0; i < n; i++)
1600 if (operand_locs)
1601 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1602 if (operands)
1603 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1604 if (constraints)
1605 constraints[nbase + i] = "";
1606 if (modes)
1607 modes[nbase + i] = Pmode;
1610 if (loc)
1611 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1613 return ASM_OPERANDS_TEMPLATE (asmop);
1616 /* Check if an asm_operand matches its constraints.
1617 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1620 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1622 int result = 0;
1623 #ifdef AUTO_INC_DEC
1624 bool incdec_ok = false;
1625 #endif
1627 /* Use constrain_operands after reload. */
1628 gcc_assert (!reload_completed);
1630 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1631 many alternatives as required to match the other operands. */
1632 if (*constraint == '\0')
1633 result = 1;
1635 while (*constraint)
1637 char c = *constraint;
1638 int len;
1639 switch (c)
1641 case ',':
1642 constraint++;
1643 continue;
1644 case '=':
1645 case '+':
1646 case '*':
1647 case '%':
1648 case '!':
1649 case '#':
1650 case '&':
1651 case '?':
1652 break;
1654 case '0': case '1': case '2': case '3': case '4':
1655 case '5': case '6': case '7': case '8': case '9':
1656 /* If caller provided constraints pointer, look up
1657 the matching constraint. Otherwise, our caller should have
1658 given us the proper matching constraint, but we can't
1659 actually fail the check if they didn't. Indicate that
1660 results are inconclusive. */
1661 if (constraints)
1663 char *end;
1664 unsigned long match;
1666 match = strtoul (constraint, &end, 10);
1667 if (!result)
1668 result = asm_operand_ok (op, constraints[match], NULL);
1669 constraint = (const char *) end;
1671 else
1674 constraint++;
1675 while (ISDIGIT (*constraint));
1676 if (! result)
1677 result = -1;
1679 continue;
1681 case 'p':
1682 if (address_operand (op, VOIDmode))
1683 result = 1;
1684 break;
1686 case TARGET_MEM_CONSTRAINT:
1687 case 'V': /* non-offsettable */
1688 if (memory_operand (op, VOIDmode))
1689 result = 1;
1690 break;
1692 case 'o': /* offsettable */
1693 if (offsettable_nonstrict_memref_p (op))
1694 result = 1;
1695 break;
1697 case '<':
1698 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1699 excepting those that expand_call created. Further, on some
1700 machines which do not have generalized auto inc/dec, an inc/dec
1701 is not a memory_operand.
1703 Match any memory and hope things are resolved after reload. */
1705 if (MEM_P (op)
1706 && (1
1707 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1708 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1709 result = 1;
1710 #ifdef AUTO_INC_DEC
1711 incdec_ok = true;
1712 #endif
1713 break;
1715 case '>':
1716 if (MEM_P (op)
1717 && (1
1718 || GET_CODE (XEXP (op, 0)) == PRE_INC
1719 || GET_CODE (XEXP (op, 0)) == POST_INC))
1720 result = 1;
1721 #ifdef AUTO_INC_DEC
1722 incdec_ok = true;
1723 #endif
1724 break;
1726 case 'E':
1727 case 'F':
1728 if (CONST_DOUBLE_AS_FLOAT_P (op)
1729 || (GET_CODE (op) == CONST_VECTOR
1730 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1731 result = 1;
1732 break;
1734 case 'G':
1735 if (CONST_DOUBLE_AS_FLOAT_P (op)
1736 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1737 result = 1;
1738 break;
1739 case 'H':
1740 if (CONST_DOUBLE_AS_FLOAT_P (op)
1741 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1742 result = 1;
1743 break;
1745 case 's':
1746 if (CONST_SCALAR_INT_P (op))
1747 break;
1748 /* Fall through. */
1750 case 'i':
1751 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1752 result = 1;
1753 break;
1755 case 'n':
1756 if (CONST_SCALAR_INT_P (op))
1757 result = 1;
1758 break;
1760 case 'I':
1761 if (CONST_INT_P (op)
1762 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1763 result = 1;
1764 break;
1765 case 'J':
1766 if (CONST_INT_P (op)
1767 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1768 result = 1;
1769 break;
1770 case 'K':
1771 if (CONST_INT_P (op)
1772 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1773 result = 1;
1774 break;
1775 case 'L':
1776 if (CONST_INT_P (op)
1777 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1778 result = 1;
1779 break;
1780 case 'M':
1781 if (CONST_INT_P (op)
1782 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1783 result = 1;
1784 break;
1785 case 'N':
1786 if (CONST_INT_P (op)
1787 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1788 result = 1;
1789 break;
1790 case 'O':
1791 if (CONST_INT_P (op)
1792 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1793 result = 1;
1794 break;
1795 case 'P':
1796 if (CONST_INT_P (op)
1797 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1798 result = 1;
1799 break;
1801 case 'X':
1802 result = 1;
1803 break;
1805 case 'g':
1806 if (general_operand (op, VOIDmode))
1807 result = 1;
1808 break;
1810 default:
1811 /* For all other letters, we first check for a register class,
1812 otherwise it is an EXTRA_CONSTRAINT. */
1813 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1815 case 'r':
1816 if (GET_MODE (op) == BLKmode)
1817 break;
1818 if (register_operand (op, VOIDmode))
1819 result = 1;
1821 #ifdef EXTRA_CONSTRAINT_STR
1822 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1823 /* Every memory operand can be reloaded to fit. */
1824 result = result || memory_operand (op, VOIDmode);
1825 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1826 /* Every address operand can be reloaded to fit. */
1827 result = result || address_operand (op, VOIDmode);
1828 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1829 result = 1;
1830 #endif
1831 break;
1833 len = CONSTRAINT_LEN (c, constraint);
1835 constraint++;
1836 while (--len && *constraint);
1837 if (len)
1838 return 0;
1841 #ifdef AUTO_INC_DEC
1842 /* For operands without < or > constraints reject side-effects. */
1843 if (!incdec_ok && result && MEM_P (op))
1844 switch (GET_CODE (XEXP (op, 0)))
1846 case PRE_INC:
1847 case POST_INC:
1848 case PRE_DEC:
1849 case POST_DEC:
1850 case PRE_MODIFY:
1851 case POST_MODIFY:
1852 return 0;
1853 default:
1854 break;
1856 #endif
1858 return result;
1861 /* Given an rtx *P, if it is a sum containing an integer constant term,
1862 return the location (type rtx *) of the pointer to that constant term.
1863 Otherwise, return a null pointer. */
1865 rtx *
1866 find_constant_term_loc (rtx *p)
1868 rtx *tem;
1869 enum rtx_code code = GET_CODE (*p);
1871 /* If *P IS such a constant term, P is its location. */
1873 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1874 || code == CONST)
1875 return p;
1877 /* Otherwise, if not a sum, it has no constant term. */
1879 if (GET_CODE (*p) != PLUS)
1880 return 0;
1882 /* If one of the summands is constant, return its location. */
1884 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1885 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1886 return p;
1888 /* Otherwise, check each summand for containing a constant term. */
1890 if (XEXP (*p, 0) != 0)
1892 tem = find_constant_term_loc (&XEXP (*p, 0));
1893 if (tem != 0)
1894 return tem;
1897 if (XEXP (*p, 1) != 0)
1899 tem = find_constant_term_loc (&XEXP (*p, 1));
1900 if (tem != 0)
1901 return tem;
1904 return 0;
1907 /* Return 1 if OP is a memory reference
1908 whose address contains no side effects
1909 and remains valid after the addition
1910 of a positive integer less than the
1911 size of the object being referenced.
1913 We assume that the original address is valid and do not check it.
1915 This uses strict_memory_address_p as a subroutine, so
1916 don't use it before reload. */
1919 offsettable_memref_p (rtx op)
1921 return ((MEM_P (op))
1922 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1923 MEM_ADDR_SPACE (op)));
1926 /* Similar, but don't require a strictly valid mem ref:
1927 consider pseudo-regs valid as index or base regs. */
1930 offsettable_nonstrict_memref_p (rtx op)
1932 return ((MEM_P (op))
1933 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1934 MEM_ADDR_SPACE (op)));
1937 /* Return 1 if Y is a memory address which contains no side effects
1938 and would remain valid for address space AS after the addition of
1939 a positive integer less than the size of that mode.
1941 We assume that the original address is valid and do not check it.
1942 We do check that it is valid for narrower modes.
1944 If STRICTP is nonzero, we require a strictly valid address,
1945 for the sake of use in reload.c. */
1948 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1949 addr_space_t as)
1951 enum rtx_code ycode = GET_CODE (y);
1952 rtx z;
1953 rtx y1 = y;
1954 rtx *y2;
1955 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1956 (strictp ? strict_memory_address_addr_space_p
1957 : memory_address_addr_space_p);
1958 unsigned int mode_sz = GET_MODE_SIZE (mode);
1960 if (CONSTANT_ADDRESS_P (y))
1961 return 1;
1963 /* Adjusting an offsettable address involves changing to a narrower mode.
1964 Make sure that's OK. */
1966 if (mode_dependent_address_p (y, as))
1967 return 0;
1969 enum machine_mode address_mode = GET_MODE (y);
1970 if (address_mode == VOIDmode)
1971 address_mode = targetm.addr_space.address_mode (as);
1972 #ifdef POINTERS_EXTEND_UNSIGNED
1973 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1974 #endif
1976 /* ??? How much offset does an offsettable BLKmode reference need?
1977 Clearly that depends on the situation in which it's being used.
1978 However, the current situation in which we test 0xffffffff is
1979 less than ideal. Caveat user. */
1980 if (mode_sz == 0)
1981 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1983 /* If the expression contains a constant term,
1984 see if it remains valid when max possible offset is added. */
1986 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1988 int good;
1990 y1 = *y2;
1991 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1992 /* Use QImode because an odd displacement may be automatically invalid
1993 for any wider mode. But it should be valid for a single byte. */
1994 good = (*addressp) (QImode, y, as);
1996 /* In any case, restore old contents of memory. */
1997 *y2 = y1;
1998 return good;
2001 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2002 return 0;
2004 /* The offset added here is chosen as the maximum offset that
2005 any instruction could need to add when operating on something
2006 of the specified mode. We assume that if Y and Y+c are
2007 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2008 go inside a LO_SUM here, so we do so as well. */
2009 if (GET_CODE (y) == LO_SUM
2010 && mode != BLKmode
2011 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2012 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2013 plus_constant (address_mode, XEXP (y, 1),
2014 mode_sz - 1));
2015 #ifdef POINTERS_EXTEND_UNSIGNED
2016 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2017 else if (POINTERS_EXTEND_UNSIGNED > 0
2018 && GET_CODE (y) == ZERO_EXTEND
2019 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2020 z = gen_rtx_ZERO_EXTEND (address_mode,
2021 plus_constant (pointer_mode, XEXP (y, 0),
2022 mode_sz - 1));
2023 #endif
2024 else
2025 z = plus_constant (address_mode, y, mode_sz - 1);
2027 /* Use QImode because an odd displacement may be automatically invalid
2028 for any wider mode. But it should be valid for a single byte. */
2029 return (*addressp) (QImode, z, as);
2032 /* Return 1 if ADDR is an address-expression whose effect depends
2033 on the mode of the memory reference it is used in.
2035 ADDRSPACE is the address space associated with the address.
2037 Autoincrement addressing is a typical example of mode-dependence
2038 because the amount of the increment depends on the mode. */
2040 bool
2041 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2043 /* Auto-increment addressing with anything other than post_modify
2044 or pre_modify always introduces a mode dependency. Catch such
2045 cases now instead of deferring to the target. */
2046 if (GET_CODE (addr) == PRE_INC
2047 || GET_CODE (addr) == POST_INC
2048 || GET_CODE (addr) == PRE_DEC
2049 || GET_CODE (addr) == POST_DEC)
2050 return true;
2052 return targetm.mode_dependent_address_p (addr, addrspace);
2055 /* Like extract_insn, but save insn extracted and don't extract again, when
2056 called again for the same insn expecting that recog_data still contain the
2057 valid information. This is used primary by gen_attr infrastructure that
2058 often does extract insn again and again. */
2059 void
2060 extract_insn_cached (rtx insn)
2062 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2063 return;
2064 extract_insn (insn);
2065 recog_data.insn = insn;
2068 /* Do cached extract_insn, constrain_operands and complain about failures.
2069 Used by insn_attrtab. */
2070 void
2071 extract_constrain_insn_cached (rtx insn)
2073 extract_insn_cached (insn);
2074 if (which_alternative == -1
2075 && !constrain_operands (reload_completed))
2076 fatal_insn_not_found (insn);
2079 /* Do cached constrain_operands and complain about failures. */
2081 constrain_operands_cached (int strict)
2083 if (which_alternative == -1)
2084 return constrain_operands (strict);
2085 else
2086 return 1;
2089 /* Analyze INSN and fill in recog_data. */
2091 void
2092 extract_insn (rtx insn)
2094 int i;
2095 int icode;
2096 int noperands;
2097 rtx body = PATTERN (insn);
2099 recog_data.n_operands = 0;
2100 recog_data.n_alternatives = 0;
2101 recog_data.n_dups = 0;
2102 recog_data.is_asm = false;
2104 switch (GET_CODE (body))
2106 case USE:
2107 case CLOBBER:
2108 case ASM_INPUT:
2109 case ADDR_VEC:
2110 case ADDR_DIFF_VEC:
2111 case VAR_LOCATION:
2112 return;
2114 case SET:
2115 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2116 goto asm_insn;
2117 else
2118 goto normal_insn;
2119 case PARALLEL:
2120 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2121 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2122 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2123 goto asm_insn;
2124 else
2125 goto normal_insn;
2126 case ASM_OPERANDS:
2127 asm_insn:
2128 recog_data.n_operands = noperands = asm_noperands (body);
2129 if (noperands >= 0)
2131 /* This insn is an `asm' with operands. */
2133 /* expand_asm_operands makes sure there aren't too many operands. */
2134 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2136 /* Now get the operand values and constraints out of the insn. */
2137 decode_asm_operands (body, recog_data.operand,
2138 recog_data.operand_loc,
2139 recog_data.constraints,
2140 recog_data.operand_mode, NULL);
2141 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2142 if (noperands > 0)
2144 const char *p = recog_data.constraints[0];
2145 recog_data.n_alternatives = 1;
2146 while (*p)
2147 recog_data.n_alternatives += (*p++ == ',');
2149 recog_data.is_asm = true;
2150 break;
2152 fatal_insn_not_found (insn);
2154 default:
2155 normal_insn:
2156 /* Ordinary insn: recognize it, get the operands via insn_extract
2157 and get the constraints. */
2159 icode = recog_memoized (insn);
2160 if (icode < 0)
2161 fatal_insn_not_found (insn);
2163 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2164 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2165 recog_data.n_dups = insn_data[icode].n_dups;
2167 insn_extract (insn);
2169 for (i = 0; i < noperands; i++)
2171 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2172 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2173 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2174 /* VOIDmode match_operands gets mode from their real operand. */
2175 if (recog_data.operand_mode[i] == VOIDmode)
2176 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2179 for (i = 0; i < noperands; i++)
2180 recog_data.operand_type[i]
2181 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2182 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2183 : OP_IN);
2185 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2187 if (INSN_CODE (insn) < 0)
2188 for (i = 0; i < recog_data.n_alternatives; i++)
2189 recog_data.alternative_enabled_p[i] = true;
2190 else
2192 recog_data.insn = insn;
2193 for (i = 0; i < recog_data.n_alternatives; i++)
2195 which_alternative = i;
2196 recog_data.alternative_enabled_p[i]
2197 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2201 recog_data.insn = NULL;
2202 which_alternative = -1;
2205 /* After calling extract_insn, you can use this function to extract some
2206 information from the constraint strings into a more usable form.
2207 The collected data is stored in recog_op_alt. */
2208 void
2209 preprocess_constraints (void)
2211 int i;
2213 for (i = 0; i < recog_data.n_operands; i++)
2214 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2215 * sizeof (struct operand_alternative)));
2217 for (i = 0; i < recog_data.n_operands; i++)
2219 int j;
2220 struct operand_alternative *op_alt;
2221 const char *p = recog_data.constraints[i];
2223 op_alt = recog_op_alt[i];
2225 for (j = 0; j < recog_data.n_alternatives; j++)
2227 op_alt[j].cl = NO_REGS;
2228 op_alt[j].constraint = p;
2229 op_alt[j].matches = -1;
2230 op_alt[j].matched = -1;
2232 if (!recog_data.alternative_enabled_p[j])
2234 p = skip_alternative (p);
2235 continue;
2238 if (*p == '\0' || *p == ',')
2240 op_alt[j].anything_ok = 1;
2241 continue;
2244 for (;;)
2246 char c = *p;
2247 if (c == '#')
2249 c = *++p;
2250 while (c != ',' && c != '\0');
2251 if (c == ',' || c == '\0')
2253 p++;
2254 break;
2257 switch (c)
2259 case '=': case '+': case '*': case '%':
2260 case 'E': case 'F': case 'G': case 'H':
2261 case 's': case 'i': case 'n':
2262 case 'I': case 'J': case 'K': case 'L':
2263 case 'M': case 'N': case 'O': case 'P':
2264 /* These don't say anything we care about. */
2265 break;
2267 case '?':
2268 op_alt[j].reject += 6;
2269 break;
2270 case '!':
2271 op_alt[j].reject += 600;
2272 break;
2273 case '&':
2274 op_alt[j].earlyclobber = 1;
2275 break;
2277 case '0': case '1': case '2': case '3': case '4':
2278 case '5': case '6': case '7': case '8': case '9':
2280 char *end;
2281 op_alt[j].matches = strtoul (p, &end, 10);
2282 recog_op_alt[op_alt[j].matches][j].matched = i;
2283 p = end;
2285 continue;
2287 case TARGET_MEM_CONSTRAINT:
2288 op_alt[j].memory_ok = 1;
2289 break;
2290 case '<':
2291 op_alt[j].decmem_ok = 1;
2292 break;
2293 case '>':
2294 op_alt[j].incmem_ok = 1;
2295 break;
2296 case 'V':
2297 op_alt[j].nonoffmem_ok = 1;
2298 break;
2299 case 'o':
2300 op_alt[j].offmem_ok = 1;
2301 break;
2302 case 'X':
2303 op_alt[j].anything_ok = 1;
2304 break;
2306 case 'p':
2307 op_alt[j].is_address = 1;
2308 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2309 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2310 ADDRESS, SCRATCH)];
2311 break;
2313 case 'g':
2314 case 'r':
2315 op_alt[j].cl =
2316 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2317 break;
2319 default:
2320 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2322 op_alt[j].memory_ok = 1;
2323 break;
2325 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2327 op_alt[j].is_address = 1;
2328 op_alt[j].cl
2329 = (reg_class_subunion
2330 [(int) op_alt[j].cl]
2331 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2332 ADDRESS, SCRATCH)]);
2333 break;
2336 op_alt[j].cl
2337 = (reg_class_subunion
2338 [(int) op_alt[j].cl]
2339 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2340 break;
2342 p += CONSTRAINT_LEN (c, p);
2348 /* Check the operands of an insn against the insn's operand constraints
2349 and return 1 if they are valid.
2350 The information about the insn's operands, constraints, operand modes
2351 etc. is obtained from the global variables set up by extract_insn.
2353 WHICH_ALTERNATIVE is set to a number which indicates which
2354 alternative of constraints was matched: 0 for the first alternative,
2355 1 for the next, etc.
2357 In addition, when two operands are required to match
2358 and it happens that the output operand is (reg) while the
2359 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2360 make the output operand look like the input.
2361 This is because the output operand is the one the template will print.
2363 This is used in final, just before printing the assembler code and by
2364 the routines that determine an insn's attribute.
2366 If STRICT is a positive nonzero value, it means that we have been
2367 called after reload has been completed. In that case, we must
2368 do all checks strictly. If it is zero, it means that we have been called
2369 before reload has completed. In that case, we first try to see if we can
2370 find an alternative that matches strictly. If not, we try again, this
2371 time assuming that reload will fix up the insn. This provides a "best
2372 guess" for the alternative and is used to compute attributes of insns prior
2373 to reload. A negative value of STRICT is used for this internal call. */
2375 struct funny_match
2377 int this_op, other;
2381 constrain_operands (int strict)
2383 const char *constraints[MAX_RECOG_OPERANDS];
2384 int matching_operands[MAX_RECOG_OPERANDS];
2385 int earlyclobber[MAX_RECOG_OPERANDS];
2386 int c;
2388 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2389 int funny_match_index;
2391 which_alternative = 0;
2392 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2393 return 1;
2395 for (c = 0; c < recog_data.n_operands; c++)
2397 constraints[c] = recog_data.constraints[c];
2398 matching_operands[c] = -1;
2403 int seen_earlyclobber_at = -1;
2404 int opno;
2405 int lose = 0;
2406 funny_match_index = 0;
2408 if (!recog_data.alternative_enabled_p[which_alternative])
2410 int i;
2412 for (i = 0; i < recog_data.n_operands; i++)
2413 constraints[i] = skip_alternative (constraints[i]);
2415 which_alternative++;
2416 continue;
2419 for (opno = 0; opno < recog_data.n_operands; opno++)
2421 rtx op = recog_data.operand[opno];
2422 enum machine_mode mode = GET_MODE (op);
2423 const char *p = constraints[opno];
2424 int offset = 0;
2425 int win = 0;
2426 int val;
2427 int len;
2429 earlyclobber[opno] = 0;
2431 /* A unary operator may be accepted by the predicate, but it
2432 is irrelevant for matching constraints. */
2433 if (UNARY_P (op))
2434 op = XEXP (op, 0);
2436 if (GET_CODE (op) == SUBREG)
2438 if (REG_P (SUBREG_REG (op))
2439 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2440 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2441 GET_MODE (SUBREG_REG (op)),
2442 SUBREG_BYTE (op),
2443 GET_MODE (op));
2444 op = SUBREG_REG (op);
2447 /* An empty constraint or empty alternative
2448 allows anything which matched the pattern. */
2449 if (*p == 0 || *p == ',')
2450 win = 1;
2453 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2455 case '\0':
2456 len = 0;
2457 break;
2458 case ',':
2459 c = '\0';
2460 break;
2462 case '?': case '!': case '*': case '%':
2463 case '=': case '+':
2464 break;
2466 case '#':
2467 /* Ignore rest of this alternative as far as
2468 constraint checking is concerned. */
2470 p++;
2471 while (*p && *p != ',');
2472 len = 0;
2473 break;
2475 case '&':
2476 earlyclobber[opno] = 1;
2477 if (seen_earlyclobber_at < 0)
2478 seen_earlyclobber_at = opno;
2479 break;
2481 case '0': case '1': case '2': case '3': case '4':
2482 case '5': case '6': case '7': case '8': case '9':
2484 /* This operand must be the same as a previous one.
2485 This kind of constraint is used for instructions such
2486 as add when they take only two operands.
2488 Note that the lower-numbered operand is passed first.
2490 If we are not testing strictly, assume that this
2491 constraint will be satisfied. */
2493 char *end;
2494 int match;
2496 match = strtoul (p, &end, 10);
2497 p = end;
2499 if (strict < 0)
2500 val = 1;
2501 else
2503 rtx op1 = recog_data.operand[match];
2504 rtx op2 = recog_data.operand[opno];
2506 /* A unary operator may be accepted by the predicate,
2507 but it is irrelevant for matching constraints. */
2508 if (UNARY_P (op1))
2509 op1 = XEXP (op1, 0);
2510 if (UNARY_P (op2))
2511 op2 = XEXP (op2, 0);
2513 val = operands_match_p (op1, op2);
2516 matching_operands[opno] = match;
2517 matching_operands[match] = opno;
2519 if (val != 0)
2520 win = 1;
2522 /* If output is *x and input is *--x, arrange later
2523 to change the output to *--x as well, since the
2524 output op is the one that will be printed. */
2525 if (val == 2 && strict > 0)
2527 funny_match[funny_match_index].this_op = opno;
2528 funny_match[funny_match_index++].other = match;
2531 len = 0;
2532 break;
2534 case 'p':
2535 /* p is used for address_operands. When we are called by
2536 gen_reload, no one will have checked that the address is
2537 strictly valid, i.e., that all pseudos requiring hard regs
2538 have gotten them. */
2539 if (strict <= 0
2540 || (strict_memory_address_p (recog_data.operand_mode[opno],
2541 op)))
2542 win = 1;
2543 break;
2545 /* No need to check general_operand again;
2546 it was done in insn-recog.c. Well, except that reload
2547 doesn't check the validity of its replacements, but
2548 that should only matter when there's a bug. */
2549 case 'g':
2550 /* Anything goes unless it is a REG and really has a hard reg
2551 but the hard reg is not in the class GENERAL_REGS. */
2552 if (REG_P (op))
2554 if (strict < 0
2555 || GENERAL_REGS == ALL_REGS
2556 || (reload_in_progress
2557 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2558 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2559 win = 1;
2561 else if (strict < 0 || general_operand (op, mode))
2562 win = 1;
2563 break;
2565 case 'X':
2566 /* This is used for a MATCH_SCRATCH in the cases when
2567 we don't actually need anything. So anything goes
2568 any time. */
2569 win = 1;
2570 break;
2572 case TARGET_MEM_CONSTRAINT:
2573 /* Memory operands must be valid, to the extent
2574 required by STRICT. */
2575 if (MEM_P (op))
2577 if (strict > 0
2578 && !strict_memory_address_addr_space_p
2579 (GET_MODE (op), XEXP (op, 0),
2580 MEM_ADDR_SPACE (op)))
2581 break;
2582 if (strict == 0
2583 && !memory_address_addr_space_p
2584 (GET_MODE (op), XEXP (op, 0),
2585 MEM_ADDR_SPACE (op)))
2586 break;
2587 win = 1;
2589 /* Before reload, accept what reload can turn into mem. */
2590 else if (strict < 0 && CONSTANT_P (op))
2591 win = 1;
2592 /* During reload, accept a pseudo */
2593 else if (reload_in_progress && REG_P (op)
2594 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2595 win = 1;
2596 break;
2598 case '<':
2599 if (MEM_P (op)
2600 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2601 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2602 win = 1;
2603 break;
2605 case '>':
2606 if (MEM_P (op)
2607 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2608 || GET_CODE (XEXP (op, 0)) == POST_INC))
2609 win = 1;
2610 break;
2612 case 'E':
2613 case 'F':
2614 if (CONST_DOUBLE_AS_FLOAT_P (op)
2615 || (GET_CODE (op) == CONST_VECTOR
2616 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2617 win = 1;
2618 break;
2620 case 'G':
2621 case 'H':
2622 if (CONST_DOUBLE_AS_FLOAT_P (op)
2623 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2624 win = 1;
2625 break;
2627 case 's':
2628 if (CONST_SCALAR_INT_P (op))
2629 break;
2630 case 'i':
2631 if (CONSTANT_P (op))
2632 win = 1;
2633 break;
2635 case 'n':
2636 if (CONST_SCALAR_INT_P (op))
2637 win = 1;
2638 break;
2640 case 'I':
2641 case 'J':
2642 case 'K':
2643 case 'L':
2644 case 'M':
2645 case 'N':
2646 case 'O':
2647 case 'P':
2648 if (CONST_INT_P (op)
2649 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2650 win = 1;
2651 break;
2653 case 'V':
2654 if (MEM_P (op)
2655 && ((strict > 0 && ! offsettable_memref_p (op))
2656 || (strict < 0
2657 && !(CONSTANT_P (op) || MEM_P (op)))
2658 || (reload_in_progress
2659 && !(REG_P (op)
2660 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2661 win = 1;
2662 break;
2664 case 'o':
2665 if ((strict > 0 && offsettable_memref_p (op))
2666 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2667 /* Before reload, accept what reload can handle. */
2668 || (strict < 0
2669 && (CONSTANT_P (op) || MEM_P (op)))
2670 /* During reload, accept a pseudo */
2671 || (reload_in_progress && REG_P (op)
2672 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2673 win = 1;
2674 break;
2676 default:
2678 enum reg_class cl;
2680 cl = (c == 'r'
2681 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2682 if (cl != NO_REGS)
2684 if (strict < 0
2685 || (strict == 0
2686 && REG_P (op)
2687 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2688 || (strict == 0 && GET_CODE (op) == SCRATCH)
2689 || (REG_P (op)
2690 && reg_fits_class_p (op, cl, offset, mode)))
2691 win = 1;
2693 #ifdef EXTRA_CONSTRAINT_STR
2694 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2695 win = 1;
2697 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2698 /* Every memory operand can be reloaded to fit. */
2699 && ((strict < 0 && MEM_P (op))
2700 /* Before reload, accept what reload can turn
2701 into mem. */
2702 || (strict < 0 && CONSTANT_P (op))
2703 /* During reload, accept a pseudo */
2704 || (reload_in_progress && REG_P (op)
2705 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2706 win = 1;
2707 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2708 /* Every address operand can be reloaded to fit. */
2709 && strict < 0)
2710 win = 1;
2711 /* Cater to architectures like IA-64 that define extra memory
2712 constraints without using define_memory_constraint. */
2713 else if (reload_in_progress
2714 && REG_P (op)
2715 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2716 && reg_renumber[REGNO (op)] < 0
2717 && reg_equiv_mem (REGNO (op)) != 0
2718 && EXTRA_CONSTRAINT_STR
2719 (reg_equiv_mem (REGNO (op)), c, p))
2720 win = 1;
2721 #endif
2722 break;
2725 while (p += len, c);
2727 constraints[opno] = p;
2728 /* If this operand did not win somehow,
2729 this alternative loses. */
2730 if (! win)
2731 lose = 1;
2733 /* This alternative won; the operands are ok.
2734 Change whichever operands this alternative says to change. */
2735 if (! lose)
2737 int opno, eopno;
2739 /* See if any earlyclobber operand conflicts with some other
2740 operand. */
2742 if (strict > 0 && seen_earlyclobber_at >= 0)
2743 for (eopno = seen_earlyclobber_at;
2744 eopno < recog_data.n_operands;
2745 eopno++)
2746 /* Ignore earlyclobber operands now in memory,
2747 because we would often report failure when we have
2748 two memory operands, one of which was formerly a REG. */
2749 if (earlyclobber[eopno]
2750 && REG_P (recog_data.operand[eopno]))
2751 for (opno = 0; opno < recog_data.n_operands; opno++)
2752 if ((MEM_P (recog_data.operand[opno])
2753 || recog_data.operand_type[opno] != OP_OUT)
2754 && opno != eopno
2755 /* Ignore things like match_operator operands. */
2756 && *recog_data.constraints[opno] != 0
2757 && ! (matching_operands[opno] == eopno
2758 && operands_match_p (recog_data.operand[opno],
2759 recog_data.operand[eopno]))
2760 && ! safe_from_earlyclobber (recog_data.operand[opno],
2761 recog_data.operand[eopno]))
2762 lose = 1;
2764 if (! lose)
2766 while (--funny_match_index >= 0)
2768 recog_data.operand[funny_match[funny_match_index].other]
2769 = recog_data.operand[funny_match[funny_match_index].this_op];
2772 #ifdef AUTO_INC_DEC
2773 /* For operands without < or > constraints reject side-effects. */
2774 if (recog_data.is_asm)
2776 for (opno = 0; opno < recog_data.n_operands; opno++)
2777 if (MEM_P (recog_data.operand[opno]))
2778 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2780 case PRE_INC:
2781 case POST_INC:
2782 case PRE_DEC:
2783 case POST_DEC:
2784 case PRE_MODIFY:
2785 case POST_MODIFY:
2786 if (strchr (recog_data.constraints[opno], '<') == NULL
2787 && strchr (recog_data.constraints[opno], '>')
2788 == NULL)
2789 return 0;
2790 break;
2791 default:
2792 break;
2795 #endif
2796 return 1;
2800 which_alternative++;
2802 while (which_alternative < recog_data.n_alternatives);
2804 which_alternative = -1;
2805 /* If we are about to reject this, but we are not to test strictly,
2806 try a very loose test. Only return failure if it fails also. */
2807 if (strict == 0)
2808 return constrain_operands (-1);
2809 else
2810 return 0;
2813 /* Return true iff OPERAND (assumed to be a REG rtx)
2814 is a hard reg in class CLASS when its regno is offset by OFFSET
2815 and changed to mode MODE.
2816 If REG occupies multiple hard regs, all of them must be in CLASS. */
2818 bool
2819 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2820 enum machine_mode mode)
2822 unsigned int regno = REGNO (operand);
2824 if (cl == NO_REGS)
2825 return false;
2827 /* Regno must not be a pseudo register. Offset may be negative. */
2828 return (HARD_REGISTER_NUM_P (regno)
2829 && HARD_REGISTER_NUM_P (regno + offset)
2830 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2831 regno + offset));
2834 /* Split single instruction. Helper function for split_all_insns and
2835 split_all_insns_noflow. Return last insn in the sequence if successful,
2836 or NULL if unsuccessful. */
2838 static rtx
2839 split_insn (rtx insn)
2841 /* Split insns here to get max fine-grain parallelism. */
2842 rtx first = PREV_INSN (insn);
2843 rtx last = try_split (PATTERN (insn), insn, 1);
2844 rtx insn_set, last_set, note;
2846 if (last == insn)
2847 return NULL_RTX;
2849 /* If the original instruction was a single set that was known to be
2850 equivalent to a constant, see if we can say the same about the last
2851 instruction in the split sequence. The two instructions must set
2852 the same destination. */
2853 insn_set = single_set (insn);
2854 if (insn_set)
2856 last_set = single_set (last);
2857 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2859 note = find_reg_equal_equiv_note (insn);
2860 if (note && CONSTANT_P (XEXP (note, 0)))
2861 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2862 else if (CONSTANT_P (SET_SRC (insn_set)))
2863 set_unique_reg_note (last, REG_EQUAL,
2864 copy_rtx (SET_SRC (insn_set)));
2868 /* try_split returns the NOTE that INSN became. */
2869 SET_INSN_DELETED (insn);
2871 /* ??? Coddle to md files that generate subregs in post-reload
2872 splitters instead of computing the proper hard register. */
2873 if (reload_completed && first != last)
2875 first = NEXT_INSN (first);
2876 for (;;)
2878 if (INSN_P (first))
2879 cleanup_subreg_operands (first);
2880 if (first == last)
2881 break;
2882 first = NEXT_INSN (first);
2886 return last;
2889 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2891 void
2892 split_all_insns (void)
2894 sbitmap blocks;
2895 bool changed;
2896 basic_block bb;
2898 blocks = sbitmap_alloc (last_basic_block);
2899 bitmap_clear (blocks);
2900 changed = false;
2902 FOR_EACH_BB_REVERSE (bb)
2904 rtx insn, next;
2905 bool finish = false;
2907 rtl_profile_for_bb (bb);
2908 for (insn = BB_HEAD (bb); !finish ; insn = next)
2910 /* Can't use `next_real_insn' because that might go across
2911 CODE_LABELS and short-out basic blocks. */
2912 next = NEXT_INSN (insn);
2913 finish = (insn == BB_END (bb));
2914 if (INSN_P (insn))
2916 rtx set = single_set (insn);
2918 /* Don't split no-op move insns. These should silently
2919 disappear later in final. Splitting such insns would
2920 break the code that handles LIBCALL blocks. */
2921 if (set && set_noop_p (set))
2923 /* Nops get in the way while scheduling, so delete them
2924 now if register allocation has already been done. It
2925 is too risky to try to do this before register
2926 allocation, and there are unlikely to be very many
2927 nops then anyways. */
2928 if (reload_completed)
2929 delete_insn_and_edges (insn);
2931 else
2933 if (split_insn (insn))
2935 bitmap_set_bit (blocks, bb->index);
2936 changed = true;
2943 default_rtl_profile ();
2944 if (changed)
2945 find_many_sub_basic_blocks (blocks);
2947 #ifdef ENABLE_CHECKING
2948 verify_flow_info ();
2949 #endif
2951 sbitmap_free (blocks);
2954 /* Same as split_all_insns, but do not expect CFG to be available.
2955 Used by machine dependent reorg passes. */
2957 unsigned int
2958 split_all_insns_noflow (void)
2960 rtx next, insn;
2962 for (insn = get_insns (); insn; insn = next)
2964 next = NEXT_INSN (insn);
2965 if (INSN_P (insn))
2967 /* Don't split no-op move insns. These should silently
2968 disappear later in final. Splitting such insns would
2969 break the code that handles LIBCALL blocks. */
2970 rtx set = single_set (insn);
2971 if (set && set_noop_p (set))
2973 /* Nops get in the way while scheduling, so delete them
2974 now if register allocation has already been done. It
2975 is too risky to try to do this before register
2976 allocation, and there are unlikely to be very many
2977 nops then anyways.
2979 ??? Should we use delete_insn when the CFG isn't valid? */
2980 if (reload_completed)
2981 delete_insn_and_edges (insn);
2983 else
2984 split_insn (insn);
2987 return 0;
2990 #ifdef HAVE_peephole2
2991 struct peep2_insn_data
2993 rtx insn;
2994 regset live_before;
2997 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2998 static int peep2_current;
3000 static bool peep2_do_rebuild_jump_labels;
3001 static bool peep2_do_cleanup_cfg;
3003 /* The number of instructions available to match a peep2. */
3004 int peep2_current_count;
3006 /* A non-insn marker indicating the last insn of the block.
3007 The live_before regset for this element is correct, indicating
3008 DF_LIVE_OUT for the block. */
3009 #define PEEP2_EOB pc_rtx
3011 /* Wrap N to fit into the peep2_insn_data buffer. */
3013 static int
3014 peep2_buf_position (int n)
3016 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3017 n -= MAX_INSNS_PER_PEEP2 + 1;
3018 return n;
3021 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3022 does not exist. Used by the recognizer to find the next insn to match
3023 in a multi-insn pattern. */
3026 peep2_next_insn (int n)
3028 gcc_assert (n <= peep2_current_count);
3030 n = peep2_buf_position (peep2_current + n);
3032 return peep2_insn_data[n].insn;
3035 /* Return true if REGNO is dead before the Nth non-note insn
3036 after `current'. */
3039 peep2_regno_dead_p (int ofs, int regno)
3041 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3043 ofs = peep2_buf_position (peep2_current + ofs);
3045 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3047 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3050 /* Similarly for a REG. */
3053 peep2_reg_dead_p (int ofs, rtx reg)
3055 int regno, n;
3057 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3059 ofs = peep2_buf_position (peep2_current + ofs);
3061 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3063 regno = REGNO (reg);
3064 n = hard_regno_nregs[regno][GET_MODE (reg)];
3065 while (--n >= 0)
3066 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3067 return 0;
3068 return 1;
3071 /* Try to find a hard register of mode MODE, matching the register class in
3072 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3073 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3074 in which case the only condition is that the register must be available
3075 before CURRENT_INSN.
3076 Registers that already have bits set in REG_SET will not be considered.
3078 If an appropriate register is available, it will be returned and the
3079 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3080 returned. */
3083 peep2_find_free_register (int from, int to, const char *class_str,
3084 enum machine_mode mode, HARD_REG_SET *reg_set)
3086 static int search_ofs;
3087 enum reg_class cl;
3088 HARD_REG_SET live;
3089 df_ref *def_rec;
3090 int i;
3092 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3093 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3095 from = peep2_buf_position (peep2_current + from);
3096 to = peep2_buf_position (peep2_current + to);
3098 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3099 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3101 while (from != to)
3103 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3105 /* Don't use registers set or clobbered by the insn. */
3106 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3107 *def_rec; def_rec++)
3108 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3110 from = peep2_buf_position (from + 1);
3113 cl = (class_str[0] == 'r' ? GENERAL_REGS
3114 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3116 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3118 int raw_regno, regno, success, j;
3120 /* Distribute the free registers as much as possible. */
3121 raw_regno = search_ofs + i;
3122 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3123 raw_regno -= FIRST_PSEUDO_REGISTER;
3124 #ifdef REG_ALLOC_ORDER
3125 regno = reg_alloc_order[raw_regno];
3126 #else
3127 regno = raw_regno;
3128 #endif
3130 /* Can it support the mode we need? */
3131 if (! HARD_REGNO_MODE_OK (regno, mode))
3132 continue;
3134 success = 1;
3135 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3137 /* Don't allocate fixed registers. */
3138 if (fixed_regs[regno + j])
3140 success = 0;
3141 break;
3143 /* Don't allocate global registers. */
3144 if (global_regs[regno + j])
3146 success = 0;
3147 break;
3149 /* Make sure the register is of the right class. */
3150 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3152 success = 0;
3153 break;
3155 /* And that we don't create an extra save/restore. */
3156 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3158 success = 0;
3159 break;
3162 if (! targetm.hard_regno_scratch_ok (regno + j))
3164 success = 0;
3165 break;
3168 /* And we don't clobber traceback for noreturn functions. */
3169 if ((regno + j == FRAME_POINTER_REGNUM
3170 || regno + j == HARD_FRAME_POINTER_REGNUM)
3171 && (! reload_completed || frame_pointer_needed))
3173 success = 0;
3174 break;
3177 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3178 || TEST_HARD_REG_BIT (live, regno + j))
3180 success = 0;
3181 break;
3185 if (success)
3187 add_to_hard_reg_set (reg_set, mode, regno);
3189 /* Start the next search with the next register. */
3190 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3191 raw_regno = 0;
3192 search_ofs = raw_regno;
3194 return gen_rtx_REG (mode, regno);
3198 search_ofs = 0;
3199 return NULL_RTX;
3202 /* Forget all currently tracked instructions, only remember current
3203 LIVE regset. */
3205 static void
3206 peep2_reinit_state (regset live)
3208 int i;
3210 /* Indicate that all slots except the last holds invalid data. */
3211 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3212 peep2_insn_data[i].insn = NULL_RTX;
3213 peep2_current_count = 0;
3215 /* Indicate that the last slot contains live_after data. */
3216 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3217 peep2_current = MAX_INSNS_PER_PEEP2;
3219 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3222 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3223 starting at INSN. Perform the replacement, removing the old insns and
3224 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3225 if the replacement is rejected. */
3227 static rtx
3228 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3230 int i;
3231 rtx last, eh_note, as_note, before_try, x;
3232 rtx old_insn, new_insn;
3233 bool was_call = false;
3235 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3236 match more than one insn, or to be split into more than one insn. */
3237 old_insn = peep2_insn_data[peep2_current].insn;
3238 if (RTX_FRAME_RELATED_P (old_insn))
3240 bool any_note = false;
3241 rtx note;
3243 if (match_len != 0)
3244 return NULL;
3246 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3247 may be in the stream for the purpose of register allocation. */
3248 if (active_insn_p (attempt))
3249 new_insn = attempt;
3250 else
3251 new_insn = next_active_insn (attempt);
3252 if (next_active_insn (new_insn))
3253 return NULL;
3255 /* We have a 1-1 replacement. Copy over any frame-related info. */
3256 RTX_FRAME_RELATED_P (new_insn) = 1;
3258 /* Allow the backend to fill in a note during the split. */
3259 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3260 switch (REG_NOTE_KIND (note))
3262 case REG_FRAME_RELATED_EXPR:
3263 case REG_CFA_DEF_CFA:
3264 case REG_CFA_ADJUST_CFA:
3265 case REG_CFA_OFFSET:
3266 case REG_CFA_REGISTER:
3267 case REG_CFA_EXPRESSION:
3268 case REG_CFA_RESTORE:
3269 case REG_CFA_SET_VDRAP:
3270 any_note = true;
3271 break;
3272 default:
3273 break;
3276 /* If the backend didn't supply a note, copy one over. */
3277 if (!any_note)
3278 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3279 switch (REG_NOTE_KIND (note))
3281 case REG_FRAME_RELATED_EXPR:
3282 case REG_CFA_DEF_CFA:
3283 case REG_CFA_ADJUST_CFA:
3284 case REG_CFA_OFFSET:
3285 case REG_CFA_REGISTER:
3286 case REG_CFA_EXPRESSION:
3287 case REG_CFA_RESTORE:
3288 case REG_CFA_SET_VDRAP:
3289 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3290 any_note = true;
3291 break;
3292 default:
3293 break;
3296 /* If there still isn't a note, make sure the unwind info sees the
3297 same expression as before the split. */
3298 if (!any_note)
3300 rtx old_set, new_set;
3302 /* The old insn had better have been simple, or annotated. */
3303 old_set = single_set (old_insn);
3304 gcc_assert (old_set != NULL);
3306 new_set = single_set (new_insn);
3307 if (!new_set || !rtx_equal_p (new_set, old_set))
3308 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3311 /* Copy prologue/epilogue status. This is required in order to keep
3312 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3313 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3316 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3317 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3318 cfg-related call notes. */
3319 for (i = 0; i <= match_len; ++i)
3321 int j;
3322 rtx note;
3324 j = peep2_buf_position (peep2_current + i);
3325 old_insn = peep2_insn_data[j].insn;
3326 if (!CALL_P (old_insn))
3327 continue;
3328 was_call = true;
3330 new_insn = attempt;
3331 while (new_insn != NULL_RTX)
3333 if (CALL_P (new_insn))
3334 break;
3335 new_insn = NEXT_INSN (new_insn);
3338 gcc_assert (new_insn != NULL_RTX);
3340 CALL_INSN_FUNCTION_USAGE (new_insn)
3341 = CALL_INSN_FUNCTION_USAGE (old_insn);
3343 for (note = REG_NOTES (old_insn);
3344 note;
3345 note = XEXP (note, 1))
3346 switch (REG_NOTE_KIND (note))
3348 case REG_NORETURN:
3349 case REG_SETJMP:
3350 case REG_TM:
3351 add_reg_note (new_insn, REG_NOTE_KIND (note),
3352 XEXP (note, 0));
3353 break;
3354 default:
3355 /* Discard all other reg notes. */
3356 break;
3359 /* Croak if there is another call in the sequence. */
3360 while (++i <= match_len)
3362 j = peep2_buf_position (peep2_current + i);
3363 old_insn = peep2_insn_data[j].insn;
3364 gcc_assert (!CALL_P (old_insn));
3366 break;
3369 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3370 move those notes over to the new sequence. */
3371 as_note = NULL;
3372 for (i = match_len; i >= 0; --i)
3374 int j = peep2_buf_position (peep2_current + i);
3375 old_insn = peep2_insn_data[j].insn;
3377 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3378 if (as_note)
3379 break;
3382 i = peep2_buf_position (peep2_current + match_len);
3383 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3385 /* Replace the old sequence with the new. */
3386 last = emit_insn_after_setloc (attempt,
3387 peep2_insn_data[i].insn,
3388 INSN_LOCATION (peep2_insn_data[i].insn));
3389 before_try = PREV_INSN (insn);
3390 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3392 /* Re-insert the EH_REGION notes. */
3393 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3395 edge eh_edge;
3396 edge_iterator ei;
3398 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3399 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3400 break;
3402 if (eh_note)
3403 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3405 if (eh_edge)
3406 for (x = last; x != before_try; x = PREV_INSN (x))
3407 if (x != BB_END (bb)
3408 && (can_throw_internal (x)
3409 || can_nonlocal_goto (x)))
3411 edge nfte, nehe;
3412 int flags;
3414 nfte = split_block (bb, x);
3415 flags = (eh_edge->flags
3416 & (EDGE_EH | EDGE_ABNORMAL));
3417 if (CALL_P (x))
3418 flags |= EDGE_ABNORMAL_CALL;
3419 nehe = make_edge (nfte->src, eh_edge->dest,
3420 flags);
3422 nehe->probability = eh_edge->probability;
3423 nfte->probability
3424 = REG_BR_PROB_BASE - nehe->probability;
3426 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3427 bb = nfte->src;
3428 eh_edge = nehe;
3431 /* Converting possibly trapping insn to non-trapping is
3432 possible. Zap dummy outgoing edges. */
3433 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3436 /* Re-insert the ARGS_SIZE notes. */
3437 if (as_note)
3438 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3440 /* If we generated a jump instruction, it won't have
3441 JUMP_LABEL set. Recompute after we're done. */
3442 for (x = last; x != before_try; x = PREV_INSN (x))
3443 if (JUMP_P (x))
3445 peep2_do_rebuild_jump_labels = true;
3446 break;
3449 return last;
3452 /* After performing a replacement in basic block BB, fix up the life
3453 information in our buffer. LAST is the last of the insns that we
3454 emitted as a replacement. PREV is the insn before the start of
3455 the replacement. MATCH_LEN is the number of instructions that were
3456 matched, and which now need to be replaced in the buffer. */
3458 static void
3459 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3461 int i = peep2_buf_position (peep2_current + match_len + 1);
3462 rtx x;
3463 regset_head live;
3465 INIT_REG_SET (&live);
3466 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3468 gcc_assert (peep2_current_count >= match_len + 1);
3469 peep2_current_count -= match_len + 1;
3471 x = last;
3474 if (INSN_P (x))
3476 df_insn_rescan (x);
3477 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3479 peep2_current_count++;
3480 if (--i < 0)
3481 i = MAX_INSNS_PER_PEEP2;
3482 peep2_insn_data[i].insn = x;
3483 df_simulate_one_insn_backwards (bb, x, &live);
3484 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3487 x = PREV_INSN (x);
3489 while (x != prev);
3490 CLEAR_REG_SET (&live);
3492 peep2_current = i;
3495 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3496 Return true if we added it, false otherwise. The caller will try to match
3497 peepholes against the buffer if we return false; otherwise it will try to
3498 add more instructions to the buffer. */
3500 static bool
3501 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3503 int pos;
3505 /* Once we have filled the maximum number of insns the buffer can hold,
3506 allow the caller to match the insns against peepholes. We wait until
3507 the buffer is full in case the target has similar peepholes of different
3508 length; we always want to match the longest if possible. */
3509 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3510 return false;
3512 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3513 any other pattern, lest it change the semantics of the frame info. */
3514 if (RTX_FRAME_RELATED_P (insn))
3516 /* Let the buffer drain first. */
3517 if (peep2_current_count > 0)
3518 return false;
3519 /* Now the insn will be the only thing in the buffer. */
3522 pos = peep2_buf_position (peep2_current + peep2_current_count);
3523 peep2_insn_data[pos].insn = insn;
3524 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3525 peep2_current_count++;
3527 df_simulate_one_insn_forwards (bb, insn, live);
3528 return true;
3531 /* Perform the peephole2 optimization pass. */
3533 static void
3534 peephole2_optimize (void)
3536 rtx insn;
3537 bitmap live;
3538 int i;
3539 basic_block bb;
3541 peep2_do_cleanup_cfg = false;
3542 peep2_do_rebuild_jump_labels = false;
3544 df_set_flags (DF_LR_RUN_DCE);
3545 df_note_add_problem ();
3546 df_analyze ();
3548 /* Initialize the regsets we're going to use. */
3549 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3550 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3551 live = BITMAP_ALLOC (&reg_obstack);
3553 FOR_EACH_BB_REVERSE (bb)
3555 bool past_end = false;
3556 int pos;
3558 rtl_profile_for_bb (bb);
3560 /* Start up propagation. */
3561 bitmap_copy (live, DF_LR_IN (bb));
3562 df_simulate_initialize_forwards (bb, live);
3563 peep2_reinit_state (live);
3565 insn = BB_HEAD (bb);
3566 for (;;)
3568 rtx attempt, head;
3569 int match_len;
3571 if (!past_end && !NONDEBUG_INSN_P (insn))
3573 next_insn:
3574 insn = NEXT_INSN (insn);
3575 if (insn == NEXT_INSN (BB_END (bb)))
3576 past_end = true;
3577 continue;
3579 if (!past_end && peep2_fill_buffer (bb, insn, live))
3580 goto next_insn;
3582 /* If we did not fill an empty buffer, it signals the end of the
3583 block. */
3584 if (peep2_current_count == 0)
3585 break;
3587 /* The buffer filled to the current maximum, so try to match. */
3589 pos = peep2_buf_position (peep2_current + peep2_current_count);
3590 peep2_insn_data[pos].insn = PEEP2_EOB;
3591 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3593 /* Match the peephole. */
3594 head = peep2_insn_data[peep2_current].insn;
3595 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3596 if (attempt != NULL)
3598 rtx last = peep2_attempt (bb, head, match_len, attempt);
3599 if (last)
3601 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3602 continue;
3606 /* No match: advance the buffer by one insn. */
3607 peep2_current = peep2_buf_position (peep2_current + 1);
3608 peep2_current_count--;
3612 default_rtl_profile ();
3613 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3614 BITMAP_FREE (peep2_insn_data[i].live_before);
3615 BITMAP_FREE (live);
3616 if (peep2_do_rebuild_jump_labels)
3617 rebuild_jump_labels (get_insns ());
3619 #endif /* HAVE_peephole2 */
3621 /* Common predicates for use with define_bypass. */
3623 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3624 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3625 must be either a single_set or a PARALLEL with SETs inside. */
3628 store_data_bypass_p (rtx out_insn, rtx in_insn)
3630 rtx out_set, in_set;
3631 rtx out_pat, in_pat;
3632 rtx out_exp, in_exp;
3633 int i, j;
3635 in_set = single_set (in_insn);
3636 if (in_set)
3638 if (!MEM_P (SET_DEST (in_set)))
3639 return false;
3641 out_set = single_set (out_insn);
3642 if (out_set)
3644 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3645 return false;
3647 else
3649 out_pat = PATTERN (out_insn);
3651 if (GET_CODE (out_pat) != PARALLEL)
3652 return false;
3654 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3656 out_exp = XVECEXP (out_pat, 0, i);
3658 if (GET_CODE (out_exp) == CLOBBER)
3659 continue;
3661 gcc_assert (GET_CODE (out_exp) == SET);
3663 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3664 return false;
3668 else
3670 in_pat = PATTERN (in_insn);
3671 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3673 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3675 in_exp = XVECEXP (in_pat, 0, i);
3677 if (GET_CODE (in_exp) == CLOBBER)
3678 continue;
3680 gcc_assert (GET_CODE (in_exp) == SET);
3682 if (!MEM_P (SET_DEST (in_exp)))
3683 return false;
3685 out_set = single_set (out_insn);
3686 if (out_set)
3688 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3689 return false;
3691 else
3693 out_pat = PATTERN (out_insn);
3694 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3696 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3698 out_exp = XVECEXP (out_pat, 0, j);
3700 if (GET_CODE (out_exp) == CLOBBER)
3701 continue;
3703 gcc_assert (GET_CODE (out_exp) == SET);
3705 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3706 return false;
3712 return true;
3715 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3716 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3717 or multiple set; IN_INSN should be single_set for truth, but for convenience
3718 of insn categorization may be any JUMP or CALL insn. */
3721 if_test_bypass_p (rtx out_insn, rtx in_insn)
3723 rtx out_set, in_set;
3725 in_set = single_set (in_insn);
3726 if (! in_set)
3728 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3729 return false;
3732 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3733 return false;
3734 in_set = SET_SRC (in_set);
3736 out_set = single_set (out_insn);
3737 if (out_set)
3739 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3740 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3741 return false;
3743 else
3745 rtx out_pat;
3746 int i;
3748 out_pat = PATTERN (out_insn);
3749 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3751 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3753 rtx exp = XVECEXP (out_pat, 0, i);
3755 if (GET_CODE (exp) == CLOBBER)
3756 continue;
3758 gcc_assert (GET_CODE (exp) == SET);
3760 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3761 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3762 return false;
3766 return true;
3769 static bool
3770 gate_handle_peephole2 (void)
3772 return (optimize > 0 && flag_peephole2);
3775 static unsigned int
3776 rest_of_handle_peephole2 (void)
3778 #ifdef HAVE_peephole2
3779 peephole2_optimize ();
3780 #endif
3781 return 0;
3784 namespace {
3786 const pass_data pass_data_peephole2 =
3788 RTL_PASS, /* type */
3789 "peephole2", /* name */
3790 OPTGROUP_NONE, /* optinfo_flags */
3791 true, /* has_gate */
3792 true, /* has_execute */
3793 TV_PEEPHOLE2, /* tv_id */
3794 0, /* properties_required */
3795 0, /* properties_provided */
3796 0, /* properties_destroyed */
3797 0, /* todo_flags_start */
3798 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3801 class pass_peephole2 : public rtl_opt_pass
3803 public:
3804 pass_peephole2 (gcc::context *ctxt)
3805 : rtl_opt_pass (pass_data_peephole2, ctxt)
3808 /* opt_pass methods: */
3809 /* The epiphany backend creates a second instance of this pass, so we need
3810 a clone method. */
3811 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3812 bool gate () { return gate_handle_peephole2 (); }
3813 unsigned int execute () { return rest_of_handle_peephole2 (); }
3815 }; // class pass_peephole2
3817 } // anon namespace
3819 rtl_opt_pass *
3820 make_pass_peephole2 (gcc::context *ctxt)
3822 return new pass_peephole2 (ctxt);
3825 static unsigned int
3826 rest_of_handle_split_all_insns (void)
3828 split_all_insns ();
3829 return 0;
3832 namespace {
3834 const pass_data pass_data_split_all_insns =
3836 RTL_PASS, /* type */
3837 "split1", /* name */
3838 OPTGROUP_NONE, /* optinfo_flags */
3839 false, /* has_gate */
3840 true, /* has_execute */
3841 TV_NONE, /* tv_id */
3842 0, /* properties_required */
3843 0, /* properties_provided */
3844 0, /* properties_destroyed */
3845 0, /* todo_flags_start */
3846 0, /* todo_flags_finish */
3849 class pass_split_all_insns : public rtl_opt_pass
3851 public:
3852 pass_split_all_insns (gcc::context *ctxt)
3853 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3856 /* opt_pass methods: */
3857 /* The epiphany backend creates a second instance of this pass, so
3858 we need a clone method. */
3859 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3860 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3862 }; // class pass_split_all_insns
3864 } // anon namespace
3866 rtl_opt_pass *
3867 make_pass_split_all_insns (gcc::context *ctxt)
3869 return new pass_split_all_insns (ctxt);
3872 static unsigned int
3873 rest_of_handle_split_after_reload (void)
3875 /* If optimizing, then go ahead and split insns now. */
3876 #ifndef STACK_REGS
3877 if (optimize > 0)
3878 #endif
3879 split_all_insns ();
3880 return 0;
3883 namespace {
3885 const pass_data pass_data_split_after_reload =
3887 RTL_PASS, /* type */
3888 "split2", /* name */
3889 OPTGROUP_NONE, /* optinfo_flags */
3890 false, /* has_gate */
3891 true, /* has_execute */
3892 TV_NONE, /* tv_id */
3893 0, /* properties_required */
3894 0, /* properties_provided */
3895 0, /* properties_destroyed */
3896 0, /* todo_flags_start */
3897 0, /* todo_flags_finish */
3900 class pass_split_after_reload : public rtl_opt_pass
3902 public:
3903 pass_split_after_reload (gcc::context *ctxt)
3904 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3907 /* opt_pass methods: */
3908 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3910 }; // class pass_split_after_reload
3912 } // anon namespace
3914 rtl_opt_pass *
3915 make_pass_split_after_reload (gcc::context *ctxt)
3917 return new pass_split_after_reload (ctxt);
3920 static bool
3921 gate_handle_split_before_regstack (void)
3923 #if HAVE_ATTR_length && defined (STACK_REGS)
3924 /* If flow2 creates new instructions which need splitting
3925 and scheduling after reload is not done, they might not be
3926 split until final which doesn't allow splitting
3927 if HAVE_ATTR_length. */
3928 # ifdef INSN_SCHEDULING
3929 return (optimize && !flag_schedule_insns_after_reload);
3930 # else
3931 return (optimize);
3932 # endif
3933 #else
3934 return 0;
3935 #endif
3938 static unsigned int
3939 rest_of_handle_split_before_regstack (void)
3941 split_all_insns ();
3942 return 0;
3945 namespace {
3947 const pass_data pass_data_split_before_regstack =
3949 RTL_PASS, /* type */
3950 "split3", /* name */
3951 OPTGROUP_NONE, /* optinfo_flags */
3952 true, /* has_gate */
3953 true, /* has_execute */
3954 TV_NONE, /* tv_id */
3955 0, /* properties_required */
3956 0, /* properties_provided */
3957 0, /* properties_destroyed */
3958 0, /* todo_flags_start */
3959 0, /* todo_flags_finish */
3962 class pass_split_before_regstack : public rtl_opt_pass
3964 public:
3965 pass_split_before_regstack (gcc::context *ctxt)
3966 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3969 /* opt_pass methods: */
3970 bool gate () { return gate_handle_split_before_regstack (); }
3971 unsigned int execute () {
3972 return rest_of_handle_split_before_regstack ();
3975 }; // class pass_split_before_regstack
3977 } // anon namespace
3979 rtl_opt_pass *
3980 make_pass_split_before_regstack (gcc::context *ctxt)
3982 return new pass_split_before_regstack (ctxt);
3985 static bool
3986 gate_handle_split_before_sched2 (void)
3988 #ifdef INSN_SCHEDULING
3989 return optimize > 0 && flag_schedule_insns_after_reload;
3990 #else
3991 return 0;
3992 #endif
3995 static unsigned int
3996 rest_of_handle_split_before_sched2 (void)
3998 #ifdef INSN_SCHEDULING
3999 split_all_insns ();
4000 #endif
4001 return 0;
4004 namespace {
4006 const pass_data pass_data_split_before_sched2 =
4008 RTL_PASS, /* type */
4009 "split4", /* name */
4010 OPTGROUP_NONE, /* optinfo_flags */
4011 true, /* has_gate */
4012 true, /* has_execute */
4013 TV_NONE, /* tv_id */
4014 0, /* properties_required */
4015 0, /* properties_provided */
4016 0, /* properties_destroyed */
4017 0, /* todo_flags_start */
4018 TODO_verify_flow, /* todo_flags_finish */
4021 class pass_split_before_sched2 : public rtl_opt_pass
4023 public:
4024 pass_split_before_sched2 (gcc::context *ctxt)
4025 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4028 /* opt_pass methods: */
4029 bool gate () { return gate_handle_split_before_sched2 (); }
4030 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4032 }; // class pass_split_before_sched2
4034 } // anon namespace
4036 rtl_opt_pass *
4037 make_pass_split_before_sched2 (gcc::context *ctxt)
4039 return new pass_split_before_sched2 (ctxt);
4042 /* The placement of the splitting that we do for shorten_branches
4043 depends on whether regstack is used by the target or not. */
4044 static bool
4045 gate_do_final_split (void)
4047 #if HAVE_ATTR_length && !defined (STACK_REGS)
4048 return 1;
4049 #else
4050 return 0;
4051 #endif
4054 namespace {
4056 const pass_data pass_data_split_for_shorten_branches =
4058 RTL_PASS, /* type */
4059 "split5", /* name */
4060 OPTGROUP_NONE, /* optinfo_flags */
4061 true, /* has_gate */
4062 true, /* has_execute */
4063 TV_NONE, /* tv_id */
4064 0, /* properties_required */
4065 0, /* properties_provided */
4066 0, /* properties_destroyed */
4067 0, /* todo_flags_start */
4068 TODO_verify_rtl_sharing, /* todo_flags_finish */
4071 class pass_split_for_shorten_branches : public rtl_opt_pass
4073 public:
4074 pass_split_for_shorten_branches (gcc::context *ctxt)
4075 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4078 /* opt_pass methods: */
4079 bool gate () { return gate_do_final_split (); }
4080 unsigned int execute () { return split_all_insns_noflow (); }
4082 }; // class pass_split_for_shorten_branches
4084 } // anon namespace
4086 rtl_opt_pass *
4087 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4089 return new pass_split_for_shorten_branches (ctxt);