Daily bump.
[official-gcc.git] / gcc / recog.c
blob14a1e7af09311e22c60783510550a1279b0993c6
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data_d recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
113 static bool
114 asm_labels_ok (rtx body)
116 rtx asmop;
117 int i;
119 asmop = extract_asm_operands (body);
120 if (asmop == NULL_RTX)
121 return true;
123 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125 return false;
127 return true;
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
134 check_asm_operands (rtx x)
136 int noperands;
137 rtx *operands;
138 const char **constraints;
139 int i;
141 if (!asm_labels_ok (x))
142 return 0;
144 /* Post-reload, be more strict with things. */
145 if (reload_completed)
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x));
149 constrain_operands (1);
150 return which_alternative >= 0;
153 noperands = asm_noperands (x);
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
159 operands = XALLOCAVEC (rtx, noperands);
160 constraints = XALLOCAVEC (const char *, noperands);
162 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
164 for (i = 0; i < noperands; i++)
166 const char *c = constraints[i];
167 if (c[0] == '%')
168 c++;
169 if (! asm_operand_ok (operands[i], c, constraints))
170 return 0;
173 return 1;
176 /* Static data for the next two routines. */
178 typedef struct change_t
180 rtx object;
181 int old_code;
182 rtx *loc;
183 rtx old;
184 bool unshare;
185 } change_t;
187 static change_t *changes;
188 static int changes_allocated;
190 static int num_changes = 0;
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 the change in place.
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
210 static bool
211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
213 rtx old = *loc;
215 if (old == new_rtx || rtx_equal_p (old, new_rtx))
216 return 1;
218 gcc_assert (in_group != 0 || num_changes == 0);
220 *loc = new_rtx;
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
232 changes = XRESIZEVEC (change_t, changes, changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238 changes[num_changes].unshare = unshare;
240 if (object && !MEM_P (object))
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
248 num_changes++;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260 UNSHARE to false. */
262 bool
263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
265 return validate_change_1 (object, loc, new_rtx, in_group, false);
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269 UNSHARE to true. */
271 bool
272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
274 return validate_change_1 (object, loc, new_rtx, in_group, true);
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
282 Return true if anything was changed. */
283 bool
284 canonicalize_change_group (rtx insn, rtx x)
286 if (COMMUTATIVE_P (x)
287 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem = XEXP (x, 0);
292 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294 return true;
296 else
297 return false;
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
309 insn_invalid_p (rtx insn, bool in_group)
311 rtx pat = PATTERN (insn);
312 int num_clobbers = 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
314 clobbers. */
315 int icode = recog (pat, insn,
316 (GET_CODE (pat) == SET
317 && ! reload_completed && ! reload_in_progress)
318 ? &num_clobbers : 0);
319 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
322 /* If this is an asm and the operand aren't legal, then fail. Likewise if
323 this is not an asm and the insn wasn't recognized. */
324 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
325 || (!is_asm && icode < 0))
326 return 1;
328 /* If we have to add CLOBBERs, fail if we have to add ones that reference
329 hard registers since our callers can't know if they are live or not.
330 Otherwise, add them. */
331 if (num_clobbers > 0)
333 rtx newpat;
335 if (added_clobbers_hard_reg_p (icode))
336 return 1;
338 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
339 XVECEXP (newpat, 0, 0) = pat;
340 add_clobbers (newpat, icode);
341 if (in_group)
342 validate_change (insn, &PATTERN (insn), newpat, 1);
343 else
344 PATTERN (insn) = pat = newpat;
347 /* After reload, verify that all constraints are satisfied. */
348 if (reload_completed)
350 extract_insn (insn);
352 if (! constrain_operands (1))
353 return 1;
356 INSN_CODE (insn) = icode;
357 return 0;
360 /* Return number of changes made and not validated yet. */
362 num_changes_pending (void)
364 return num_changes;
367 /* Tentatively apply the changes numbered NUM and up.
368 Return 1 if all changes are valid, zero otherwise. */
371 verify_changes (int num)
373 int i;
374 rtx last_validated = NULL_RTX;
376 /* The changes have been applied and all INSN_CODEs have been reset to force
377 rerecognition.
379 The changes are valid if we aren't given an object, or if we are
380 given a MEM and it still is a valid address, or if this is in insn
381 and it is recognized. In the latter case, if reload has completed,
382 we also require that the operands meet the constraints for
383 the insn. */
385 for (i = num; i < num_changes; i++)
387 rtx object = changes[i].object;
389 /* If there is no object to test or if it is the same as the one we
390 already tested, ignore it. */
391 if (object == 0 || object == last_validated)
392 continue;
394 if (MEM_P (object))
396 if (! memory_address_addr_space_p (GET_MODE (object),
397 XEXP (object, 0),
398 MEM_ADDR_SPACE (object)))
399 break;
401 else if (/* changes[i].old might be zero, e.g. when putting a
402 REG_FRAME_RELATED_EXPR into a previously empty list. */
403 changes[i].old
404 && REG_P (changes[i].old)
405 && asm_noperands (PATTERN (object)) > 0
406 && REG_EXPR (changes[i].old) != NULL_TREE
407 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
408 && DECL_REGISTER (REG_EXPR (changes[i].old)))
410 /* Don't allow changes of hard register operands to inline
411 assemblies if they have been defined as register asm ("x"). */
412 break;
414 else if (DEBUG_INSN_P (object))
415 continue;
416 else if (insn_invalid_p (object, true))
418 rtx pat = PATTERN (object);
420 /* Perhaps we couldn't recognize the insn because there were
421 extra CLOBBERs at the end. If so, try to re-recognize
422 without the last CLOBBER (later iterations will cause each of
423 them to be eliminated, in turn). But don't do this if we
424 have an ASM_OPERAND. */
425 if (GET_CODE (pat) == PARALLEL
426 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
427 && asm_noperands (PATTERN (object)) < 0)
429 rtx newpat;
431 if (XVECLEN (pat, 0) == 2)
432 newpat = XVECEXP (pat, 0, 0);
433 else
435 int j;
437 newpat
438 = gen_rtx_PARALLEL (VOIDmode,
439 rtvec_alloc (XVECLEN (pat, 0) - 1));
440 for (j = 0; j < XVECLEN (newpat, 0); j++)
441 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
444 /* Add a new change to this group to replace the pattern
445 with this new pattern. Then consider this change
446 as having succeeded. The change we added will
447 cause the entire call to fail if things remain invalid.
449 Note that this can lose if a later change than the one
450 we are processing specified &XVECEXP (PATTERN (object), 0, X)
451 but this shouldn't occur. */
453 validate_change (object, &PATTERN (object), newpat, 1);
454 continue;
456 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
457 || GET_CODE (pat) == VAR_LOCATION)
458 /* If this insn is a CLOBBER or USE, it is always valid, but is
459 never recognized. */
460 continue;
461 else
462 break;
464 last_validated = object;
467 return (i == num_changes);
470 /* A group of changes has previously been issued with validate_change
471 and verified with verify_changes. Call df_insn_rescan for each of
472 the insn changed and clear num_changes. */
474 void
475 confirm_change_group (void)
477 int i;
478 rtx last_object = NULL;
480 for (i = 0; i < num_changes; i++)
482 rtx object = changes[i].object;
484 if (changes[i].unshare)
485 *changes[i].loc = copy_rtx (*changes[i].loc);
487 /* Avoid unnecessary rescanning when multiple changes to same instruction
488 are made. */
489 if (object)
491 if (object != last_object && last_object && INSN_P (last_object))
492 df_insn_rescan (last_object);
493 last_object = object;
497 if (last_object && INSN_P (last_object))
498 df_insn_rescan (last_object);
499 num_changes = 0;
502 /* Apply a group of changes previously issued with `validate_change'.
503 If all changes are valid, call confirm_change_group and return 1,
504 otherwise, call cancel_changes and return 0. */
507 apply_change_group (void)
509 if (verify_changes (0))
511 confirm_change_group ();
512 return 1;
514 else
516 cancel_changes (0);
517 return 0;
522 /* Return the number of changes so far in the current group. */
525 num_validated_changes (void)
527 return num_changes;
530 /* Retract the changes numbered NUM and up. */
532 void
533 cancel_changes (int num)
535 int i;
537 /* Back out all the changes. Do this in the opposite order in which
538 they were made. */
539 for (i = num_changes - 1; i >= num; i--)
541 *changes[i].loc = changes[i].old;
542 if (changes[i].object && !MEM_P (changes[i].object))
543 INSN_CODE (changes[i].object) = changes[i].old_code;
545 num_changes = num;
548 /* Reduce conditional compilation elsewhere. */
549 #ifndef HAVE_extv
550 #define HAVE_extv 0
551 #define CODE_FOR_extv CODE_FOR_nothing
552 #endif
553 #ifndef HAVE_extzv
554 #define HAVE_extzv 0
555 #define CODE_FOR_extzv CODE_FOR_nothing
556 #endif
558 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
559 rtx. */
561 static void
562 simplify_while_replacing (rtx *loc, rtx to, rtx object,
563 enum machine_mode op0_mode)
565 rtx x = *loc;
566 enum rtx_code code = GET_CODE (x);
567 rtx new_rtx;
569 if (SWAPPABLE_OPERANDS_P (x)
570 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
572 validate_unshare_change (object, loc,
573 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
574 : swap_condition (code),
575 GET_MODE (x), XEXP (x, 1),
576 XEXP (x, 0)), 1);
577 x = *loc;
578 code = GET_CODE (x);
581 switch (code)
583 case PLUS:
584 /* If we have a PLUS whose second operand is now a CONST_INT, use
585 simplify_gen_binary to try to simplify it.
586 ??? We may want later to remove this, once simplification is
587 separated from this function. */
588 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
589 validate_change (object, loc,
590 simplify_gen_binary
591 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
592 break;
593 case MINUS:
594 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
595 validate_change (object, loc,
596 simplify_gen_binary
597 (PLUS, GET_MODE (x), XEXP (x, 0),
598 simplify_gen_unary (NEG,
599 GET_MODE (x), XEXP (x, 1),
600 GET_MODE (x))), 1);
601 break;
602 case ZERO_EXTEND:
603 case SIGN_EXTEND:
604 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
606 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
607 op0_mode);
608 /* If any of the above failed, substitute in something that
609 we know won't be recognized. */
610 if (!new_rtx)
611 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
612 validate_change (object, loc, new_rtx, 1);
614 break;
615 case SUBREG:
616 /* All subregs possible to simplify should be simplified. */
617 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
618 SUBREG_BYTE (x));
620 /* Subregs of VOIDmode operands are incorrect. */
621 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
622 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
623 if (new_rtx)
624 validate_change (object, loc, new_rtx, 1);
625 break;
626 case ZERO_EXTRACT:
627 case SIGN_EXTRACT:
628 /* If we are replacing a register with memory, try to change the memory
629 to be the mode required for memory in extract operations (this isn't
630 likely to be an insertion operation; if it was, nothing bad will
631 happen, we might just fail in some cases). */
633 if (MEM_P (XEXP (x, 0))
634 && CONST_INT_P (XEXP (x, 1))
635 && CONST_INT_P (XEXP (x, 2))
636 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
637 MEM_ADDR_SPACE (XEXP (x, 0)))
638 && !MEM_VOLATILE_P (XEXP (x, 0)))
640 enum machine_mode wanted_mode = VOIDmode;
641 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
642 int pos = INTVAL (XEXP (x, 2));
644 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
646 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
647 if (wanted_mode == VOIDmode)
648 wanted_mode = word_mode;
650 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
652 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
653 if (wanted_mode == VOIDmode)
654 wanted_mode = word_mode;
657 /* If we have a narrower mode, we can do something. */
658 if (wanted_mode != VOIDmode
659 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
661 int offset = pos / BITS_PER_UNIT;
662 rtx newmem;
664 /* If the bytes and bits are counted differently, we
665 must adjust the offset. */
666 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
667 offset =
668 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
669 offset);
671 gcc_assert (GET_MODE_PRECISION (wanted_mode)
672 == GET_MODE_BITSIZE (wanted_mode));
673 pos %= GET_MODE_BITSIZE (wanted_mode);
675 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
677 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
678 validate_change (object, &XEXP (x, 0), newmem, 1);
682 break;
684 default:
685 break;
689 /* Replace every occurrence of FROM in X with TO. Mark each change with
690 validate_change passing OBJECT. */
692 static void
693 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
694 bool simplify)
696 int i, j;
697 const char *fmt;
698 rtx x = *loc;
699 enum rtx_code code;
700 enum machine_mode op0_mode = VOIDmode;
701 int prev_changes = num_changes;
703 if (!x)
704 return;
706 code = GET_CODE (x);
707 fmt = GET_RTX_FORMAT (code);
708 if (fmt[0] == 'e')
709 op0_mode = GET_MODE (XEXP (x, 0));
711 /* X matches FROM if it is the same rtx or they are both referring to the
712 same register in the same mode. Avoid calling rtx_equal_p unless the
713 operands look similar. */
715 if (x == from
716 || (REG_P (x) && REG_P (from)
717 && GET_MODE (x) == GET_MODE (from)
718 && REGNO (x) == REGNO (from))
719 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
720 && rtx_equal_p (x, from)))
722 validate_unshare_change (object, loc, to, 1);
723 return;
726 /* Call ourself recursively to perform the replacements.
727 We must not replace inside already replaced expression, otherwise we
728 get infinite recursion for replacements like (reg X)->(subreg (reg X))
729 done by regmove, so we must special case shared ASM_OPERANDS. */
731 if (GET_CODE (x) == PARALLEL)
733 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
735 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
736 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
738 /* Verify that operands are really shared. */
739 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
740 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
741 (x, 0, j))));
742 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
743 from, to, object, simplify);
745 else
746 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
747 simplify);
750 else
751 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
753 if (fmt[i] == 'e')
754 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
755 else if (fmt[i] == 'E')
756 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
757 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
758 simplify);
761 /* If we didn't substitute, there is nothing more to do. */
762 if (num_changes == prev_changes)
763 return;
765 /* Allow substituted expression to have different mode. This is used by
766 regmove to change mode of pseudo register. */
767 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
768 op0_mode = GET_MODE (XEXP (x, 0));
770 /* Do changes needed to keep rtx consistent. Don't do any other
771 simplifications, as it is not our job. */
772 if (simplify)
773 simplify_while_replacing (loc, to, object, op0_mode);
776 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
777 with TO. After all changes have been made, validate by seeing
778 if INSN is still valid. */
781 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
783 validate_replace_rtx_1 (loc, from, to, insn, true);
784 return apply_change_group ();
787 /* Try replacing every occurrence of FROM in INSN with TO. After all
788 changes have been made, validate by seeing if INSN is still valid. */
791 validate_replace_rtx (rtx from, rtx to, rtx insn)
793 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
794 return apply_change_group ();
797 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
798 is a part of INSN. After all changes have been made, validate by seeing if
799 INSN is still valid.
800 validate_replace_rtx (from, to, insn) is equivalent to
801 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
804 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
806 validate_replace_rtx_1 (where, from, to, insn, true);
807 return apply_change_group ();
810 /* Same as above, but do not simplify rtx afterwards. */
812 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
813 rtx insn)
815 validate_replace_rtx_1 (where, from, to, insn, false);
816 return apply_change_group ();
820 /* Try replacing every occurrence of FROM in INSN with TO. This also
821 will replace in REG_EQUAL and REG_EQUIV notes. */
823 void
824 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
826 rtx note;
827 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
828 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
829 if (REG_NOTE_KIND (note) == REG_EQUAL
830 || REG_NOTE_KIND (note) == REG_EQUIV)
831 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
834 /* Function called by note_uses to replace used subexpressions. */
835 struct validate_replace_src_data
837 rtx from; /* Old RTX */
838 rtx to; /* New RTX */
839 rtx insn; /* Insn in which substitution is occurring. */
842 static void
843 validate_replace_src_1 (rtx *x, void *data)
845 struct validate_replace_src_data *d
846 = (struct validate_replace_src_data *) data;
848 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
851 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
852 SET_DESTs. */
854 void
855 validate_replace_src_group (rtx from, rtx to, rtx insn)
857 struct validate_replace_src_data d;
859 d.from = from;
860 d.to = to;
861 d.insn = insn;
862 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
865 /* Try simplify INSN.
866 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
867 pattern and return true if something was simplified. */
869 bool
870 validate_simplify_insn (rtx insn)
872 int i;
873 rtx pat = NULL;
874 rtx newpat = NULL;
876 pat = PATTERN (insn);
878 if (GET_CODE (pat) == SET)
880 newpat = simplify_rtx (SET_SRC (pat));
881 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
882 validate_change (insn, &SET_SRC (pat), newpat, 1);
883 newpat = simplify_rtx (SET_DEST (pat));
884 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
885 validate_change (insn, &SET_DEST (pat), newpat, 1);
887 else if (GET_CODE (pat) == PARALLEL)
888 for (i = 0; i < XVECLEN (pat, 0); i++)
890 rtx s = XVECEXP (pat, 0, i);
892 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
894 newpat = simplify_rtx (SET_SRC (s));
895 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
896 validate_change (insn, &SET_SRC (s), newpat, 1);
897 newpat = simplify_rtx (SET_DEST (s));
898 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
899 validate_change (insn, &SET_DEST (s), newpat, 1);
902 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
905 #ifdef HAVE_cc0
906 /* Return 1 if the insn using CC0 set by INSN does not contain
907 any ordered tests applied to the condition codes.
908 EQ and NE tests do not count. */
911 next_insn_tests_no_inequality (rtx insn)
913 rtx next = next_cc0_user (insn);
915 /* If there is no next insn, we have to take the conservative choice. */
916 if (next == 0)
917 return 0;
919 return (INSN_P (next)
920 && ! inequality_comparisons_p (PATTERN (next)));
922 #endif
924 /* Return 1 if OP is a valid general operand for machine mode MODE.
925 This is either a register reference, a memory reference,
926 or a constant. In the case of a memory reference, the address
927 is checked for general validity for the target machine.
929 Register and memory references must have mode MODE in order to be valid,
930 but some constants have no machine mode and are valid for any mode.
932 If MODE is VOIDmode, OP is checked for validity for whatever mode
933 it has.
935 The main use of this function is as a predicate in match_operand
936 expressions in the machine description. */
939 general_operand (rtx op, enum machine_mode mode)
941 enum rtx_code code = GET_CODE (op);
943 if (mode == VOIDmode)
944 mode = GET_MODE (op);
946 /* Don't accept CONST_INT or anything similar
947 if the caller wants something floating. */
948 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
949 && GET_MODE_CLASS (mode) != MODE_INT
950 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
951 return 0;
953 if (CONST_INT_P (op)
954 && mode != VOIDmode
955 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
956 return 0;
958 if (CONSTANT_P (op))
959 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
960 || mode == VOIDmode)
961 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
962 && targetm.legitimate_constant_p (mode == VOIDmode
963 ? GET_MODE (op)
964 : mode, op));
966 /* Except for certain constants with VOIDmode, already checked for,
967 OP's mode must match MODE if MODE specifies a mode. */
969 if (GET_MODE (op) != mode)
970 return 0;
972 if (code == SUBREG)
974 rtx sub = SUBREG_REG (op);
976 #ifdef INSN_SCHEDULING
977 /* On machines that have insn scheduling, we want all memory
978 reference to be explicit, so outlaw paradoxical SUBREGs.
979 However, we must allow them after reload so that they can
980 get cleaned up by cleanup_subreg_operands. */
981 if (!reload_completed && MEM_P (sub)
982 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
983 return 0;
984 #endif
985 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
986 may result in incorrect reference. We should simplify all valid
987 subregs of MEM anyway. But allow this after reload because we
988 might be called from cleanup_subreg_operands.
990 ??? This is a kludge. */
991 if (!reload_completed && SUBREG_BYTE (op) != 0
992 && MEM_P (sub))
993 return 0;
995 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
996 create such rtl, and we must reject it. */
997 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
998 /* LRA can use subreg to store a floating point value in an
999 integer mode. Although the floating point and the
1000 integer modes need the same number of hard registers, the
1001 size of floating point mode can be less than the integer
1002 mode. */
1003 && ! lra_in_progress
1004 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1005 return 0;
1007 op = sub;
1008 code = GET_CODE (op);
1011 if (code == REG)
1012 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1013 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1015 if (code == MEM)
1017 rtx y = XEXP (op, 0);
1019 if (! volatile_ok && MEM_VOLATILE_P (op))
1020 return 0;
1022 /* Use the mem's mode, since it will be reloaded thus. */
1023 if (memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1024 return 1;
1027 return 0;
1030 /* Return 1 if OP is a valid memory address for a memory reference
1031 of mode MODE.
1033 The main use of this function is as a predicate in match_operand
1034 expressions in the machine description. */
1037 address_operand (rtx op, enum machine_mode mode)
1039 return memory_address_p (mode, op);
1042 /* Return 1 if OP is a register reference of mode MODE.
1043 If MODE is VOIDmode, accept a register in any mode.
1045 The main use of this function is as a predicate in match_operand
1046 expressions in the machine description. */
1049 register_operand (rtx op, enum machine_mode mode)
1051 if (GET_MODE (op) != mode && mode != VOIDmode)
1052 return 0;
1054 if (GET_CODE (op) == SUBREG)
1056 rtx sub = SUBREG_REG (op);
1058 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1059 because it is guaranteed to be reloaded into one.
1060 Just make sure the MEM is valid in itself.
1061 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1062 but currently it does result from (SUBREG (REG)...) where the
1063 reg went on the stack.) */
1064 if (! reload_completed && MEM_P (sub))
1065 return general_operand (op, mode);
1067 #ifdef CANNOT_CHANGE_MODE_CLASS
1068 if (REG_P (sub)
1069 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1070 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1071 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1072 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1073 /* LRA can generate some invalid SUBREGS just for matched
1074 operand reload presentation. LRA needs to treat them as
1075 valid. */
1076 && ! LRA_SUBREG_P (op))
1077 return 0;
1078 #endif
1080 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1081 create such rtl, and we must reject it. */
1082 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1083 /* LRA can use subreg to store a floating point value in an
1084 integer mode. Although the floating point and the
1085 integer modes need the same number of hard registers, the
1086 size of floating point mode can be less than the integer
1087 mode. */
1088 && ! lra_in_progress
1089 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1090 return 0;
1092 op = sub;
1095 return (REG_P (op)
1096 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1097 || in_hard_reg_set_p (operand_reg_set,
1098 GET_MODE (op), REGNO (op))));
1101 /* Return 1 for a register in Pmode; ignore the tested mode. */
1104 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1106 return register_operand (op, Pmode);
1109 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1110 or a hard register. */
1113 scratch_operand (rtx op, enum machine_mode mode)
1115 if (GET_MODE (op) != mode && mode != VOIDmode)
1116 return 0;
1118 return (GET_CODE (op) == SCRATCH
1119 || (REG_P (op)
1120 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1123 /* Return 1 if OP is a valid immediate operand for mode MODE.
1125 The main use of this function is as a predicate in match_operand
1126 expressions in the machine description. */
1129 immediate_operand (rtx op, enum machine_mode mode)
1131 /* Don't accept CONST_INT or anything similar
1132 if the caller wants something floating. */
1133 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1134 && GET_MODE_CLASS (mode) != MODE_INT
1135 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1136 return 0;
1138 if (CONST_INT_P (op)
1139 && mode != VOIDmode
1140 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1141 return 0;
1143 return (CONSTANT_P (op)
1144 && (GET_MODE (op) == mode || mode == VOIDmode
1145 || GET_MODE (op) == VOIDmode)
1146 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1147 && targetm.legitimate_constant_p (mode == VOIDmode
1148 ? GET_MODE (op)
1149 : mode, op));
1152 /* Returns 1 if OP is an operand that is a CONST_INT. */
1155 const_int_operand (rtx op, enum machine_mode mode)
1157 if (!CONST_INT_P (op))
1158 return 0;
1160 if (mode != VOIDmode
1161 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1162 return 0;
1164 return 1;
1167 /* Returns 1 if OP is an operand that is a constant integer or constant
1168 floating-point number. */
1171 const_double_operand (rtx op, enum machine_mode mode)
1173 /* Don't accept CONST_INT or anything similar
1174 if the caller wants something floating. */
1175 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1176 && GET_MODE_CLASS (mode) != MODE_INT
1177 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1178 return 0;
1180 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1181 && (mode == VOIDmode || GET_MODE (op) == mode
1182 || GET_MODE (op) == VOIDmode));
1185 /* Return 1 if OP is a general operand that is not an immediate operand. */
1188 nonimmediate_operand (rtx op, enum machine_mode mode)
1190 return (general_operand (op, mode) && ! CONSTANT_P (op));
1193 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1196 nonmemory_operand (rtx op, enum machine_mode mode)
1198 if (CONSTANT_P (op))
1199 return immediate_operand (op, mode);
1201 if (GET_MODE (op) != mode && mode != VOIDmode)
1202 return 0;
1204 if (GET_CODE (op) == SUBREG)
1206 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1207 because it is guaranteed to be reloaded into one.
1208 Just make sure the MEM is valid in itself.
1209 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1210 but currently it does result from (SUBREG (REG)...) where the
1211 reg went on the stack.) */
1212 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1213 return general_operand (op, mode);
1214 op = SUBREG_REG (op);
1217 return (REG_P (op)
1218 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1219 || in_hard_reg_set_p (operand_reg_set,
1220 GET_MODE (op), REGNO (op))));
1223 /* Return 1 if OP is a valid operand that stands for pushing a
1224 value of mode MODE onto the stack.
1226 The main use of this function is as a predicate in match_operand
1227 expressions in the machine description. */
1230 push_operand (rtx op, enum machine_mode mode)
1232 unsigned int rounded_size = GET_MODE_SIZE (mode);
1234 #ifdef PUSH_ROUNDING
1235 rounded_size = PUSH_ROUNDING (rounded_size);
1236 #endif
1238 if (!MEM_P (op))
1239 return 0;
1241 if (mode != VOIDmode && GET_MODE (op) != mode)
1242 return 0;
1244 op = XEXP (op, 0);
1246 if (rounded_size == GET_MODE_SIZE (mode))
1248 if (GET_CODE (op) != STACK_PUSH_CODE)
1249 return 0;
1251 else
1253 if (GET_CODE (op) != PRE_MODIFY
1254 || GET_CODE (XEXP (op, 1)) != PLUS
1255 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1256 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1257 #ifdef STACK_GROWS_DOWNWARD
1258 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1259 #else
1260 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1261 #endif
1263 return 0;
1266 return XEXP (op, 0) == stack_pointer_rtx;
1269 /* Return 1 if OP is a valid operand that stands for popping a
1270 value of mode MODE off the stack.
1272 The main use of this function is as a predicate in match_operand
1273 expressions in the machine description. */
1276 pop_operand (rtx op, enum machine_mode mode)
1278 if (!MEM_P (op))
1279 return 0;
1281 if (mode != VOIDmode && GET_MODE (op) != mode)
1282 return 0;
1284 op = XEXP (op, 0);
1286 if (GET_CODE (op) != STACK_POP_CODE)
1287 return 0;
1289 return XEXP (op, 0) == stack_pointer_rtx;
1292 /* Return 1 if ADDR is a valid memory address
1293 for mode MODE in address space AS. */
1296 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1297 rtx addr, addr_space_t as)
1299 #ifdef GO_IF_LEGITIMATE_ADDRESS
1300 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1301 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1302 return 0;
1304 win:
1305 return 1;
1306 #else
1307 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1308 #endif
1311 /* Return 1 if OP is a valid memory reference with mode MODE,
1312 including a valid address.
1314 The main use of this function is as a predicate in match_operand
1315 expressions in the machine description. */
1318 memory_operand (rtx op, enum machine_mode mode)
1320 rtx inner;
1322 if (! reload_completed)
1323 /* Note that no SUBREG is a memory operand before end of reload pass,
1324 because (SUBREG (MEM...)) forces reloading into a register. */
1325 return MEM_P (op) && general_operand (op, mode);
1327 if (mode != VOIDmode && GET_MODE (op) != mode)
1328 return 0;
1330 inner = op;
1331 if (GET_CODE (inner) == SUBREG)
1332 inner = SUBREG_REG (inner);
1334 return (MEM_P (inner) && general_operand (op, mode));
1337 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1338 that is, a memory reference whose address is a general_operand. */
1341 indirect_operand (rtx op, enum machine_mode mode)
1343 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1344 if (! reload_completed
1345 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1347 int offset = SUBREG_BYTE (op);
1348 rtx inner = SUBREG_REG (op);
1350 if (mode != VOIDmode && GET_MODE (op) != mode)
1351 return 0;
1353 /* The only way that we can have a general_operand as the resulting
1354 address is if OFFSET is zero and the address already is an operand
1355 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1356 operand. */
1358 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1359 || (GET_CODE (XEXP (inner, 0)) == PLUS
1360 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1361 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1362 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1365 return (MEM_P (op)
1366 && memory_operand (op, mode)
1367 && general_operand (XEXP (op, 0), Pmode));
1370 /* Return 1 if this is an ordered comparison operator (not including
1371 ORDERED and UNORDERED). */
1374 ordered_comparison_operator (rtx op, enum machine_mode mode)
1376 if (mode != VOIDmode && GET_MODE (op) != mode)
1377 return false;
1378 switch (GET_CODE (op))
1380 case EQ:
1381 case NE:
1382 case LT:
1383 case LTU:
1384 case LE:
1385 case LEU:
1386 case GT:
1387 case GTU:
1388 case GE:
1389 case GEU:
1390 return true;
1391 default:
1392 return false;
1396 /* Return 1 if this is a comparison operator. This allows the use of
1397 MATCH_OPERATOR to recognize all the branch insns. */
1400 comparison_operator (rtx op, enum machine_mode mode)
1402 return ((mode == VOIDmode || GET_MODE (op) == mode)
1403 && COMPARISON_P (op));
1406 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1409 extract_asm_operands (rtx body)
1411 rtx tmp;
1412 switch (GET_CODE (body))
1414 case ASM_OPERANDS:
1415 return body;
1417 case SET:
1418 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1419 tmp = SET_SRC (body);
1420 if (GET_CODE (tmp) == ASM_OPERANDS)
1421 return tmp;
1422 break;
1424 case PARALLEL:
1425 tmp = XVECEXP (body, 0, 0);
1426 if (GET_CODE (tmp) == ASM_OPERANDS)
1427 return tmp;
1428 if (GET_CODE (tmp) == SET)
1430 tmp = SET_SRC (tmp);
1431 if (GET_CODE (tmp) == ASM_OPERANDS)
1432 return tmp;
1434 break;
1436 default:
1437 break;
1439 return NULL;
1442 /* If BODY is an insn body that uses ASM_OPERANDS,
1443 return the number of operands (both input and output) in the insn.
1444 Otherwise return -1. */
1447 asm_noperands (const_rtx body)
1449 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1450 int n_sets = 0;
1452 if (asm_op == NULL)
1453 return -1;
1455 if (GET_CODE (body) == SET)
1456 n_sets = 1;
1457 else if (GET_CODE (body) == PARALLEL)
1459 int i;
1460 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1462 /* Multiple output operands, or 1 output plus some clobbers:
1463 body is
1464 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1465 /* Count backwards through CLOBBERs to determine number of SETs. */
1466 for (i = XVECLEN (body, 0); i > 0; i--)
1468 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1469 break;
1470 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1471 return -1;
1474 /* N_SETS is now number of output operands. */
1475 n_sets = i;
1477 /* Verify that all the SETs we have
1478 came from a single original asm_operands insn
1479 (so that invalid combinations are blocked). */
1480 for (i = 0; i < n_sets; i++)
1482 rtx elt = XVECEXP (body, 0, i);
1483 if (GET_CODE (elt) != SET)
1484 return -1;
1485 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1486 return -1;
1487 /* If these ASM_OPERANDS rtx's came from different original insns
1488 then they aren't allowed together. */
1489 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1490 != ASM_OPERANDS_INPUT_VEC (asm_op))
1491 return -1;
1494 else
1496 /* 0 outputs, but some clobbers:
1497 body is [(asm_operands ...) (clobber (reg ...))...]. */
1498 /* Make sure all the other parallel things really are clobbers. */
1499 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1500 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1501 return -1;
1505 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1506 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1509 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1510 copy its operands (both input and output) into the vector OPERANDS,
1511 the locations of the operands within the insn into the vector OPERAND_LOCS,
1512 and the constraints for the operands into CONSTRAINTS.
1513 Write the modes of the operands into MODES.
1514 Return the assembler-template.
1516 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1517 we don't store that info. */
1519 const char *
1520 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1521 const char **constraints, enum machine_mode *modes,
1522 location_t *loc)
1524 int nbase = 0, n, i;
1525 rtx asmop;
1527 switch (GET_CODE (body))
1529 case ASM_OPERANDS:
1530 /* Zero output asm: BODY is (asm_operands ...). */
1531 asmop = body;
1532 break;
1534 case SET:
1535 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1536 asmop = SET_SRC (body);
1538 /* The output is in the SET.
1539 Its constraint is in the ASM_OPERANDS itself. */
1540 if (operands)
1541 operands[0] = SET_DEST (body);
1542 if (operand_locs)
1543 operand_locs[0] = &SET_DEST (body);
1544 if (constraints)
1545 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1546 if (modes)
1547 modes[0] = GET_MODE (SET_DEST (body));
1548 nbase = 1;
1549 break;
1551 case PARALLEL:
1553 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1555 asmop = XVECEXP (body, 0, 0);
1556 if (GET_CODE (asmop) == SET)
1558 asmop = SET_SRC (asmop);
1560 /* At least one output, plus some CLOBBERs. The outputs are in
1561 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1562 for (i = 0; i < nparallel; i++)
1564 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1565 break; /* Past last SET */
1566 if (operands)
1567 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1568 if (operand_locs)
1569 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1570 if (constraints)
1571 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1572 if (modes)
1573 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1575 nbase = i;
1577 break;
1580 default:
1581 gcc_unreachable ();
1584 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1585 for (i = 0; i < n; i++)
1587 if (operand_locs)
1588 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1589 if (operands)
1590 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1591 if (constraints)
1592 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1593 if (modes)
1594 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1596 nbase += n;
1598 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1599 for (i = 0; i < n; i++)
1601 if (operand_locs)
1602 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1603 if (operands)
1604 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1605 if (constraints)
1606 constraints[nbase + i] = "";
1607 if (modes)
1608 modes[nbase + i] = Pmode;
1611 if (loc)
1612 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1614 return ASM_OPERANDS_TEMPLATE (asmop);
1617 /* Check if an asm_operand matches its constraints.
1618 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1621 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1623 int result = 0;
1624 #ifdef AUTO_INC_DEC
1625 bool incdec_ok = false;
1626 #endif
1628 /* Use constrain_operands after reload. */
1629 gcc_assert (!reload_completed);
1631 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1632 many alternatives as required to match the other operands. */
1633 if (*constraint == '\0')
1634 result = 1;
1636 while (*constraint)
1638 char c = *constraint;
1639 int len;
1640 switch (c)
1642 case ',':
1643 constraint++;
1644 continue;
1645 case '=':
1646 case '+':
1647 case '*':
1648 case '%':
1649 case '!':
1650 case '#':
1651 case '&':
1652 case '?':
1653 break;
1655 case '0': case '1': case '2': case '3': case '4':
1656 case '5': case '6': case '7': case '8': case '9':
1657 /* If caller provided constraints pointer, look up
1658 the matching constraint. Otherwise, our caller should have
1659 given us the proper matching constraint, but we can't
1660 actually fail the check if they didn't. Indicate that
1661 results are inconclusive. */
1662 if (constraints)
1664 char *end;
1665 unsigned long match;
1667 match = strtoul (constraint, &end, 10);
1668 if (!result)
1669 result = asm_operand_ok (op, constraints[match], NULL);
1670 constraint = (const char *) end;
1672 else
1675 constraint++;
1676 while (ISDIGIT (*constraint));
1677 if (! result)
1678 result = -1;
1680 continue;
1682 case 'p':
1683 if (address_operand (op, VOIDmode))
1684 result = 1;
1685 break;
1687 case TARGET_MEM_CONSTRAINT:
1688 case 'V': /* non-offsettable */
1689 if (memory_operand (op, VOIDmode))
1690 result = 1;
1691 break;
1693 case 'o': /* offsettable */
1694 if (offsettable_nonstrict_memref_p (op))
1695 result = 1;
1696 break;
1698 case '<':
1699 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1700 excepting those that expand_call created. Further, on some
1701 machines which do not have generalized auto inc/dec, an inc/dec
1702 is not a memory_operand.
1704 Match any memory and hope things are resolved after reload. */
1706 if (MEM_P (op)
1707 && (1
1708 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1709 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1710 result = 1;
1711 #ifdef AUTO_INC_DEC
1712 incdec_ok = true;
1713 #endif
1714 break;
1716 case '>':
1717 if (MEM_P (op)
1718 && (1
1719 || GET_CODE (XEXP (op, 0)) == PRE_INC
1720 || GET_CODE (XEXP (op, 0)) == POST_INC))
1721 result = 1;
1722 #ifdef AUTO_INC_DEC
1723 incdec_ok = true;
1724 #endif
1725 break;
1727 case 'E':
1728 case 'F':
1729 if (CONST_DOUBLE_AS_FLOAT_P (op)
1730 || (GET_CODE (op) == CONST_VECTOR
1731 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1732 result = 1;
1733 break;
1735 case 'G':
1736 if (CONST_DOUBLE_AS_FLOAT_P (op)
1737 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1738 result = 1;
1739 break;
1740 case 'H':
1741 if (CONST_DOUBLE_AS_FLOAT_P (op)
1742 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1743 result = 1;
1744 break;
1746 case 's':
1747 if (CONST_SCALAR_INT_P (op))
1748 break;
1749 /* Fall through. */
1751 case 'i':
1752 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1753 result = 1;
1754 break;
1756 case 'n':
1757 if (CONST_SCALAR_INT_P (op))
1758 result = 1;
1759 break;
1761 case 'I':
1762 if (CONST_INT_P (op)
1763 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1764 result = 1;
1765 break;
1766 case 'J':
1767 if (CONST_INT_P (op)
1768 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1769 result = 1;
1770 break;
1771 case 'K':
1772 if (CONST_INT_P (op)
1773 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1774 result = 1;
1775 break;
1776 case 'L':
1777 if (CONST_INT_P (op)
1778 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1779 result = 1;
1780 break;
1781 case 'M':
1782 if (CONST_INT_P (op)
1783 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1784 result = 1;
1785 break;
1786 case 'N':
1787 if (CONST_INT_P (op)
1788 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1789 result = 1;
1790 break;
1791 case 'O':
1792 if (CONST_INT_P (op)
1793 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1794 result = 1;
1795 break;
1796 case 'P':
1797 if (CONST_INT_P (op)
1798 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1799 result = 1;
1800 break;
1802 case 'X':
1803 result = 1;
1804 break;
1806 case 'g':
1807 if (general_operand (op, VOIDmode))
1808 result = 1;
1809 break;
1811 default:
1812 /* For all other letters, we first check for a register class,
1813 otherwise it is an EXTRA_CONSTRAINT. */
1814 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1816 case 'r':
1817 if (GET_MODE (op) == BLKmode)
1818 break;
1819 if (register_operand (op, VOIDmode))
1820 result = 1;
1822 #ifdef EXTRA_CONSTRAINT_STR
1823 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1824 /* Every memory operand can be reloaded to fit. */
1825 result = result || memory_operand (op, VOIDmode);
1826 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1827 /* Every address operand can be reloaded to fit. */
1828 result = result || address_operand (op, VOIDmode);
1829 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1830 result = 1;
1831 #endif
1832 break;
1834 len = CONSTRAINT_LEN (c, constraint);
1836 constraint++;
1837 while (--len && *constraint);
1838 if (len)
1839 return 0;
1842 #ifdef AUTO_INC_DEC
1843 /* For operands without < or > constraints reject side-effects. */
1844 if (!incdec_ok && result && MEM_P (op))
1845 switch (GET_CODE (XEXP (op, 0)))
1847 case PRE_INC:
1848 case POST_INC:
1849 case PRE_DEC:
1850 case POST_DEC:
1851 case PRE_MODIFY:
1852 case POST_MODIFY:
1853 return 0;
1854 default:
1855 break;
1857 #endif
1859 return result;
1862 /* Given an rtx *P, if it is a sum containing an integer constant term,
1863 return the location (type rtx *) of the pointer to that constant term.
1864 Otherwise, return a null pointer. */
1866 rtx *
1867 find_constant_term_loc (rtx *p)
1869 rtx *tem;
1870 enum rtx_code code = GET_CODE (*p);
1872 /* If *P IS such a constant term, P is its location. */
1874 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1875 || code == CONST)
1876 return p;
1878 /* Otherwise, if not a sum, it has no constant term. */
1880 if (GET_CODE (*p) != PLUS)
1881 return 0;
1883 /* If one of the summands is constant, return its location. */
1885 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1886 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1887 return p;
1889 /* Otherwise, check each summand for containing a constant term. */
1891 if (XEXP (*p, 0) != 0)
1893 tem = find_constant_term_loc (&XEXP (*p, 0));
1894 if (tem != 0)
1895 return tem;
1898 if (XEXP (*p, 1) != 0)
1900 tem = find_constant_term_loc (&XEXP (*p, 1));
1901 if (tem != 0)
1902 return tem;
1905 return 0;
1908 /* Return 1 if OP is a memory reference
1909 whose address contains no side effects
1910 and remains valid after the addition
1911 of a positive integer less than the
1912 size of the object being referenced.
1914 We assume that the original address is valid and do not check it.
1916 This uses strict_memory_address_p as a subroutine, so
1917 don't use it before reload. */
1920 offsettable_memref_p (rtx op)
1922 return ((MEM_P (op))
1923 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1924 MEM_ADDR_SPACE (op)));
1927 /* Similar, but don't require a strictly valid mem ref:
1928 consider pseudo-regs valid as index or base regs. */
1931 offsettable_nonstrict_memref_p (rtx op)
1933 return ((MEM_P (op))
1934 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1935 MEM_ADDR_SPACE (op)));
1938 /* Return 1 if Y is a memory address which contains no side effects
1939 and would remain valid for address space AS after the addition of
1940 a positive integer less than the size of that mode.
1942 We assume that the original address is valid and do not check it.
1943 We do check that it is valid for narrower modes.
1945 If STRICTP is nonzero, we require a strictly valid address,
1946 for the sake of use in reload.c. */
1949 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
1950 addr_space_t as)
1952 enum rtx_code ycode = GET_CODE (y);
1953 rtx z;
1954 rtx y1 = y;
1955 rtx *y2;
1956 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
1957 (strictp ? strict_memory_address_addr_space_p
1958 : memory_address_addr_space_p);
1959 unsigned int mode_sz = GET_MODE_SIZE (mode);
1961 if (CONSTANT_ADDRESS_P (y))
1962 return 1;
1964 /* Adjusting an offsettable address involves changing to a narrower mode.
1965 Make sure that's OK. */
1967 if (mode_dependent_address_p (y, as))
1968 return 0;
1970 enum machine_mode address_mode = GET_MODE (y);
1971 if (address_mode == VOIDmode)
1972 address_mode = targetm.addr_space.address_mode (as);
1973 #ifdef POINTERS_EXTEND_UNSIGNED
1974 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1975 #endif
1977 /* ??? How much offset does an offsettable BLKmode reference need?
1978 Clearly that depends on the situation in which it's being used.
1979 However, the current situation in which we test 0xffffffff is
1980 less than ideal. Caveat user. */
1981 if (mode_sz == 0)
1982 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1984 /* If the expression contains a constant term,
1985 see if it remains valid when max possible offset is added. */
1987 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1989 int good;
1991 y1 = *y2;
1992 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1993 /* Use QImode because an odd displacement may be automatically invalid
1994 for any wider mode. But it should be valid for a single byte. */
1995 good = (*addressp) (QImode, y, as);
1997 /* In any case, restore old contents of memory. */
1998 *y2 = y1;
1999 return good;
2002 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2003 return 0;
2005 /* The offset added here is chosen as the maximum offset that
2006 any instruction could need to add when operating on something
2007 of the specified mode. We assume that if Y and Y+c are
2008 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2009 go inside a LO_SUM here, so we do so as well. */
2010 if (GET_CODE (y) == LO_SUM
2011 && mode != BLKmode
2012 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2013 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2014 plus_constant (address_mode, XEXP (y, 1),
2015 mode_sz - 1));
2016 #ifdef POINTERS_EXTEND_UNSIGNED
2017 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2018 else if (POINTERS_EXTEND_UNSIGNED > 0
2019 && GET_CODE (y) == ZERO_EXTEND
2020 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2021 z = gen_rtx_ZERO_EXTEND (address_mode,
2022 plus_constant (pointer_mode, XEXP (y, 0),
2023 mode_sz - 1));
2024 #endif
2025 else
2026 z = plus_constant (address_mode, y, mode_sz - 1);
2028 /* Use QImode because an odd displacement may be automatically invalid
2029 for any wider mode. But it should be valid for a single byte. */
2030 return (*addressp) (QImode, z, as);
2033 /* Return 1 if ADDR is an address-expression whose effect depends
2034 on the mode of the memory reference it is used in.
2036 ADDRSPACE is the address space associated with the address.
2038 Autoincrement addressing is a typical example of mode-dependence
2039 because the amount of the increment depends on the mode. */
2041 bool
2042 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2044 /* Auto-increment addressing with anything other than post_modify
2045 or pre_modify always introduces a mode dependency. Catch such
2046 cases now instead of deferring to the target. */
2047 if (GET_CODE (addr) == PRE_INC
2048 || GET_CODE (addr) == POST_INC
2049 || GET_CODE (addr) == PRE_DEC
2050 || GET_CODE (addr) == POST_DEC)
2051 return true;
2053 return targetm.mode_dependent_address_p (addr, addrspace);
2056 /* Like extract_insn, but save insn extracted and don't extract again, when
2057 called again for the same insn expecting that recog_data still contain the
2058 valid information. This is used primary by gen_attr infrastructure that
2059 often does extract insn again and again. */
2060 void
2061 extract_insn_cached (rtx insn)
2063 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2064 return;
2065 extract_insn (insn);
2066 recog_data.insn = insn;
2069 /* Do cached extract_insn, constrain_operands and complain about failures.
2070 Used by insn_attrtab. */
2071 void
2072 extract_constrain_insn_cached (rtx insn)
2074 extract_insn_cached (insn);
2075 if (which_alternative == -1
2076 && !constrain_operands (reload_completed))
2077 fatal_insn_not_found (insn);
2080 /* Do cached constrain_operands and complain about failures. */
2082 constrain_operands_cached (int strict)
2084 if (which_alternative == -1)
2085 return constrain_operands (strict);
2086 else
2087 return 1;
2090 /* Analyze INSN and fill in recog_data. */
2092 void
2093 extract_insn (rtx insn)
2095 int i;
2096 int icode;
2097 int noperands;
2098 rtx body = PATTERN (insn);
2100 recog_data.n_operands = 0;
2101 recog_data.n_alternatives = 0;
2102 recog_data.n_dups = 0;
2103 recog_data.is_asm = false;
2105 switch (GET_CODE (body))
2107 case USE:
2108 case CLOBBER:
2109 case ASM_INPUT:
2110 case ADDR_VEC:
2111 case ADDR_DIFF_VEC:
2112 case VAR_LOCATION:
2113 return;
2115 case SET:
2116 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2117 goto asm_insn;
2118 else
2119 goto normal_insn;
2120 case PARALLEL:
2121 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2122 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2123 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2124 goto asm_insn;
2125 else
2126 goto normal_insn;
2127 case ASM_OPERANDS:
2128 asm_insn:
2129 recog_data.n_operands = noperands = asm_noperands (body);
2130 if (noperands >= 0)
2132 /* This insn is an `asm' with operands. */
2134 /* expand_asm_operands makes sure there aren't too many operands. */
2135 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2137 /* Now get the operand values and constraints out of the insn. */
2138 decode_asm_operands (body, recog_data.operand,
2139 recog_data.operand_loc,
2140 recog_data.constraints,
2141 recog_data.operand_mode, NULL);
2142 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2143 if (noperands > 0)
2145 const char *p = recog_data.constraints[0];
2146 recog_data.n_alternatives = 1;
2147 while (*p)
2148 recog_data.n_alternatives += (*p++ == ',');
2150 recog_data.is_asm = true;
2151 break;
2153 fatal_insn_not_found (insn);
2155 default:
2156 normal_insn:
2157 /* Ordinary insn: recognize it, get the operands via insn_extract
2158 and get the constraints. */
2160 icode = recog_memoized (insn);
2161 if (icode < 0)
2162 fatal_insn_not_found (insn);
2164 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2165 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2166 recog_data.n_dups = insn_data[icode].n_dups;
2168 insn_extract (insn);
2170 for (i = 0; i < noperands; i++)
2172 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2173 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2174 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2175 /* VOIDmode match_operands gets mode from their real operand. */
2176 if (recog_data.operand_mode[i] == VOIDmode)
2177 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2180 for (i = 0; i < noperands; i++)
2181 recog_data.operand_type[i]
2182 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2183 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2184 : OP_IN);
2186 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2188 if (INSN_CODE (insn) < 0)
2189 for (i = 0; i < recog_data.n_alternatives; i++)
2190 recog_data.alternative_enabled_p[i] = true;
2191 else
2193 recog_data.insn = insn;
2194 for (i = 0; i < recog_data.n_alternatives; i++)
2196 which_alternative = i;
2197 recog_data.alternative_enabled_p[i]
2198 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2202 recog_data.insn = NULL;
2203 which_alternative = -1;
2206 /* After calling extract_insn, you can use this function to extract some
2207 information from the constraint strings into a more usable form.
2208 The collected data is stored in recog_op_alt. */
2209 void
2210 preprocess_constraints (void)
2212 int i;
2214 for (i = 0; i < recog_data.n_operands; i++)
2215 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2216 * sizeof (struct operand_alternative)));
2218 for (i = 0; i < recog_data.n_operands; i++)
2220 int j;
2221 struct operand_alternative *op_alt;
2222 const char *p = recog_data.constraints[i];
2224 op_alt = recog_op_alt[i];
2226 for (j = 0; j < recog_data.n_alternatives; j++)
2228 op_alt[j].cl = NO_REGS;
2229 op_alt[j].constraint = p;
2230 op_alt[j].matches = -1;
2231 op_alt[j].matched = -1;
2233 if (!recog_data.alternative_enabled_p[j])
2235 p = skip_alternative (p);
2236 continue;
2239 if (*p == '\0' || *p == ',')
2241 op_alt[j].anything_ok = 1;
2242 continue;
2245 for (;;)
2247 char c = *p;
2248 if (c == '#')
2250 c = *++p;
2251 while (c != ',' && c != '\0');
2252 if (c == ',' || c == '\0')
2254 p++;
2255 break;
2258 switch (c)
2260 case '=': case '+': case '*': case '%':
2261 case 'E': case 'F': case 'G': case 'H':
2262 case 's': case 'i': case 'n':
2263 case 'I': case 'J': case 'K': case 'L':
2264 case 'M': case 'N': case 'O': case 'P':
2265 /* These don't say anything we care about. */
2266 break;
2268 case '?':
2269 op_alt[j].reject += 6;
2270 break;
2271 case '!':
2272 op_alt[j].reject += 600;
2273 break;
2274 case '&':
2275 op_alt[j].earlyclobber = 1;
2276 break;
2278 case '0': case '1': case '2': case '3': case '4':
2279 case '5': case '6': case '7': case '8': case '9':
2281 char *end;
2282 op_alt[j].matches = strtoul (p, &end, 10);
2283 recog_op_alt[op_alt[j].matches][j].matched = i;
2284 p = end;
2286 continue;
2288 case TARGET_MEM_CONSTRAINT:
2289 op_alt[j].memory_ok = 1;
2290 break;
2291 case '<':
2292 op_alt[j].decmem_ok = 1;
2293 break;
2294 case '>':
2295 op_alt[j].incmem_ok = 1;
2296 break;
2297 case 'V':
2298 op_alt[j].nonoffmem_ok = 1;
2299 break;
2300 case 'o':
2301 op_alt[j].offmem_ok = 1;
2302 break;
2303 case 'X':
2304 op_alt[j].anything_ok = 1;
2305 break;
2307 case 'p':
2308 op_alt[j].is_address = 1;
2309 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2310 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2311 ADDRESS, SCRATCH)];
2312 break;
2314 case 'g':
2315 case 'r':
2316 op_alt[j].cl =
2317 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2318 break;
2320 default:
2321 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2323 op_alt[j].memory_ok = 1;
2324 break;
2326 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2328 op_alt[j].is_address = 1;
2329 op_alt[j].cl
2330 = (reg_class_subunion
2331 [(int) op_alt[j].cl]
2332 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2333 ADDRESS, SCRATCH)]);
2334 break;
2337 op_alt[j].cl
2338 = (reg_class_subunion
2339 [(int) op_alt[j].cl]
2340 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2341 break;
2343 p += CONSTRAINT_LEN (c, p);
2349 /* Check the operands of an insn against the insn's operand constraints
2350 and return 1 if they are valid.
2351 The information about the insn's operands, constraints, operand modes
2352 etc. is obtained from the global variables set up by extract_insn.
2354 WHICH_ALTERNATIVE is set to a number which indicates which
2355 alternative of constraints was matched: 0 for the first alternative,
2356 1 for the next, etc.
2358 In addition, when two operands are required to match
2359 and it happens that the output operand is (reg) while the
2360 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2361 make the output operand look like the input.
2362 This is because the output operand is the one the template will print.
2364 This is used in final, just before printing the assembler code and by
2365 the routines that determine an insn's attribute.
2367 If STRICT is a positive nonzero value, it means that we have been
2368 called after reload has been completed. In that case, we must
2369 do all checks strictly. If it is zero, it means that we have been called
2370 before reload has completed. In that case, we first try to see if we can
2371 find an alternative that matches strictly. If not, we try again, this
2372 time assuming that reload will fix up the insn. This provides a "best
2373 guess" for the alternative and is used to compute attributes of insns prior
2374 to reload. A negative value of STRICT is used for this internal call. */
2376 struct funny_match
2378 int this_op, other;
2382 constrain_operands (int strict)
2384 const char *constraints[MAX_RECOG_OPERANDS];
2385 int matching_operands[MAX_RECOG_OPERANDS];
2386 int earlyclobber[MAX_RECOG_OPERANDS];
2387 int c;
2389 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2390 int funny_match_index;
2392 which_alternative = 0;
2393 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2394 return 1;
2396 for (c = 0; c < recog_data.n_operands; c++)
2398 constraints[c] = recog_data.constraints[c];
2399 matching_operands[c] = -1;
2404 int seen_earlyclobber_at = -1;
2405 int opno;
2406 int lose = 0;
2407 funny_match_index = 0;
2409 if (!recog_data.alternative_enabled_p[which_alternative])
2411 int i;
2413 for (i = 0; i < recog_data.n_operands; i++)
2414 constraints[i] = skip_alternative (constraints[i]);
2416 which_alternative++;
2417 continue;
2420 for (opno = 0; opno < recog_data.n_operands; opno++)
2422 rtx op = recog_data.operand[opno];
2423 enum machine_mode mode = GET_MODE (op);
2424 const char *p = constraints[opno];
2425 int offset = 0;
2426 int win = 0;
2427 int val;
2428 int len;
2430 earlyclobber[opno] = 0;
2432 /* A unary operator may be accepted by the predicate, but it
2433 is irrelevant for matching constraints. */
2434 if (UNARY_P (op))
2435 op = XEXP (op, 0);
2437 if (GET_CODE (op) == SUBREG)
2439 if (REG_P (SUBREG_REG (op))
2440 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2441 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2442 GET_MODE (SUBREG_REG (op)),
2443 SUBREG_BYTE (op),
2444 GET_MODE (op));
2445 op = SUBREG_REG (op);
2448 /* An empty constraint or empty alternative
2449 allows anything which matched the pattern. */
2450 if (*p == 0 || *p == ',')
2451 win = 1;
2454 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2456 case '\0':
2457 len = 0;
2458 break;
2459 case ',':
2460 c = '\0';
2461 break;
2463 case '?': case '!': case '*': case '%':
2464 case '=': case '+':
2465 break;
2467 case '#':
2468 /* Ignore rest of this alternative as far as
2469 constraint checking is concerned. */
2471 p++;
2472 while (*p && *p != ',');
2473 len = 0;
2474 break;
2476 case '&':
2477 earlyclobber[opno] = 1;
2478 if (seen_earlyclobber_at < 0)
2479 seen_earlyclobber_at = opno;
2480 break;
2482 case '0': case '1': case '2': case '3': case '4':
2483 case '5': case '6': case '7': case '8': case '9':
2485 /* This operand must be the same as a previous one.
2486 This kind of constraint is used for instructions such
2487 as add when they take only two operands.
2489 Note that the lower-numbered operand is passed first.
2491 If we are not testing strictly, assume that this
2492 constraint will be satisfied. */
2494 char *end;
2495 int match;
2497 match = strtoul (p, &end, 10);
2498 p = end;
2500 if (strict < 0)
2501 val = 1;
2502 else
2504 rtx op1 = recog_data.operand[match];
2505 rtx op2 = recog_data.operand[opno];
2507 /* A unary operator may be accepted by the predicate,
2508 but it is irrelevant for matching constraints. */
2509 if (UNARY_P (op1))
2510 op1 = XEXP (op1, 0);
2511 if (UNARY_P (op2))
2512 op2 = XEXP (op2, 0);
2514 val = operands_match_p (op1, op2);
2517 matching_operands[opno] = match;
2518 matching_operands[match] = opno;
2520 if (val != 0)
2521 win = 1;
2523 /* If output is *x and input is *--x, arrange later
2524 to change the output to *--x as well, since the
2525 output op is the one that will be printed. */
2526 if (val == 2 && strict > 0)
2528 funny_match[funny_match_index].this_op = opno;
2529 funny_match[funny_match_index++].other = match;
2532 len = 0;
2533 break;
2535 case 'p':
2536 /* p is used for address_operands. When we are called by
2537 gen_reload, no one will have checked that the address is
2538 strictly valid, i.e., that all pseudos requiring hard regs
2539 have gotten them. */
2540 if (strict <= 0
2541 || (strict_memory_address_p (recog_data.operand_mode[opno],
2542 op)))
2543 win = 1;
2544 break;
2546 /* No need to check general_operand again;
2547 it was done in insn-recog.c. Well, except that reload
2548 doesn't check the validity of its replacements, but
2549 that should only matter when there's a bug. */
2550 case 'g':
2551 /* Anything goes unless it is a REG and really has a hard reg
2552 but the hard reg is not in the class GENERAL_REGS. */
2553 if (REG_P (op))
2555 if (strict < 0
2556 || GENERAL_REGS == ALL_REGS
2557 || (reload_in_progress
2558 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2559 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2560 win = 1;
2562 else if (strict < 0 || general_operand (op, mode))
2563 win = 1;
2564 break;
2566 case 'X':
2567 /* This is used for a MATCH_SCRATCH in the cases when
2568 we don't actually need anything. So anything goes
2569 any time. */
2570 win = 1;
2571 break;
2573 case TARGET_MEM_CONSTRAINT:
2574 /* Memory operands must be valid, to the extent
2575 required by STRICT. */
2576 if (MEM_P (op))
2578 if (strict > 0
2579 && !strict_memory_address_addr_space_p
2580 (GET_MODE (op), XEXP (op, 0),
2581 MEM_ADDR_SPACE (op)))
2582 break;
2583 if (strict == 0
2584 && !memory_address_addr_space_p
2585 (GET_MODE (op), XEXP (op, 0),
2586 MEM_ADDR_SPACE (op)))
2587 break;
2588 win = 1;
2590 /* Before reload, accept what reload can turn into mem. */
2591 else if (strict < 0 && CONSTANT_P (op))
2592 win = 1;
2593 /* During reload, accept a pseudo */
2594 else if (reload_in_progress && REG_P (op)
2595 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2596 win = 1;
2597 break;
2599 case '<':
2600 if (MEM_P (op)
2601 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2602 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2603 win = 1;
2604 break;
2606 case '>':
2607 if (MEM_P (op)
2608 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2609 || GET_CODE (XEXP (op, 0)) == POST_INC))
2610 win = 1;
2611 break;
2613 case 'E':
2614 case 'F':
2615 if (CONST_DOUBLE_AS_FLOAT_P (op)
2616 || (GET_CODE (op) == CONST_VECTOR
2617 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2618 win = 1;
2619 break;
2621 case 'G':
2622 case 'H':
2623 if (CONST_DOUBLE_AS_FLOAT_P (op)
2624 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2625 win = 1;
2626 break;
2628 case 's':
2629 if (CONST_SCALAR_INT_P (op))
2630 break;
2631 case 'i':
2632 if (CONSTANT_P (op))
2633 win = 1;
2634 break;
2636 case 'n':
2637 if (CONST_SCALAR_INT_P (op))
2638 win = 1;
2639 break;
2641 case 'I':
2642 case 'J':
2643 case 'K':
2644 case 'L':
2645 case 'M':
2646 case 'N':
2647 case 'O':
2648 case 'P':
2649 if (CONST_INT_P (op)
2650 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2651 win = 1;
2652 break;
2654 case 'V':
2655 if (MEM_P (op)
2656 && ((strict > 0 && ! offsettable_memref_p (op))
2657 || (strict < 0
2658 && !(CONSTANT_P (op) || MEM_P (op)))
2659 || (reload_in_progress
2660 && !(REG_P (op)
2661 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2662 win = 1;
2663 break;
2665 case 'o':
2666 if ((strict > 0 && offsettable_memref_p (op))
2667 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2668 /* Before reload, accept what reload can handle. */
2669 || (strict < 0
2670 && (CONSTANT_P (op) || MEM_P (op)))
2671 /* During reload, accept a pseudo */
2672 || (reload_in_progress && REG_P (op)
2673 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2674 win = 1;
2675 break;
2677 default:
2679 enum reg_class cl;
2681 cl = (c == 'r'
2682 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2683 if (cl != NO_REGS)
2685 if (strict < 0
2686 || (strict == 0
2687 && REG_P (op)
2688 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2689 || (strict == 0 && GET_CODE (op) == SCRATCH)
2690 || (REG_P (op)
2691 && reg_fits_class_p (op, cl, offset, mode)))
2692 win = 1;
2694 #ifdef EXTRA_CONSTRAINT_STR
2695 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2696 win = 1;
2698 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2699 /* Every memory operand can be reloaded to fit. */
2700 && ((strict < 0 && MEM_P (op))
2701 /* Before reload, accept what reload can turn
2702 into mem. */
2703 || (strict < 0 && CONSTANT_P (op))
2704 /* During reload, accept a pseudo */
2705 || (reload_in_progress && REG_P (op)
2706 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2707 win = 1;
2708 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2709 /* Every address operand can be reloaded to fit. */
2710 && strict < 0)
2711 win = 1;
2712 /* Cater to architectures like IA-64 that define extra memory
2713 constraints without using define_memory_constraint. */
2714 else if (reload_in_progress
2715 && REG_P (op)
2716 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2717 && reg_renumber[REGNO (op)] < 0
2718 && reg_equiv_mem (REGNO (op)) != 0
2719 && EXTRA_CONSTRAINT_STR
2720 (reg_equiv_mem (REGNO (op)), c, p))
2721 win = 1;
2722 #endif
2723 break;
2726 while (p += len, c);
2728 constraints[opno] = p;
2729 /* If this operand did not win somehow,
2730 this alternative loses. */
2731 if (! win)
2732 lose = 1;
2734 /* This alternative won; the operands are ok.
2735 Change whichever operands this alternative says to change. */
2736 if (! lose)
2738 int opno, eopno;
2740 /* See if any earlyclobber operand conflicts with some other
2741 operand. */
2743 if (strict > 0 && seen_earlyclobber_at >= 0)
2744 for (eopno = seen_earlyclobber_at;
2745 eopno < recog_data.n_operands;
2746 eopno++)
2747 /* Ignore earlyclobber operands now in memory,
2748 because we would often report failure when we have
2749 two memory operands, one of which was formerly a REG. */
2750 if (earlyclobber[eopno]
2751 && REG_P (recog_data.operand[eopno]))
2752 for (opno = 0; opno < recog_data.n_operands; opno++)
2753 if ((MEM_P (recog_data.operand[opno])
2754 || recog_data.operand_type[opno] != OP_OUT)
2755 && opno != eopno
2756 /* Ignore things like match_operator operands. */
2757 && *recog_data.constraints[opno] != 0
2758 && ! (matching_operands[opno] == eopno
2759 && operands_match_p (recog_data.operand[opno],
2760 recog_data.operand[eopno]))
2761 && ! safe_from_earlyclobber (recog_data.operand[opno],
2762 recog_data.operand[eopno]))
2763 lose = 1;
2765 if (! lose)
2767 while (--funny_match_index >= 0)
2769 recog_data.operand[funny_match[funny_match_index].other]
2770 = recog_data.operand[funny_match[funny_match_index].this_op];
2773 #ifdef AUTO_INC_DEC
2774 /* For operands without < or > constraints reject side-effects. */
2775 if (recog_data.is_asm)
2777 for (opno = 0; opno < recog_data.n_operands; opno++)
2778 if (MEM_P (recog_data.operand[opno]))
2779 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2781 case PRE_INC:
2782 case POST_INC:
2783 case PRE_DEC:
2784 case POST_DEC:
2785 case PRE_MODIFY:
2786 case POST_MODIFY:
2787 if (strchr (recog_data.constraints[opno], '<') == NULL
2788 && strchr (recog_data.constraints[opno], '>')
2789 == NULL)
2790 return 0;
2791 break;
2792 default:
2793 break;
2796 #endif
2797 return 1;
2801 which_alternative++;
2803 while (which_alternative < recog_data.n_alternatives);
2805 which_alternative = -1;
2806 /* If we are about to reject this, but we are not to test strictly,
2807 try a very loose test. Only return failure if it fails also. */
2808 if (strict == 0)
2809 return constrain_operands (-1);
2810 else
2811 return 0;
2814 /* Return true iff OPERAND (assumed to be a REG rtx)
2815 is a hard reg in class CLASS when its regno is offset by OFFSET
2816 and changed to mode MODE.
2817 If REG occupies multiple hard regs, all of them must be in CLASS. */
2819 bool
2820 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2821 enum machine_mode mode)
2823 unsigned int regno = REGNO (operand);
2825 if (cl == NO_REGS)
2826 return false;
2828 /* Regno must not be a pseudo register. Offset may be negative. */
2829 return (HARD_REGISTER_NUM_P (regno)
2830 && HARD_REGISTER_NUM_P (regno + offset)
2831 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2832 regno + offset));
2835 /* Split single instruction. Helper function for split_all_insns and
2836 split_all_insns_noflow. Return last insn in the sequence if successful,
2837 or NULL if unsuccessful. */
2839 static rtx
2840 split_insn (rtx insn)
2842 /* Split insns here to get max fine-grain parallelism. */
2843 rtx first = PREV_INSN (insn);
2844 rtx last = try_split (PATTERN (insn), insn, 1);
2845 rtx insn_set, last_set, note;
2847 if (last == insn)
2848 return NULL_RTX;
2850 /* If the original instruction was a single set that was known to be
2851 equivalent to a constant, see if we can say the same about the last
2852 instruction in the split sequence. The two instructions must set
2853 the same destination. */
2854 insn_set = single_set (insn);
2855 if (insn_set)
2857 last_set = single_set (last);
2858 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2860 note = find_reg_equal_equiv_note (insn);
2861 if (note && CONSTANT_P (XEXP (note, 0)))
2862 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2863 else if (CONSTANT_P (SET_SRC (insn_set)))
2864 set_unique_reg_note (last, REG_EQUAL,
2865 copy_rtx (SET_SRC (insn_set)));
2869 /* try_split returns the NOTE that INSN became. */
2870 SET_INSN_DELETED (insn);
2872 /* ??? Coddle to md files that generate subregs in post-reload
2873 splitters instead of computing the proper hard register. */
2874 if (reload_completed && first != last)
2876 first = NEXT_INSN (first);
2877 for (;;)
2879 if (INSN_P (first))
2880 cleanup_subreg_operands (first);
2881 if (first == last)
2882 break;
2883 first = NEXT_INSN (first);
2887 return last;
2890 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2892 void
2893 split_all_insns (void)
2895 sbitmap blocks;
2896 bool changed;
2897 basic_block bb;
2899 blocks = sbitmap_alloc (last_basic_block);
2900 bitmap_clear (blocks);
2901 changed = false;
2903 FOR_EACH_BB_REVERSE (bb)
2905 rtx insn, next;
2906 bool finish = false;
2908 rtl_profile_for_bb (bb);
2909 for (insn = BB_HEAD (bb); !finish ; insn = next)
2911 /* Can't use `next_real_insn' because that might go across
2912 CODE_LABELS and short-out basic blocks. */
2913 next = NEXT_INSN (insn);
2914 finish = (insn == BB_END (bb));
2915 if (INSN_P (insn))
2917 rtx set = single_set (insn);
2919 /* Don't split no-op move insns. These should silently
2920 disappear later in final. Splitting such insns would
2921 break the code that handles LIBCALL blocks. */
2922 if (set && set_noop_p (set))
2924 /* Nops get in the way while scheduling, so delete them
2925 now if register allocation has already been done. It
2926 is too risky to try to do this before register
2927 allocation, and there are unlikely to be very many
2928 nops then anyways. */
2929 if (reload_completed)
2930 delete_insn_and_edges (insn);
2932 else
2934 if (split_insn (insn))
2936 bitmap_set_bit (blocks, bb->index);
2937 changed = true;
2944 default_rtl_profile ();
2945 if (changed)
2946 find_many_sub_basic_blocks (blocks);
2948 #ifdef ENABLE_CHECKING
2949 verify_flow_info ();
2950 #endif
2952 sbitmap_free (blocks);
2955 /* Same as split_all_insns, but do not expect CFG to be available.
2956 Used by machine dependent reorg passes. */
2958 unsigned int
2959 split_all_insns_noflow (void)
2961 rtx next, insn;
2963 for (insn = get_insns (); insn; insn = next)
2965 next = NEXT_INSN (insn);
2966 if (INSN_P (insn))
2968 /* Don't split no-op move insns. These should silently
2969 disappear later in final. Splitting such insns would
2970 break the code that handles LIBCALL blocks. */
2971 rtx set = single_set (insn);
2972 if (set && set_noop_p (set))
2974 /* Nops get in the way while scheduling, so delete them
2975 now if register allocation has already been done. It
2976 is too risky to try to do this before register
2977 allocation, and there are unlikely to be very many
2978 nops then anyways.
2980 ??? Should we use delete_insn when the CFG isn't valid? */
2981 if (reload_completed)
2982 delete_insn_and_edges (insn);
2984 else
2985 split_insn (insn);
2988 return 0;
2991 #ifdef HAVE_peephole2
2992 struct peep2_insn_data
2994 rtx insn;
2995 regset live_before;
2998 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2999 static int peep2_current;
3001 static bool peep2_do_rebuild_jump_labels;
3002 static bool peep2_do_cleanup_cfg;
3004 /* The number of instructions available to match a peep2. */
3005 int peep2_current_count;
3007 /* A non-insn marker indicating the last insn of the block.
3008 The live_before regset for this element is correct, indicating
3009 DF_LIVE_OUT for the block. */
3010 #define PEEP2_EOB pc_rtx
3012 /* Wrap N to fit into the peep2_insn_data buffer. */
3014 static int
3015 peep2_buf_position (int n)
3017 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3018 n -= MAX_INSNS_PER_PEEP2 + 1;
3019 return n;
3022 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3023 does not exist. Used by the recognizer to find the next insn to match
3024 in a multi-insn pattern. */
3027 peep2_next_insn (int n)
3029 gcc_assert (n <= peep2_current_count);
3031 n = peep2_buf_position (peep2_current + n);
3033 return peep2_insn_data[n].insn;
3036 /* Return true if REGNO is dead before the Nth non-note insn
3037 after `current'. */
3040 peep2_regno_dead_p (int ofs, int regno)
3042 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3044 ofs = peep2_buf_position (peep2_current + ofs);
3046 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3048 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3051 /* Similarly for a REG. */
3054 peep2_reg_dead_p (int ofs, rtx reg)
3056 int regno, n;
3058 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3060 ofs = peep2_buf_position (peep2_current + ofs);
3062 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3064 regno = REGNO (reg);
3065 n = hard_regno_nregs[regno][GET_MODE (reg)];
3066 while (--n >= 0)
3067 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3068 return 0;
3069 return 1;
3072 /* Regno offset to be used in the register search. */
3073 static int search_ofs;
3075 /* Try to find a hard register of mode MODE, matching the register class in
3076 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3077 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3078 in which case the only condition is that the register must be available
3079 before CURRENT_INSN.
3080 Registers that already have bits set in REG_SET will not be considered.
3082 If an appropriate register is available, it will be returned and the
3083 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3084 returned. */
3087 peep2_find_free_register (int from, int to, const char *class_str,
3088 enum machine_mode mode, HARD_REG_SET *reg_set)
3090 enum reg_class cl;
3091 HARD_REG_SET live;
3092 df_ref *def_rec;
3093 int i;
3095 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3096 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3098 from = peep2_buf_position (peep2_current + from);
3099 to = peep2_buf_position (peep2_current + to);
3101 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3102 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3104 while (from != to)
3106 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3108 /* Don't use registers set or clobbered by the insn. */
3109 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3110 *def_rec; def_rec++)
3111 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3113 from = peep2_buf_position (from + 1);
3116 cl = (class_str[0] == 'r' ? GENERAL_REGS
3117 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3119 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3121 int raw_regno, regno, success, j;
3123 /* Distribute the free registers as much as possible. */
3124 raw_regno = search_ofs + i;
3125 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3126 raw_regno -= FIRST_PSEUDO_REGISTER;
3127 #ifdef REG_ALLOC_ORDER
3128 regno = reg_alloc_order[raw_regno];
3129 #else
3130 regno = raw_regno;
3131 #endif
3133 /* Can it support the mode we need? */
3134 if (! HARD_REGNO_MODE_OK (regno, mode))
3135 continue;
3137 success = 1;
3138 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3140 /* Don't allocate fixed registers. */
3141 if (fixed_regs[regno + j])
3143 success = 0;
3144 break;
3146 /* Don't allocate global registers. */
3147 if (global_regs[regno + j])
3149 success = 0;
3150 break;
3152 /* Make sure the register is of the right class. */
3153 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3155 success = 0;
3156 break;
3158 /* And that we don't create an extra save/restore. */
3159 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3161 success = 0;
3162 break;
3165 if (! targetm.hard_regno_scratch_ok (regno + j))
3167 success = 0;
3168 break;
3171 /* And we don't clobber traceback for noreturn functions. */
3172 if ((regno + j == FRAME_POINTER_REGNUM
3173 || regno + j == HARD_FRAME_POINTER_REGNUM)
3174 && (! reload_completed || frame_pointer_needed))
3176 success = 0;
3177 break;
3180 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3181 || TEST_HARD_REG_BIT (live, regno + j))
3183 success = 0;
3184 break;
3188 if (success)
3190 add_to_hard_reg_set (reg_set, mode, regno);
3192 /* Start the next search with the next register. */
3193 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3194 raw_regno = 0;
3195 search_ofs = raw_regno;
3197 return gen_rtx_REG (mode, regno);
3201 search_ofs = 0;
3202 return NULL_RTX;
3205 /* Forget all currently tracked instructions, only remember current
3206 LIVE regset. */
3208 static void
3209 peep2_reinit_state (regset live)
3211 int i;
3213 /* Indicate that all slots except the last holds invalid data. */
3214 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3215 peep2_insn_data[i].insn = NULL_RTX;
3216 peep2_current_count = 0;
3218 /* Indicate that the last slot contains live_after data. */
3219 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3220 peep2_current = MAX_INSNS_PER_PEEP2;
3222 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3225 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3226 starting at INSN. Perform the replacement, removing the old insns and
3227 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3228 if the replacement is rejected. */
3230 static rtx
3231 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3233 int i;
3234 rtx last, eh_note, as_note, before_try, x;
3235 rtx old_insn, new_insn;
3236 bool was_call = false;
3238 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3239 match more than one insn, or to be split into more than one insn. */
3240 old_insn = peep2_insn_data[peep2_current].insn;
3241 if (RTX_FRAME_RELATED_P (old_insn))
3243 bool any_note = false;
3244 rtx note;
3246 if (match_len != 0)
3247 return NULL;
3249 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3250 may be in the stream for the purpose of register allocation. */
3251 if (active_insn_p (attempt))
3252 new_insn = attempt;
3253 else
3254 new_insn = next_active_insn (attempt);
3255 if (next_active_insn (new_insn))
3256 return NULL;
3258 /* We have a 1-1 replacement. Copy over any frame-related info. */
3259 RTX_FRAME_RELATED_P (new_insn) = 1;
3261 /* Allow the backend to fill in a note during the split. */
3262 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3263 switch (REG_NOTE_KIND (note))
3265 case REG_FRAME_RELATED_EXPR:
3266 case REG_CFA_DEF_CFA:
3267 case REG_CFA_ADJUST_CFA:
3268 case REG_CFA_OFFSET:
3269 case REG_CFA_REGISTER:
3270 case REG_CFA_EXPRESSION:
3271 case REG_CFA_RESTORE:
3272 case REG_CFA_SET_VDRAP:
3273 any_note = true;
3274 break;
3275 default:
3276 break;
3279 /* If the backend didn't supply a note, copy one over. */
3280 if (!any_note)
3281 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3282 switch (REG_NOTE_KIND (note))
3284 case REG_FRAME_RELATED_EXPR:
3285 case REG_CFA_DEF_CFA:
3286 case REG_CFA_ADJUST_CFA:
3287 case REG_CFA_OFFSET:
3288 case REG_CFA_REGISTER:
3289 case REG_CFA_EXPRESSION:
3290 case REG_CFA_RESTORE:
3291 case REG_CFA_SET_VDRAP:
3292 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3293 any_note = true;
3294 break;
3295 default:
3296 break;
3299 /* If there still isn't a note, make sure the unwind info sees the
3300 same expression as before the split. */
3301 if (!any_note)
3303 rtx old_set, new_set;
3305 /* The old insn had better have been simple, or annotated. */
3306 old_set = single_set (old_insn);
3307 gcc_assert (old_set != NULL);
3309 new_set = single_set (new_insn);
3310 if (!new_set || !rtx_equal_p (new_set, old_set))
3311 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3314 /* Copy prologue/epilogue status. This is required in order to keep
3315 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3316 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3319 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3320 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3321 cfg-related call notes. */
3322 for (i = 0; i <= match_len; ++i)
3324 int j;
3325 rtx note;
3327 j = peep2_buf_position (peep2_current + i);
3328 old_insn = peep2_insn_data[j].insn;
3329 if (!CALL_P (old_insn))
3330 continue;
3331 was_call = true;
3333 new_insn = attempt;
3334 while (new_insn != NULL_RTX)
3336 if (CALL_P (new_insn))
3337 break;
3338 new_insn = NEXT_INSN (new_insn);
3341 gcc_assert (new_insn != NULL_RTX);
3343 CALL_INSN_FUNCTION_USAGE (new_insn)
3344 = CALL_INSN_FUNCTION_USAGE (old_insn);
3346 for (note = REG_NOTES (old_insn);
3347 note;
3348 note = XEXP (note, 1))
3349 switch (REG_NOTE_KIND (note))
3351 case REG_NORETURN:
3352 case REG_SETJMP:
3353 case REG_TM:
3354 add_reg_note (new_insn, REG_NOTE_KIND (note),
3355 XEXP (note, 0));
3356 break;
3357 default:
3358 /* Discard all other reg notes. */
3359 break;
3362 /* Croak if there is another call in the sequence. */
3363 while (++i <= match_len)
3365 j = peep2_buf_position (peep2_current + i);
3366 old_insn = peep2_insn_data[j].insn;
3367 gcc_assert (!CALL_P (old_insn));
3369 break;
3372 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3373 move those notes over to the new sequence. */
3374 as_note = NULL;
3375 for (i = match_len; i >= 0; --i)
3377 int j = peep2_buf_position (peep2_current + i);
3378 old_insn = peep2_insn_data[j].insn;
3380 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3381 if (as_note)
3382 break;
3385 i = peep2_buf_position (peep2_current + match_len);
3386 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3388 /* Replace the old sequence with the new. */
3389 last = emit_insn_after_setloc (attempt,
3390 peep2_insn_data[i].insn,
3391 INSN_LOCATION (peep2_insn_data[i].insn));
3392 before_try = PREV_INSN (insn);
3393 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3395 /* Re-insert the EH_REGION notes. */
3396 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3398 edge eh_edge;
3399 edge_iterator ei;
3401 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3402 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3403 break;
3405 if (eh_note)
3406 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3408 if (eh_edge)
3409 for (x = last; x != before_try; x = PREV_INSN (x))
3410 if (x != BB_END (bb)
3411 && (can_throw_internal (x)
3412 || can_nonlocal_goto (x)))
3414 edge nfte, nehe;
3415 int flags;
3417 nfte = split_block (bb, x);
3418 flags = (eh_edge->flags
3419 & (EDGE_EH | EDGE_ABNORMAL));
3420 if (CALL_P (x))
3421 flags |= EDGE_ABNORMAL_CALL;
3422 nehe = make_edge (nfte->src, eh_edge->dest,
3423 flags);
3425 nehe->probability = eh_edge->probability;
3426 nfte->probability
3427 = REG_BR_PROB_BASE - nehe->probability;
3429 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3430 bb = nfte->src;
3431 eh_edge = nehe;
3434 /* Converting possibly trapping insn to non-trapping is
3435 possible. Zap dummy outgoing edges. */
3436 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3439 /* Re-insert the ARGS_SIZE notes. */
3440 if (as_note)
3441 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3443 /* If we generated a jump instruction, it won't have
3444 JUMP_LABEL set. Recompute after we're done. */
3445 for (x = last; x != before_try; x = PREV_INSN (x))
3446 if (JUMP_P (x))
3448 peep2_do_rebuild_jump_labels = true;
3449 break;
3452 return last;
3455 /* After performing a replacement in basic block BB, fix up the life
3456 information in our buffer. LAST is the last of the insns that we
3457 emitted as a replacement. PREV is the insn before the start of
3458 the replacement. MATCH_LEN is the number of instructions that were
3459 matched, and which now need to be replaced in the buffer. */
3461 static void
3462 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3464 int i = peep2_buf_position (peep2_current + match_len + 1);
3465 rtx x;
3466 regset_head live;
3468 INIT_REG_SET (&live);
3469 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3471 gcc_assert (peep2_current_count >= match_len + 1);
3472 peep2_current_count -= match_len + 1;
3474 x = last;
3477 if (INSN_P (x))
3479 df_insn_rescan (x);
3480 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3482 peep2_current_count++;
3483 if (--i < 0)
3484 i = MAX_INSNS_PER_PEEP2;
3485 peep2_insn_data[i].insn = x;
3486 df_simulate_one_insn_backwards (bb, x, &live);
3487 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3490 x = PREV_INSN (x);
3492 while (x != prev);
3493 CLEAR_REG_SET (&live);
3495 peep2_current = i;
3498 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3499 Return true if we added it, false otherwise. The caller will try to match
3500 peepholes against the buffer if we return false; otherwise it will try to
3501 add more instructions to the buffer. */
3503 static bool
3504 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3506 int pos;
3508 /* Once we have filled the maximum number of insns the buffer can hold,
3509 allow the caller to match the insns against peepholes. We wait until
3510 the buffer is full in case the target has similar peepholes of different
3511 length; we always want to match the longest if possible. */
3512 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3513 return false;
3515 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3516 any other pattern, lest it change the semantics of the frame info. */
3517 if (RTX_FRAME_RELATED_P (insn))
3519 /* Let the buffer drain first. */
3520 if (peep2_current_count > 0)
3521 return false;
3522 /* Now the insn will be the only thing in the buffer. */
3525 pos = peep2_buf_position (peep2_current + peep2_current_count);
3526 peep2_insn_data[pos].insn = insn;
3527 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3528 peep2_current_count++;
3530 df_simulate_one_insn_forwards (bb, insn, live);
3531 return true;
3534 /* Perform the peephole2 optimization pass. */
3536 static void
3537 peephole2_optimize (void)
3539 rtx insn;
3540 bitmap live;
3541 int i;
3542 basic_block bb;
3544 peep2_do_cleanup_cfg = false;
3545 peep2_do_rebuild_jump_labels = false;
3547 df_set_flags (DF_LR_RUN_DCE);
3548 df_note_add_problem ();
3549 df_analyze ();
3551 /* Initialize the regsets we're going to use. */
3552 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3553 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3554 search_ofs = 0;
3555 live = BITMAP_ALLOC (&reg_obstack);
3557 FOR_EACH_BB_REVERSE (bb)
3559 bool past_end = false;
3560 int pos;
3562 rtl_profile_for_bb (bb);
3564 /* Start up propagation. */
3565 bitmap_copy (live, DF_LR_IN (bb));
3566 df_simulate_initialize_forwards (bb, live);
3567 peep2_reinit_state (live);
3569 insn = BB_HEAD (bb);
3570 for (;;)
3572 rtx attempt, head;
3573 int match_len;
3575 if (!past_end && !NONDEBUG_INSN_P (insn))
3577 next_insn:
3578 insn = NEXT_INSN (insn);
3579 if (insn == NEXT_INSN (BB_END (bb)))
3580 past_end = true;
3581 continue;
3583 if (!past_end && peep2_fill_buffer (bb, insn, live))
3584 goto next_insn;
3586 /* If we did not fill an empty buffer, it signals the end of the
3587 block. */
3588 if (peep2_current_count == 0)
3589 break;
3591 /* The buffer filled to the current maximum, so try to match. */
3593 pos = peep2_buf_position (peep2_current + peep2_current_count);
3594 peep2_insn_data[pos].insn = PEEP2_EOB;
3595 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3597 /* Match the peephole. */
3598 head = peep2_insn_data[peep2_current].insn;
3599 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3600 if (attempt != NULL)
3602 rtx last = peep2_attempt (bb, head, match_len, attempt);
3603 if (last)
3605 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3606 continue;
3610 /* No match: advance the buffer by one insn. */
3611 peep2_current = peep2_buf_position (peep2_current + 1);
3612 peep2_current_count--;
3616 default_rtl_profile ();
3617 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3618 BITMAP_FREE (peep2_insn_data[i].live_before);
3619 BITMAP_FREE (live);
3620 if (peep2_do_rebuild_jump_labels)
3621 rebuild_jump_labels (get_insns ());
3623 #endif /* HAVE_peephole2 */
3625 /* Common predicates for use with define_bypass. */
3627 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3628 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3629 must be either a single_set or a PARALLEL with SETs inside. */
3632 store_data_bypass_p (rtx out_insn, rtx in_insn)
3634 rtx out_set, in_set;
3635 rtx out_pat, in_pat;
3636 rtx out_exp, in_exp;
3637 int i, j;
3639 in_set = single_set (in_insn);
3640 if (in_set)
3642 if (!MEM_P (SET_DEST (in_set)))
3643 return false;
3645 out_set = single_set (out_insn);
3646 if (out_set)
3648 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3649 return false;
3651 else
3653 out_pat = PATTERN (out_insn);
3655 if (GET_CODE (out_pat) != PARALLEL)
3656 return false;
3658 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3660 out_exp = XVECEXP (out_pat, 0, i);
3662 if (GET_CODE (out_exp) == CLOBBER)
3663 continue;
3665 gcc_assert (GET_CODE (out_exp) == SET);
3667 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3668 return false;
3672 else
3674 in_pat = PATTERN (in_insn);
3675 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3677 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3679 in_exp = XVECEXP (in_pat, 0, i);
3681 if (GET_CODE (in_exp) == CLOBBER)
3682 continue;
3684 gcc_assert (GET_CODE (in_exp) == SET);
3686 if (!MEM_P (SET_DEST (in_exp)))
3687 return false;
3689 out_set = single_set (out_insn);
3690 if (out_set)
3692 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3693 return false;
3695 else
3697 out_pat = PATTERN (out_insn);
3698 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3700 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3702 out_exp = XVECEXP (out_pat, 0, j);
3704 if (GET_CODE (out_exp) == CLOBBER)
3705 continue;
3707 gcc_assert (GET_CODE (out_exp) == SET);
3709 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3710 return false;
3716 return true;
3719 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3720 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3721 or multiple set; IN_INSN should be single_set for truth, but for convenience
3722 of insn categorization may be any JUMP or CALL insn. */
3725 if_test_bypass_p (rtx out_insn, rtx in_insn)
3727 rtx out_set, in_set;
3729 in_set = single_set (in_insn);
3730 if (! in_set)
3732 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3733 return false;
3736 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3737 return false;
3738 in_set = SET_SRC (in_set);
3740 out_set = single_set (out_insn);
3741 if (out_set)
3743 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3744 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3745 return false;
3747 else
3749 rtx out_pat;
3750 int i;
3752 out_pat = PATTERN (out_insn);
3753 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3755 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3757 rtx exp = XVECEXP (out_pat, 0, i);
3759 if (GET_CODE (exp) == CLOBBER)
3760 continue;
3762 gcc_assert (GET_CODE (exp) == SET);
3764 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3765 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3766 return false;
3770 return true;
3773 static bool
3774 gate_handle_peephole2 (void)
3776 return (optimize > 0 && flag_peephole2);
3779 static unsigned int
3780 rest_of_handle_peephole2 (void)
3782 #ifdef HAVE_peephole2
3783 peephole2_optimize ();
3784 #endif
3785 return 0;
3788 namespace {
3790 const pass_data pass_data_peephole2 =
3792 RTL_PASS, /* type */
3793 "peephole2", /* name */
3794 OPTGROUP_NONE, /* optinfo_flags */
3795 true, /* has_gate */
3796 true, /* has_execute */
3797 TV_PEEPHOLE2, /* tv_id */
3798 0, /* properties_required */
3799 0, /* properties_provided */
3800 0, /* properties_destroyed */
3801 0, /* todo_flags_start */
3802 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3805 class pass_peephole2 : public rtl_opt_pass
3807 public:
3808 pass_peephole2 (gcc::context *ctxt)
3809 : rtl_opt_pass (pass_data_peephole2, ctxt)
3812 /* opt_pass methods: */
3813 /* The epiphany backend creates a second instance of this pass, so we need
3814 a clone method. */
3815 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3816 bool gate () { return gate_handle_peephole2 (); }
3817 unsigned int execute () { return rest_of_handle_peephole2 (); }
3819 }; // class pass_peephole2
3821 } // anon namespace
3823 rtl_opt_pass *
3824 make_pass_peephole2 (gcc::context *ctxt)
3826 return new pass_peephole2 (ctxt);
3829 static unsigned int
3830 rest_of_handle_split_all_insns (void)
3832 split_all_insns ();
3833 return 0;
3836 namespace {
3838 const pass_data pass_data_split_all_insns =
3840 RTL_PASS, /* type */
3841 "split1", /* name */
3842 OPTGROUP_NONE, /* optinfo_flags */
3843 false, /* has_gate */
3844 true, /* has_execute */
3845 TV_NONE, /* tv_id */
3846 0, /* properties_required */
3847 0, /* properties_provided */
3848 0, /* properties_destroyed */
3849 0, /* todo_flags_start */
3850 0, /* todo_flags_finish */
3853 class pass_split_all_insns : public rtl_opt_pass
3855 public:
3856 pass_split_all_insns (gcc::context *ctxt)
3857 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3860 /* opt_pass methods: */
3861 /* The epiphany backend creates a second instance of this pass, so
3862 we need a clone method. */
3863 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3864 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3866 }; // class pass_split_all_insns
3868 } // anon namespace
3870 rtl_opt_pass *
3871 make_pass_split_all_insns (gcc::context *ctxt)
3873 return new pass_split_all_insns (ctxt);
3876 static unsigned int
3877 rest_of_handle_split_after_reload (void)
3879 /* If optimizing, then go ahead and split insns now. */
3880 #ifndef STACK_REGS
3881 if (optimize > 0)
3882 #endif
3883 split_all_insns ();
3884 return 0;
3887 namespace {
3889 const pass_data pass_data_split_after_reload =
3891 RTL_PASS, /* type */
3892 "split2", /* name */
3893 OPTGROUP_NONE, /* optinfo_flags */
3894 false, /* has_gate */
3895 true, /* has_execute */
3896 TV_NONE, /* tv_id */
3897 0, /* properties_required */
3898 0, /* properties_provided */
3899 0, /* properties_destroyed */
3900 0, /* todo_flags_start */
3901 0, /* todo_flags_finish */
3904 class pass_split_after_reload : public rtl_opt_pass
3906 public:
3907 pass_split_after_reload (gcc::context *ctxt)
3908 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3911 /* opt_pass methods: */
3912 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3914 }; // class pass_split_after_reload
3916 } // anon namespace
3918 rtl_opt_pass *
3919 make_pass_split_after_reload (gcc::context *ctxt)
3921 return new pass_split_after_reload (ctxt);
3924 static bool
3925 gate_handle_split_before_regstack (void)
3927 #if HAVE_ATTR_length && defined (STACK_REGS)
3928 /* If flow2 creates new instructions which need splitting
3929 and scheduling after reload is not done, they might not be
3930 split until final which doesn't allow splitting
3931 if HAVE_ATTR_length. */
3932 # ifdef INSN_SCHEDULING
3933 return (optimize && !flag_schedule_insns_after_reload);
3934 # else
3935 return (optimize);
3936 # endif
3937 #else
3938 return 0;
3939 #endif
3942 static unsigned int
3943 rest_of_handle_split_before_regstack (void)
3945 split_all_insns ();
3946 return 0;
3949 namespace {
3951 const pass_data pass_data_split_before_regstack =
3953 RTL_PASS, /* type */
3954 "split3", /* name */
3955 OPTGROUP_NONE, /* optinfo_flags */
3956 true, /* has_gate */
3957 true, /* has_execute */
3958 TV_NONE, /* tv_id */
3959 0, /* properties_required */
3960 0, /* properties_provided */
3961 0, /* properties_destroyed */
3962 0, /* todo_flags_start */
3963 0, /* todo_flags_finish */
3966 class pass_split_before_regstack : public rtl_opt_pass
3968 public:
3969 pass_split_before_regstack (gcc::context *ctxt)
3970 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3973 /* opt_pass methods: */
3974 bool gate () { return gate_handle_split_before_regstack (); }
3975 unsigned int execute () {
3976 return rest_of_handle_split_before_regstack ();
3979 }; // class pass_split_before_regstack
3981 } // anon namespace
3983 rtl_opt_pass *
3984 make_pass_split_before_regstack (gcc::context *ctxt)
3986 return new pass_split_before_regstack (ctxt);
3989 static bool
3990 gate_handle_split_before_sched2 (void)
3992 #ifdef INSN_SCHEDULING
3993 return optimize > 0 && flag_schedule_insns_after_reload;
3994 #else
3995 return 0;
3996 #endif
3999 static unsigned int
4000 rest_of_handle_split_before_sched2 (void)
4002 #ifdef INSN_SCHEDULING
4003 split_all_insns ();
4004 #endif
4005 return 0;
4008 namespace {
4010 const pass_data pass_data_split_before_sched2 =
4012 RTL_PASS, /* type */
4013 "split4", /* name */
4014 OPTGROUP_NONE, /* optinfo_flags */
4015 true, /* has_gate */
4016 true, /* has_execute */
4017 TV_NONE, /* tv_id */
4018 0, /* properties_required */
4019 0, /* properties_provided */
4020 0, /* properties_destroyed */
4021 0, /* todo_flags_start */
4022 TODO_verify_flow, /* todo_flags_finish */
4025 class pass_split_before_sched2 : public rtl_opt_pass
4027 public:
4028 pass_split_before_sched2 (gcc::context *ctxt)
4029 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4032 /* opt_pass methods: */
4033 bool gate () { return gate_handle_split_before_sched2 (); }
4034 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4036 }; // class pass_split_before_sched2
4038 } // anon namespace
4040 rtl_opt_pass *
4041 make_pass_split_before_sched2 (gcc::context *ctxt)
4043 return new pass_split_before_sched2 (ctxt);
4046 /* The placement of the splitting that we do for shorten_branches
4047 depends on whether regstack is used by the target or not. */
4048 static bool
4049 gate_do_final_split (void)
4051 #if HAVE_ATTR_length && !defined (STACK_REGS)
4052 return 1;
4053 #else
4054 return 0;
4055 #endif
4058 namespace {
4060 const pass_data pass_data_split_for_shorten_branches =
4062 RTL_PASS, /* type */
4063 "split5", /* name */
4064 OPTGROUP_NONE, /* optinfo_flags */
4065 true, /* has_gate */
4066 true, /* has_execute */
4067 TV_NONE, /* tv_id */
4068 0, /* properties_required */
4069 0, /* properties_provided */
4070 0, /* properties_destroyed */
4071 0, /* todo_flags_start */
4072 TODO_verify_rtl_sharing, /* todo_flags_finish */
4075 class pass_split_for_shorten_branches : public rtl_opt_pass
4077 public:
4078 pass_split_for_shorten_branches (gcc::context *ctxt)
4079 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4082 /* opt_pass methods: */
4083 bool gate () { return gate_do_final_split (); }
4084 unsigned int execute () { return split_all_insns_noflow (); }
4086 }; // class pass_split_for_shorten_branches
4088 } // anon namespace
4090 rtl_opt_pass *
4091 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4093 return new pass_split_for_shorten_branches (ctxt);