re PR testsuite/60672 (FAIL: g++.dg/cpp1y/auto-fn25.C -std=gnu++1y (test for errors...
[official-gcc.git] / gcc / recog.c
blobf9040dcde7591ace470eb4ebfeffcc9771b2f9cd
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl-error.h"
27 #include "tm_p.h"
28 #include "insn-config.h"
29 #include "insn-attr.h"
30 #include "hard-reg-set.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "addresses.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "basic-block.h"
38 #include "reload.h"
39 #include "target.h"
40 #include "tree-pass.h"
41 #include "df.h"
42 #include "insn-codes.h"
44 #ifndef STACK_PUSH_CODE
45 #ifdef STACK_GROWS_DOWNWARD
46 #define STACK_PUSH_CODE PRE_DEC
47 #else
48 #define STACK_PUSH_CODE PRE_INC
49 #endif
50 #endif
52 #ifndef STACK_POP_CODE
53 #ifdef STACK_GROWS_DOWNWARD
54 #define STACK_POP_CODE POST_INC
55 #else
56 #define STACK_POP_CODE POST_DEC
57 #endif
58 #endif
60 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in reginfo.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data_d recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
111 /* Return true if labels in asm operands BODY are LABEL_REFs. */
113 static bool
114 asm_labels_ok (rtx body)
116 rtx asmop;
117 int i;
119 asmop = extract_asm_operands (body);
120 if (asmop == NULL_RTX)
121 return true;
123 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
124 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
125 return false;
127 return true;
130 /* Check that X is an insn-body for an `asm' with operands
131 and that the operands mentioned in it are legitimate. */
134 check_asm_operands (rtx x)
136 int noperands;
137 rtx *operands;
138 const char **constraints;
139 int i;
141 if (!asm_labels_ok (x))
142 return 0;
144 /* Post-reload, be more strict with things. */
145 if (reload_completed)
147 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
148 extract_insn (make_insn_raw (x));
149 constrain_operands (1);
150 return which_alternative >= 0;
153 noperands = asm_noperands (x);
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
159 operands = XALLOCAVEC (rtx, noperands);
160 constraints = XALLOCAVEC (const char *, noperands);
162 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
164 for (i = 0; i < noperands; i++)
166 const char *c = constraints[i];
167 if (c[0] == '%')
168 c++;
169 if (! asm_operand_ok (operands[i], c, constraints))
170 return 0;
173 return 1;
176 /* Static data for the next two routines. */
178 typedef struct change_t
180 rtx object;
181 int old_code;
182 rtx *loc;
183 rtx old;
184 bool unshare;
185 } change_t;
187 static change_t *changes;
188 static int changes_allocated;
190 static int num_changes = 0;
192 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
193 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
194 the change is simply made.
196 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
197 will be called with the address and mode as parameters. If OBJECT is
198 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
199 the change in place.
201 IN_GROUP is nonzero if this is part of a group of changes that must be
202 performed as a group. In that case, the changes will be stored. The
203 function `apply_change_group' will validate and apply the changes.
205 If IN_GROUP is zero, this is a single change. Try to recognize the insn
206 or validate the memory reference with the change applied. If the result
207 is not valid for the machine, suppress the change and return zero.
208 Otherwise, perform the change and return 1. */
210 static bool
211 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
213 rtx old = *loc;
215 if (old == new_rtx || rtx_equal_p (old, new_rtx))
216 return 1;
218 gcc_assert (in_group != 0 || num_changes == 0);
220 *loc = new_rtx;
222 /* Save the information describing this change. */
223 if (num_changes >= changes_allocated)
225 if (changes_allocated == 0)
226 /* This value allows for repeated substitutions inside complex
227 indexed addresses, or changes in up to 5 insns. */
228 changes_allocated = MAX_RECOG_OPERANDS * 5;
229 else
230 changes_allocated *= 2;
232 changes = XRESIZEVEC (change_t, changes, changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
238 changes[num_changes].unshare = unshare;
240 if (object && !MEM_P (object))
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
248 num_changes++;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
259 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
260 UNSHARE to false. */
262 bool
263 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
265 return validate_change_1 (object, loc, new_rtx, in_group, false);
268 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
269 UNSHARE to true. */
271 bool
272 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
274 return validate_change_1 (object, loc, new_rtx, in_group, true);
278 /* Keep X canonicalized if some changes have made it non-canonical; only
279 modifies the operands of X, not (for example) its code. Simplifications
280 are not the job of this routine.
282 Return true if anything was changed. */
283 bool
284 canonicalize_change_group (rtx insn, rtx x)
286 if (COMMUTATIVE_P (x)
287 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
289 /* Oops, the caller has made X no longer canonical.
290 Let's redo the changes in the correct order. */
291 rtx tem = XEXP (x, 0);
292 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
293 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
294 return true;
296 else
297 return false;
301 /* This subroutine of apply_change_group verifies whether the changes to INSN
302 were valid; i.e. whether INSN can still be recognized.
304 If IN_GROUP is true clobbers which have to be added in order to
305 match the instructions will be added to the current change group.
306 Otherwise the changes will take effect immediately. */
309 insn_invalid_p (rtx insn, bool in_group)
311 rtx pat = PATTERN (insn);
312 int num_clobbers = 0;
313 /* If we are before reload and the pattern is a SET, see if we can add
314 clobbers. */
315 int icode = recog (pat, insn,
316 (GET_CODE (pat) == SET
317 && ! reload_completed
318 && ! reload_in_progress)
319 ? &num_clobbers : 0);
320 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
323 /* If this is an asm and the operand aren't legal, then fail. Likewise if
324 this is not an asm and the insn wasn't recognized. */
325 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
326 || (!is_asm && icode < 0))
327 return 1;
329 /* If we have to add CLOBBERs, fail if we have to add ones that reference
330 hard registers since our callers can't know if they are live or not.
331 Otherwise, add them. */
332 if (num_clobbers > 0)
334 rtx newpat;
336 if (added_clobbers_hard_reg_p (icode))
337 return 1;
339 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
340 XVECEXP (newpat, 0, 0) = pat;
341 add_clobbers (newpat, icode);
342 if (in_group)
343 validate_change (insn, &PATTERN (insn), newpat, 1);
344 else
345 PATTERN (insn) = pat = newpat;
348 /* After reload, verify that all constraints are satisfied. */
349 if (reload_completed)
351 extract_insn (insn);
353 if (! constrain_operands (1))
354 return 1;
357 INSN_CODE (insn) = icode;
358 return 0;
361 /* Return number of changes made and not validated yet. */
363 num_changes_pending (void)
365 return num_changes;
368 /* Tentatively apply the changes numbered NUM and up.
369 Return 1 if all changes are valid, zero otherwise. */
372 verify_changes (int num)
374 int i;
375 rtx last_validated = NULL_RTX;
377 /* The changes have been applied and all INSN_CODEs have been reset to force
378 rerecognition.
380 The changes are valid if we aren't given an object, or if we are
381 given a MEM and it still is a valid address, or if this is in insn
382 and it is recognized. In the latter case, if reload has completed,
383 we also require that the operands meet the constraints for
384 the insn. */
386 for (i = num; i < num_changes; i++)
388 rtx object = changes[i].object;
390 /* If there is no object to test or if it is the same as the one we
391 already tested, ignore it. */
392 if (object == 0 || object == last_validated)
393 continue;
395 if (MEM_P (object))
397 if (! memory_address_addr_space_p (GET_MODE (object),
398 XEXP (object, 0),
399 MEM_ADDR_SPACE (object)))
400 break;
402 else if (/* changes[i].old might be zero, e.g. when putting a
403 REG_FRAME_RELATED_EXPR into a previously empty list. */
404 changes[i].old
405 && REG_P (changes[i].old)
406 && asm_noperands (PATTERN (object)) > 0
407 && REG_EXPR (changes[i].old) != NULL_TREE
408 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
409 && DECL_REGISTER (REG_EXPR (changes[i].old)))
411 /* Don't allow changes of hard register operands to inline
412 assemblies if they have been defined as register asm ("x"). */
413 break;
415 else if (DEBUG_INSN_P (object))
416 continue;
417 else if (insn_invalid_p (object, true))
419 rtx pat = PATTERN (object);
421 /* Perhaps we couldn't recognize the insn because there were
422 extra CLOBBERs at the end. If so, try to re-recognize
423 without the last CLOBBER (later iterations will cause each of
424 them to be eliminated, in turn). But don't do this if we
425 have an ASM_OPERAND. */
426 if (GET_CODE (pat) == PARALLEL
427 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
428 && asm_noperands (PATTERN (object)) < 0)
430 rtx newpat;
432 if (XVECLEN (pat, 0) == 2)
433 newpat = XVECEXP (pat, 0, 0);
434 else
436 int j;
438 newpat
439 = gen_rtx_PARALLEL (VOIDmode,
440 rtvec_alloc (XVECLEN (pat, 0) - 1));
441 for (j = 0; j < XVECLEN (newpat, 0); j++)
442 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
445 /* Add a new change to this group to replace the pattern
446 with this new pattern. Then consider this change
447 as having succeeded. The change we added will
448 cause the entire call to fail if things remain invalid.
450 Note that this can lose if a later change than the one
451 we are processing specified &XVECEXP (PATTERN (object), 0, X)
452 but this shouldn't occur. */
454 validate_change (object, &PATTERN (object), newpat, 1);
455 continue;
457 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
458 || GET_CODE (pat) == VAR_LOCATION)
459 /* If this insn is a CLOBBER or USE, it is always valid, but is
460 never recognized. */
461 continue;
462 else
463 break;
465 last_validated = object;
468 return (i == num_changes);
471 /* A group of changes has previously been issued with validate_change
472 and verified with verify_changes. Call df_insn_rescan for each of
473 the insn changed and clear num_changes. */
475 void
476 confirm_change_group (void)
478 int i;
479 rtx last_object = NULL;
481 for (i = 0; i < num_changes; i++)
483 rtx object = changes[i].object;
485 if (changes[i].unshare)
486 *changes[i].loc = copy_rtx (*changes[i].loc);
488 /* Avoid unnecessary rescanning when multiple changes to same instruction
489 are made. */
490 if (object)
492 if (object != last_object && last_object && INSN_P (last_object))
493 df_insn_rescan (last_object);
494 last_object = object;
498 if (last_object && INSN_P (last_object))
499 df_insn_rescan (last_object);
500 num_changes = 0;
503 /* Apply a group of changes previously issued with `validate_change'.
504 If all changes are valid, call confirm_change_group and return 1,
505 otherwise, call cancel_changes and return 0. */
508 apply_change_group (void)
510 if (verify_changes (0))
512 confirm_change_group ();
513 return 1;
515 else
517 cancel_changes (0);
518 return 0;
523 /* Return the number of changes so far in the current group. */
526 num_validated_changes (void)
528 return num_changes;
531 /* Retract the changes numbered NUM and up. */
533 void
534 cancel_changes (int num)
536 int i;
538 /* Back out all the changes. Do this in the opposite order in which
539 they were made. */
540 for (i = num_changes - 1; i >= num; i--)
542 *changes[i].loc = changes[i].old;
543 if (changes[i].object && !MEM_P (changes[i].object))
544 INSN_CODE (changes[i].object) = changes[i].old_code;
546 num_changes = num;
549 /* Reduce conditional compilation elsewhere. */
550 #ifndef HAVE_extv
551 #define HAVE_extv 0
552 #define CODE_FOR_extv CODE_FOR_nothing
553 #endif
554 #ifndef HAVE_extzv
555 #define HAVE_extzv 0
556 #define CODE_FOR_extzv CODE_FOR_nothing
557 #endif
559 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
560 rtx. */
562 static void
563 simplify_while_replacing (rtx *loc, rtx to, rtx object,
564 enum machine_mode op0_mode)
566 rtx x = *loc;
567 enum rtx_code code = GET_CODE (x);
568 rtx new_rtx = NULL_RTX;
570 if (SWAPPABLE_OPERANDS_P (x)
571 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
573 validate_unshare_change (object, loc,
574 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
575 : swap_condition (code),
576 GET_MODE (x), XEXP (x, 1),
577 XEXP (x, 0)), 1);
578 x = *loc;
579 code = GET_CODE (x);
582 /* Canonicalize arithmetics with all constant operands. */
583 switch (GET_RTX_CLASS (code))
585 case RTX_UNARY:
586 if (CONSTANT_P (XEXP (x, 0)))
587 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
588 op0_mode);
589 break;
590 case RTX_COMM_ARITH:
591 case RTX_BIN_ARITH:
592 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
593 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
594 XEXP (x, 1));
595 break;
596 case RTX_COMPARE:
597 case RTX_COMM_COMPARE:
598 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
599 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
600 XEXP (x, 0), XEXP (x, 1));
601 break;
602 default:
603 break;
605 if (new_rtx)
607 validate_change (object, loc, new_rtx, 1);
608 return;
611 switch (code)
613 case PLUS:
614 /* If we have a PLUS whose second operand is now a CONST_INT, use
615 simplify_gen_binary to try to simplify it.
616 ??? We may want later to remove this, once simplification is
617 separated from this function. */
618 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
619 validate_change (object, loc,
620 simplify_gen_binary
621 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
622 break;
623 case MINUS:
624 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
625 validate_change (object, loc,
626 simplify_gen_binary
627 (PLUS, GET_MODE (x), XEXP (x, 0),
628 simplify_gen_unary (NEG,
629 GET_MODE (x), XEXP (x, 1),
630 GET_MODE (x))), 1);
631 break;
632 case ZERO_EXTEND:
633 case SIGN_EXTEND:
634 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
636 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
637 op0_mode);
638 /* If any of the above failed, substitute in something that
639 we know won't be recognized. */
640 if (!new_rtx)
641 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
642 validate_change (object, loc, new_rtx, 1);
644 break;
645 case SUBREG:
646 /* All subregs possible to simplify should be simplified. */
647 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
648 SUBREG_BYTE (x));
650 /* Subregs of VOIDmode operands are incorrect. */
651 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
652 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
653 if (new_rtx)
654 validate_change (object, loc, new_rtx, 1);
655 break;
656 case ZERO_EXTRACT:
657 case SIGN_EXTRACT:
658 /* If we are replacing a register with memory, try to change the memory
659 to be the mode required for memory in extract operations (this isn't
660 likely to be an insertion operation; if it was, nothing bad will
661 happen, we might just fail in some cases). */
663 if (MEM_P (XEXP (x, 0))
664 && CONST_INT_P (XEXP (x, 1))
665 && CONST_INT_P (XEXP (x, 2))
666 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
667 MEM_ADDR_SPACE (XEXP (x, 0)))
668 && !MEM_VOLATILE_P (XEXP (x, 0)))
670 enum machine_mode wanted_mode = VOIDmode;
671 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
672 int pos = INTVAL (XEXP (x, 2));
674 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
676 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
677 if (wanted_mode == VOIDmode)
678 wanted_mode = word_mode;
680 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
682 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
683 if (wanted_mode == VOIDmode)
684 wanted_mode = word_mode;
687 /* If we have a narrower mode, we can do something. */
688 if (wanted_mode != VOIDmode
689 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
691 int offset = pos / BITS_PER_UNIT;
692 rtx newmem;
694 /* If the bytes and bits are counted differently, we
695 must adjust the offset. */
696 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
697 offset =
698 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
699 offset);
701 gcc_assert (GET_MODE_PRECISION (wanted_mode)
702 == GET_MODE_BITSIZE (wanted_mode));
703 pos %= GET_MODE_BITSIZE (wanted_mode);
705 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
707 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
708 validate_change (object, &XEXP (x, 0), newmem, 1);
712 break;
714 default:
715 break;
719 /* Replace every occurrence of FROM in X with TO. Mark each change with
720 validate_change passing OBJECT. */
722 static void
723 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
724 bool simplify)
726 int i, j;
727 const char *fmt;
728 rtx x = *loc;
729 enum rtx_code code;
730 enum machine_mode op0_mode = VOIDmode;
731 int prev_changes = num_changes;
733 if (!x)
734 return;
736 code = GET_CODE (x);
737 fmt = GET_RTX_FORMAT (code);
738 if (fmt[0] == 'e')
739 op0_mode = GET_MODE (XEXP (x, 0));
741 /* X matches FROM if it is the same rtx or they are both referring to the
742 same register in the same mode. Avoid calling rtx_equal_p unless the
743 operands look similar. */
745 if (x == from
746 || (REG_P (x) && REG_P (from)
747 && GET_MODE (x) == GET_MODE (from)
748 && REGNO (x) == REGNO (from))
749 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
750 && rtx_equal_p (x, from)))
752 validate_unshare_change (object, loc, to, 1);
753 return;
756 /* Call ourself recursively to perform the replacements.
757 We must not replace inside already replaced expression, otherwise we
758 get infinite recursion for replacements like (reg X)->(subreg (reg X))
759 so we must special case shared ASM_OPERANDS. */
761 if (GET_CODE (x) == PARALLEL)
763 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
765 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
766 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
768 /* Verify that operands are really shared. */
769 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
770 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
771 (x, 0, j))));
772 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
773 from, to, object, simplify);
775 else
776 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
777 simplify);
780 else
781 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
783 if (fmt[i] == 'e')
784 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
785 else if (fmt[i] == 'E')
786 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
787 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
788 simplify);
791 /* If we didn't substitute, there is nothing more to do. */
792 if (num_changes == prev_changes)
793 return;
795 /* ??? The regmove is no more, so is this aberration still necessary? */
796 /* Allow substituted expression to have different mode. This is used by
797 regmove to change mode of pseudo register. */
798 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
799 op0_mode = GET_MODE (XEXP (x, 0));
801 /* Do changes needed to keep rtx consistent. Don't do any other
802 simplifications, as it is not our job. */
803 if (simplify)
804 simplify_while_replacing (loc, to, object, op0_mode);
807 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
808 with TO. After all changes have been made, validate by seeing
809 if INSN is still valid. */
812 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
814 validate_replace_rtx_1 (loc, from, to, insn, true);
815 return apply_change_group ();
818 /* Try replacing every occurrence of FROM in INSN with TO. After all
819 changes have been made, validate by seeing if INSN is still valid. */
822 validate_replace_rtx (rtx from, rtx to, rtx insn)
824 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
825 return apply_change_group ();
828 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
829 is a part of INSN. After all changes have been made, validate by seeing if
830 INSN is still valid.
831 validate_replace_rtx (from, to, insn) is equivalent to
832 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
835 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
837 validate_replace_rtx_1 (where, from, to, insn, true);
838 return apply_change_group ();
841 /* Same as above, but do not simplify rtx afterwards. */
843 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
844 rtx insn)
846 validate_replace_rtx_1 (where, from, to, insn, false);
847 return apply_change_group ();
851 /* Try replacing every occurrence of FROM in INSN with TO. This also
852 will replace in REG_EQUAL and REG_EQUIV notes. */
854 void
855 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
857 rtx note;
858 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
859 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
860 if (REG_NOTE_KIND (note) == REG_EQUAL
861 || REG_NOTE_KIND (note) == REG_EQUIV)
862 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
865 /* Function called by note_uses to replace used subexpressions. */
866 struct validate_replace_src_data
868 rtx from; /* Old RTX */
869 rtx to; /* New RTX */
870 rtx insn; /* Insn in which substitution is occurring. */
873 static void
874 validate_replace_src_1 (rtx *x, void *data)
876 struct validate_replace_src_data *d
877 = (struct validate_replace_src_data *) data;
879 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
882 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
883 SET_DESTs. */
885 void
886 validate_replace_src_group (rtx from, rtx to, rtx insn)
888 struct validate_replace_src_data d;
890 d.from = from;
891 d.to = to;
892 d.insn = insn;
893 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
896 /* Try simplify INSN.
897 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
898 pattern and return true if something was simplified. */
900 bool
901 validate_simplify_insn (rtx insn)
903 int i;
904 rtx pat = NULL;
905 rtx newpat = NULL;
907 pat = PATTERN (insn);
909 if (GET_CODE (pat) == SET)
911 newpat = simplify_rtx (SET_SRC (pat));
912 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
913 validate_change (insn, &SET_SRC (pat), newpat, 1);
914 newpat = simplify_rtx (SET_DEST (pat));
915 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
916 validate_change (insn, &SET_DEST (pat), newpat, 1);
918 else if (GET_CODE (pat) == PARALLEL)
919 for (i = 0; i < XVECLEN (pat, 0); i++)
921 rtx s = XVECEXP (pat, 0, i);
923 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
925 newpat = simplify_rtx (SET_SRC (s));
926 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
927 validate_change (insn, &SET_SRC (s), newpat, 1);
928 newpat = simplify_rtx (SET_DEST (s));
929 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
930 validate_change (insn, &SET_DEST (s), newpat, 1);
933 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
936 #ifdef HAVE_cc0
937 /* Return 1 if the insn using CC0 set by INSN does not contain
938 any ordered tests applied to the condition codes.
939 EQ and NE tests do not count. */
942 next_insn_tests_no_inequality (rtx insn)
944 rtx next = next_cc0_user (insn);
946 /* If there is no next insn, we have to take the conservative choice. */
947 if (next == 0)
948 return 0;
950 return (INSN_P (next)
951 && ! inequality_comparisons_p (PATTERN (next)));
953 #endif
955 /* Return 1 if OP is a valid general operand for machine mode MODE.
956 This is either a register reference, a memory reference,
957 or a constant. In the case of a memory reference, the address
958 is checked for general validity for the target machine.
960 Register and memory references must have mode MODE in order to be valid,
961 but some constants have no machine mode and are valid for any mode.
963 If MODE is VOIDmode, OP is checked for validity for whatever mode
964 it has.
966 The main use of this function is as a predicate in match_operand
967 expressions in the machine description. */
970 general_operand (rtx op, enum machine_mode mode)
972 enum rtx_code code = GET_CODE (op);
974 if (mode == VOIDmode)
975 mode = GET_MODE (op);
977 /* Don't accept CONST_INT or anything similar
978 if the caller wants something floating. */
979 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
980 && GET_MODE_CLASS (mode) != MODE_INT
981 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
982 return 0;
984 if (CONST_INT_P (op)
985 && mode != VOIDmode
986 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
987 return 0;
989 if (CONSTANT_P (op))
990 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
991 || mode == VOIDmode)
992 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
993 && targetm.legitimate_constant_p (mode == VOIDmode
994 ? GET_MODE (op)
995 : mode, op));
997 /* Except for certain constants with VOIDmode, already checked for,
998 OP's mode must match MODE if MODE specifies a mode. */
1000 if (GET_MODE (op) != mode)
1001 return 0;
1003 if (code == SUBREG)
1005 rtx sub = SUBREG_REG (op);
1007 #ifdef INSN_SCHEDULING
1008 /* On machines that have insn scheduling, we want all memory
1009 reference to be explicit, so outlaw paradoxical SUBREGs.
1010 However, we must allow them after reload so that they can
1011 get cleaned up by cleanup_subreg_operands. */
1012 if (!reload_completed && MEM_P (sub)
1013 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1014 return 0;
1015 #endif
1016 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1017 may result in incorrect reference. We should simplify all valid
1018 subregs of MEM anyway. But allow this after reload because we
1019 might be called from cleanup_subreg_operands.
1021 ??? This is a kludge. */
1022 if (!reload_completed && SUBREG_BYTE (op) != 0
1023 && MEM_P (sub))
1024 return 0;
1026 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1027 create such rtl, and we must reject it. */
1028 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1029 /* LRA can use subreg to store a floating point value in an
1030 integer mode. Although the floating point and the
1031 integer modes need the same number of hard registers, the
1032 size of floating point mode can be less than the integer
1033 mode. */
1034 && ! lra_in_progress
1035 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1036 return 0;
1038 op = sub;
1039 code = GET_CODE (op);
1042 if (code == REG)
1043 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1044 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1046 if (code == MEM)
1048 rtx y = XEXP (op, 0);
1050 if (! volatile_ok && MEM_VOLATILE_P (op))
1051 return 0;
1053 /* Use the mem's mode, since it will be reloaded thus. LRA can
1054 generate move insn with invalid addresses which is made valid
1055 and efficiently calculated by LRA through further numerous
1056 transformations. */
1057 if (lra_in_progress
1058 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1059 return 1;
1062 return 0;
1065 /* Return 1 if OP is a valid memory address for a memory reference
1066 of mode MODE.
1068 The main use of this function is as a predicate in match_operand
1069 expressions in the machine description. */
1072 address_operand (rtx op, enum machine_mode mode)
1074 return memory_address_p (mode, op);
1077 /* Return 1 if OP is a register reference of mode MODE.
1078 If MODE is VOIDmode, accept a register in any mode.
1080 The main use of this function is as a predicate in match_operand
1081 expressions in the machine description. */
1084 register_operand (rtx op, enum machine_mode mode)
1086 if (GET_MODE (op) != mode && mode != VOIDmode)
1087 return 0;
1089 if (GET_CODE (op) == SUBREG)
1091 rtx sub = SUBREG_REG (op);
1093 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1094 because it is guaranteed to be reloaded into one.
1095 Just make sure the MEM is valid in itself.
1096 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1097 but currently it does result from (SUBREG (REG)...) where the
1098 reg went on the stack.) */
1099 if (! reload_completed && MEM_P (sub))
1100 return general_operand (op, mode);
1102 #ifdef CANNOT_CHANGE_MODE_CLASS
1103 if (REG_P (sub)
1104 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1105 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1106 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1107 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1108 /* LRA can generate some invalid SUBREGS just for matched
1109 operand reload presentation. LRA needs to treat them as
1110 valid. */
1111 && ! LRA_SUBREG_P (op))
1112 return 0;
1113 #endif
1115 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1116 create such rtl, and we must reject it. */
1117 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1118 /* LRA can use subreg to store a floating point value in an
1119 integer mode. Although the floating point and the
1120 integer modes need the same number of hard registers, the
1121 size of floating point mode can be less than the integer
1122 mode. */
1123 && ! lra_in_progress
1124 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1125 return 0;
1127 op = sub;
1130 return (REG_P (op)
1131 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1132 || in_hard_reg_set_p (operand_reg_set,
1133 GET_MODE (op), REGNO (op))));
1136 /* Return 1 for a register in Pmode; ignore the tested mode. */
1139 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1141 return register_operand (op, Pmode);
1144 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1145 or a hard register. */
1148 scratch_operand (rtx op, enum machine_mode mode)
1150 if (GET_MODE (op) != mode && mode != VOIDmode)
1151 return 0;
1153 return (GET_CODE (op) == SCRATCH
1154 || (REG_P (op)
1155 && (lra_in_progress || REGNO (op) < FIRST_PSEUDO_REGISTER)));
1158 /* Return 1 if OP is a valid immediate operand for mode MODE.
1160 The main use of this function is as a predicate in match_operand
1161 expressions in the machine description. */
1164 immediate_operand (rtx op, enum machine_mode mode)
1166 /* Don't accept CONST_INT or anything similar
1167 if the caller wants something floating. */
1168 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1169 && GET_MODE_CLASS (mode) != MODE_INT
1170 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1171 return 0;
1173 if (CONST_INT_P (op)
1174 && mode != VOIDmode
1175 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1176 return 0;
1178 return (CONSTANT_P (op)
1179 && (GET_MODE (op) == mode || mode == VOIDmode
1180 || GET_MODE (op) == VOIDmode)
1181 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1182 && targetm.legitimate_constant_p (mode == VOIDmode
1183 ? GET_MODE (op)
1184 : mode, op));
1187 /* Returns 1 if OP is an operand that is a CONST_INT. */
1190 const_int_operand (rtx op, enum machine_mode mode)
1192 if (!CONST_INT_P (op))
1193 return 0;
1195 if (mode != VOIDmode
1196 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1197 return 0;
1199 return 1;
1202 /* Returns 1 if OP is an operand that is a constant integer or constant
1203 floating-point number. */
1206 const_double_operand (rtx op, enum machine_mode mode)
1208 /* Don't accept CONST_INT or anything similar
1209 if the caller wants something floating. */
1210 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1211 && GET_MODE_CLASS (mode) != MODE_INT
1212 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1213 return 0;
1215 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1216 && (mode == VOIDmode || GET_MODE (op) == mode
1217 || GET_MODE (op) == VOIDmode));
1220 /* Return 1 if OP is a general operand that is not an immediate operand. */
1223 nonimmediate_operand (rtx op, enum machine_mode mode)
1225 return (general_operand (op, mode) && ! CONSTANT_P (op));
1228 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1231 nonmemory_operand (rtx op, enum machine_mode mode)
1233 if (CONSTANT_P (op))
1234 return immediate_operand (op, mode);
1236 if (GET_MODE (op) != mode && mode != VOIDmode)
1237 return 0;
1239 if (GET_CODE (op) == SUBREG)
1241 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1242 because it is guaranteed to be reloaded into one.
1243 Just make sure the MEM is valid in itself.
1244 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1245 but currently it does result from (SUBREG (REG)...) where the
1246 reg went on the stack.) */
1247 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1248 return general_operand (op, mode);
1249 op = SUBREG_REG (op);
1252 return (REG_P (op)
1253 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1254 || in_hard_reg_set_p (operand_reg_set,
1255 GET_MODE (op), REGNO (op))));
1258 /* Return 1 if OP is a valid operand that stands for pushing a
1259 value of mode MODE onto the stack.
1261 The main use of this function is as a predicate in match_operand
1262 expressions in the machine description. */
1265 push_operand (rtx op, enum machine_mode mode)
1267 unsigned int rounded_size = GET_MODE_SIZE (mode);
1269 #ifdef PUSH_ROUNDING
1270 rounded_size = PUSH_ROUNDING (rounded_size);
1271 #endif
1273 if (!MEM_P (op))
1274 return 0;
1276 if (mode != VOIDmode && GET_MODE (op) != mode)
1277 return 0;
1279 op = XEXP (op, 0);
1281 if (rounded_size == GET_MODE_SIZE (mode))
1283 if (GET_CODE (op) != STACK_PUSH_CODE)
1284 return 0;
1286 else
1288 if (GET_CODE (op) != PRE_MODIFY
1289 || GET_CODE (XEXP (op, 1)) != PLUS
1290 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1291 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1292 #ifdef STACK_GROWS_DOWNWARD
1293 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1294 #else
1295 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1296 #endif
1298 return 0;
1301 return XEXP (op, 0) == stack_pointer_rtx;
1304 /* Return 1 if OP is a valid operand that stands for popping a
1305 value of mode MODE off the stack.
1307 The main use of this function is as a predicate in match_operand
1308 expressions in the machine description. */
1311 pop_operand (rtx op, enum machine_mode mode)
1313 if (!MEM_P (op))
1314 return 0;
1316 if (mode != VOIDmode && GET_MODE (op) != mode)
1317 return 0;
1319 op = XEXP (op, 0);
1321 if (GET_CODE (op) != STACK_POP_CODE)
1322 return 0;
1324 return XEXP (op, 0) == stack_pointer_rtx;
1327 /* Return 1 if ADDR is a valid memory address
1328 for mode MODE in address space AS. */
1331 memory_address_addr_space_p (enum machine_mode mode ATTRIBUTE_UNUSED,
1332 rtx addr, addr_space_t as)
1334 #ifdef GO_IF_LEGITIMATE_ADDRESS
1335 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1336 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1337 return 0;
1339 win:
1340 return 1;
1341 #else
1342 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1343 #endif
1346 /* Return 1 if OP is a valid memory reference with mode MODE,
1347 including a valid address.
1349 The main use of this function is as a predicate in match_operand
1350 expressions in the machine description. */
1353 memory_operand (rtx op, enum machine_mode mode)
1355 rtx inner;
1357 if (! reload_completed)
1358 /* Note that no SUBREG is a memory operand before end of reload pass,
1359 because (SUBREG (MEM...)) forces reloading into a register. */
1360 return MEM_P (op) && general_operand (op, mode);
1362 if (mode != VOIDmode && GET_MODE (op) != mode)
1363 return 0;
1365 inner = op;
1366 if (GET_CODE (inner) == SUBREG)
1367 inner = SUBREG_REG (inner);
1369 return (MEM_P (inner) && general_operand (op, mode));
1372 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1373 that is, a memory reference whose address is a general_operand. */
1376 indirect_operand (rtx op, enum machine_mode mode)
1378 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1379 if (! reload_completed
1380 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1382 int offset = SUBREG_BYTE (op);
1383 rtx inner = SUBREG_REG (op);
1385 if (mode != VOIDmode && GET_MODE (op) != mode)
1386 return 0;
1388 /* The only way that we can have a general_operand as the resulting
1389 address is if OFFSET is zero and the address already is an operand
1390 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1391 operand. */
1393 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1394 || (GET_CODE (XEXP (inner, 0)) == PLUS
1395 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1396 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1397 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1400 return (MEM_P (op)
1401 && memory_operand (op, mode)
1402 && general_operand (XEXP (op, 0), Pmode));
1405 /* Return 1 if this is an ordered comparison operator (not including
1406 ORDERED and UNORDERED). */
1409 ordered_comparison_operator (rtx op, enum machine_mode mode)
1411 if (mode != VOIDmode && GET_MODE (op) != mode)
1412 return false;
1413 switch (GET_CODE (op))
1415 case EQ:
1416 case NE:
1417 case LT:
1418 case LTU:
1419 case LE:
1420 case LEU:
1421 case GT:
1422 case GTU:
1423 case GE:
1424 case GEU:
1425 return true;
1426 default:
1427 return false;
1431 /* Return 1 if this is a comparison operator. This allows the use of
1432 MATCH_OPERATOR to recognize all the branch insns. */
1435 comparison_operator (rtx op, enum machine_mode mode)
1437 return ((mode == VOIDmode || GET_MODE (op) == mode)
1438 && COMPARISON_P (op));
1441 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1444 extract_asm_operands (rtx body)
1446 rtx tmp;
1447 switch (GET_CODE (body))
1449 case ASM_OPERANDS:
1450 return body;
1452 case SET:
1453 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1454 tmp = SET_SRC (body);
1455 if (GET_CODE (tmp) == ASM_OPERANDS)
1456 return tmp;
1457 break;
1459 case PARALLEL:
1460 tmp = XVECEXP (body, 0, 0);
1461 if (GET_CODE (tmp) == ASM_OPERANDS)
1462 return tmp;
1463 if (GET_CODE (tmp) == SET)
1465 tmp = SET_SRC (tmp);
1466 if (GET_CODE (tmp) == ASM_OPERANDS)
1467 return tmp;
1469 break;
1471 default:
1472 break;
1474 return NULL;
1477 /* If BODY is an insn body that uses ASM_OPERANDS,
1478 return the number of operands (both input and output) in the insn.
1479 Otherwise return -1. */
1482 asm_noperands (const_rtx body)
1484 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1485 int n_sets = 0;
1487 if (asm_op == NULL)
1488 return -1;
1490 if (GET_CODE (body) == SET)
1491 n_sets = 1;
1492 else if (GET_CODE (body) == PARALLEL)
1494 int i;
1495 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1497 /* Multiple output operands, or 1 output plus some clobbers:
1498 body is
1499 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1500 /* Count backwards through CLOBBERs to determine number of SETs. */
1501 for (i = XVECLEN (body, 0); i > 0; i--)
1503 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1504 break;
1505 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1506 return -1;
1509 /* N_SETS is now number of output operands. */
1510 n_sets = i;
1512 /* Verify that all the SETs we have
1513 came from a single original asm_operands insn
1514 (so that invalid combinations are blocked). */
1515 for (i = 0; i < n_sets; i++)
1517 rtx elt = XVECEXP (body, 0, i);
1518 if (GET_CODE (elt) != SET)
1519 return -1;
1520 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1521 return -1;
1522 /* If these ASM_OPERANDS rtx's came from different original insns
1523 then they aren't allowed together. */
1524 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1525 != ASM_OPERANDS_INPUT_VEC (asm_op))
1526 return -1;
1529 else
1531 /* 0 outputs, but some clobbers:
1532 body is [(asm_operands ...) (clobber (reg ...))...]. */
1533 /* Make sure all the other parallel things really are clobbers. */
1534 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1535 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1536 return -1;
1540 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1541 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1544 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1545 copy its operands (both input and output) into the vector OPERANDS,
1546 the locations of the operands within the insn into the vector OPERAND_LOCS,
1547 and the constraints for the operands into CONSTRAINTS.
1548 Write the modes of the operands into MODES.
1549 Return the assembler-template.
1551 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1552 we don't store that info. */
1554 const char *
1555 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1556 const char **constraints, enum machine_mode *modes,
1557 location_t *loc)
1559 int nbase = 0, n, i;
1560 rtx asmop;
1562 switch (GET_CODE (body))
1564 case ASM_OPERANDS:
1565 /* Zero output asm: BODY is (asm_operands ...). */
1566 asmop = body;
1567 break;
1569 case SET:
1570 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1571 asmop = SET_SRC (body);
1573 /* The output is in the SET.
1574 Its constraint is in the ASM_OPERANDS itself. */
1575 if (operands)
1576 operands[0] = SET_DEST (body);
1577 if (operand_locs)
1578 operand_locs[0] = &SET_DEST (body);
1579 if (constraints)
1580 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1581 if (modes)
1582 modes[0] = GET_MODE (SET_DEST (body));
1583 nbase = 1;
1584 break;
1586 case PARALLEL:
1588 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1590 asmop = XVECEXP (body, 0, 0);
1591 if (GET_CODE (asmop) == SET)
1593 asmop = SET_SRC (asmop);
1595 /* At least one output, plus some CLOBBERs. The outputs are in
1596 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1597 for (i = 0; i < nparallel; i++)
1599 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1600 break; /* Past last SET */
1601 if (operands)
1602 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1603 if (operand_locs)
1604 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1605 if (constraints)
1606 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1607 if (modes)
1608 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1610 nbase = i;
1612 break;
1615 default:
1616 gcc_unreachable ();
1619 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1620 for (i = 0; i < n; i++)
1622 if (operand_locs)
1623 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1624 if (operands)
1625 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1626 if (constraints)
1627 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1628 if (modes)
1629 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1631 nbase += n;
1633 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1634 for (i = 0; i < n; i++)
1636 if (operand_locs)
1637 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1638 if (operands)
1639 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1640 if (constraints)
1641 constraints[nbase + i] = "";
1642 if (modes)
1643 modes[nbase + i] = Pmode;
1646 if (loc)
1647 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1649 return ASM_OPERANDS_TEMPLATE (asmop);
1652 /* Parse inline assembly string STRING and determine which operands are
1653 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1654 to true if operand I is referenced.
1656 This is intended to distinguish barrier-like asms such as:
1658 asm ("" : "=m" (...));
1660 from real references such as:
1662 asm ("sw\t$0, %0" : "=m" (...)); */
1664 void
1665 get_referenced_operands (const char *string, bool *used,
1666 unsigned int noperands)
1668 memset (used, 0, sizeof (bool) * noperands);
1669 const char *p = string;
1670 while (*p)
1671 switch (*p)
1673 case '%':
1674 p += 1;
1675 /* A letter followed by a digit indicates an operand number. */
1676 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1677 p += 1;
1678 if (ISDIGIT (*p))
1680 char *endptr;
1681 unsigned long opnum = strtoul (p, &endptr, 10);
1682 if (endptr != p && opnum < noperands)
1683 used[opnum] = true;
1684 p = endptr;
1686 else
1687 p += 1;
1688 break;
1690 default:
1691 p++;
1692 break;
1696 /* Check if an asm_operand matches its constraints.
1697 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1700 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1702 int result = 0;
1703 #ifdef AUTO_INC_DEC
1704 bool incdec_ok = false;
1705 #endif
1707 /* Use constrain_operands after reload. */
1708 gcc_assert (!reload_completed);
1710 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1711 many alternatives as required to match the other operands. */
1712 if (*constraint == '\0')
1713 result = 1;
1715 while (*constraint)
1717 char c = *constraint;
1718 int len;
1719 switch (c)
1721 case ',':
1722 constraint++;
1723 continue;
1724 case '=':
1725 case '+':
1726 case '*':
1727 case '%':
1728 case '!':
1729 case '#':
1730 case '&':
1731 case '?':
1732 break;
1734 case '0': case '1': case '2': case '3': case '4':
1735 case '5': case '6': case '7': case '8': case '9':
1736 /* If caller provided constraints pointer, look up
1737 the matching constraint. Otherwise, our caller should have
1738 given us the proper matching constraint, but we can't
1739 actually fail the check if they didn't. Indicate that
1740 results are inconclusive. */
1741 if (constraints)
1743 char *end;
1744 unsigned long match;
1746 match = strtoul (constraint, &end, 10);
1747 if (!result)
1748 result = asm_operand_ok (op, constraints[match], NULL);
1749 constraint = (const char *) end;
1751 else
1754 constraint++;
1755 while (ISDIGIT (*constraint));
1756 if (! result)
1757 result = -1;
1759 continue;
1761 case 'p':
1762 if (address_operand (op, VOIDmode))
1763 result = 1;
1764 break;
1766 case TARGET_MEM_CONSTRAINT:
1767 case 'V': /* non-offsettable */
1768 if (memory_operand (op, VOIDmode))
1769 result = 1;
1770 break;
1772 case 'o': /* offsettable */
1773 if (offsettable_nonstrict_memref_p (op))
1774 result = 1;
1775 break;
1777 case '<':
1778 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1779 excepting those that expand_call created. Further, on some
1780 machines which do not have generalized auto inc/dec, an inc/dec
1781 is not a memory_operand.
1783 Match any memory and hope things are resolved after reload. */
1785 if (MEM_P (op)
1786 && (1
1787 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1788 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1789 result = 1;
1790 #ifdef AUTO_INC_DEC
1791 incdec_ok = true;
1792 #endif
1793 break;
1795 case '>':
1796 if (MEM_P (op)
1797 && (1
1798 || GET_CODE (XEXP (op, 0)) == PRE_INC
1799 || GET_CODE (XEXP (op, 0)) == POST_INC))
1800 result = 1;
1801 #ifdef AUTO_INC_DEC
1802 incdec_ok = true;
1803 #endif
1804 break;
1806 case 'E':
1807 case 'F':
1808 if (CONST_DOUBLE_AS_FLOAT_P (op)
1809 || (GET_CODE (op) == CONST_VECTOR
1810 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1811 result = 1;
1812 break;
1814 case 'G':
1815 if (CONST_DOUBLE_AS_FLOAT_P (op)
1816 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1817 result = 1;
1818 break;
1819 case 'H':
1820 if (CONST_DOUBLE_AS_FLOAT_P (op)
1821 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1822 result = 1;
1823 break;
1825 case 's':
1826 if (CONST_SCALAR_INT_P (op))
1827 break;
1828 /* Fall through. */
1830 case 'i':
1831 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1832 result = 1;
1833 break;
1835 case 'n':
1836 if (CONST_SCALAR_INT_P (op))
1837 result = 1;
1838 break;
1840 case 'I':
1841 if (CONST_INT_P (op)
1842 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1843 result = 1;
1844 break;
1845 case 'J':
1846 if (CONST_INT_P (op)
1847 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1848 result = 1;
1849 break;
1850 case 'K':
1851 if (CONST_INT_P (op)
1852 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1853 result = 1;
1854 break;
1855 case 'L':
1856 if (CONST_INT_P (op)
1857 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1858 result = 1;
1859 break;
1860 case 'M':
1861 if (CONST_INT_P (op)
1862 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1863 result = 1;
1864 break;
1865 case 'N':
1866 if (CONST_INT_P (op)
1867 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1868 result = 1;
1869 break;
1870 case 'O':
1871 if (CONST_INT_P (op)
1872 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1873 result = 1;
1874 break;
1875 case 'P':
1876 if (CONST_INT_P (op)
1877 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1878 result = 1;
1879 break;
1881 case 'X':
1882 result = 1;
1883 break;
1885 case 'g':
1886 if (general_operand (op, VOIDmode))
1887 result = 1;
1888 break;
1890 default:
1891 /* For all other letters, we first check for a register class,
1892 otherwise it is an EXTRA_CONSTRAINT. */
1893 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1895 case 'r':
1896 if (GET_MODE (op) == BLKmode)
1897 break;
1898 if (register_operand (op, VOIDmode))
1899 result = 1;
1901 #ifdef EXTRA_CONSTRAINT_STR
1902 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1903 /* Every memory operand can be reloaded to fit. */
1904 result = result || memory_operand (op, VOIDmode);
1905 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1906 /* Every address operand can be reloaded to fit. */
1907 result = result || address_operand (op, VOIDmode);
1908 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1909 result = 1;
1910 #endif
1911 break;
1913 len = CONSTRAINT_LEN (c, constraint);
1915 constraint++;
1916 while (--len && *constraint);
1917 if (len)
1918 return 0;
1921 #ifdef AUTO_INC_DEC
1922 /* For operands without < or > constraints reject side-effects. */
1923 if (!incdec_ok && result && MEM_P (op))
1924 switch (GET_CODE (XEXP (op, 0)))
1926 case PRE_INC:
1927 case POST_INC:
1928 case PRE_DEC:
1929 case POST_DEC:
1930 case PRE_MODIFY:
1931 case POST_MODIFY:
1932 return 0;
1933 default:
1934 break;
1936 #endif
1938 return result;
1941 /* Given an rtx *P, if it is a sum containing an integer constant term,
1942 return the location (type rtx *) of the pointer to that constant term.
1943 Otherwise, return a null pointer. */
1945 rtx *
1946 find_constant_term_loc (rtx *p)
1948 rtx *tem;
1949 enum rtx_code code = GET_CODE (*p);
1951 /* If *P IS such a constant term, P is its location. */
1953 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1954 || code == CONST)
1955 return p;
1957 /* Otherwise, if not a sum, it has no constant term. */
1959 if (GET_CODE (*p) != PLUS)
1960 return 0;
1962 /* If one of the summands is constant, return its location. */
1964 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1965 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1966 return p;
1968 /* Otherwise, check each summand for containing a constant term. */
1970 if (XEXP (*p, 0) != 0)
1972 tem = find_constant_term_loc (&XEXP (*p, 0));
1973 if (tem != 0)
1974 return tem;
1977 if (XEXP (*p, 1) != 0)
1979 tem = find_constant_term_loc (&XEXP (*p, 1));
1980 if (tem != 0)
1981 return tem;
1984 return 0;
1987 /* Return 1 if OP is a memory reference
1988 whose address contains no side effects
1989 and remains valid after the addition
1990 of a positive integer less than the
1991 size of the object being referenced.
1993 We assume that the original address is valid and do not check it.
1995 This uses strict_memory_address_p as a subroutine, so
1996 don't use it before reload. */
1999 offsettable_memref_p (rtx op)
2001 return ((MEM_P (op))
2002 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2003 MEM_ADDR_SPACE (op)));
2006 /* Similar, but don't require a strictly valid mem ref:
2007 consider pseudo-regs valid as index or base regs. */
2010 offsettable_nonstrict_memref_p (rtx op)
2012 return ((MEM_P (op))
2013 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2014 MEM_ADDR_SPACE (op)));
2017 /* Return 1 if Y is a memory address which contains no side effects
2018 and would remain valid for address space AS after the addition of
2019 a positive integer less than the size of that mode.
2021 We assume that the original address is valid and do not check it.
2022 We do check that it is valid for narrower modes.
2024 If STRICTP is nonzero, we require a strictly valid address,
2025 for the sake of use in reload.c. */
2028 offsettable_address_addr_space_p (int strictp, enum machine_mode mode, rtx y,
2029 addr_space_t as)
2031 enum rtx_code ycode = GET_CODE (y);
2032 rtx z;
2033 rtx y1 = y;
2034 rtx *y2;
2035 int (*addressp) (enum machine_mode, rtx, addr_space_t) =
2036 (strictp ? strict_memory_address_addr_space_p
2037 : memory_address_addr_space_p);
2038 unsigned int mode_sz = GET_MODE_SIZE (mode);
2040 if (CONSTANT_ADDRESS_P (y))
2041 return 1;
2043 /* Adjusting an offsettable address involves changing to a narrower mode.
2044 Make sure that's OK. */
2046 if (mode_dependent_address_p (y, as))
2047 return 0;
2049 enum machine_mode address_mode = GET_MODE (y);
2050 if (address_mode == VOIDmode)
2051 address_mode = targetm.addr_space.address_mode (as);
2052 #ifdef POINTERS_EXTEND_UNSIGNED
2053 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2054 #endif
2056 /* ??? How much offset does an offsettable BLKmode reference need?
2057 Clearly that depends on the situation in which it's being used.
2058 However, the current situation in which we test 0xffffffff is
2059 less than ideal. Caveat user. */
2060 if (mode_sz == 0)
2061 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2063 /* If the expression contains a constant term,
2064 see if it remains valid when max possible offset is added. */
2066 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2068 int good;
2070 y1 = *y2;
2071 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2072 /* Use QImode because an odd displacement may be automatically invalid
2073 for any wider mode. But it should be valid for a single byte. */
2074 good = (*addressp) (QImode, y, as);
2076 /* In any case, restore old contents of memory. */
2077 *y2 = y1;
2078 return good;
2081 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2082 return 0;
2084 /* The offset added here is chosen as the maximum offset that
2085 any instruction could need to add when operating on something
2086 of the specified mode. We assume that if Y and Y+c are
2087 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2088 go inside a LO_SUM here, so we do so as well. */
2089 if (GET_CODE (y) == LO_SUM
2090 && mode != BLKmode
2091 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2092 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2093 plus_constant (address_mode, XEXP (y, 1),
2094 mode_sz - 1));
2095 #ifdef POINTERS_EXTEND_UNSIGNED
2096 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2097 else if (POINTERS_EXTEND_UNSIGNED > 0
2098 && GET_CODE (y) == ZERO_EXTEND
2099 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2100 z = gen_rtx_ZERO_EXTEND (address_mode,
2101 plus_constant (pointer_mode, XEXP (y, 0),
2102 mode_sz - 1));
2103 #endif
2104 else
2105 z = plus_constant (address_mode, y, mode_sz - 1);
2107 /* Use QImode because an odd displacement may be automatically invalid
2108 for any wider mode. But it should be valid for a single byte. */
2109 return (*addressp) (QImode, z, as);
2112 /* Return 1 if ADDR is an address-expression whose effect depends
2113 on the mode of the memory reference it is used in.
2115 ADDRSPACE is the address space associated with the address.
2117 Autoincrement addressing is a typical example of mode-dependence
2118 because the amount of the increment depends on the mode. */
2120 bool
2121 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2123 /* Auto-increment addressing with anything other than post_modify
2124 or pre_modify always introduces a mode dependency. Catch such
2125 cases now instead of deferring to the target. */
2126 if (GET_CODE (addr) == PRE_INC
2127 || GET_CODE (addr) == POST_INC
2128 || GET_CODE (addr) == PRE_DEC
2129 || GET_CODE (addr) == POST_DEC)
2130 return true;
2132 return targetm.mode_dependent_address_p (addr, addrspace);
2135 /* Like extract_insn, but save insn extracted and don't extract again, when
2136 called again for the same insn expecting that recog_data still contain the
2137 valid information. This is used primary by gen_attr infrastructure that
2138 often does extract insn again and again. */
2139 void
2140 extract_insn_cached (rtx insn)
2142 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2143 return;
2144 extract_insn (insn);
2145 recog_data.insn = insn;
2148 /* Do cached extract_insn, constrain_operands and complain about failures.
2149 Used by insn_attrtab. */
2150 void
2151 extract_constrain_insn_cached (rtx insn)
2153 extract_insn_cached (insn);
2154 if (which_alternative == -1
2155 && !constrain_operands (reload_completed))
2156 fatal_insn_not_found (insn);
2159 /* Do cached constrain_operands and complain about failures. */
2161 constrain_operands_cached (int strict)
2163 if (which_alternative == -1)
2164 return constrain_operands (strict);
2165 else
2166 return 1;
2169 /* Analyze INSN and fill in recog_data. */
2171 void
2172 extract_insn (rtx insn)
2174 int i;
2175 int icode;
2176 int noperands;
2177 rtx body = PATTERN (insn);
2179 recog_data.n_operands = 0;
2180 recog_data.n_alternatives = 0;
2181 recog_data.n_dups = 0;
2182 recog_data.is_asm = false;
2184 switch (GET_CODE (body))
2186 case USE:
2187 case CLOBBER:
2188 case ASM_INPUT:
2189 case ADDR_VEC:
2190 case ADDR_DIFF_VEC:
2191 case VAR_LOCATION:
2192 return;
2194 case SET:
2195 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2196 goto asm_insn;
2197 else
2198 goto normal_insn;
2199 case PARALLEL:
2200 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2201 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2202 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2203 goto asm_insn;
2204 else
2205 goto normal_insn;
2206 case ASM_OPERANDS:
2207 asm_insn:
2208 recog_data.n_operands = noperands = asm_noperands (body);
2209 if (noperands >= 0)
2211 /* This insn is an `asm' with operands. */
2213 /* expand_asm_operands makes sure there aren't too many operands. */
2214 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2216 /* Now get the operand values and constraints out of the insn. */
2217 decode_asm_operands (body, recog_data.operand,
2218 recog_data.operand_loc,
2219 recog_data.constraints,
2220 recog_data.operand_mode, NULL);
2221 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2222 if (noperands > 0)
2224 const char *p = recog_data.constraints[0];
2225 recog_data.n_alternatives = 1;
2226 while (*p)
2227 recog_data.n_alternatives += (*p++ == ',');
2229 recog_data.is_asm = true;
2230 break;
2232 fatal_insn_not_found (insn);
2234 default:
2235 normal_insn:
2236 /* Ordinary insn: recognize it, get the operands via insn_extract
2237 and get the constraints. */
2239 icode = recog_memoized (insn);
2240 if (icode < 0)
2241 fatal_insn_not_found (insn);
2243 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2244 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2245 recog_data.n_dups = insn_data[icode].n_dups;
2247 insn_extract (insn);
2249 for (i = 0; i < noperands; i++)
2251 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2252 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2253 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2254 /* VOIDmode match_operands gets mode from their real operand. */
2255 if (recog_data.operand_mode[i] == VOIDmode)
2256 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2259 for (i = 0; i < noperands; i++)
2260 recog_data.operand_type[i]
2261 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2262 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2263 : OP_IN);
2265 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2267 if (INSN_CODE (insn) < 0)
2268 for (i = 0; i < recog_data.n_alternatives; i++)
2269 recog_data.alternative_enabled_p[i] = true;
2270 else
2272 recog_data.insn = insn;
2273 for (i = 0; i < recog_data.n_alternatives; i++)
2275 which_alternative = i;
2276 recog_data.alternative_enabled_p[i]
2277 = HAVE_ATTR_enabled ? get_attr_enabled (insn) : 1;
2281 recog_data.insn = NULL;
2282 which_alternative = -1;
2285 /* After calling extract_insn, you can use this function to extract some
2286 information from the constraint strings into a more usable form.
2287 The collected data is stored in recog_op_alt. */
2288 void
2289 preprocess_constraints (void)
2291 int i;
2293 for (i = 0; i < recog_data.n_operands; i++)
2294 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2295 * sizeof (struct operand_alternative)));
2297 for (i = 0; i < recog_data.n_operands; i++)
2299 int j;
2300 struct operand_alternative *op_alt;
2301 const char *p = recog_data.constraints[i];
2303 op_alt = recog_op_alt[i];
2305 for (j = 0; j < recog_data.n_alternatives; j++)
2307 op_alt[j].cl = NO_REGS;
2308 op_alt[j].constraint = p;
2309 op_alt[j].matches = -1;
2310 op_alt[j].matched = -1;
2312 if (!recog_data.alternative_enabled_p[j])
2314 p = skip_alternative (p);
2315 continue;
2318 if (*p == '\0' || *p == ',')
2320 op_alt[j].anything_ok = 1;
2321 continue;
2324 for (;;)
2326 char c = *p;
2327 if (c == '#')
2329 c = *++p;
2330 while (c != ',' && c != '\0');
2331 if (c == ',' || c == '\0')
2333 p++;
2334 break;
2337 switch (c)
2339 case '=': case '+': case '*': case '%':
2340 case 'E': case 'F': case 'G': case 'H':
2341 case 's': case 'i': case 'n':
2342 case 'I': case 'J': case 'K': case 'L':
2343 case 'M': case 'N': case 'O': case 'P':
2344 /* These don't say anything we care about. */
2345 break;
2347 case '?':
2348 op_alt[j].reject += 6;
2349 break;
2350 case '!':
2351 op_alt[j].reject += 600;
2352 break;
2353 case '&':
2354 op_alt[j].earlyclobber = 1;
2355 break;
2357 case '0': case '1': case '2': case '3': case '4':
2358 case '5': case '6': case '7': case '8': case '9':
2360 char *end;
2361 op_alt[j].matches = strtoul (p, &end, 10);
2362 recog_op_alt[op_alt[j].matches][j].matched = i;
2363 p = end;
2365 continue;
2367 case TARGET_MEM_CONSTRAINT:
2368 op_alt[j].memory_ok = 1;
2369 break;
2370 case '<':
2371 op_alt[j].decmem_ok = 1;
2372 break;
2373 case '>':
2374 op_alt[j].incmem_ok = 1;
2375 break;
2376 case 'V':
2377 op_alt[j].nonoffmem_ok = 1;
2378 break;
2379 case 'o':
2380 op_alt[j].offmem_ok = 1;
2381 break;
2382 case 'X':
2383 op_alt[j].anything_ok = 1;
2384 break;
2386 case 'p':
2387 op_alt[j].is_address = 1;
2388 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2389 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2390 ADDRESS, SCRATCH)];
2391 break;
2393 case 'g':
2394 case 'r':
2395 op_alt[j].cl =
2396 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2397 break;
2399 default:
2400 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2402 op_alt[j].memory_ok = 1;
2403 break;
2405 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2407 op_alt[j].is_address = 1;
2408 op_alt[j].cl
2409 = (reg_class_subunion
2410 [(int) op_alt[j].cl]
2411 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2412 ADDRESS, SCRATCH)]);
2413 break;
2416 op_alt[j].cl
2417 = (reg_class_subunion
2418 [(int) op_alt[j].cl]
2419 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2420 break;
2422 p += CONSTRAINT_LEN (c, p);
2428 /* Check the operands of an insn against the insn's operand constraints
2429 and return 1 if they are valid.
2430 The information about the insn's operands, constraints, operand modes
2431 etc. is obtained from the global variables set up by extract_insn.
2433 WHICH_ALTERNATIVE is set to a number which indicates which
2434 alternative of constraints was matched: 0 for the first alternative,
2435 1 for the next, etc.
2437 In addition, when two operands are required to match
2438 and it happens that the output operand is (reg) while the
2439 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2440 make the output operand look like the input.
2441 This is because the output operand is the one the template will print.
2443 This is used in final, just before printing the assembler code and by
2444 the routines that determine an insn's attribute.
2446 If STRICT is a positive nonzero value, it means that we have been
2447 called after reload has been completed. In that case, we must
2448 do all checks strictly. If it is zero, it means that we have been called
2449 before reload has completed. In that case, we first try to see if we can
2450 find an alternative that matches strictly. If not, we try again, this
2451 time assuming that reload will fix up the insn. This provides a "best
2452 guess" for the alternative and is used to compute attributes of insns prior
2453 to reload. A negative value of STRICT is used for this internal call. */
2455 struct funny_match
2457 int this_op, other;
2461 constrain_operands (int strict)
2463 const char *constraints[MAX_RECOG_OPERANDS];
2464 int matching_operands[MAX_RECOG_OPERANDS];
2465 int earlyclobber[MAX_RECOG_OPERANDS];
2466 int c;
2468 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2469 int funny_match_index;
2471 which_alternative = 0;
2472 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2473 return 1;
2475 for (c = 0; c < recog_data.n_operands; c++)
2477 constraints[c] = recog_data.constraints[c];
2478 matching_operands[c] = -1;
2483 int seen_earlyclobber_at = -1;
2484 int opno;
2485 int lose = 0;
2486 funny_match_index = 0;
2488 if (!recog_data.alternative_enabled_p[which_alternative])
2490 int i;
2492 for (i = 0; i < recog_data.n_operands; i++)
2493 constraints[i] = skip_alternative (constraints[i]);
2495 which_alternative++;
2496 continue;
2499 for (opno = 0; opno < recog_data.n_operands; opno++)
2501 rtx op = recog_data.operand[opno];
2502 enum machine_mode mode = GET_MODE (op);
2503 const char *p = constraints[opno];
2504 int offset = 0;
2505 int win = 0;
2506 int val;
2507 int len;
2509 earlyclobber[opno] = 0;
2511 /* A unary operator may be accepted by the predicate, but it
2512 is irrelevant for matching constraints. */
2513 if (UNARY_P (op))
2514 op = XEXP (op, 0);
2516 if (GET_CODE (op) == SUBREG)
2518 if (REG_P (SUBREG_REG (op))
2519 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2520 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2521 GET_MODE (SUBREG_REG (op)),
2522 SUBREG_BYTE (op),
2523 GET_MODE (op));
2524 op = SUBREG_REG (op);
2527 /* An empty constraint or empty alternative
2528 allows anything which matched the pattern. */
2529 if (*p == 0 || *p == ',')
2530 win = 1;
2533 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2535 case '\0':
2536 len = 0;
2537 break;
2538 case ',':
2539 c = '\0';
2540 break;
2542 case '?': case '!': case '*': case '%':
2543 case '=': case '+':
2544 break;
2546 case '#':
2547 /* Ignore rest of this alternative as far as
2548 constraint checking is concerned. */
2550 p++;
2551 while (*p && *p != ',');
2552 len = 0;
2553 break;
2555 case '&':
2556 earlyclobber[opno] = 1;
2557 if (seen_earlyclobber_at < 0)
2558 seen_earlyclobber_at = opno;
2559 break;
2561 case '0': case '1': case '2': case '3': case '4':
2562 case '5': case '6': case '7': case '8': case '9':
2564 /* This operand must be the same as a previous one.
2565 This kind of constraint is used for instructions such
2566 as add when they take only two operands.
2568 Note that the lower-numbered operand is passed first.
2570 If we are not testing strictly, assume that this
2571 constraint will be satisfied. */
2573 char *end;
2574 int match;
2576 match = strtoul (p, &end, 10);
2577 p = end;
2579 if (strict < 0)
2580 val = 1;
2581 else
2583 rtx op1 = recog_data.operand[match];
2584 rtx op2 = recog_data.operand[opno];
2586 /* A unary operator may be accepted by the predicate,
2587 but it is irrelevant for matching constraints. */
2588 if (UNARY_P (op1))
2589 op1 = XEXP (op1, 0);
2590 if (UNARY_P (op2))
2591 op2 = XEXP (op2, 0);
2593 val = operands_match_p (op1, op2);
2596 matching_operands[opno] = match;
2597 matching_operands[match] = opno;
2599 if (val != 0)
2600 win = 1;
2602 /* If output is *x and input is *--x, arrange later
2603 to change the output to *--x as well, since the
2604 output op is the one that will be printed. */
2605 if (val == 2 && strict > 0)
2607 funny_match[funny_match_index].this_op = opno;
2608 funny_match[funny_match_index++].other = match;
2611 len = 0;
2612 break;
2614 case 'p':
2615 /* p is used for address_operands. When we are called by
2616 gen_reload, no one will have checked that the address is
2617 strictly valid, i.e., that all pseudos requiring hard regs
2618 have gotten them. */
2619 if (strict <= 0
2620 || (strict_memory_address_p (recog_data.operand_mode[opno],
2621 op)))
2622 win = 1;
2623 break;
2625 /* No need to check general_operand again;
2626 it was done in insn-recog.c. Well, except that reload
2627 doesn't check the validity of its replacements, but
2628 that should only matter when there's a bug. */
2629 case 'g':
2630 /* Anything goes unless it is a REG and really has a hard reg
2631 but the hard reg is not in the class GENERAL_REGS. */
2632 if (REG_P (op))
2634 if (strict < 0
2635 || GENERAL_REGS == ALL_REGS
2636 || (reload_in_progress
2637 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2638 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2639 win = 1;
2641 else if (strict < 0 || general_operand (op, mode))
2642 win = 1;
2643 break;
2645 case 'X':
2646 /* This is used for a MATCH_SCRATCH in the cases when
2647 we don't actually need anything. So anything goes
2648 any time. */
2649 win = 1;
2650 break;
2652 case TARGET_MEM_CONSTRAINT:
2653 /* Memory operands must be valid, to the extent
2654 required by STRICT. */
2655 if (MEM_P (op))
2657 if (strict > 0
2658 && !strict_memory_address_addr_space_p
2659 (GET_MODE (op), XEXP (op, 0),
2660 MEM_ADDR_SPACE (op)))
2661 break;
2662 if (strict == 0
2663 && !memory_address_addr_space_p
2664 (GET_MODE (op), XEXP (op, 0),
2665 MEM_ADDR_SPACE (op)))
2666 break;
2667 win = 1;
2669 /* Before reload, accept what reload can turn into mem. */
2670 else if (strict < 0 && CONSTANT_P (op))
2671 win = 1;
2672 /* During reload, accept a pseudo */
2673 else if (reload_in_progress && REG_P (op)
2674 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2675 win = 1;
2676 break;
2678 case '<':
2679 if (MEM_P (op)
2680 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2681 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2682 win = 1;
2683 break;
2685 case '>':
2686 if (MEM_P (op)
2687 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2688 || GET_CODE (XEXP (op, 0)) == POST_INC))
2689 win = 1;
2690 break;
2692 case 'E':
2693 case 'F':
2694 if (CONST_DOUBLE_AS_FLOAT_P (op)
2695 || (GET_CODE (op) == CONST_VECTOR
2696 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2697 win = 1;
2698 break;
2700 case 'G':
2701 case 'H':
2702 if (CONST_DOUBLE_AS_FLOAT_P (op)
2703 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2704 win = 1;
2705 break;
2707 case 's':
2708 if (CONST_SCALAR_INT_P (op))
2709 break;
2710 case 'i':
2711 if (CONSTANT_P (op))
2712 win = 1;
2713 break;
2715 case 'n':
2716 if (CONST_SCALAR_INT_P (op))
2717 win = 1;
2718 break;
2720 case 'I':
2721 case 'J':
2722 case 'K':
2723 case 'L':
2724 case 'M':
2725 case 'N':
2726 case 'O':
2727 case 'P':
2728 if (CONST_INT_P (op)
2729 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2730 win = 1;
2731 break;
2733 case 'V':
2734 if (MEM_P (op)
2735 && ((strict > 0 && ! offsettable_memref_p (op))
2736 || (strict < 0
2737 && !(CONSTANT_P (op) || MEM_P (op)))
2738 || (reload_in_progress
2739 && !(REG_P (op)
2740 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2741 win = 1;
2742 break;
2744 case 'o':
2745 if ((strict > 0 && offsettable_memref_p (op))
2746 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2747 /* Before reload, accept what reload can handle. */
2748 || (strict < 0
2749 && (CONSTANT_P (op) || MEM_P (op)))
2750 /* During reload, accept a pseudo */
2751 || (reload_in_progress && REG_P (op)
2752 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2753 win = 1;
2754 break;
2756 default:
2758 enum reg_class cl;
2760 cl = (c == 'r'
2761 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2762 if (cl != NO_REGS)
2764 if (strict < 0
2765 || (strict == 0
2766 && REG_P (op)
2767 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2768 || (strict == 0 && GET_CODE (op) == SCRATCH)
2769 || (REG_P (op)
2770 && reg_fits_class_p (op, cl, offset, mode)))
2771 win = 1;
2773 #ifdef EXTRA_CONSTRAINT_STR
2774 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2775 win = 1;
2777 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2778 /* Every memory operand can be reloaded to fit. */
2779 && ((strict < 0 && MEM_P (op))
2780 /* Before reload, accept what reload can turn
2781 into mem. */
2782 || (strict < 0 && CONSTANT_P (op))
2783 /* During reload, accept a pseudo */
2784 || (reload_in_progress && REG_P (op)
2785 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2786 win = 1;
2787 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2788 /* Every address operand can be reloaded to fit. */
2789 && strict < 0)
2790 win = 1;
2791 /* Cater to architectures like IA-64 that define extra memory
2792 constraints without using define_memory_constraint. */
2793 else if (reload_in_progress
2794 && REG_P (op)
2795 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2796 && reg_renumber[REGNO (op)] < 0
2797 && reg_equiv_mem (REGNO (op)) != 0
2798 && EXTRA_CONSTRAINT_STR
2799 (reg_equiv_mem (REGNO (op)), c, p))
2800 win = 1;
2801 #endif
2802 break;
2805 while (p += len, c);
2807 constraints[opno] = p;
2808 /* If this operand did not win somehow,
2809 this alternative loses. */
2810 if (! win)
2811 lose = 1;
2813 /* This alternative won; the operands are ok.
2814 Change whichever operands this alternative says to change. */
2815 if (! lose)
2817 int opno, eopno;
2819 /* See if any earlyclobber operand conflicts with some other
2820 operand. */
2822 if (strict > 0 && seen_earlyclobber_at >= 0)
2823 for (eopno = seen_earlyclobber_at;
2824 eopno < recog_data.n_operands;
2825 eopno++)
2826 /* Ignore earlyclobber operands now in memory,
2827 because we would often report failure when we have
2828 two memory operands, one of which was formerly a REG. */
2829 if (earlyclobber[eopno]
2830 && REG_P (recog_data.operand[eopno]))
2831 for (opno = 0; opno < recog_data.n_operands; opno++)
2832 if ((MEM_P (recog_data.operand[opno])
2833 || recog_data.operand_type[opno] != OP_OUT)
2834 && opno != eopno
2835 /* Ignore things like match_operator operands. */
2836 && *recog_data.constraints[opno] != 0
2837 && ! (matching_operands[opno] == eopno
2838 && operands_match_p (recog_data.operand[opno],
2839 recog_data.operand[eopno]))
2840 && ! safe_from_earlyclobber (recog_data.operand[opno],
2841 recog_data.operand[eopno]))
2842 lose = 1;
2844 if (! lose)
2846 while (--funny_match_index >= 0)
2848 recog_data.operand[funny_match[funny_match_index].other]
2849 = recog_data.operand[funny_match[funny_match_index].this_op];
2852 #ifdef AUTO_INC_DEC
2853 /* For operands without < or > constraints reject side-effects. */
2854 if (recog_data.is_asm)
2856 for (opno = 0; opno < recog_data.n_operands; opno++)
2857 if (MEM_P (recog_data.operand[opno]))
2858 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2860 case PRE_INC:
2861 case POST_INC:
2862 case PRE_DEC:
2863 case POST_DEC:
2864 case PRE_MODIFY:
2865 case POST_MODIFY:
2866 if (strchr (recog_data.constraints[opno], '<') == NULL
2867 && strchr (recog_data.constraints[opno], '>')
2868 == NULL)
2869 return 0;
2870 break;
2871 default:
2872 break;
2875 #endif
2876 return 1;
2880 which_alternative++;
2882 while (which_alternative < recog_data.n_alternatives);
2884 which_alternative = -1;
2885 /* If we are about to reject this, but we are not to test strictly,
2886 try a very loose test. Only return failure if it fails also. */
2887 if (strict == 0)
2888 return constrain_operands (-1);
2889 else
2890 return 0;
2893 /* Return true iff OPERAND (assumed to be a REG rtx)
2894 is a hard reg in class CLASS when its regno is offset by OFFSET
2895 and changed to mode MODE.
2896 If REG occupies multiple hard regs, all of them must be in CLASS. */
2898 bool
2899 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2900 enum machine_mode mode)
2902 unsigned int regno = REGNO (operand);
2904 if (cl == NO_REGS)
2905 return false;
2907 /* Regno must not be a pseudo register. Offset may be negative. */
2908 return (HARD_REGISTER_NUM_P (regno)
2909 && HARD_REGISTER_NUM_P (regno + offset)
2910 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2911 regno + offset));
2914 /* Split single instruction. Helper function for split_all_insns and
2915 split_all_insns_noflow. Return last insn in the sequence if successful,
2916 or NULL if unsuccessful. */
2918 static rtx
2919 split_insn (rtx insn)
2921 /* Split insns here to get max fine-grain parallelism. */
2922 rtx first = PREV_INSN (insn);
2923 rtx last = try_split (PATTERN (insn), insn, 1);
2924 rtx insn_set, last_set, note;
2926 if (last == insn)
2927 return NULL_RTX;
2929 /* If the original instruction was a single set that was known to be
2930 equivalent to a constant, see if we can say the same about the last
2931 instruction in the split sequence. The two instructions must set
2932 the same destination. */
2933 insn_set = single_set (insn);
2934 if (insn_set)
2936 last_set = single_set (last);
2937 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2939 note = find_reg_equal_equiv_note (insn);
2940 if (note && CONSTANT_P (XEXP (note, 0)))
2941 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2942 else if (CONSTANT_P (SET_SRC (insn_set)))
2943 set_unique_reg_note (last, REG_EQUAL,
2944 copy_rtx (SET_SRC (insn_set)));
2948 /* try_split returns the NOTE that INSN became. */
2949 SET_INSN_DELETED (insn);
2951 /* ??? Coddle to md files that generate subregs in post-reload
2952 splitters instead of computing the proper hard register. */
2953 if (reload_completed && first != last)
2955 first = NEXT_INSN (first);
2956 for (;;)
2958 if (INSN_P (first))
2959 cleanup_subreg_operands (first);
2960 if (first == last)
2961 break;
2962 first = NEXT_INSN (first);
2966 return last;
2969 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2971 void
2972 split_all_insns (void)
2974 sbitmap blocks;
2975 bool changed;
2976 basic_block bb;
2978 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2979 bitmap_clear (blocks);
2980 changed = false;
2982 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2984 rtx insn, next;
2985 bool finish = false;
2987 rtl_profile_for_bb (bb);
2988 for (insn = BB_HEAD (bb); !finish ; insn = next)
2990 /* Can't use `next_real_insn' because that might go across
2991 CODE_LABELS and short-out basic blocks. */
2992 next = NEXT_INSN (insn);
2993 finish = (insn == BB_END (bb));
2994 if (INSN_P (insn))
2996 rtx set = single_set (insn);
2998 /* Don't split no-op move insns. These should silently
2999 disappear later in final. Splitting such insns would
3000 break the code that handles LIBCALL blocks. */
3001 if (set && set_noop_p (set))
3003 /* Nops get in the way while scheduling, so delete them
3004 now if register allocation has already been done. It
3005 is too risky to try to do this before register
3006 allocation, and there are unlikely to be very many
3007 nops then anyways. */
3008 if (reload_completed)
3009 delete_insn_and_edges (insn);
3011 else
3013 if (split_insn (insn))
3015 bitmap_set_bit (blocks, bb->index);
3016 changed = true;
3023 default_rtl_profile ();
3024 if (changed)
3025 find_many_sub_basic_blocks (blocks);
3027 #ifdef ENABLE_CHECKING
3028 verify_flow_info ();
3029 #endif
3031 sbitmap_free (blocks);
3034 /* Same as split_all_insns, but do not expect CFG to be available.
3035 Used by machine dependent reorg passes. */
3037 unsigned int
3038 split_all_insns_noflow (void)
3040 rtx next, insn;
3042 for (insn = get_insns (); insn; insn = next)
3044 next = NEXT_INSN (insn);
3045 if (INSN_P (insn))
3047 /* Don't split no-op move insns. These should silently
3048 disappear later in final. Splitting such insns would
3049 break the code that handles LIBCALL blocks. */
3050 rtx set = single_set (insn);
3051 if (set && set_noop_p (set))
3053 /* Nops get in the way while scheduling, so delete them
3054 now if register allocation has already been done. It
3055 is too risky to try to do this before register
3056 allocation, and there are unlikely to be very many
3057 nops then anyways.
3059 ??? Should we use delete_insn when the CFG isn't valid? */
3060 if (reload_completed)
3061 delete_insn_and_edges (insn);
3063 else
3064 split_insn (insn);
3067 return 0;
3070 #ifdef HAVE_peephole2
3071 struct peep2_insn_data
3073 rtx insn;
3074 regset live_before;
3077 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3078 static int peep2_current;
3080 static bool peep2_do_rebuild_jump_labels;
3081 static bool peep2_do_cleanup_cfg;
3083 /* The number of instructions available to match a peep2. */
3084 int peep2_current_count;
3086 /* A non-insn marker indicating the last insn of the block.
3087 The live_before regset for this element is correct, indicating
3088 DF_LIVE_OUT for the block. */
3089 #define PEEP2_EOB pc_rtx
3091 /* Wrap N to fit into the peep2_insn_data buffer. */
3093 static int
3094 peep2_buf_position (int n)
3096 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3097 n -= MAX_INSNS_PER_PEEP2 + 1;
3098 return n;
3101 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3102 does not exist. Used by the recognizer to find the next insn to match
3103 in a multi-insn pattern. */
3106 peep2_next_insn (int n)
3108 gcc_assert (n <= peep2_current_count);
3110 n = peep2_buf_position (peep2_current + n);
3112 return peep2_insn_data[n].insn;
3115 /* Return true if REGNO is dead before the Nth non-note insn
3116 after `current'. */
3119 peep2_regno_dead_p (int ofs, int regno)
3121 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3123 ofs = peep2_buf_position (peep2_current + ofs);
3125 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3127 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3130 /* Similarly for a REG. */
3133 peep2_reg_dead_p (int ofs, rtx reg)
3135 int regno, n;
3137 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3139 ofs = peep2_buf_position (peep2_current + ofs);
3141 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3143 regno = REGNO (reg);
3144 n = hard_regno_nregs[regno][GET_MODE (reg)];
3145 while (--n >= 0)
3146 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3147 return 0;
3148 return 1;
3151 /* Regno offset to be used in the register search. */
3152 static int search_ofs;
3154 /* Try to find a hard register of mode MODE, matching the register class in
3155 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3156 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3157 in which case the only condition is that the register must be available
3158 before CURRENT_INSN.
3159 Registers that already have bits set in REG_SET will not be considered.
3161 If an appropriate register is available, it will be returned and the
3162 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3163 returned. */
3166 peep2_find_free_register (int from, int to, const char *class_str,
3167 enum machine_mode mode, HARD_REG_SET *reg_set)
3169 enum reg_class cl;
3170 HARD_REG_SET live;
3171 df_ref *def_rec;
3172 int i;
3174 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3175 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3177 from = peep2_buf_position (peep2_current + from);
3178 to = peep2_buf_position (peep2_current + to);
3180 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3181 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3183 while (from != to)
3185 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3187 /* Don't use registers set or clobbered by the insn. */
3188 for (def_rec = DF_INSN_DEFS (peep2_insn_data[from].insn);
3189 *def_rec; def_rec++)
3190 SET_HARD_REG_BIT (live, DF_REF_REGNO (*def_rec));
3192 from = peep2_buf_position (from + 1);
3195 cl = (class_str[0] == 'r' ? GENERAL_REGS
3196 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3198 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3200 int raw_regno, regno, success, j;
3202 /* Distribute the free registers as much as possible. */
3203 raw_regno = search_ofs + i;
3204 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3205 raw_regno -= FIRST_PSEUDO_REGISTER;
3206 #ifdef REG_ALLOC_ORDER
3207 regno = reg_alloc_order[raw_regno];
3208 #else
3209 regno = raw_regno;
3210 #endif
3212 /* Can it support the mode we need? */
3213 if (! HARD_REGNO_MODE_OK (regno, mode))
3214 continue;
3216 success = 1;
3217 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3219 /* Don't allocate fixed registers. */
3220 if (fixed_regs[regno + j])
3222 success = 0;
3223 break;
3225 /* Don't allocate global registers. */
3226 if (global_regs[regno + j])
3228 success = 0;
3229 break;
3231 /* Make sure the register is of the right class. */
3232 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3234 success = 0;
3235 break;
3237 /* And that we don't create an extra save/restore. */
3238 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3240 success = 0;
3241 break;
3244 if (! targetm.hard_regno_scratch_ok (regno + j))
3246 success = 0;
3247 break;
3250 /* And we don't clobber traceback for noreturn functions. */
3251 if ((regno + j == FRAME_POINTER_REGNUM
3252 || regno + j == HARD_FRAME_POINTER_REGNUM)
3253 && (! reload_completed || frame_pointer_needed))
3255 success = 0;
3256 break;
3259 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3260 || TEST_HARD_REG_BIT (live, regno + j))
3262 success = 0;
3263 break;
3267 if (success)
3269 add_to_hard_reg_set (reg_set, mode, regno);
3271 /* Start the next search with the next register. */
3272 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3273 raw_regno = 0;
3274 search_ofs = raw_regno;
3276 return gen_rtx_REG (mode, regno);
3280 search_ofs = 0;
3281 return NULL_RTX;
3284 /* Forget all currently tracked instructions, only remember current
3285 LIVE regset. */
3287 static void
3288 peep2_reinit_state (regset live)
3290 int i;
3292 /* Indicate that all slots except the last holds invalid data. */
3293 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3294 peep2_insn_data[i].insn = NULL_RTX;
3295 peep2_current_count = 0;
3297 /* Indicate that the last slot contains live_after data. */
3298 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3299 peep2_current = MAX_INSNS_PER_PEEP2;
3301 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3304 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3305 starting at INSN. Perform the replacement, removing the old insns and
3306 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3307 if the replacement is rejected. */
3309 static rtx
3310 peep2_attempt (basic_block bb, rtx insn, int match_len, rtx attempt)
3312 int i;
3313 rtx last, eh_note, as_note, before_try, x;
3314 rtx old_insn, new_insn;
3315 bool was_call = false;
3317 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3318 match more than one insn, or to be split into more than one insn. */
3319 old_insn = peep2_insn_data[peep2_current].insn;
3320 if (RTX_FRAME_RELATED_P (old_insn))
3322 bool any_note = false;
3323 rtx note;
3325 if (match_len != 0)
3326 return NULL;
3328 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3329 may be in the stream for the purpose of register allocation. */
3330 if (active_insn_p (attempt))
3331 new_insn = attempt;
3332 else
3333 new_insn = next_active_insn (attempt);
3334 if (next_active_insn (new_insn))
3335 return NULL;
3337 /* We have a 1-1 replacement. Copy over any frame-related info. */
3338 RTX_FRAME_RELATED_P (new_insn) = 1;
3340 /* Allow the backend to fill in a note during the split. */
3341 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3342 switch (REG_NOTE_KIND (note))
3344 case REG_FRAME_RELATED_EXPR:
3345 case REG_CFA_DEF_CFA:
3346 case REG_CFA_ADJUST_CFA:
3347 case REG_CFA_OFFSET:
3348 case REG_CFA_REGISTER:
3349 case REG_CFA_EXPRESSION:
3350 case REG_CFA_RESTORE:
3351 case REG_CFA_SET_VDRAP:
3352 any_note = true;
3353 break;
3354 default:
3355 break;
3358 /* If the backend didn't supply a note, copy one over. */
3359 if (!any_note)
3360 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3361 switch (REG_NOTE_KIND (note))
3363 case REG_FRAME_RELATED_EXPR:
3364 case REG_CFA_DEF_CFA:
3365 case REG_CFA_ADJUST_CFA:
3366 case REG_CFA_OFFSET:
3367 case REG_CFA_REGISTER:
3368 case REG_CFA_EXPRESSION:
3369 case REG_CFA_RESTORE:
3370 case REG_CFA_SET_VDRAP:
3371 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3372 any_note = true;
3373 break;
3374 default:
3375 break;
3378 /* If there still isn't a note, make sure the unwind info sees the
3379 same expression as before the split. */
3380 if (!any_note)
3382 rtx old_set, new_set;
3384 /* The old insn had better have been simple, or annotated. */
3385 old_set = single_set (old_insn);
3386 gcc_assert (old_set != NULL);
3388 new_set = single_set (new_insn);
3389 if (!new_set || !rtx_equal_p (new_set, old_set))
3390 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3393 /* Copy prologue/epilogue status. This is required in order to keep
3394 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3395 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3398 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3399 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3400 cfg-related call notes. */
3401 for (i = 0; i <= match_len; ++i)
3403 int j;
3404 rtx note;
3406 j = peep2_buf_position (peep2_current + i);
3407 old_insn = peep2_insn_data[j].insn;
3408 if (!CALL_P (old_insn))
3409 continue;
3410 was_call = true;
3412 new_insn = attempt;
3413 while (new_insn != NULL_RTX)
3415 if (CALL_P (new_insn))
3416 break;
3417 new_insn = NEXT_INSN (new_insn);
3420 gcc_assert (new_insn != NULL_RTX);
3422 CALL_INSN_FUNCTION_USAGE (new_insn)
3423 = CALL_INSN_FUNCTION_USAGE (old_insn);
3425 for (note = REG_NOTES (old_insn);
3426 note;
3427 note = XEXP (note, 1))
3428 switch (REG_NOTE_KIND (note))
3430 case REG_NORETURN:
3431 case REG_SETJMP:
3432 case REG_TM:
3433 add_reg_note (new_insn, REG_NOTE_KIND (note),
3434 XEXP (note, 0));
3435 break;
3436 default:
3437 /* Discard all other reg notes. */
3438 break;
3441 /* Croak if there is another call in the sequence. */
3442 while (++i <= match_len)
3444 j = peep2_buf_position (peep2_current + i);
3445 old_insn = peep2_insn_data[j].insn;
3446 gcc_assert (!CALL_P (old_insn));
3448 break;
3451 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3452 move those notes over to the new sequence. */
3453 as_note = NULL;
3454 for (i = match_len; i >= 0; --i)
3456 int j = peep2_buf_position (peep2_current + i);
3457 old_insn = peep2_insn_data[j].insn;
3459 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3460 if (as_note)
3461 break;
3464 i = peep2_buf_position (peep2_current + match_len);
3465 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3467 /* Replace the old sequence with the new. */
3468 last = emit_insn_after_setloc (attempt,
3469 peep2_insn_data[i].insn,
3470 INSN_LOCATION (peep2_insn_data[i].insn));
3471 before_try = PREV_INSN (insn);
3472 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3474 /* Re-insert the EH_REGION notes. */
3475 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3477 edge eh_edge;
3478 edge_iterator ei;
3480 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3481 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3482 break;
3484 if (eh_note)
3485 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3487 if (eh_edge)
3488 for (x = last; x != before_try; x = PREV_INSN (x))
3489 if (x != BB_END (bb)
3490 && (can_throw_internal (x)
3491 || can_nonlocal_goto (x)))
3493 edge nfte, nehe;
3494 int flags;
3496 nfte = split_block (bb, x);
3497 flags = (eh_edge->flags
3498 & (EDGE_EH | EDGE_ABNORMAL));
3499 if (CALL_P (x))
3500 flags |= EDGE_ABNORMAL_CALL;
3501 nehe = make_edge (nfte->src, eh_edge->dest,
3502 flags);
3504 nehe->probability = eh_edge->probability;
3505 nfte->probability
3506 = REG_BR_PROB_BASE - nehe->probability;
3508 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3509 bb = nfte->src;
3510 eh_edge = nehe;
3513 /* Converting possibly trapping insn to non-trapping is
3514 possible. Zap dummy outgoing edges. */
3515 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3518 /* Re-insert the ARGS_SIZE notes. */
3519 if (as_note)
3520 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3522 /* If we generated a jump instruction, it won't have
3523 JUMP_LABEL set. Recompute after we're done. */
3524 for (x = last; x != before_try; x = PREV_INSN (x))
3525 if (JUMP_P (x))
3527 peep2_do_rebuild_jump_labels = true;
3528 break;
3531 return last;
3534 /* After performing a replacement in basic block BB, fix up the life
3535 information in our buffer. LAST is the last of the insns that we
3536 emitted as a replacement. PREV is the insn before the start of
3537 the replacement. MATCH_LEN is the number of instructions that were
3538 matched, and which now need to be replaced in the buffer. */
3540 static void
3541 peep2_update_life (basic_block bb, int match_len, rtx last, rtx prev)
3543 int i = peep2_buf_position (peep2_current + match_len + 1);
3544 rtx x;
3545 regset_head live;
3547 INIT_REG_SET (&live);
3548 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3550 gcc_assert (peep2_current_count >= match_len + 1);
3551 peep2_current_count -= match_len + 1;
3553 x = last;
3556 if (INSN_P (x))
3558 df_insn_rescan (x);
3559 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3561 peep2_current_count++;
3562 if (--i < 0)
3563 i = MAX_INSNS_PER_PEEP2;
3564 peep2_insn_data[i].insn = x;
3565 df_simulate_one_insn_backwards (bb, x, &live);
3566 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3569 x = PREV_INSN (x);
3571 while (x != prev);
3572 CLEAR_REG_SET (&live);
3574 peep2_current = i;
3577 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3578 Return true if we added it, false otherwise. The caller will try to match
3579 peepholes against the buffer if we return false; otherwise it will try to
3580 add more instructions to the buffer. */
3582 static bool
3583 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3585 int pos;
3587 /* Once we have filled the maximum number of insns the buffer can hold,
3588 allow the caller to match the insns against peepholes. We wait until
3589 the buffer is full in case the target has similar peepholes of different
3590 length; we always want to match the longest if possible. */
3591 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3592 return false;
3594 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3595 any other pattern, lest it change the semantics of the frame info. */
3596 if (RTX_FRAME_RELATED_P (insn))
3598 /* Let the buffer drain first. */
3599 if (peep2_current_count > 0)
3600 return false;
3601 /* Now the insn will be the only thing in the buffer. */
3604 pos = peep2_buf_position (peep2_current + peep2_current_count);
3605 peep2_insn_data[pos].insn = insn;
3606 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3607 peep2_current_count++;
3609 df_simulate_one_insn_forwards (bb, insn, live);
3610 return true;
3613 /* Perform the peephole2 optimization pass. */
3615 static void
3616 peephole2_optimize (void)
3618 rtx insn;
3619 bitmap live;
3620 int i;
3621 basic_block bb;
3623 peep2_do_cleanup_cfg = false;
3624 peep2_do_rebuild_jump_labels = false;
3626 df_set_flags (DF_LR_RUN_DCE);
3627 df_note_add_problem ();
3628 df_analyze ();
3630 /* Initialize the regsets we're going to use. */
3631 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3632 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3633 search_ofs = 0;
3634 live = BITMAP_ALLOC (&reg_obstack);
3636 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3638 bool past_end = false;
3639 int pos;
3641 rtl_profile_for_bb (bb);
3643 /* Start up propagation. */
3644 bitmap_copy (live, DF_LR_IN (bb));
3645 df_simulate_initialize_forwards (bb, live);
3646 peep2_reinit_state (live);
3648 insn = BB_HEAD (bb);
3649 for (;;)
3651 rtx attempt, head;
3652 int match_len;
3654 if (!past_end && !NONDEBUG_INSN_P (insn))
3656 next_insn:
3657 insn = NEXT_INSN (insn);
3658 if (insn == NEXT_INSN (BB_END (bb)))
3659 past_end = true;
3660 continue;
3662 if (!past_end && peep2_fill_buffer (bb, insn, live))
3663 goto next_insn;
3665 /* If we did not fill an empty buffer, it signals the end of the
3666 block. */
3667 if (peep2_current_count == 0)
3668 break;
3670 /* The buffer filled to the current maximum, so try to match. */
3672 pos = peep2_buf_position (peep2_current + peep2_current_count);
3673 peep2_insn_data[pos].insn = PEEP2_EOB;
3674 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3676 /* Match the peephole. */
3677 head = peep2_insn_data[peep2_current].insn;
3678 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3679 if (attempt != NULL)
3681 rtx last = peep2_attempt (bb, head, match_len, attempt);
3682 if (last)
3684 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3685 continue;
3689 /* No match: advance the buffer by one insn. */
3690 peep2_current = peep2_buf_position (peep2_current + 1);
3691 peep2_current_count--;
3695 default_rtl_profile ();
3696 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3697 BITMAP_FREE (peep2_insn_data[i].live_before);
3698 BITMAP_FREE (live);
3699 if (peep2_do_rebuild_jump_labels)
3700 rebuild_jump_labels (get_insns ());
3702 #endif /* HAVE_peephole2 */
3704 /* Common predicates for use with define_bypass. */
3706 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3707 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3708 must be either a single_set or a PARALLEL with SETs inside. */
3711 store_data_bypass_p (rtx out_insn, rtx in_insn)
3713 rtx out_set, in_set;
3714 rtx out_pat, in_pat;
3715 rtx out_exp, in_exp;
3716 int i, j;
3718 in_set = single_set (in_insn);
3719 if (in_set)
3721 if (!MEM_P (SET_DEST (in_set)))
3722 return false;
3724 out_set = single_set (out_insn);
3725 if (out_set)
3727 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3728 return false;
3730 else
3732 out_pat = PATTERN (out_insn);
3734 if (GET_CODE (out_pat) != PARALLEL)
3735 return false;
3737 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3739 out_exp = XVECEXP (out_pat, 0, i);
3741 if (GET_CODE (out_exp) == CLOBBER)
3742 continue;
3744 gcc_assert (GET_CODE (out_exp) == SET);
3746 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3747 return false;
3751 else
3753 in_pat = PATTERN (in_insn);
3754 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3756 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3758 in_exp = XVECEXP (in_pat, 0, i);
3760 if (GET_CODE (in_exp) == CLOBBER)
3761 continue;
3763 gcc_assert (GET_CODE (in_exp) == SET);
3765 if (!MEM_P (SET_DEST (in_exp)))
3766 return false;
3768 out_set = single_set (out_insn);
3769 if (out_set)
3771 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3772 return false;
3774 else
3776 out_pat = PATTERN (out_insn);
3777 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3779 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3781 out_exp = XVECEXP (out_pat, 0, j);
3783 if (GET_CODE (out_exp) == CLOBBER)
3784 continue;
3786 gcc_assert (GET_CODE (out_exp) == SET);
3788 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3789 return false;
3795 return true;
3798 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3799 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3800 or multiple set; IN_INSN should be single_set for truth, but for convenience
3801 of insn categorization may be any JUMP or CALL insn. */
3804 if_test_bypass_p (rtx out_insn, rtx in_insn)
3806 rtx out_set, in_set;
3808 in_set = single_set (in_insn);
3809 if (! in_set)
3811 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3812 return false;
3815 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3816 return false;
3817 in_set = SET_SRC (in_set);
3819 out_set = single_set (out_insn);
3820 if (out_set)
3822 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3823 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3824 return false;
3826 else
3828 rtx out_pat;
3829 int i;
3831 out_pat = PATTERN (out_insn);
3832 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3834 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3836 rtx exp = XVECEXP (out_pat, 0, i);
3838 if (GET_CODE (exp) == CLOBBER)
3839 continue;
3841 gcc_assert (GET_CODE (exp) == SET);
3843 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3844 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3845 return false;
3849 return true;
3852 static bool
3853 gate_handle_peephole2 (void)
3855 return (optimize > 0 && flag_peephole2);
3858 static unsigned int
3859 rest_of_handle_peephole2 (void)
3861 #ifdef HAVE_peephole2
3862 peephole2_optimize ();
3863 #endif
3864 return 0;
3867 namespace {
3869 const pass_data pass_data_peephole2 =
3871 RTL_PASS, /* type */
3872 "peephole2", /* name */
3873 OPTGROUP_NONE, /* optinfo_flags */
3874 true, /* has_gate */
3875 true, /* has_execute */
3876 TV_PEEPHOLE2, /* tv_id */
3877 0, /* properties_required */
3878 0, /* properties_provided */
3879 0, /* properties_destroyed */
3880 0, /* todo_flags_start */
3881 ( TODO_df_finish | TODO_verify_rtl_sharing | 0 ), /* todo_flags_finish */
3884 class pass_peephole2 : public rtl_opt_pass
3886 public:
3887 pass_peephole2 (gcc::context *ctxt)
3888 : rtl_opt_pass (pass_data_peephole2, ctxt)
3891 /* opt_pass methods: */
3892 /* The epiphany backend creates a second instance of this pass, so we need
3893 a clone method. */
3894 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3895 bool gate () { return gate_handle_peephole2 (); }
3896 unsigned int execute () { return rest_of_handle_peephole2 (); }
3898 }; // class pass_peephole2
3900 } // anon namespace
3902 rtl_opt_pass *
3903 make_pass_peephole2 (gcc::context *ctxt)
3905 return new pass_peephole2 (ctxt);
3908 static unsigned int
3909 rest_of_handle_split_all_insns (void)
3911 split_all_insns ();
3912 return 0;
3915 namespace {
3917 const pass_data pass_data_split_all_insns =
3919 RTL_PASS, /* type */
3920 "split1", /* name */
3921 OPTGROUP_NONE, /* optinfo_flags */
3922 false, /* has_gate */
3923 true, /* has_execute */
3924 TV_NONE, /* tv_id */
3925 0, /* properties_required */
3926 0, /* properties_provided */
3927 0, /* properties_destroyed */
3928 0, /* todo_flags_start */
3929 0, /* todo_flags_finish */
3932 class pass_split_all_insns : public rtl_opt_pass
3934 public:
3935 pass_split_all_insns (gcc::context *ctxt)
3936 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3939 /* opt_pass methods: */
3940 /* The epiphany backend creates a second instance of this pass, so
3941 we need a clone method. */
3942 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3943 unsigned int execute () { return rest_of_handle_split_all_insns (); }
3945 }; // class pass_split_all_insns
3947 } // anon namespace
3949 rtl_opt_pass *
3950 make_pass_split_all_insns (gcc::context *ctxt)
3952 return new pass_split_all_insns (ctxt);
3955 static unsigned int
3956 rest_of_handle_split_after_reload (void)
3958 /* If optimizing, then go ahead and split insns now. */
3959 #ifndef STACK_REGS
3960 if (optimize > 0)
3961 #endif
3962 split_all_insns ();
3963 return 0;
3966 namespace {
3968 const pass_data pass_data_split_after_reload =
3970 RTL_PASS, /* type */
3971 "split2", /* name */
3972 OPTGROUP_NONE, /* optinfo_flags */
3973 false, /* has_gate */
3974 true, /* has_execute */
3975 TV_NONE, /* tv_id */
3976 0, /* properties_required */
3977 0, /* properties_provided */
3978 0, /* properties_destroyed */
3979 0, /* todo_flags_start */
3980 0, /* todo_flags_finish */
3983 class pass_split_after_reload : public rtl_opt_pass
3985 public:
3986 pass_split_after_reload (gcc::context *ctxt)
3987 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3990 /* opt_pass methods: */
3991 unsigned int execute () { return rest_of_handle_split_after_reload (); }
3993 }; // class pass_split_after_reload
3995 } // anon namespace
3997 rtl_opt_pass *
3998 make_pass_split_after_reload (gcc::context *ctxt)
4000 return new pass_split_after_reload (ctxt);
4003 static bool
4004 gate_handle_split_before_regstack (void)
4006 #if HAVE_ATTR_length && defined (STACK_REGS)
4007 /* If flow2 creates new instructions which need splitting
4008 and scheduling after reload is not done, they might not be
4009 split until final which doesn't allow splitting
4010 if HAVE_ATTR_length. */
4011 # ifdef INSN_SCHEDULING
4012 return (optimize && !flag_schedule_insns_after_reload);
4013 # else
4014 return (optimize);
4015 # endif
4016 #else
4017 return 0;
4018 #endif
4021 static unsigned int
4022 rest_of_handle_split_before_regstack (void)
4024 split_all_insns ();
4025 return 0;
4028 namespace {
4030 const pass_data pass_data_split_before_regstack =
4032 RTL_PASS, /* type */
4033 "split3", /* name */
4034 OPTGROUP_NONE, /* optinfo_flags */
4035 true, /* has_gate */
4036 true, /* has_execute */
4037 TV_NONE, /* tv_id */
4038 0, /* properties_required */
4039 0, /* properties_provided */
4040 0, /* properties_destroyed */
4041 0, /* todo_flags_start */
4042 0, /* todo_flags_finish */
4045 class pass_split_before_regstack : public rtl_opt_pass
4047 public:
4048 pass_split_before_regstack (gcc::context *ctxt)
4049 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4052 /* opt_pass methods: */
4053 bool gate () { return gate_handle_split_before_regstack (); }
4054 unsigned int execute () {
4055 return rest_of_handle_split_before_regstack ();
4058 }; // class pass_split_before_regstack
4060 } // anon namespace
4062 rtl_opt_pass *
4063 make_pass_split_before_regstack (gcc::context *ctxt)
4065 return new pass_split_before_regstack (ctxt);
4068 static bool
4069 gate_handle_split_before_sched2 (void)
4071 #ifdef INSN_SCHEDULING
4072 return optimize > 0 && flag_schedule_insns_after_reload;
4073 #else
4074 return 0;
4075 #endif
4078 static unsigned int
4079 rest_of_handle_split_before_sched2 (void)
4081 #ifdef INSN_SCHEDULING
4082 split_all_insns ();
4083 #endif
4084 return 0;
4087 namespace {
4089 const pass_data pass_data_split_before_sched2 =
4091 RTL_PASS, /* type */
4092 "split4", /* name */
4093 OPTGROUP_NONE, /* optinfo_flags */
4094 true, /* has_gate */
4095 true, /* has_execute */
4096 TV_NONE, /* tv_id */
4097 0, /* properties_required */
4098 0, /* properties_provided */
4099 0, /* properties_destroyed */
4100 0, /* todo_flags_start */
4101 TODO_verify_flow, /* todo_flags_finish */
4104 class pass_split_before_sched2 : public rtl_opt_pass
4106 public:
4107 pass_split_before_sched2 (gcc::context *ctxt)
4108 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4111 /* opt_pass methods: */
4112 bool gate () { return gate_handle_split_before_sched2 (); }
4113 unsigned int execute () { return rest_of_handle_split_before_sched2 (); }
4115 }; // class pass_split_before_sched2
4117 } // anon namespace
4119 rtl_opt_pass *
4120 make_pass_split_before_sched2 (gcc::context *ctxt)
4122 return new pass_split_before_sched2 (ctxt);
4125 /* The placement of the splitting that we do for shorten_branches
4126 depends on whether regstack is used by the target or not. */
4127 static bool
4128 gate_do_final_split (void)
4130 #if HAVE_ATTR_length && !defined (STACK_REGS)
4131 return 1;
4132 #else
4133 return 0;
4134 #endif
4137 namespace {
4139 const pass_data pass_data_split_for_shorten_branches =
4141 RTL_PASS, /* type */
4142 "split5", /* name */
4143 OPTGROUP_NONE, /* optinfo_flags */
4144 true, /* has_gate */
4145 true, /* has_execute */
4146 TV_NONE, /* tv_id */
4147 0, /* properties_required */
4148 0, /* properties_provided */
4149 0, /* properties_destroyed */
4150 0, /* todo_flags_start */
4151 TODO_verify_rtl_sharing, /* todo_flags_finish */
4154 class pass_split_for_shorten_branches : public rtl_opt_pass
4156 public:
4157 pass_split_for_shorten_branches (gcc::context *ctxt)
4158 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4161 /* opt_pass methods: */
4162 bool gate () { return gate_do_final_split (); }
4163 unsigned int execute () { return split_all_insns_noflow (); }
4165 }; // class pass_split_for_shorten_branches
4167 } // anon namespace
4169 rtl_opt_pass *
4170 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4172 return new pass_split_for_shorten_branches (ctxt);