PR tree-optimization/66718
[official-gcc.git] / gcc / recog.c
blobd96e29320985b3c49b8fc241c3880dd157713d32
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "rtl-error.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
33 #include "recog.h"
34 #include "regs.h"
35 #include "addresses.h"
36 #include "function.h"
37 #include "rtl.h"
38 #include "flags.h"
39 #include "expmed.h"
40 #include "dojump.h"
41 #include "explow.h"
42 #include "calls.h"
43 #include "emit-rtl.h"
44 #include "varasm.h"
45 #include "stmt.h"
46 #include "expr.h"
47 #include "predict.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfgrtl.h"
51 #include "cfgbuild.h"
52 #include "cfgcleanup.h"
53 #include "basic-block.h"
54 #include "reload.h"
55 #include "target.h"
56 #include "tree-pass.h"
57 #include "df.h"
58 #include "insn-codes.h"
60 #ifndef STACK_POP_CODE
61 #if STACK_GROWS_DOWNWARD
62 #define STACK_POP_CODE POST_INC
63 #else
64 #define STACK_POP_CODE POST_DEC
65 #endif
66 #endif
68 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
69 static void validate_replace_src_1 (rtx *, void *);
70 static rtx_insn *split_insn (rtx_insn *);
72 struct target_recog default_target_recog;
73 #if SWITCHABLE_TARGET
74 struct target_recog *this_target_recog = &default_target_recog;
75 #endif
77 /* Nonzero means allow operands to be volatile.
78 This should be 0 if you are generating rtl, such as if you are calling
79 the functions in optabs.c and expmed.c (most of the time).
80 This should be 1 if all valid insns need to be recognized,
81 such as in reginfo.c and final.c and reload.c.
83 init_recog and init_recog_no_volatile are responsible for setting this. */
85 int volatile_ok;
87 struct recog_data_d recog_data;
89 /* Contains a vector of operand_alternative structures, such that
90 operand OP of alternative A is at index A * n_operands + OP.
91 Set up by preprocess_constraints. */
92 const operand_alternative *recog_op_alt;
94 /* Used to provide recog_op_alt for asms. */
95 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
96 * MAX_RECOG_ALTERNATIVES];
98 /* On return from `constrain_operands', indicate which alternative
99 was satisfied. */
101 int which_alternative;
103 /* Nonzero after end of reload pass.
104 Set to 1 or 0 by toplev.c.
105 Controls the significance of (SUBREG (MEM)). */
107 int reload_completed;
109 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
110 int epilogue_completed;
112 /* Initialize data used by the function `recog'.
113 This must be called once in the compilation of a function
114 before any insn recognition may be done in the function. */
116 void
117 init_recog_no_volatile (void)
119 volatile_ok = 0;
122 void
123 init_recog (void)
125 volatile_ok = 1;
129 /* Return true if labels in asm operands BODY are LABEL_REFs. */
131 static bool
132 asm_labels_ok (rtx body)
134 rtx asmop;
135 int i;
137 asmop = extract_asm_operands (body);
138 if (asmop == NULL_RTX)
139 return true;
141 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
142 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
143 return false;
145 return true;
148 /* Check that X is an insn-body for an `asm' with operands
149 and that the operands mentioned in it are legitimate. */
152 check_asm_operands (rtx x)
154 int noperands;
155 rtx *operands;
156 const char **constraints;
157 int i;
159 if (!asm_labels_ok (x))
160 return 0;
162 /* Post-reload, be more strict with things. */
163 if (reload_completed)
165 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
166 rtx_insn *insn = make_insn_raw (x);
167 extract_insn (insn);
168 constrain_operands (1, get_enabled_alternatives (insn));
169 return which_alternative >= 0;
172 noperands = asm_noperands (x);
173 if (noperands < 0)
174 return 0;
175 if (noperands == 0)
176 return 1;
178 operands = XALLOCAVEC (rtx, noperands);
179 constraints = XALLOCAVEC (const char *, noperands);
181 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
183 for (i = 0; i < noperands; i++)
185 const char *c = constraints[i];
186 if (c[0] == '%')
187 c++;
188 if (! asm_operand_ok (operands[i], c, constraints))
189 return 0;
192 return 1;
195 /* Static data for the next two routines. */
197 typedef struct change_t
199 rtx object;
200 int old_code;
201 rtx *loc;
202 rtx old;
203 bool unshare;
204 } change_t;
206 static change_t *changes;
207 static int changes_allocated;
209 static int num_changes = 0;
211 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
212 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
213 the change is simply made.
215 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
216 will be called with the address and mode as parameters. If OBJECT is
217 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
218 the change in place.
220 IN_GROUP is nonzero if this is part of a group of changes that must be
221 performed as a group. In that case, the changes will be stored. The
222 function `apply_change_group' will validate and apply the changes.
224 If IN_GROUP is zero, this is a single change. Try to recognize the insn
225 or validate the memory reference with the change applied. If the result
226 is not valid for the machine, suppress the change and return zero.
227 Otherwise, perform the change and return 1. */
229 static bool
230 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
232 rtx old = *loc;
234 if (old == new_rtx || rtx_equal_p (old, new_rtx))
235 return 1;
237 gcc_assert (in_group != 0 || num_changes == 0);
239 *loc = new_rtx;
241 /* Save the information describing this change. */
242 if (num_changes >= changes_allocated)
244 if (changes_allocated == 0)
245 /* This value allows for repeated substitutions inside complex
246 indexed addresses, or changes in up to 5 insns. */
247 changes_allocated = MAX_RECOG_OPERANDS * 5;
248 else
249 changes_allocated *= 2;
251 changes = XRESIZEVEC (change_t, changes, changes_allocated);
254 changes[num_changes].object = object;
255 changes[num_changes].loc = loc;
256 changes[num_changes].old = old;
257 changes[num_changes].unshare = unshare;
259 if (object && !MEM_P (object))
261 /* Set INSN_CODE to force rerecognition of insn. Save old code in
262 case invalid. */
263 changes[num_changes].old_code = INSN_CODE (object);
264 INSN_CODE (object) = -1;
267 num_changes++;
269 /* If we are making a group of changes, return 1. Otherwise, validate the
270 change group we made. */
272 if (in_group)
273 return 1;
274 else
275 return apply_change_group ();
278 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
279 UNSHARE to false. */
281 bool
282 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
284 return validate_change_1 (object, loc, new_rtx, in_group, false);
287 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
288 UNSHARE to true. */
290 bool
291 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
293 return validate_change_1 (object, loc, new_rtx, in_group, true);
297 /* Keep X canonicalized if some changes have made it non-canonical; only
298 modifies the operands of X, not (for example) its code. Simplifications
299 are not the job of this routine.
301 Return true if anything was changed. */
302 bool
303 canonicalize_change_group (rtx_insn *insn, rtx x)
305 if (COMMUTATIVE_P (x)
306 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
308 /* Oops, the caller has made X no longer canonical.
309 Let's redo the changes in the correct order. */
310 rtx tem = XEXP (x, 0);
311 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
312 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
313 return true;
315 else
316 return false;
320 /* This subroutine of apply_change_group verifies whether the changes to INSN
321 were valid; i.e. whether INSN can still be recognized.
323 If IN_GROUP is true clobbers which have to be added in order to
324 match the instructions will be added to the current change group.
325 Otherwise the changes will take effect immediately. */
328 insn_invalid_p (rtx_insn *insn, bool in_group)
330 rtx pat = PATTERN (insn);
331 int num_clobbers = 0;
332 /* If we are before reload and the pattern is a SET, see if we can add
333 clobbers. */
334 int icode = recog (pat, insn,
335 (GET_CODE (pat) == SET
336 && ! reload_completed
337 && ! reload_in_progress)
338 ? &num_clobbers : 0);
339 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
342 /* If this is an asm and the operand aren't legal, then fail. Likewise if
343 this is not an asm and the insn wasn't recognized. */
344 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
345 || (!is_asm && icode < 0))
346 return 1;
348 /* If we have to add CLOBBERs, fail if we have to add ones that reference
349 hard registers since our callers can't know if they are live or not.
350 Otherwise, add them. */
351 if (num_clobbers > 0)
353 rtx newpat;
355 if (added_clobbers_hard_reg_p (icode))
356 return 1;
358 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
359 XVECEXP (newpat, 0, 0) = pat;
360 add_clobbers (newpat, icode);
361 if (in_group)
362 validate_change (insn, &PATTERN (insn), newpat, 1);
363 else
364 PATTERN (insn) = pat = newpat;
367 /* After reload, verify that all constraints are satisfied. */
368 if (reload_completed)
370 extract_insn (insn);
372 if (! constrain_operands (1, get_preferred_alternatives (insn)))
373 return 1;
376 INSN_CODE (insn) = icode;
377 return 0;
380 /* Return number of changes made and not validated yet. */
382 num_changes_pending (void)
384 return num_changes;
387 /* Tentatively apply the changes numbered NUM and up.
388 Return 1 if all changes are valid, zero otherwise. */
391 verify_changes (int num)
393 int i;
394 rtx last_validated = NULL_RTX;
396 /* The changes have been applied and all INSN_CODEs have been reset to force
397 rerecognition.
399 The changes are valid if we aren't given an object, or if we are
400 given a MEM and it still is a valid address, or if this is in insn
401 and it is recognized. In the latter case, if reload has completed,
402 we also require that the operands meet the constraints for
403 the insn. */
405 for (i = num; i < num_changes; i++)
407 rtx object = changes[i].object;
409 /* If there is no object to test or if it is the same as the one we
410 already tested, ignore it. */
411 if (object == 0 || object == last_validated)
412 continue;
414 if (MEM_P (object))
416 if (! memory_address_addr_space_p (GET_MODE (object),
417 XEXP (object, 0),
418 MEM_ADDR_SPACE (object)))
419 break;
421 else if (/* changes[i].old might be zero, e.g. when putting a
422 REG_FRAME_RELATED_EXPR into a previously empty list. */
423 changes[i].old
424 && REG_P (changes[i].old)
425 && asm_noperands (PATTERN (object)) > 0
426 && REG_EXPR (changes[i].old) != NULL_TREE
427 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
428 && DECL_REGISTER (REG_EXPR (changes[i].old)))
430 /* Don't allow changes of hard register operands to inline
431 assemblies if they have been defined as register asm ("x"). */
432 break;
434 else if (DEBUG_INSN_P (object))
435 continue;
436 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
438 rtx pat = PATTERN (object);
440 /* Perhaps we couldn't recognize the insn because there were
441 extra CLOBBERs at the end. If so, try to re-recognize
442 without the last CLOBBER (later iterations will cause each of
443 them to be eliminated, in turn). But don't do this if we
444 have an ASM_OPERAND. */
445 if (GET_CODE (pat) == PARALLEL
446 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
447 && asm_noperands (PATTERN (object)) < 0)
449 rtx newpat;
451 if (XVECLEN (pat, 0) == 2)
452 newpat = XVECEXP (pat, 0, 0);
453 else
455 int j;
457 newpat
458 = gen_rtx_PARALLEL (VOIDmode,
459 rtvec_alloc (XVECLEN (pat, 0) - 1));
460 for (j = 0; j < XVECLEN (newpat, 0); j++)
461 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
464 /* Add a new change to this group to replace the pattern
465 with this new pattern. Then consider this change
466 as having succeeded. The change we added will
467 cause the entire call to fail if things remain invalid.
469 Note that this can lose if a later change than the one
470 we are processing specified &XVECEXP (PATTERN (object), 0, X)
471 but this shouldn't occur. */
473 validate_change (object, &PATTERN (object), newpat, 1);
474 continue;
476 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
477 || GET_CODE (pat) == VAR_LOCATION)
478 /* If this insn is a CLOBBER or USE, it is always valid, but is
479 never recognized. */
480 continue;
481 else
482 break;
484 last_validated = object;
487 return (i == num_changes);
490 /* A group of changes has previously been issued with validate_change
491 and verified with verify_changes. Call df_insn_rescan for each of
492 the insn changed and clear num_changes. */
494 void
495 confirm_change_group (void)
497 int i;
498 rtx last_object = NULL;
500 for (i = 0; i < num_changes; i++)
502 rtx object = changes[i].object;
504 if (changes[i].unshare)
505 *changes[i].loc = copy_rtx (*changes[i].loc);
507 /* Avoid unnecessary rescanning when multiple changes to same instruction
508 are made. */
509 if (object)
511 if (object != last_object && last_object && INSN_P (last_object))
512 df_insn_rescan (as_a <rtx_insn *> (last_object));
513 last_object = object;
517 if (last_object && INSN_P (last_object))
518 df_insn_rescan (as_a <rtx_insn *> (last_object));
519 num_changes = 0;
522 /* Apply a group of changes previously issued with `validate_change'.
523 If all changes are valid, call confirm_change_group and return 1,
524 otherwise, call cancel_changes and return 0. */
527 apply_change_group (void)
529 if (verify_changes (0))
531 confirm_change_group ();
532 return 1;
534 else
536 cancel_changes (0);
537 return 0;
542 /* Return the number of changes so far in the current group. */
545 num_validated_changes (void)
547 return num_changes;
550 /* Retract the changes numbered NUM and up. */
552 void
553 cancel_changes (int num)
555 int i;
557 /* Back out all the changes. Do this in the opposite order in which
558 they were made. */
559 for (i = num_changes - 1; i >= num; i--)
561 *changes[i].loc = changes[i].old;
562 if (changes[i].object && !MEM_P (changes[i].object))
563 INSN_CODE (changes[i].object) = changes[i].old_code;
565 num_changes = num;
568 /* Reduce conditional compilation elsewhere. */
569 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
570 rtx. */
572 static void
573 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
574 machine_mode op0_mode)
576 rtx x = *loc;
577 enum rtx_code code = GET_CODE (x);
578 rtx new_rtx = NULL_RTX;
580 if (SWAPPABLE_OPERANDS_P (x)
581 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
583 validate_unshare_change (object, loc,
584 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
585 : swap_condition (code),
586 GET_MODE (x), XEXP (x, 1),
587 XEXP (x, 0)), 1);
588 x = *loc;
589 code = GET_CODE (x);
592 /* Canonicalize arithmetics with all constant operands. */
593 switch (GET_RTX_CLASS (code))
595 case RTX_UNARY:
596 if (CONSTANT_P (XEXP (x, 0)))
597 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
598 op0_mode);
599 break;
600 case RTX_COMM_ARITH:
601 case RTX_BIN_ARITH:
602 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
603 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
604 XEXP (x, 1));
605 break;
606 case RTX_COMPARE:
607 case RTX_COMM_COMPARE:
608 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
609 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
610 XEXP (x, 0), XEXP (x, 1));
611 break;
612 default:
613 break;
615 if (new_rtx)
617 validate_change (object, loc, new_rtx, 1);
618 return;
621 switch (code)
623 case PLUS:
624 /* If we have a PLUS whose second operand is now a CONST_INT, use
625 simplify_gen_binary to try to simplify it.
626 ??? We may want later to remove this, once simplification is
627 separated from this function. */
628 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
629 validate_change (object, loc,
630 simplify_gen_binary
631 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
632 break;
633 case MINUS:
634 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
635 validate_change (object, loc,
636 simplify_gen_binary
637 (PLUS, GET_MODE (x), XEXP (x, 0),
638 simplify_gen_unary (NEG,
639 GET_MODE (x), XEXP (x, 1),
640 GET_MODE (x))), 1);
641 break;
642 case ZERO_EXTEND:
643 case SIGN_EXTEND:
644 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
646 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
647 op0_mode);
648 /* If any of the above failed, substitute in something that
649 we know won't be recognized. */
650 if (!new_rtx)
651 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
652 validate_change (object, loc, new_rtx, 1);
654 break;
655 case SUBREG:
656 /* All subregs possible to simplify should be simplified. */
657 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
658 SUBREG_BYTE (x));
660 /* Subregs of VOIDmode operands are incorrect. */
661 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
662 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
663 if (new_rtx)
664 validate_change (object, loc, new_rtx, 1);
665 break;
666 case ZERO_EXTRACT:
667 case SIGN_EXTRACT:
668 /* If we are replacing a register with memory, try to change the memory
669 to be the mode required for memory in extract operations (this isn't
670 likely to be an insertion operation; if it was, nothing bad will
671 happen, we might just fail in some cases). */
673 if (MEM_P (XEXP (x, 0))
674 && CONST_INT_P (XEXP (x, 1))
675 && CONST_INT_P (XEXP (x, 2))
676 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
677 MEM_ADDR_SPACE (XEXP (x, 0)))
678 && !MEM_VOLATILE_P (XEXP (x, 0)))
680 machine_mode wanted_mode = VOIDmode;
681 machine_mode is_mode = GET_MODE (XEXP (x, 0));
682 int pos = INTVAL (XEXP (x, 2));
684 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
686 wanted_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
687 if (wanted_mode == VOIDmode)
688 wanted_mode = word_mode;
690 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
692 wanted_mode = insn_data[targetm.code_for_extv].operand[1].mode;
693 if (wanted_mode == VOIDmode)
694 wanted_mode = word_mode;
697 /* If we have a narrower mode, we can do something. */
698 if (wanted_mode != VOIDmode
699 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
701 int offset = pos / BITS_PER_UNIT;
702 rtx newmem;
704 /* If the bytes and bits are counted differently, we
705 must adjust the offset. */
706 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
707 offset =
708 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
709 offset);
711 gcc_assert (GET_MODE_PRECISION (wanted_mode)
712 == GET_MODE_BITSIZE (wanted_mode));
713 pos %= GET_MODE_BITSIZE (wanted_mode);
715 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
717 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
718 validate_change (object, &XEXP (x, 0), newmem, 1);
722 break;
724 default:
725 break;
729 /* Replace every occurrence of FROM in X with TO. Mark each change with
730 validate_change passing OBJECT. */
732 static void
733 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
734 bool simplify)
736 int i, j;
737 const char *fmt;
738 rtx x = *loc;
739 enum rtx_code code;
740 machine_mode op0_mode = VOIDmode;
741 int prev_changes = num_changes;
743 if (!x)
744 return;
746 code = GET_CODE (x);
747 fmt = GET_RTX_FORMAT (code);
748 if (fmt[0] == 'e')
749 op0_mode = GET_MODE (XEXP (x, 0));
751 /* X matches FROM if it is the same rtx or they are both referring to the
752 same register in the same mode. Avoid calling rtx_equal_p unless the
753 operands look similar. */
755 if (x == from
756 || (REG_P (x) && REG_P (from)
757 && GET_MODE (x) == GET_MODE (from)
758 && REGNO (x) == REGNO (from))
759 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
760 && rtx_equal_p (x, from)))
762 validate_unshare_change (object, loc, to, 1);
763 return;
766 /* Call ourself recursively to perform the replacements.
767 We must not replace inside already replaced expression, otherwise we
768 get infinite recursion for replacements like (reg X)->(subreg (reg X))
769 so we must special case shared ASM_OPERANDS. */
771 if (GET_CODE (x) == PARALLEL)
773 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
775 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
776 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
778 /* Verify that operands are really shared. */
779 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
780 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
781 (x, 0, j))));
782 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
783 from, to, object, simplify);
785 else
786 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
787 simplify);
790 else
791 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
793 if (fmt[i] == 'e')
794 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
795 else if (fmt[i] == 'E')
796 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
797 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
798 simplify);
801 /* If we didn't substitute, there is nothing more to do. */
802 if (num_changes == prev_changes)
803 return;
805 /* ??? The regmove is no more, so is this aberration still necessary? */
806 /* Allow substituted expression to have different mode. This is used by
807 regmove to change mode of pseudo register. */
808 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
809 op0_mode = GET_MODE (XEXP (x, 0));
811 /* Do changes needed to keep rtx consistent. Don't do any other
812 simplifications, as it is not our job. */
813 if (simplify)
814 simplify_while_replacing (loc, to, object, op0_mode);
817 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
818 with TO. After all changes have been made, validate by seeing
819 if INSN is still valid. */
822 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
824 validate_replace_rtx_1 (loc, from, to, insn, true);
825 return apply_change_group ();
828 /* Try replacing every occurrence of FROM in INSN with TO. After all
829 changes have been made, validate by seeing if INSN is still valid. */
832 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
834 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
835 return apply_change_group ();
838 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
839 is a part of INSN. After all changes have been made, validate by seeing if
840 INSN is still valid.
841 validate_replace_rtx (from, to, insn) is equivalent to
842 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
845 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
847 validate_replace_rtx_1 (where, from, to, insn, true);
848 return apply_change_group ();
851 /* Same as above, but do not simplify rtx afterwards. */
853 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
854 rtx_insn *insn)
856 validate_replace_rtx_1 (where, from, to, insn, false);
857 return apply_change_group ();
861 /* Try replacing every occurrence of FROM in INSN with TO. This also
862 will replace in REG_EQUAL and REG_EQUIV notes. */
864 void
865 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
867 rtx note;
868 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
869 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
870 if (REG_NOTE_KIND (note) == REG_EQUAL
871 || REG_NOTE_KIND (note) == REG_EQUIV)
872 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
875 /* Function called by note_uses to replace used subexpressions. */
876 struct validate_replace_src_data
878 rtx from; /* Old RTX */
879 rtx to; /* New RTX */
880 rtx_insn *insn; /* Insn in which substitution is occurring. */
883 static void
884 validate_replace_src_1 (rtx *x, void *data)
886 struct validate_replace_src_data *d
887 = (struct validate_replace_src_data *) data;
889 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
892 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
893 SET_DESTs. */
895 void
896 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
898 struct validate_replace_src_data d;
900 d.from = from;
901 d.to = to;
902 d.insn = insn;
903 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
906 /* Try simplify INSN.
907 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
908 pattern and return true if something was simplified. */
910 bool
911 validate_simplify_insn (rtx_insn *insn)
913 int i;
914 rtx pat = NULL;
915 rtx newpat = NULL;
917 pat = PATTERN (insn);
919 if (GET_CODE (pat) == SET)
921 newpat = simplify_rtx (SET_SRC (pat));
922 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
923 validate_change (insn, &SET_SRC (pat), newpat, 1);
924 newpat = simplify_rtx (SET_DEST (pat));
925 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
926 validate_change (insn, &SET_DEST (pat), newpat, 1);
928 else if (GET_CODE (pat) == PARALLEL)
929 for (i = 0; i < XVECLEN (pat, 0); i++)
931 rtx s = XVECEXP (pat, 0, i);
933 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
935 newpat = simplify_rtx (SET_SRC (s));
936 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
937 validate_change (insn, &SET_SRC (s), newpat, 1);
938 newpat = simplify_rtx (SET_DEST (s));
939 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
940 validate_change (insn, &SET_DEST (s), newpat, 1);
943 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
946 /* Return 1 if the insn using CC0 set by INSN does not contain
947 any ordered tests applied to the condition codes.
948 EQ and NE tests do not count. */
951 next_insn_tests_no_inequality (rtx_insn *insn)
953 rtx_insn *next = next_cc0_user (insn);
955 /* If there is no next insn, we have to take the conservative choice. */
956 if (next == 0)
957 return 0;
959 return (INSN_P (next)
960 && ! inequality_comparisons_p (PATTERN (next)));
963 /* Return 1 if OP is a valid general operand for machine mode MODE.
964 This is either a register reference, a memory reference,
965 or a constant. In the case of a memory reference, the address
966 is checked for general validity for the target machine.
968 Register and memory references must have mode MODE in order to be valid,
969 but some constants have no machine mode and are valid for any mode.
971 If MODE is VOIDmode, OP is checked for validity for whatever mode
972 it has.
974 The main use of this function is as a predicate in match_operand
975 expressions in the machine description. */
978 general_operand (rtx op, machine_mode mode)
980 enum rtx_code code = GET_CODE (op);
982 if (mode == VOIDmode)
983 mode = GET_MODE (op);
985 /* Don't accept CONST_INT or anything similar
986 if the caller wants something floating. */
987 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
988 && GET_MODE_CLASS (mode) != MODE_INT
989 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
990 return 0;
992 if (CONST_INT_P (op)
993 && mode != VOIDmode
994 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
995 return 0;
997 if (CONSTANT_P (op))
998 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
999 || mode == VOIDmode)
1000 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1001 && targetm.legitimate_constant_p (mode == VOIDmode
1002 ? GET_MODE (op)
1003 : mode, op));
1005 /* Except for certain constants with VOIDmode, already checked for,
1006 OP's mode must match MODE if MODE specifies a mode. */
1008 if (GET_MODE (op) != mode)
1009 return 0;
1011 if (code == SUBREG)
1013 rtx sub = SUBREG_REG (op);
1015 #ifdef INSN_SCHEDULING
1016 /* On machines that have insn scheduling, we want all memory
1017 reference to be explicit, so outlaw paradoxical SUBREGs.
1018 However, we must allow them after reload so that they can
1019 get cleaned up by cleanup_subreg_operands. */
1020 if (!reload_completed && MEM_P (sub)
1021 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1022 return 0;
1023 #endif
1024 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1025 may result in incorrect reference. We should simplify all valid
1026 subregs of MEM anyway. But allow this after reload because we
1027 might be called from cleanup_subreg_operands.
1029 ??? This is a kludge. */
1030 if (!reload_completed && SUBREG_BYTE (op) != 0
1031 && MEM_P (sub))
1032 return 0;
1034 #ifdef CANNOT_CHANGE_MODE_CLASS
1035 if (REG_P (sub)
1036 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1037 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1038 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1039 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1040 /* LRA can generate some invalid SUBREGS just for matched
1041 operand reload presentation. LRA needs to treat them as
1042 valid. */
1043 && ! LRA_SUBREG_P (op))
1044 return 0;
1045 #endif
1047 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1048 create such rtl, and we must reject it. */
1049 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1050 /* LRA can use subreg to store a floating point value in an
1051 integer mode. Although the floating point and the
1052 integer modes need the same number of hard registers, the
1053 size of floating point mode can be less than the integer
1054 mode. */
1055 && ! lra_in_progress
1056 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1057 return 0;
1059 op = sub;
1060 code = GET_CODE (op);
1063 if (code == REG)
1064 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1065 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1067 if (code == MEM)
1069 rtx y = XEXP (op, 0);
1071 if (! volatile_ok && MEM_VOLATILE_P (op))
1072 return 0;
1074 /* Use the mem's mode, since it will be reloaded thus. LRA can
1075 generate move insn with invalid addresses which is made valid
1076 and efficiently calculated by LRA through further numerous
1077 transformations. */
1078 if (lra_in_progress
1079 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1080 return 1;
1083 return 0;
1086 /* Return 1 if OP is a valid memory address for a memory reference
1087 of mode MODE.
1089 The main use of this function is as a predicate in match_operand
1090 expressions in the machine description. */
1093 address_operand (rtx op, machine_mode mode)
1095 return memory_address_p (mode, op);
1098 /* Return 1 if OP is a register reference of mode MODE.
1099 If MODE is VOIDmode, accept a register in any mode.
1101 The main use of this function is as a predicate in match_operand
1102 expressions in the machine description. */
1105 register_operand (rtx op, machine_mode mode)
1107 if (GET_CODE (op) == SUBREG)
1109 rtx sub = SUBREG_REG (op);
1111 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1112 because it is guaranteed to be reloaded into one.
1113 Just make sure the MEM is valid in itself.
1114 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1115 but currently it does result from (SUBREG (REG)...) where the
1116 reg went on the stack.) */
1117 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1118 return 0;
1120 else if (!REG_P (op))
1121 return 0;
1122 return general_operand (op, mode);
1125 /* Return 1 for a register in Pmode; ignore the tested mode. */
1128 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1130 return register_operand (op, Pmode);
1133 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1134 or a hard register. */
1137 scratch_operand (rtx op, machine_mode mode)
1139 if (GET_MODE (op) != mode && mode != VOIDmode)
1140 return 0;
1142 return (GET_CODE (op) == SCRATCH
1143 || (REG_P (op)
1144 && (lra_in_progress
1145 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1146 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1149 /* Return 1 if OP is a valid immediate operand for mode MODE.
1151 The main use of this function is as a predicate in match_operand
1152 expressions in the machine description. */
1155 immediate_operand (rtx op, machine_mode mode)
1157 /* Don't accept CONST_INT or anything similar
1158 if the caller wants something floating. */
1159 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1160 && GET_MODE_CLASS (mode) != MODE_INT
1161 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1162 return 0;
1164 if (CONST_INT_P (op)
1165 && mode != VOIDmode
1166 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1167 return 0;
1169 return (CONSTANT_P (op)
1170 && (GET_MODE (op) == mode || mode == VOIDmode
1171 || GET_MODE (op) == VOIDmode)
1172 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1173 && targetm.legitimate_constant_p (mode == VOIDmode
1174 ? GET_MODE (op)
1175 : mode, op));
1178 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1181 const_int_operand (rtx op, machine_mode mode)
1183 if (!CONST_INT_P (op))
1184 return 0;
1186 if (mode != VOIDmode
1187 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1188 return 0;
1190 return 1;
1193 #if TARGET_SUPPORTS_WIDE_INT
1194 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1195 of mode MODE. */
1197 const_scalar_int_operand (rtx op, machine_mode mode)
1199 if (!CONST_SCALAR_INT_P (op))
1200 return 0;
1202 if (CONST_INT_P (op))
1203 return const_int_operand (op, mode);
1205 if (mode != VOIDmode)
1207 int prec = GET_MODE_PRECISION (mode);
1208 int bitsize = GET_MODE_BITSIZE (mode);
1210 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1211 return 0;
1213 if (prec == bitsize)
1214 return 1;
1215 else
1217 /* Multiword partial int. */
1218 HOST_WIDE_INT x
1219 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1220 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1223 return 1;
1226 /* Returns 1 if OP is an operand that is a constant integer or constant
1227 floating-point number of MODE. */
1230 const_double_operand (rtx op, machine_mode mode)
1232 return (GET_CODE (op) == CONST_DOUBLE)
1233 && (GET_MODE (op) == mode || mode == VOIDmode);
1235 #else
1236 /* Returns 1 if OP is an operand that is a constant integer or constant
1237 floating-point number of MODE. */
1240 const_double_operand (rtx op, machine_mode mode)
1242 /* Don't accept CONST_INT or anything similar
1243 if the caller wants something floating. */
1244 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1245 && GET_MODE_CLASS (mode) != MODE_INT
1246 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1247 return 0;
1249 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1250 && (mode == VOIDmode || GET_MODE (op) == mode
1251 || GET_MODE (op) == VOIDmode));
1253 #endif
1254 /* Return 1 if OP is a general operand that is not an immediate
1255 operand of mode MODE. */
1258 nonimmediate_operand (rtx op, machine_mode mode)
1260 return (general_operand (op, mode) && ! CONSTANT_P (op));
1263 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1266 nonmemory_operand (rtx op, machine_mode mode)
1268 if (CONSTANT_P (op))
1269 return immediate_operand (op, mode);
1270 return register_operand (op, mode);
1273 /* Return 1 if OP is a valid operand that stands for pushing a
1274 value of mode MODE onto the stack.
1276 The main use of this function is as a predicate in match_operand
1277 expressions in the machine description. */
1280 push_operand (rtx op, machine_mode mode)
1282 unsigned int rounded_size = GET_MODE_SIZE (mode);
1284 #ifdef PUSH_ROUNDING
1285 rounded_size = PUSH_ROUNDING (rounded_size);
1286 #endif
1288 if (!MEM_P (op))
1289 return 0;
1291 if (mode != VOIDmode && GET_MODE (op) != mode)
1292 return 0;
1294 op = XEXP (op, 0);
1296 if (rounded_size == GET_MODE_SIZE (mode))
1298 if (GET_CODE (op) != STACK_PUSH_CODE)
1299 return 0;
1301 else
1303 if (GET_CODE (op) != PRE_MODIFY
1304 || GET_CODE (XEXP (op, 1)) != PLUS
1305 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1306 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1307 || INTVAL (XEXP (XEXP (op, 1), 1))
1308 != ((STACK_GROWS_DOWNWARD ? -1 : 1) * (int) rounded_size))
1309 return 0;
1312 return XEXP (op, 0) == stack_pointer_rtx;
1315 /* Return 1 if OP is a valid operand that stands for popping a
1316 value of mode MODE off the stack.
1318 The main use of this function is as a predicate in match_operand
1319 expressions in the machine description. */
1322 pop_operand (rtx op, machine_mode mode)
1324 if (!MEM_P (op))
1325 return 0;
1327 if (mode != VOIDmode && GET_MODE (op) != mode)
1328 return 0;
1330 op = XEXP (op, 0);
1332 if (GET_CODE (op) != STACK_POP_CODE)
1333 return 0;
1335 return XEXP (op, 0) == stack_pointer_rtx;
1338 /* Return 1 if ADDR is a valid memory address
1339 for mode MODE in address space AS. */
1342 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1343 rtx addr, addr_space_t as)
1345 #ifdef GO_IF_LEGITIMATE_ADDRESS
1346 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1347 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1348 return 0;
1350 win:
1351 return 1;
1352 #else
1353 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1354 #endif
1357 /* Return 1 if OP is a valid memory reference with mode MODE,
1358 including a valid address.
1360 The main use of this function is as a predicate in match_operand
1361 expressions in the machine description. */
1364 memory_operand (rtx op, machine_mode mode)
1366 rtx inner;
1368 if (! reload_completed)
1369 /* Note that no SUBREG is a memory operand before end of reload pass,
1370 because (SUBREG (MEM...)) forces reloading into a register. */
1371 return MEM_P (op) && general_operand (op, mode);
1373 if (mode != VOIDmode && GET_MODE (op) != mode)
1374 return 0;
1376 inner = op;
1377 if (GET_CODE (inner) == SUBREG)
1378 inner = SUBREG_REG (inner);
1380 return (MEM_P (inner) && general_operand (op, mode));
1383 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1384 that is, a memory reference whose address is a general_operand. */
1387 indirect_operand (rtx op, machine_mode mode)
1389 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1390 if (! reload_completed
1391 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1393 int offset = SUBREG_BYTE (op);
1394 rtx inner = SUBREG_REG (op);
1396 if (mode != VOIDmode && GET_MODE (op) != mode)
1397 return 0;
1399 /* The only way that we can have a general_operand as the resulting
1400 address is if OFFSET is zero and the address already is an operand
1401 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1402 operand. */
1404 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1405 || (GET_CODE (XEXP (inner, 0)) == PLUS
1406 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1407 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1408 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1411 return (MEM_P (op)
1412 && memory_operand (op, mode)
1413 && general_operand (XEXP (op, 0), Pmode));
1416 /* Return 1 if this is an ordered comparison operator (not including
1417 ORDERED and UNORDERED). */
1420 ordered_comparison_operator (rtx op, machine_mode mode)
1422 if (mode != VOIDmode && GET_MODE (op) != mode)
1423 return false;
1424 switch (GET_CODE (op))
1426 case EQ:
1427 case NE:
1428 case LT:
1429 case LTU:
1430 case LE:
1431 case LEU:
1432 case GT:
1433 case GTU:
1434 case GE:
1435 case GEU:
1436 return true;
1437 default:
1438 return false;
1442 /* Return 1 if this is a comparison operator. This allows the use of
1443 MATCH_OPERATOR to recognize all the branch insns. */
1446 comparison_operator (rtx op, machine_mode mode)
1448 return ((mode == VOIDmode || GET_MODE (op) == mode)
1449 && COMPARISON_P (op));
1452 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1455 extract_asm_operands (rtx body)
1457 rtx tmp;
1458 switch (GET_CODE (body))
1460 case ASM_OPERANDS:
1461 return body;
1463 case SET:
1464 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1465 tmp = SET_SRC (body);
1466 if (GET_CODE (tmp) == ASM_OPERANDS)
1467 return tmp;
1468 break;
1470 case PARALLEL:
1471 tmp = XVECEXP (body, 0, 0);
1472 if (GET_CODE (tmp) == ASM_OPERANDS)
1473 return tmp;
1474 if (GET_CODE (tmp) == SET)
1476 tmp = SET_SRC (tmp);
1477 if (GET_CODE (tmp) == ASM_OPERANDS)
1478 return tmp;
1480 break;
1482 default:
1483 break;
1485 return NULL;
1488 /* If BODY is an insn body that uses ASM_OPERANDS,
1489 return the number of operands (both input and output) in the insn.
1490 Otherwise return -1. */
1493 asm_noperands (const_rtx body)
1495 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1496 int n_sets = 0;
1498 if (asm_op == NULL)
1499 return -1;
1501 if (GET_CODE (body) == SET)
1502 n_sets = 1;
1503 else if (GET_CODE (body) == PARALLEL)
1505 int i;
1506 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1508 /* Multiple output operands, or 1 output plus some clobbers:
1509 body is
1510 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1511 /* Count backwards through CLOBBERs to determine number of SETs. */
1512 for (i = XVECLEN (body, 0); i > 0; i--)
1514 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1515 break;
1516 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1517 return -1;
1520 /* N_SETS is now number of output operands. */
1521 n_sets = i;
1523 /* Verify that all the SETs we have
1524 came from a single original asm_operands insn
1525 (so that invalid combinations are blocked). */
1526 for (i = 0; i < n_sets; i++)
1528 rtx elt = XVECEXP (body, 0, i);
1529 if (GET_CODE (elt) != SET)
1530 return -1;
1531 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1532 return -1;
1533 /* If these ASM_OPERANDS rtx's came from different original insns
1534 then they aren't allowed together. */
1535 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1536 != ASM_OPERANDS_INPUT_VEC (asm_op))
1537 return -1;
1540 else
1542 /* 0 outputs, but some clobbers:
1543 body is [(asm_operands ...) (clobber (reg ...))...]. */
1544 /* Make sure all the other parallel things really are clobbers. */
1545 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1546 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1547 return -1;
1551 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1552 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1555 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1556 copy its operands (both input and output) into the vector OPERANDS,
1557 the locations of the operands within the insn into the vector OPERAND_LOCS,
1558 and the constraints for the operands into CONSTRAINTS.
1559 Write the modes of the operands into MODES.
1560 Return the assembler-template.
1562 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1563 we don't store that info. */
1565 const char *
1566 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1567 const char **constraints, machine_mode *modes,
1568 location_t *loc)
1570 int nbase = 0, n, i;
1571 rtx asmop;
1573 switch (GET_CODE (body))
1575 case ASM_OPERANDS:
1576 /* Zero output asm: BODY is (asm_operands ...). */
1577 asmop = body;
1578 break;
1580 case SET:
1581 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1582 asmop = SET_SRC (body);
1584 /* The output is in the SET.
1585 Its constraint is in the ASM_OPERANDS itself. */
1586 if (operands)
1587 operands[0] = SET_DEST (body);
1588 if (operand_locs)
1589 operand_locs[0] = &SET_DEST (body);
1590 if (constraints)
1591 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1592 if (modes)
1593 modes[0] = GET_MODE (SET_DEST (body));
1594 nbase = 1;
1595 break;
1597 case PARALLEL:
1599 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1601 asmop = XVECEXP (body, 0, 0);
1602 if (GET_CODE (asmop) == SET)
1604 asmop = SET_SRC (asmop);
1606 /* At least one output, plus some CLOBBERs. The outputs are in
1607 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1608 for (i = 0; i < nparallel; i++)
1610 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1611 break; /* Past last SET */
1612 if (operands)
1613 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1614 if (operand_locs)
1615 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1616 if (constraints)
1617 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1618 if (modes)
1619 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1621 nbase = i;
1623 break;
1626 default:
1627 gcc_unreachable ();
1630 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1631 for (i = 0; i < n; i++)
1633 if (operand_locs)
1634 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1635 if (operands)
1636 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1637 if (constraints)
1638 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1639 if (modes)
1640 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1642 nbase += n;
1644 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1645 for (i = 0; i < n; i++)
1647 if (operand_locs)
1648 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1649 if (operands)
1650 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1651 if (constraints)
1652 constraints[nbase + i] = "";
1653 if (modes)
1654 modes[nbase + i] = Pmode;
1657 if (loc)
1658 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1660 return ASM_OPERANDS_TEMPLATE (asmop);
1663 /* Parse inline assembly string STRING and determine which operands are
1664 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1665 to true if operand I is referenced.
1667 This is intended to distinguish barrier-like asms such as:
1669 asm ("" : "=m" (...));
1671 from real references such as:
1673 asm ("sw\t$0, %0" : "=m" (...)); */
1675 void
1676 get_referenced_operands (const char *string, bool *used,
1677 unsigned int noperands)
1679 memset (used, 0, sizeof (bool) * noperands);
1680 const char *p = string;
1681 while (*p)
1682 switch (*p)
1684 case '%':
1685 p += 1;
1686 /* A letter followed by a digit indicates an operand number. */
1687 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1688 p += 1;
1689 if (ISDIGIT (*p))
1691 char *endptr;
1692 unsigned long opnum = strtoul (p, &endptr, 10);
1693 if (endptr != p && opnum < noperands)
1694 used[opnum] = true;
1695 p = endptr;
1697 else
1698 p += 1;
1699 break;
1701 default:
1702 p++;
1703 break;
1707 /* Check if an asm_operand matches its constraints.
1708 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1711 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1713 int result = 0;
1714 #ifdef AUTO_INC_DEC
1715 bool incdec_ok = false;
1716 #endif
1718 /* Use constrain_operands after reload. */
1719 gcc_assert (!reload_completed);
1721 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1722 many alternatives as required to match the other operands. */
1723 if (*constraint == '\0')
1724 result = 1;
1726 while (*constraint)
1728 enum constraint_num cn;
1729 char c = *constraint;
1730 int len;
1731 switch (c)
1733 case ',':
1734 constraint++;
1735 continue;
1737 case '0': case '1': case '2': case '3': case '4':
1738 case '5': case '6': case '7': case '8': case '9':
1739 /* If caller provided constraints pointer, look up
1740 the matching constraint. Otherwise, our caller should have
1741 given us the proper matching constraint, but we can't
1742 actually fail the check if they didn't. Indicate that
1743 results are inconclusive. */
1744 if (constraints)
1746 char *end;
1747 unsigned long match;
1749 match = strtoul (constraint, &end, 10);
1750 if (!result)
1751 result = asm_operand_ok (op, constraints[match], NULL);
1752 constraint = (const char *) end;
1754 else
1757 constraint++;
1758 while (ISDIGIT (*constraint));
1759 if (! result)
1760 result = -1;
1762 continue;
1764 /* The rest of the compiler assumes that reloading the address
1765 of a MEM into a register will make it fit an 'o' constraint.
1766 That is, if it sees a MEM operand for an 'o' constraint,
1767 it assumes that (mem (base-reg)) will fit.
1769 That assumption fails on targets that don't have offsettable
1770 addresses at all. We therefore need to treat 'o' asm
1771 constraints as a special case and only accept operands that
1772 are already offsettable, thus proving that at least one
1773 offsettable address exists. */
1774 case 'o': /* offsettable */
1775 if (offsettable_nonstrict_memref_p (op))
1776 result = 1;
1777 break;
1779 case 'g':
1780 if (general_operand (op, VOIDmode))
1781 result = 1;
1782 break;
1784 #ifdef AUTO_INC_DEC
1785 case '<':
1786 case '>':
1787 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1788 to exist, excepting those that expand_call created. Further,
1789 on some machines which do not have generalized auto inc/dec,
1790 an inc/dec is not a memory_operand.
1792 Match any memory and hope things are resolved after reload. */
1793 incdec_ok = true;
1794 #endif
1795 default:
1796 cn = lookup_constraint (constraint);
1797 switch (get_constraint_type (cn))
1799 case CT_REGISTER:
1800 if (!result
1801 && reg_class_for_constraint (cn) != NO_REGS
1802 && GET_MODE (op) != BLKmode
1803 && register_operand (op, VOIDmode))
1804 result = 1;
1805 break;
1807 case CT_CONST_INT:
1808 if (!result
1809 && CONST_INT_P (op)
1810 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1811 result = 1;
1812 break;
1814 case CT_MEMORY:
1815 /* Every memory operand can be reloaded to fit. */
1816 result = result || memory_operand (op, VOIDmode);
1817 break;
1819 case CT_ADDRESS:
1820 /* Every address operand can be reloaded to fit. */
1821 result = result || address_operand (op, VOIDmode);
1822 break;
1824 case CT_FIXED_FORM:
1825 result = result || constraint_satisfied_p (op, cn);
1826 break;
1828 break;
1830 len = CONSTRAINT_LEN (c, constraint);
1832 constraint++;
1833 while (--len && *constraint);
1834 if (len)
1835 return 0;
1838 #ifdef AUTO_INC_DEC
1839 /* For operands without < or > constraints reject side-effects. */
1840 if (!incdec_ok && result && MEM_P (op))
1841 switch (GET_CODE (XEXP (op, 0)))
1843 case PRE_INC:
1844 case POST_INC:
1845 case PRE_DEC:
1846 case POST_DEC:
1847 case PRE_MODIFY:
1848 case POST_MODIFY:
1849 return 0;
1850 default:
1851 break;
1853 #endif
1855 return result;
1858 /* Given an rtx *P, if it is a sum containing an integer constant term,
1859 return the location (type rtx *) of the pointer to that constant term.
1860 Otherwise, return a null pointer. */
1862 rtx *
1863 find_constant_term_loc (rtx *p)
1865 rtx *tem;
1866 enum rtx_code code = GET_CODE (*p);
1868 /* If *P IS such a constant term, P is its location. */
1870 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1871 || code == CONST)
1872 return p;
1874 /* Otherwise, if not a sum, it has no constant term. */
1876 if (GET_CODE (*p) != PLUS)
1877 return 0;
1879 /* If one of the summands is constant, return its location. */
1881 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1882 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1883 return p;
1885 /* Otherwise, check each summand for containing a constant term. */
1887 if (XEXP (*p, 0) != 0)
1889 tem = find_constant_term_loc (&XEXP (*p, 0));
1890 if (tem != 0)
1891 return tem;
1894 if (XEXP (*p, 1) != 0)
1896 tem = find_constant_term_loc (&XEXP (*p, 1));
1897 if (tem != 0)
1898 return tem;
1901 return 0;
1904 /* Return 1 if OP is a memory reference
1905 whose address contains no side effects
1906 and remains valid after the addition
1907 of a positive integer less than the
1908 size of the object being referenced.
1910 We assume that the original address is valid and do not check it.
1912 This uses strict_memory_address_p as a subroutine, so
1913 don't use it before reload. */
1916 offsettable_memref_p (rtx op)
1918 return ((MEM_P (op))
1919 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1920 MEM_ADDR_SPACE (op)));
1923 /* Similar, but don't require a strictly valid mem ref:
1924 consider pseudo-regs valid as index or base regs. */
1927 offsettable_nonstrict_memref_p (rtx op)
1929 return ((MEM_P (op))
1930 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1931 MEM_ADDR_SPACE (op)));
1934 /* Return 1 if Y is a memory address which contains no side effects
1935 and would remain valid for address space AS after the addition of
1936 a positive integer less than the size of that mode.
1938 We assume that the original address is valid and do not check it.
1939 We do check that it is valid for narrower modes.
1941 If STRICTP is nonzero, we require a strictly valid address,
1942 for the sake of use in reload.c. */
1945 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1946 addr_space_t as)
1948 enum rtx_code ycode = GET_CODE (y);
1949 rtx z;
1950 rtx y1 = y;
1951 rtx *y2;
1952 int (*addressp) (machine_mode, rtx, addr_space_t) =
1953 (strictp ? strict_memory_address_addr_space_p
1954 : memory_address_addr_space_p);
1955 unsigned int mode_sz = GET_MODE_SIZE (mode);
1957 if (CONSTANT_ADDRESS_P (y))
1958 return 1;
1960 /* Adjusting an offsettable address involves changing to a narrower mode.
1961 Make sure that's OK. */
1963 if (mode_dependent_address_p (y, as))
1964 return 0;
1966 machine_mode address_mode = GET_MODE (y);
1967 if (address_mode == VOIDmode)
1968 address_mode = targetm.addr_space.address_mode (as);
1969 #ifdef POINTERS_EXTEND_UNSIGNED
1970 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1971 #endif
1973 /* ??? How much offset does an offsettable BLKmode reference need?
1974 Clearly that depends on the situation in which it's being used.
1975 However, the current situation in which we test 0xffffffff is
1976 less than ideal. Caveat user. */
1977 if (mode_sz == 0)
1978 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1980 /* If the expression contains a constant term,
1981 see if it remains valid when max possible offset is added. */
1983 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1985 int good;
1987 y1 = *y2;
1988 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1989 /* Use QImode because an odd displacement may be automatically invalid
1990 for any wider mode. But it should be valid for a single byte. */
1991 good = (*addressp) (QImode, y, as);
1993 /* In any case, restore old contents of memory. */
1994 *y2 = y1;
1995 return good;
1998 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1999 return 0;
2001 /* The offset added here is chosen as the maximum offset that
2002 any instruction could need to add when operating on something
2003 of the specified mode. We assume that if Y and Y+c are
2004 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2005 go inside a LO_SUM here, so we do so as well. */
2006 if (GET_CODE (y) == LO_SUM
2007 && mode != BLKmode
2008 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2009 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2010 plus_constant (address_mode, XEXP (y, 1),
2011 mode_sz - 1));
2012 #ifdef POINTERS_EXTEND_UNSIGNED
2013 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2014 else if (POINTERS_EXTEND_UNSIGNED > 0
2015 && GET_CODE (y) == ZERO_EXTEND
2016 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2017 z = gen_rtx_ZERO_EXTEND (address_mode,
2018 plus_constant (pointer_mode, XEXP (y, 0),
2019 mode_sz - 1));
2020 #endif
2021 else
2022 z = plus_constant (address_mode, y, mode_sz - 1);
2024 /* Use QImode because an odd displacement may be automatically invalid
2025 for any wider mode. But it should be valid for a single byte. */
2026 return (*addressp) (QImode, z, as);
2029 /* Return 1 if ADDR is an address-expression whose effect depends
2030 on the mode of the memory reference it is used in.
2032 ADDRSPACE is the address space associated with the address.
2034 Autoincrement addressing is a typical example of mode-dependence
2035 because the amount of the increment depends on the mode. */
2037 bool
2038 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2040 /* Auto-increment addressing with anything other than post_modify
2041 or pre_modify always introduces a mode dependency. Catch such
2042 cases now instead of deferring to the target. */
2043 if (GET_CODE (addr) == PRE_INC
2044 || GET_CODE (addr) == POST_INC
2045 || GET_CODE (addr) == PRE_DEC
2046 || GET_CODE (addr) == POST_DEC)
2047 return true;
2049 return targetm.mode_dependent_address_p (addr, addrspace);
2052 /* Return true if boolean attribute ATTR is supported. */
2054 static bool
2055 have_bool_attr (bool_attr attr)
2057 switch (attr)
2059 case BA_ENABLED:
2060 return HAVE_ATTR_enabled;
2061 case BA_PREFERRED_FOR_SIZE:
2062 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2063 case BA_PREFERRED_FOR_SPEED:
2064 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2066 gcc_unreachable ();
2069 /* Return the value of ATTR for instruction INSN. */
2071 static bool
2072 get_bool_attr (rtx_insn *insn, bool_attr attr)
2074 switch (attr)
2076 case BA_ENABLED:
2077 return get_attr_enabled (insn);
2078 case BA_PREFERRED_FOR_SIZE:
2079 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2080 case BA_PREFERRED_FOR_SPEED:
2081 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2083 gcc_unreachable ();
2086 /* Like get_bool_attr_mask, but don't use the cache. */
2088 static alternative_mask
2089 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2091 /* Temporarily install enough information for get_attr_<foo> to assume
2092 that the insn operands are already cached. As above, the attribute
2093 mustn't depend on the values of operands, so we don't provide their
2094 real values here. */
2095 rtx_insn *old_insn = recog_data.insn;
2096 int old_alternative = which_alternative;
2098 recog_data.insn = insn;
2099 alternative_mask mask = ALL_ALTERNATIVES;
2100 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2101 for (int i = 0; i < n_alternatives; i++)
2103 which_alternative = i;
2104 if (!get_bool_attr (insn, attr))
2105 mask &= ~ALTERNATIVE_BIT (i);
2108 recog_data.insn = old_insn;
2109 which_alternative = old_alternative;
2110 return mask;
2113 /* Return the mask of operand alternatives that are allowed for INSN
2114 by boolean attribute ATTR. This mask depends only on INSN and on
2115 the current target; it does not depend on things like the values of
2116 operands. */
2118 static alternative_mask
2119 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2121 /* Quick exit for asms and for targets that don't use these attributes. */
2122 int code = INSN_CODE (insn);
2123 if (code < 0 || !have_bool_attr (attr))
2124 return ALL_ALTERNATIVES;
2126 /* Calling get_attr_<foo> can be expensive, so cache the mask
2127 for speed. */
2128 if (!this_target_recog->x_bool_attr_masks[code][attr])
2129 this_target_recog->x_bool_attr_masks[code][attr]
2130 = get_bool_attr_mask_uncached (insn, attr);
2131 return this_target_recog->x_bool_attr_masks[code][attr];
2134 /* Return the set of alternatives of INSN that are allowed by the current
2135 target. */
2137 alternative_mask
2138 get_enabled_alternatives (rtx_insn *insn)
2140 return get_bool_attr_mask (insn, BA_ENABLED);
2143 /* Return the set of alternatives of INSN that are allowed by the current
2144 target and are preferred for the current size/speed optimization
2145 choice. */
2147 alternative_mask
2148 get_preferred_alternatives (rtx_insn *insn)
2150 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2151 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2152 else
2153 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2156 /* Return the set of alternatives of INSN that are allowed by the current
2157 target and are preferred for the size/speed optimization choice
2158 associated with BB. Passing a separate BB is useful if INSN has not
2159 been emitted yet or if we are considering moving it to a different
2160 block. */
2162 alternative_mask
2163 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2165 if (optimize_bb_for_speed_p (bb))
2166 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2167 else
2168 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2171 /* Assert that the cached boolean attributes for INSN are still accurate.
2172 The backend is required to define these attributes in a way that only
2173 depends on the current target (rather than operands, compiler phase,
2174 etc.). */
2176 bool
2177 check_bool_attrs (rtx_insn *insn)
2179 int code = INSN_CODE (insn);
2180 if (code >= 0)
2181 for (int i = 0; i <= BA_LAST; ++i)
2183 enum bool_attr attr = (enum bool_attr) i;
2184 if (this_target_recog->x_bool_attr_masks[code][attr])
2185 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2186 == get_bool_attr_mask_uncached (insn, attr));
2188 return true;
2191 /* Like extract_insn, but save insn extracted and don't extract again, when
2192 called again for the same insn expecting that recog_data still contain the
2193 valid information. This is used primary by gen_attr infrastructure that
2194 often does extract insn again and again. */
2195 void
2196 extract_insn_cached (rtx_insn *insn)
2198 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2199 return;
2200 extract_insn (insn);
2201 recog_data.insn = insn;
2204 /* Do uncached extract_insn, constrain_operands and complain about failures.
2205 This should be used when extracting a pre-existing constrained instruction
2206 if the caller wants to know which alternative was chosen. */
2207 void
2208 extract_constrain_insn (rtx_insn *insn)
2210 extract_insn (insn);
2211 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2212 fatal_insn_not_found (insn);
2215 /* Do cached extract_insn, constrain_operands and complain about failures.
2216 Used by insn_attrtab. */
2217 void
2218 extract_constrain_insn_cached (rtx_insn *insn)
2220 extract_insn_cached (insn);
2221 if (which_alternative == -1
2222 && !constrain_operands (reload_completed,
2223 get_enabled_alternatives (insn)))
2224 fatal_insn_not_found (insn);
2227 /* Do cached constrain_operands on INSN and complain about failures. */
2229 constrain_operands_cached (rtx_insn *insn, int strict)
2231 if (which_alternative == -1)
2232 return constrain_operands (strict, get_enabled_alternatives (insn));
2233 else
2234 return 1;
2237 /* Analyze INSN and fill in recog_data. */
2239 void
2240 extract_insn (rtx_insn *insn)
2242 int i;
2243 int icode;
2244 int noperands;
2245 rtx body = PATTERN (insn);
2247 recog_data.n_operands = 0;
2248 recog_data.n_alternatives = 0;
2249 recog_data.n_dups = 0;
2250 recog_data.is_asm = false;
2252 switch (GET_CODE (body))
2254 case USE:
2255 case CLOBBER:
2256 case ASM_INPUT:
2257 case ADDR_VEC:
2258 case ADDR_DIFF_VEC:
2259 case VAR_LOCATION:
2260 return;
2262 case SET:
2263 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2264 goto asm_insn;
2265 else
2266 goto normal_insn;
2267 case PARALLEL:
2268 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2269 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2270 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2271 goto asm_insn;
2272 else
2273 goto normal_insn;
2274 case ASM_OPERANDS:
2275 asm_insn:
2276 recog_data.n_operands = noperands = asm_noperands (body);
2277 if (noperands >= 0)
2279 /* This insn is an `asm' with operands. */
2281 /* expand_asm_operands makes sure there aren't too many operands. */
2282 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2284 /* Now get the operand values and constraints out of the insn. */
2285 decode_asm_operands (body, recog_data.operand,
2286 recog_data.operand_loc,
2287 recog_data.constraints,
2288 recog_data.operand_mode, NULL);
2289 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2290 if (noperands > 0)
2292 const char *p = recog_data.constraints[0];
2293 recog_data.n_alternatives = 1;
2294 while (*p)
2295 recog_data.n_alternatives += (*p++ == ',');
2297 recog_data.is_asm = true;
2298 break;
2300 fatal_insn_not_found (insn);
2302 default:
2303 normal_insn:
2304 /* Ordinary insn: recognize it, get the operands via insn_extract
2305 and get the constraints. */
2307 icode = recog_memoized (insn);
2308 if (icode < 0)
2309 fatal_insn_not_found (insn);
2311 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2312 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2313 recog_data.n_dups = insn_data[icode].n_dups;
2315 insn_extract (insn);
2317 for (i = 0; i < noperands; i++)
2319 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2320 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2321 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2322 /* VOIDmode match_operands gets mode from their real operand. */
2323 if (recog_data.operand_mode[i] == VOIDmode)
2324 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2327 for (i = 0; i < noperands; i++)
2328 recog_data.operand_type[i]
2329 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2330 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2331 : OP_IN);
2333 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2335 recog_data.insn = NULL;
2336 which_alternative = -1;
2339 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2340 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2341 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2342 has N_OPERANDS entries. */
2344 void
2345 preprocess_constraints (int n_operands, int n_alternatives,
2346 const char **constraints,
2347 operand_alternative *op_alt_base)
2349 for (int i = 0; i < n_operands; i++)
2351 int j;
2352 struct operand_alternative *op_alt;
2353 const char *p = constraints[i];
2355 op_alt = op_alt_base;
2357 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2359 op_alt[i].cl = NO_REGS;
2360 op_alt[i].constraint = p;
2361 op_alt[i].matches = -1;
2362 op_alt[i].matched = -1;
2364 if (*p == '\0' || *p == ',')
2366 op_alt[i].anything_ok = 1;
2367 continue;
2370 for (;;)
2372 char c = *p;
2373 if (c == '#')
2375 c = *++p;
2376 while (c != ',' && c != '\0');
2377 if (c == ',' || c == '\0')
2379 p++;
2380 break;
2383 switch (c)
2385 case '?':
2386 op_alt[i].reject += 6;
2387 break;
2388 case '!':
2389 op_alt[i].reject += 600;
2390 break;
2391 case '&':
2392 op_alt[i].earlyclobber = 1;
2393 break;
2395 case '0': case '1': case '2': case '3': case '4':
2396 case '5': case '6': case '7': case '8': case '9':
2398 char *end;
2399 op_alt[i].matches = strtoul (p, &end, 10);
2400 op_alt[op_alt[i].matches].matched = i;
2401 p = end;
2403 continue;
2405 case 'X':
2406 op_alt[i].anything_ok = 1;
2407 break;
2409 case 'g':
2410 op_alt[i].cl =
2411 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2412 break;
2414 default:
2415 enum constraint_num cn = lookup_constraint (p);
2416 enum reg_class cl;
2417 switch (get_constraint_type (cn))
2419 case CT_REGISTER:
2420 cl = reg_class_for_constraint (cn);
2421 if (cl != NO_REGS)
2422 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2423 break;
2425 case CT_CONST_INT:
2426 break;
2428 case CT_MEMORY:
2429 op_alt[i].memory_ok = 1;
2430 break;
2432 case CT_ADDRESS:
2433 op_alt[i].is_address = 1;
2434 op_alt[i].cl
2435 = (reg_class_subunion
2436 [(int) op_alt[i].cl]
2437 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2438 ADDRESS, SCRATCH)]);
2439 break;
2441 case CT_FIXED_FORM:
2442 break;
2444 break;
2446 p += CONSTRAINT_LEN (c, p);
2452 /* Return an array of operand_alternative instructions for
2453 instruction ICODE. */
2455 const operand_alternative *
2456 preprocess_insn_constraints (int icode)
2458 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2459 if (this_target_recog->x_op_alt[icode])
2460 return this_target_recog->x_op_alt[icode];
2462 int n_operands = insn_data[icode].n_operands;
2463 if (n_operands == 0)
2464 return 0;
2465 /* Always provide at least one alternative so that which_op_alt ()
2466 works correctly. If the instruction has 0 alternatives (i.e. all
2467 constraint strings are empty) then each operand in this alternative
2468 will have anything_ok set. */
2469 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2470 int n_entries = n_operands * n_alternatives;
2472 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2473 const char **constraints = XALLOCAVEC (const char *, n_operands);
2475 for (int i = 0; i < n_operands; ++i)
2476 constraints[i] = insn_data[icode].operand[i].constraint;
2477 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2479 this_target_recog->x_op_alt[icode] = op_alt;
2480 return op_alt;
2483 /* After calling extract_insn, you can use this function to extract some
2484 information from the constraint strings into a more usable form.
2485 The collected data is stored in recog_op_alt. */
2487 void
2488 preprocess_constraints (rtx_insn *insn)
2490 int icode = INSN_CODE (insn);
2491 if (icode >= 0)
2492 recog_op_alt = preprocess_insn_constraints (icode);
2493 else
2495 int n_operands = recog_data.n_operands;
2496 int n_alternatives = recog_data.n_alternatives;
2497 int n_entries = n_operands * n_alternatives;
2498 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2499 preprocess_constraints (n_operands, n_alternatives,
2500 recog_data.constraints, asm_op_alt);
2501 recog_op_alt = asm_op_alt;
2505 /* Check the operands of an insn against the insn's operand constraints
2506 and return 1 if they match any of the alternatives in ALTERNATIVES.
2508 The information about the insn's operands, constraints, operand modes
2509 etc. is obtained from the global variables set up by extract_insn.
2511 WHICH_ALTERNATIVE is set to a number which indicates which
2512 alternative of constraints was matched: 0 for the first alternative,
2513 1 for the next, etc.
2515 In addition, when two operands are required to match
2516 and it happens that the output operand is (reg) while the
2517 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2518 make the output operand look like the input.
2519 This is because the output operand is the one the template will print.
2521 This is used in final, just before printing the assembler code and by
2522 the routines that determine an insn's attribute.
2524 If STRICT is a positive nonzero value, it means that we have been
2525 called after reload has been completed. In that case, we must
2526 do all checks strictly. If it is zero, it means that we have been called
2527 before reload has completed. In that case, we first try to see if we can
2528 find an alternative that matches strictly. If not, we try again, this
2529 time assuming that reload will fix up the insn. This provides a "best
2530 guess" for the alternative and is used to compute attributes of insns prior
2531 to reload. A negative value of STRICT is used for this internal call. */
2533 struct funny_match
2535 int this_op, other;
2539 constrain_operands (int strict, alternative_mask alternatives)
2541 const char *constraints[MAX_RECOG_OPERANDS];
2542 int matching_operands[MAX_RECOG_OPERANDS];
2543 int earlyclobber[MAX_RECOG_OPERANDS];
2544 int c;
2546 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2547 int funny_match_index;
2549 which_alternative = 0;
2550 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2551 return 1;
2553 for (c = 0; c < recog_data.n_operands; c++)
2555 constraints[c] = recog_data.constraints[c];
2556 matching_operands[c] = -1;
2561 int seen_earlyclobber_at = -1;
2562 int opno;
2563 int lose = 0;
2564 funny_match_index = 0;
2566 if (!TEST_BIT (alternatives, which_alternative))
2568 int i;
2570 for (i = 0; i < recog_data.n_operands; i++)
2571 constraints[i] = skip_alternative (constraints[i]);
2573 which_alternative++;
2574 continue;
2577 for (opno = 0; opno < recog_data.n_operands; opno++)
2579 rtx op = recog_data.operand[opno];
2580 machine_mode mode = GET_MODE (op);
2581 const char *p = constraints[opno];
2582 int offset = 0;
2583 int win = 0;
2584 int val;
2585 int len;
2587 earlyclobber[opno] = 0;
2589 /* A unary operator may be accepted by the predicate, but it
2590 is irrelevant for matching constraints. */
2591 if (UNARY_P (op))
2592 op = XEXP (op, 0);
2594 if (GET_CODE (op) == SUBREG)
2596 if (REG_P (SUBREG_REG (op))
2597 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2598 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2599 GET_MODE (SUBREG_REG (op)),
2600 SUBREG_BYTE (op),
2601 GET_MODE (op));
2602 op = SUBREG_REG (op);
2605 /* An empty constraint or empty alternative
2606 allows anything which matched the pattern. */
2607 if (*p == 0 || *p == ',')
2608 win = 1;
2611 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2613 case '\0':
2614 len = 0;
2615 break;
2616 case ',':
2617 c = '\0';
2618 break;
2620 case '#':
2621 /* Ignore rest of this alternative as far as
2622 constraint checking is concerned. */
2624 p++;
2625 while (*p && *p != ',');
2626 len = 0;
2627 break;
2629 case '&':
2630 earlyclobber[opno] = 1;
2631 if (seen_earlyclobber_at < 0)
2632 seen_earlyclobber_at = opno;
2633 break;
2635 case '0': case '1': case '2': case '3': case '4':
2636 case '5': case '6': case '7': case '8': case '9':
2638 /* This operand must be the same as a previous one.
2639 This kind of constraint is used for instructions such
2640 as add when they take only two operands.
2642 Note that the lower-numbered operand is passed first.
2644 If we are not testing strictly, assume that this
2645 constraint will be satisfied. */
2647 char *end;
2648 int match;
2650 match = strtoul (p, &end, 10);
2651 p = end;
2653 if (strict < 0)
2654 val = 1;
2655 else
2657 rtx op1 = recog_data.operand[match];
2658 rtx op2 = recog_data.operand[opno];
2660 /* A unary operator may be accepted by the predicate,
2661 but it is irrelevant for matching constraints. */
2662 if (UNARY_P (op1))
2663 op1 = XEXP (op1, 0);
2664 if (UNARY_P (op2))
2665 op2 = XEXP (op2, 0);
2667 val = operands_match_p (op1, op2);
2670 matching_operands[opno] = match;
2671 matching_operands[match] = opno;
2673 if (val != 0)
2674 win = 1;
2676 /* If output is *x and input is *--x, arrange later
2677 to change the output to *--x as well, since the
2678 output op is the one that will be printed. */
2679 if (val == 2 && strict > 0)
2681 funny_match[funny_match_index].this_op = opno;
2682 funny_match[funny_match_index++].other = match;
2685 len = 0;
2686 break;
2688 case 'p':
2689 /* p is used for address_operands. When we are called by
2690 gen_reload, no one will have checked that the address is
2691 strictly valid, i.e., that all pseudos requiring hard regs
2692 have gotten them. */
2693 if (strict <= 0
2694 || (strict_memory_address_p (recog_data.operand_mode[opno],
2695 op)))
2696 win = 1;
2697 break;
2699 /* No need to check general_operand again;
2700 it was done in insn-recog.c. Well, except that reload
2701 doesn't check the validity of its replacements, but
2702 that should only matter when there's a bug. */
2703 case 'g':
2704 /* Anything goes unless it is a REG and really has a hard reg
2705 but the hard reg is not in the class GENERAL_REGS. */
2706 if (REG_P (op))
2708 if (strict < 0
2709 || GENERAL_REGS == ALL_REGS
2710 || (reload_in_progress
2711 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2712 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2713 win = 1;
2715 else if (strict < 0 || general_operand (op, mode))
2716 win = 1;
2717 break;
2719 default:
2721 enum constraint_num cn = lookup_constraint (p);
2722 enum reg_class cl = reg_class_for_constraint (cn);
2723 if (cl != NO_REGS)
2725 if (strict < 0
2726 || (strict == 0
2727 && REG_P (op)
2728 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2729 || (strict == 0 && GET_CODE (op) == SCRATCH)
2730 || (REG_P (op)
2731 && reg_fits_class_p (op, cl, offset, mode)))
2732 win = 1;
2735 else if (constraint_satisfied_p (op, cn))
2736 win = 1;
2738 else if (insn_extra_memory_constraint (cn)
2739 /* Every memory operand can be reloaded to fit. */
2740 && ((strict < 0 && MEM_P (op))
2741 /* Before reload, accept what reload can turn
2742 into a mem. */
2743 || (strict < 0 && CONSTANT_P (op))
2744 /* Before reload, accept a pseudo,
2745 since LRA can turn it into a mem. */
2746 || (strict < 0 && targetm.lra_p () && REG_P (op)
2747 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2748 /* During reload, accept a pseudo */
2749 || (reload_in_progress && REG_P (op)
2750 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2751 win = 1;
2752 else if (insn_extra_address_constraint (cn)
2753 /* Every address operand can be reloaded to fit. */
2754 && strict < 0)
2755 win = 1;
2756 /* Cater to architectures like IA-64 that define extra memory
2757 constraints without using define_memory_constraint. */
2758 else if (reload_in_progress
2759 && REG_P (op)
2760 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2761 && reg_renumber[REGNO (op)] < 0
2762 && reg_equiv_mem (REGNO (op)) != 0
2763 && constraint_satisfied_p
2764 (reg_equiv_mem (REGNO (op)), cn))
2765 win = 1;
2766 break;
2769 while (p += len, c);
2771 constraints[opno] = p;
2772 /* If this operand did not win somehow,
2773 this alternative loses. */
2774 if (! win)
2775 lose = 1;
2777 /* This alternative won; the operands are ok.
2778 Change whichever operands this alternative says to change. */
2779 if (! lose)
2781 int opno, eopno;
2783 /* See if any earlyclobber operand conflicts with some other
2784 operand. */
2786 if (strict > 0 && seen_earlyclobber_at >= 0)
2787 for (eopno = seen_earlyclobber_at;
2788 eopno < recog_data.n_operands;
2789 eopno++)
2790 /* Ignore earlyclobber operands now in memory,
2791 because we would often report failure when we have
2792 two memory operands, one of which was formerly a REG. */
2793 if (earlyclobber[eopno]
2794 && REG_P (recog_data.operand[eopno]))
2795 for (opno = 0; opno < recog_data.n_operands; opno++)
2796 if ((MEM_P (recog_data.operand[opno])
2797 || recog_data.operand_type[opno] != OP_OUT)
2798 && opno != eopno
2799 /* Ignore things like match_operator operands. */
2800 && *recog_data.constraints[opno] != 0
2801 && ! (matching_operands[opno] == eopno
2802 && operands_match_p (recog_data.operand[opno],
2803 recog_data.operand[eopno]))
2804 && ! safe_from_earlyclobber (recog_data.operand[opno],
2805 recog_data.operand[eopno]))
2806 lose = 1;
2808 if (! lose)
2810 while (--funny_match_index >= 0)
2812 recog_data.operand[funny_match[funny_match_index].other]
2813 = recog_data.operand[funny_match[funny_match_index].this_op];
2816 #ifdef AUTO_INC_DEC
2817 /* For operands without < or > constraints reject side-effects. */
2818 if (recog_data.is_asm)
2820 for (opno = 0; opno < recog_data.n_operands; opno++)
2821 if (MEM_P (recog_data.operand[opno]))
2822 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2824 case PRE_INC:
2825 case POST_INC:
2826 case PRE_DEC:
2827 case POST_DEC:
2828 case PRE_MODIFY:
2829 case POST_MODIFY:
2830 if (strchr (recog_data.constraints[opno], '<') == NULL
2831 && strchr (recog_data.constraints[opno], '>')
2832 == NULL)
2833 return 0;
2834 break;
2835 default:
2836 break;
2839 #endif
2840 return 1;
2844 which_alternative++;
2846 while (which_alternative < recog_data.n_alternatives);
2848 which_alternative = -1;
2849 /* If we are about to reject this, but we are not to test strictly,
2850 try a very loose test. Only return failure if it fails also. */
2851 if (strict == 0)
2852 return constrain_operands (-1, alternatives);
2853 else
2854 return 0;
2857 /* Return true iff OPERAND (assumed to be a REG rtx)
2858 is a hard reg in class CLASS when its regno is offset by OFFSET
2859 and changed to mode MODE.
2860 If REG occupies multiple hard regs, all of them must be in CLASS. */
2862 bool
2863 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2864 machine_mode mode)
2866 unsigned int regno = REGNO (operand);
2868 if (cl == NO_REGS)
2869 return false;
2871 /* Regno must not be a pseudo register. Offset may be negative. */
2872 return (HARD_REGISTER_NUM_P (regno)
2873 && HARD_REGISTER_NUM_P (regno + offset)
2874 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2875 regno + offset));
2878 /* Split single instruction. Helper function for split_all_insns and
2879 split_all_insns_noflow. Return last insn in the sequence if successful,
2880 or NULL if unsuccessful. */
2882 static rtx_insn *
2883 split_insn (rtx_insn *insn)
2885 /* Split insns here to get max fine-grain parallelism. */
2886 rtx_insn *first = PREV_INSN (insn);
2887 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2888 rtx insn_set, last_set, note;
2890 if (last == insn)
2891 return NULL;
2893 /* If the original instruction was a single set that was known to be
2894 equivalent to a constant, see if we can say the same about the last
2895 instruction in the split sequence. The two instructions must set
2896 the same destination. */
2897 insn_set = single_set (insn);
2898 if (insn_set)
2900 last_set = single_set (last);
2901 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2903 note = find_reg_equal_equiv_note (insn);
2904 if (note && CONSTANT_P (XEXP (note, 0)))
2905 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2906 else if (CONSTANT_P (SET_SRC (insn_set)))
2907 set_unique_reg_note (last, REG_EQUAL,
2908 copy_rtx (SET_SRC (insn_set)));
2912 /* try_split returns the NOTE that INSN became. */
2913 SET_INSN_DELETED (insn);
2915 /* ??? Coddle to md files that generate subregs in post-reload
2916 splitters instead of computing the proper hard register. */
2917 if (reload_completed && first != last)
2919 first = NEXT_INSN (first);
2920 for (;;)
2922 if (INSN_P (first))
2923 cleanup_subreg_operands (first);
2924 if (first == last)
2925 break;
2926 first = NEXT_INSN (first);
2930 return last;
2933 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2935 void
2936 split_all_insns (void)
2938 sbitmap blocks;
2939 bool changed;
2940 basic_block bb;
2942 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2943 bitmap_clear (blocks);
2944 changed = false;
2946 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2948 rtx_insn *insn, *next;
2949 bool finish = false;
2951 rtl_profile_for_bb (bb);
2952 for (insn = BB_HEAD (bb); !finish ; insn = next)
2954 /* Can't use `next_real_insn' because that might go across
2955 CODE_LABELS and short-out basic blocks. */
2956 next = NEXT_INSN (insn);
2957 finish = (insn == BB_END (bb));
2958 if (INSN_P (insn))
2960 rtx set = single_set (insn);
2962 /* Don't split no-op move insns. These should silently
2963 disappear later in final. Splitting such insns would
2964 break the code that handles LIBCALL blocks. */
2965 if (set && set_noop_p (set))
2967 /* Nops get in the way while scheduling, so delete them
2968 now if register allocation has already been done. It
2969 is too risky to try to do this before register
2970 allocation, and there are unlikely to be very many
2971 nops then anyways. */
2972 if (reload_completed)
2973 delete_insn_and_edges (insn);
2975 else
2977 if (split_insn (insn))
2979 bitmap_set_bit (blocks, bb->index);
2980 changed = true;
2987 default_rtl_profile ();
2988 if (changed)
2989 find_many_sub_basic_blocks (blocks);
2991 #ifdef ENABLE_CHECKING
2992 verify_flow_info ();
2993 #endif
2995 sbitmap_free (blocks);
2998 /* Same as split_all_insns, but do not expect CFG to be available.
2999 Used by machine dependent reorg passes. */
3001 unsigned int
3002 split_all_insns_noflow (void)
3004 rtx_insn *next, *insn;
3006 for (insn = get_insns (); insn; insn = next)
3008 next = NEXT_INSN (insn);
3009 if (INSN_P (insn))
3011 /* Don't split no-op move insns. These should silently
3012 disappear later in final. Splitting such insns would
3013 break the code that handles LIBCALL blocks. */
3014 rtx set = single_set (insn);
3015 if (set && set_noop_p (set))
3017 /* Nops get in the way while scheduling, so delete them
3018 now if register allocation has already been done. It
3019 is too risky to try to do this before register
3020 allocation, and there are unlikely to be very many
3021 nops then anyways.
3023 ??? Should we use delete_insn when the CFG isn't valid? */
3024 if (reload_completed)
3025 delete_insn_and_edges (insn);
3027 else
3028 split_insn (insn);
3031 return 0;
3034 #ifdef HAVE_peephole2
3035 struct peep2_insn_data
3037 rtx_insn *insn;
3038 regset live_before;
3041 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3042 static int peep2_current;
3044 static bool peep2_do_rebuild_jump_labels;
3045 static bool peep2_do_cleanup_cfg;
3047 /* The number of instructions available to match a peep2. */
3048 int peep2_current_count;
3050 /* A marker indicating the last insn of the block. The live_before regset
3051 for this element is correct, indicating DF_LIVE_OUT for the block. */
3052 #define PEEP2_EOB invalid_insn_rtx
3054 /* Wrap N to fit into the peep2_insn_data buffer. */
3056 static int
3057 peep2_buf_position (int n)
3059 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3060 n -= MAX_INSNS_PER_PEEP2 + 1;
3061 return n;
3064 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3065 does not exist. Used by the recognizer to find the next insn to match
3066 in a multi-insn pattern. */
3068 rtx_insn *
3069 peep2_next_insn (int n)
3071 gcc_assert (n <= peep2_current_count);
3073 n = peep2_buf_position (peep2_current + n);
3075 return peep2_insn_data[n].insn;
3078 /* Return true if REGNO is dead before the Nth non-note insn
3079 after `current'. */
3082 peep2_regno_dead_p (int ofs, int regno)
3084 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3086 ofs = peep2_buf_position (peep2_current + ofs);
3088 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3090 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3093 /* Similarly for a REG. */
3096 peep2_reg_dead_p (int ofs, rtx reg)
3098 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3100 ofs = peep2_buf_position (peep2_current + ofs);
3102 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3104 unsigned int end_regno = END_REGNO (reg);
3105 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3106 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3107 return 0;
3108 return 1;
3111 /* Regno offset to be used in the register search. */
3112 static int search_ofs;
3114 /* Try to find a hard register of mode MODE, matching the register class in
3115 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3116 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3117 in which case the only condition is that the register must be available
3118 before CURRENT_INSN.
3119 Registers that already have bits set in REG_SET will not be considered.
3121 If an appropriate register is available, it will be returned and the
3122 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3123 returned. */
3126 peep2_find_free_register (int from, int to, const char *class_str,
3127 machine_mode mode, HARD_REG_SET *reg_set)
3129 enum reg_class cl;
3130 HARD_REG_SET live;
3131 df_ref def;
3132 int i;
3134 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3135 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3137 from = peep2_buf_position (peep2_current + from);
3138 to = peep2_buf_position (peep2_current + to);
3140 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3141 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3143 while (from != to)
3145 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3147 /* Don't use registers set or clobbered by the insn. */
3148 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3149 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3151 from = peep2_buf_position (from + 1);
3154 cl = reg_class_for_constraint (lookup_constraint (class_str));
3156 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3158 int raw_regno, regno, success, j;
3160 /* Distribute the free registers as much as possible. */
3161 raw_regno = search_ofs + i;
3162 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3163 raw_regno -= FIRST_PSEUDO_REGISTER;
3164 #ifdef REG_ALLOC_ORDER
3165 regno = reg_alloc_order[raw_regno];
3166 #else
3167 regno = raw_regno;
3168 #endif
3170 /* Can it support the mode we need? */
3171 if (! HARD_REGNO_MODE_OK (regno, mode))
3172 continue;
3174 success = 1;
3175 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3177 /* Don't allocate fixed registers. */
3178 if (fixed_regs[regno + j])
3180 success = 0;
3181 break;
3183 /* Don't allocate global registers. */
3184 if (global_regs[regno + j])
3186 success = 0;
3187 break;
3189 /* Make sure the register is of the right class. */
3190 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3192 success = 0;
3193 break;
3195 /* And that we don't create an extra save/restore. */
3196 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3198 success = 0;
3199 break;
3202 if (! targetm.hard_regno_scratch_ok (regno + j))
3204 success = 0;
3205 break;
3208 /* And we don't clobber traceback for noreturn functions. */
3209 if ((regno + j == FRAME_POINTER_REGNUM
3210 || regno + j == HARD_FRAME_POINTER_REGNUM)
3211 && (! reload_completed || frame_pointer_needed))
3213 success = 0;
3214 break;
3217 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3218 || TEST_HARD_REG_BIT (live, regno + j))
3220 success = 0;
3221 break;
3225 if (success)
3227 add_to_hard_reg_set (reg_set, mode, regno);
3229 /* Start the next search with the next register. */
3230 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3231 raw_regno = 0;
3232 search_ofs = raw_regno;
3234 return gen_rtx_REG (mode, regno);
3238 search_ofs = 0;
3239 return NULL_RTX;
3242 /* Forget all currently tracked instructions, only remember current
3243 LIVE regset. */
3245 static void
3246 peep2_reinit_state (regset live)
3248 int i;
3250 /* Indicate that all slots except the last holds invalid data. */
3251 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3252 peep2_insn_data[i].insn = NULL;
3253 peep2_current_count = 0;
3255 /* Indicate that the last slot contains live_after data. */
3256 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3257 peep2_current = MAX_INSNS_PER_PEEP2;
3259 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3262 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3263 starting at INSN. Perform the replacement, removing the old insns and
3264 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3265 if the replacement is rejected. */
3267 static rtx_insn *
3268 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3270 int i;
3271 rtx_insn *last, *before_try, *x;
3272 rtx eh_note, as_note;
3273 rtx_insn *old_insn;
3274 rtx_insn *new_insn;
3275 bool was_call = false;
3277 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3278 match more than one insn, or to be split into more than one insn. */
3279 old_insn = peep2_insn_data[peep2_current].insn;
3280 if (RTX_FRAME_RELATED_P (old_insn))
3282 bool any_note = false;
3283 rtx note;
3285 if (match_len != 0)
3286 return NULL;
3288 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3289 may be in the stream for the purpose of register allocation. */
3290 if (active_insn_p (attempt))
3291 new_insn = attempt;
3292 else
3293 new_insn = next_active_insn (attempt);
3294 if (next_active_insn (new_insn))
3295 return NULL;
3297 /* We have a 1-1 replacement. Copy over any frame-related info. */
3298 RTX_FRAME_RELATED_P (new_insn) = 1;
3300 /* Allow the backend to fill in a note during the split. */
3301 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3302 switch (REG_NOTE_KIND (note))
3304 case REG_FRAME_RELATED_EXPR:
3305 case REG_CFA_DEF_CFA:
3306 case REG_CFA_ADJUST_CFA:
3307 case REG_CFA_OFFSET:
3308 case REG_CFA_REGISTER:
3309 case REG_CFA_EXPRESSION:
3310 case REG_CFA_RESTORE:
3311 case REG_CFA_SET_VDRAP:
3312 any_note = true;
3313 break;
3314 default:
3315 break;
3318 /* If the backend didn't supply a note, copy one over. */
3319 if (!any_note)
3320 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3321 switch (REG_NOTE_KIND (note))
3323 case REG_FRAME_RELATED_EXPR:
3324 case REG_CFA_DEF_CFA:
3325 case REG_CFA_ADJUST_CFA:
3326 case REG_CFA_OFFSET:
3327 case REG_CFA_REGISTER:
3328 case REG_CFA_EXPRESSION:
3329 case REG_CFA_RESTORE:
3330 case REG_CFA_SET_VDRAP:
3331 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3332 any_note = true;
3333 break;
3334 default:
3335 break;
3338 /* If there still isn't a note, make sure the unwind info sees the
3339 same expression as before the split. */
3340 if (!any_note)
3342 rtx old_set, new_set;
3344 /* The old insn had better have been simple, or annotated. */
3345 old_set = single_set (old_insn);
3346 gcc_assert (old_set != NULL);
3348 new_set = single_set (new_insn);
3349 if (!new_set || !rtx_equal_p (new_set, old_set))
3350 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3353 /* Copy prologue/epilogue status. This is required in order to keep
3354 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3355 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3358 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3359 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3360 cfg-related call notes. */
3361 for (i = 0; i <= match_len; ++i)
3363 int j;
3364 rtx note;
3366 j = peep2_buf_position (peep2_current + i);
3367 old_insn = peep2_insn_data[j].insn;
3368 if (!CALL_P (old_insn))
3369 continue;
3370 was_call = true;
3372 new_insn = attempt;
3373 while (new_insn != NULL_RTX)
3375 if (CALL_P (new_insn))
3376 break;
3377 new_insn = NEXT_INSN (new_insn);
3380 gcc_assert (new_insn != NULL_RTX);
3382 CALL_INSN_FUNCTION_USAGE (new_insn)
3383 = CALL_INSN_FUNCTION_USAGE (old_insn);
3384 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3386 for (note = REG_NOTES (old_insn);
3387 note;
3388 note = XEXP (note, 1))
3389 switch (REG_NOTE_KIND (note))
3391 case REG_NORETURN:
3392 case REG_SETJMP:
3393 case REG_TM:
3394 add_reg_note (new_insn, REG_NOTE_KIND (note),
3395 XEXP (note, 0));
3396 break;
3397 default:
3398 /* Discard all other reg notes. */
3399 break;
3402 /* Croak if there is another call in the sequence. */
3403 while (++i <= match_len)
3405 j = peep2_buf_position (peep2_current + i);
3406 old_insn = peep2_insn_data[j].insn;
3407 gcc_assert (!CALL_P (old_insn));
3409 break;
3412 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3413 move those notes over to the new sequence. */
3414 as_note = NULL;
3415 for (i = match_len; i >= 0; --i)
3417 int j = peep2_buf_position (peep2_current + i);
3418 old_insn = peep2_insn_data[j].insn;
3420 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3421 if (as_note)
3422 break;
3425 i = peep2_buf_position (peep2_current + match_len);
3426 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3428 /* Replace the old sequence with the new. */
3429 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3430 last = emit_insn_after_setloc (attempt,
3431 peep2_insn_data[i].insn,
3432 INSN_LOCATION (peepinsn));
3433 before_try = PREV_INSN (insn);
3434 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3436 /* Re-insert the EH_REGION notes. */
3437 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3439 edge eh_edge;
3440 edge_iterator ei;
3442 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3443 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3444 break;
3446 if (eh_note)
3447 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3449 if (eh_edge)
3450 for (x = last; x != before_try; x = PREV_INSN (x))
3451 if (x != BB_END (bb)
3452 && (can_throw_internal (x)
3453 || can_nonlocal_goto (x)))
3455 edge nfte, nehe;
3456 int flags;
3458 nfte = split_block (bb, x);
3459 flags = (eh_edge->flags
3460 & (EDGE_EH | EDGE_ABNORMAL));
3461 if (CALL_P (x))
3462 flags |= EDGE_ABNORMAL_CALL;
3463 nehe = make_edge (nfte->src, eh_edge->dest,
3464 flags);
3466 nehe->probability = eh_edge->probability;
3467 nfte->probability
3468 = REG_BR_PROB_BASE - nehe->probability;
3470 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3471 bb = nfte->src;
3472 eh_edge = nehe;
3475 /* Converting possibly trapping insn to non-trapping is
3476 possible. Zap dummy outgoing edges. */
3477 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3480 /* Re-insert the ARGS_SIZE notes. */
3481 if (as_note)
3482 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3484 /* If we generated a jump instruction, it won't have
3485 JUMP_LABEL set. Recompute after we're done. */
3486 for (x = last; x != before_try; x = PREV_INSN (x))
3487 if (JUMP_P (x))
3489 peep2_do_rebuild_jump_labels = true;
3490 break;
3493 return last;
3496 /* After performing a replacement in basic block BB, fix up the life
3497 information in our buffer. LAST is the last of the insns that we
3498 emitted as a replacement. PREV is the insn before the start of
3499 the replacement. MATCH_LEN is the number of instructions that were
3500 matched, and which now need to be replaced in the buffer. */
3502 static void
3503 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3504 rtx_insn *prev)
3506 int i = peep2_buf_position (peep2_current + match_len + 1);
3507 rtx_insn *x;
3508 regset_head live;
3510 INIT_REG_SET (&live);
3511 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3513 gcc_assert (peep2_current_count >= match_len + 1);
3514 peep2_current_count -= match_len + 1;
3516 x = last;
3519 if (INSN_P (x))
3521 df_insn_rescan (x);
3522 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3524 peep2_current_count++;
3525 if (--i < 0)
3526 i = MAX_INSNS_PER_PEEP2;
3527 peep2_insn_data[i].insn = x;
3528 df_simulate_one_insn_backwards (bb, x, &live);
3529 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3532 x = PREV_INSN (x);
3534 while (x != prev);
3535 CLEAR_REG_SET (&live);
3537 peep2_current = i;
3540 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3541 Return true if we added it, false otherwise. The caller will try to match
3542 peepholes against the buffer if we return false; otherwise it will try to
3543 add more instructions to the buffer. */
3545 static bool
3546 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3548 int pos;
3550 /* Once we have filled the maximum number of insns the buffer can hold,
3551 allow the caller to match the insns against peepholes. We wait until
3552 the buffer is full in case the target has similar peepholes of different
3553 length; we always want to match the longest if possible. */
3554 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3555 return false;
3557 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3558 any other pattern, lest it change the semantics of the frame info. */
3559 if (RTX_FRAME_RELATED_P (insn))
3561 /* Let the buffer drain first. */
3562 if (peep2_current_count > 0)
3563 return false;
3564 /* Now the insn will be the only thing in the buffer. */
3567 pos = peep2_buf_position (peep2_current + peep2_current_count);
3568 peep2_insn_data[pos].insn = insn;
3569 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3570 peep2_current_count++;
3572 df_simulate_one_insn_forwards (bb, insn, live);
3573 return true;
3576 /* Perform the peephole2 optimization pass. */
3578 static void
3579 peephole2_optimize (void)
3581 rtx_insn *insn;
3582 bitmap live;
3583 int i;
3584 basic_block bb;
3586 peep2_do_cleanup_cfg = false;
3587 peep2_do_rebuild_jump_labels = false;
3589 df_set_flags (DF_LR_RUN_DCE);
3590 df_note_add_problem ();
3591 df_analyze ();
3593 /* Initialize the regsets we're going to use. */
3594 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3595 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3596 search_ofs = 0;
3597 live = BITMAP_ALLOC (&reg_obstack);
3599 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3601 bool past_end = false;
3602 int pos;
3604 rtl_profile_for_bb (bb);
3606 /* Start up propagation. */
3607 bitmap_copy (live, DF_LR_IN (bb));
3608 df_simulate_initialize_forwards (bb, live);
3609 peep2_reinit_state (live);
3611 insn = BB_HEAD (bb);
3612 for (;;)
3614 rtx_insn *attempt, *head;
3615 int match_len;
3617 if (!past_end && !NONDEBUG_INSN_P (insn))
3619 next_insn:
3620 insn = NEXT_INSN (insn);
3621 if (insn == NEXT_INSN (BB_END (bb)))
3622 past_end = true;
3623 continue;
3625 if (!past_end && peep2_fill_buffer (bb, insn, live))
3626 goto next_insn;
3628 /* If we did not fill an empty buffer, it signals the end of the
3629 block. */
3630 if (peep2_current_count == 0)
3631 break;
3633 /* The buffer filled to the current maximum, so try to match. */
3635 pos = peep2_buf_position (peep2_current + peep2_current_count);
3636 peep2_insn_data[pos].insn = PEEP2_EOB;
3637 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3639 /* Match the peephole. */
3640 head = peep2_insn_data[peep2_current].insn;
3641 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3642 if (attempt != NULL)
3644 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3645 if (last)
3647 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3648 continue;
3652 /* No match: advance the buffer by one insn. */
3653 peep2_current = peep2_buf_position (peep2_current + 1);
3654 peep2_current_count--;
3658 default_rtl_profile ();
3659 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3660 BITMAP_FREE (peep2_insn_data[i].live_before);
3661 BITMAP_FREE (live);
3662 if (peep2_do_rebuild_jump_labels)
3663 rebuild_jump_labels (get_insns ());
3664 if (peep2_do_cleanup_cfg)
3665 cleanup_cfg (CLEANUP_CFG_CHANGED);
3667 #endif /* HAVE_peephole2 */
3669 /* Common predicates for use with define_bypass. */
3671 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3672 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3673 must be either a single_set or a PARALLEL with SETs inside. */
3676 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3678 rtx out_set, in_set;
3679 rtx out_pat, in_pat;
3680 rtx out_exp, in_exp;
3681 int i, j;
3683 in_set = single_set (in_insn);
3684 if (in_set)
3686 if (!MEM_P (SET_DEST (in_set)))
3687 return false;
3689 out_set = single_set (out_insn);
3690 if (out_set)
3692 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3693 return false;
3695 else
3697 out_pat = PATTERN (out_insn);
3699 if (GET_CODE (out_pat) != PARALLEL)
3700 return false;
3702 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3704 out_exp = XVECEXP (out_pat, 0, i);
3706 if (GET_CODE (out_exp) == CLOBBER)
3707 continue;
3709 gcc_assert (GET_CODE (out_exp) == SET);
3711 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3712 return false;
3716 else
3718 in_pat = PATTERN (in_insn);
3719 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3721 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3723 in_exp = XVECEXP (in_pat, 0, i);
3725 if (GET_CODE (in_exp) == CLOBBER)
3726 continue;
3728 gcc_assert (GET_CODE (in_exp) == SET);
3730 if (!MEM_P (SET_DEST (in_exp)))
3731 return false;
3733 out_set = single_set (out_insn);
3734 if (out_set)
3736 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3737 return false;
3739 else
3741 out_pat = PATTERN (out_insn);
3742 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3744 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3746 out_exp = XVECEXP (out_pat, 0, j);
3748 if (GET_CODE (out_exp) == CLOBBER)
3749 continue;
3751 gcc_assert (GET_CODE (out_exp) == SET);
3753 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3754 return false;
3760 return true;
3763 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3764 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3765 or multiple set; IN_INSN should be single_set for truth, but for convenience
3766 of insn categorization may be any JUMP or CALL insn. */
3769 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3771 rtx out_set, in_set;
3773 in_set = single_set (in_insn);
3774 if (! in_set)
3776 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3777 return false;
3780 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3781 return false;
3782 in_set = SET_SRC (in_set);
3784 out_set = single_set (out_insn);
3785 if (out_set)
3787 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3788 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3789 return false;
3791 else
3793 rtx out_pat;
3794 int i;
3796 out_pat = PATTERN (out_insn);
3797 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3799 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3801 rtx exp = XVECEXP (out_pat, 0, i);
3803 if (GET_CODE (exp) == CLOBBER)
3804 continue;
3806 gcc_assert (GET_CODE (exp) == SET);
3808 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3809 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3810 return false;
3814 return true;
3817 static unsigned int
3818 rest_of_handle_peephole2 (void)
3820 #ifdef HAVE_peephole2
3821 peephole2_optimize ();
3822 #endif
3823 return 0;
3826 namespace {
3828 const pass_data pass_data_peephole2 =
3830 RTL_PASS, /* type */
3831 "peephole2", /* name */
3832 OPTGROUP_NONE, /* optinfo_flags */
3833 TV_PEEPHOLE2, /* tv_id */
3834 0, /* properties_required */
3835 0, /* properties_provided */
3836 0, /* properties_destroyed */
3837 0, /* todo_flags_start */
3838 TODO_df_finish, /* todo_flags_finish */
3841 class pass_peephole2 : public rtl_opt_pass
3843 public:
3844 pass_peephole2 (gcc::context *ctxt)
3845 : rtl_opt_pass (pass_data_peephole2, ctxt)
3848 /* opt_pass methods: */
3849 /* The epiphany backend creates a second instance of this pass, so we need
3850 a clone method. */
3851 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3852 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3853 virtual unsigned int execute (function *)
3855 return rest_of_handle_peephole2 ();
3858 }; // class pass_peephole2
3860 } // anon namespace
3862 rtl_opt_pass *
3863 make_pass_peephole2 (gcc::context *ctxt)
3865 return new pass_peephole2 (ctxt);
3868 namespace {
3870 const pass_data pass_data_split_all_insns =
3872 RTL_PASS, /* type */
3873 "split1", /* name */
3874 OPTGROUP_NONE, /* optinfo_flags */
3875 TV_NONE, /* tv_id */
3876 0, /* properties_required */
3877 0, /* properties_provided */
3878 0, /* properties_destroyed */
3879 0, /* todo_flags_start */
3880 0, /* todo_flags_finish */
3883 class pass_split_all_insns : public rtl_opt_pass
3885 public:
3886 pass_split_all_insns (gcc::context *ctxt)
3887 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3890 /* opt_pass methods: */
3891 /* The epiphany backend creates a second instance of this pass, so
3892 we need a clone method. */
3893 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3894 virtual unsigned int execute (function *)
3896 split_all_insns ();
3897 return 0;
3900 }; // class pass_split_all_insns
3902 } // anon namespace
3904 rtl_opt_pass *
3905 make_pass_split_all_insns (gcc::context *ctxt)
3907 return new pass_split_all_insns (ctxt);
3910 static unsigned int
3911 rest_of_handle_split_after_reload (void)
3913 /* If optimizing, then go ahead and split insns now. */
3914 #ifndef STACK_REGS
3915 if (optimize > 0)
3916 #endif
3917 split_all_insns ();
3918 return 0;
3921 namespace {
3923 const pass_data pass_data_split_after_reload =
3925 RTL_PASS, /* type */
3926 "split2", /* name */
3927 OPTGROUP_NONE, /* optinfo_flags */
3928 TV_NONE, /* tv_id */
3929 0, /* properties_required */
3930 0, /* properties_provided */
3931 0, /* properties_destroyed */
3932 0, /* todo_flags_start */
3933 0, /* todo_flags_finish */
3936 class pass_split_after_reload : public rtl_opt_pass
3938 public:
3939 pass_split_after_reload (gcc::context *ctxt)
3940 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3943 /* opt_pass methods: */
3944 virtual unsigned int execute (function *)
3946 return rest_of_handle_split_after_reload ();
3949 }; // class pass_split_after_reload
3951 } // anon namespace
3953 rtl_opt_pass *
3954 make_pass_split_after_reload (gcc::context *ctxt)
3956 return new pass_split_after_reload (ctxt);
3959 namespace {
3961 const pass_data pass_data_split_before_regstack =
3963 RTL_PASS, /* type */
3964 "split3", /* name */
3965 OPTGROUP_NONE, /* optinfo_flags */
3966 TV_NONE, /* tv_id */
3967 0, /* properties_required */
3968 0, /* properties_provided */
3969 0, /* properties_destroyed */
3970 0, /* todo_flags_start */
3971 0, /* todo_flags_finish */
3974 class pass_split_before_regstack : public rtl_opt_pass
3976 public:
3977 pass_split_before_regstack (gcc::context *ctxt)
3978 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3981 /* opt_pass methods: */
3982 virtual bool gate (function *);
3983 virtual unsigned int execute (function *)
3985 split_all_insns ();
3986 return 0;
3989 }; // class pass_split_before_regstack
3991 bool
3992 pass_split_before_regstack::gate (function *)
3994 #if HAVE_ATTR_length && defined (STACK_REGS)
3995 /* If flow2 creates new instructions which need splitting
3996 and scheduling after reload is not done, they might not be
3997 split until final which doesn't allow splitting
3998 if HAVE_ATTR_length. */
3999 # ifdef INSN_SCHEDULING
4000 return (optimize && !flag_schedule_insns_after_reload);
4001 # else
4002 return (optimize);
4003 # endif
4004 #else
4005 return 0;
4006 #endif
4009 } // anon namespace
4011 rtl_opt_pass *
4012 make_pass_split_before_regstack (gcc::context *ctxt)
4014 return new pass_split_before_regstack (ctxt);
4017 static unsigned int
4018 rest_of_handle_split_before_sched2 (void)
4020 #ifdef INSN_SCHEDULING
4021 split_all_insns ();
4022 #endif
4023 return 0;
4026 namespace {
4028 const pass_data pass_data_split_before_sched2 =
4030 RTL_PASS, /* type */
4031 "split4", /* name */
4032 OPTGROUP_NONE, /* optinfo_flags */
4033 TV_NONE, /* tv_id */
4034 0, /* properties_required */
4035 0, /* properties_provided */
4036 0, /* properties_destroyed */
4037 0, /* todo_flags_start */
4038 0, /* todo_flags_finish */
4041 class pass_split_before_sched2 : public rtl_opt_pass
4043 public:
4044 pass_split_before_sched2 (gcc::context *ctxt)
4045 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4048 /* opt_pass methods: */
4049 virtual bool gate (function *)
4051 #ifdef INSN_SCHEDULING
4052 return optimize > 0 && flag_schedule_insns_after_reload;
4053 #else
4054 return false;
4055 #endif
4058 virtual unsigned int execute (function *)
4060 return rest_of_handle_split_before_sched2 ();
4063 }; // class pass_split_before_sched2
4065 } // anon namespace
4067 rtl_opt_pass *
4068 make_pass_split_before_sched2 (gcc::context *ctxt)
4070 return new pass_split_before_sched2 (ctxt);
4073 namespace {
4075 const pass_data pass_data_split_for_shorten_branches =
4077 RTL_PASS, /* type */
4078 "split5", /* name */
4079 OPTGROUP_NONE, /* optinfo_flags */
4080 TV_NONE, /* tv_id */
4081 0, /* properties_required */
4082 0, /* properties_provided */
4083 0, /* properties_destroyed */
4084 0, /* todo_flags_start */
4085 0, /* todo_flags_finish */
4088 class pass_split_for_shorten_branches : public rtl_opt_pass
4090 public:
4091 pass_split_for_shorten_branches (gcc::context *ctxt)
4092 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4095 /* opt_pass methods: */
4096 virtual bool gate (function *)
4098 /* The placement of the splitting that we do for shorten_branches
4099 depends on whether regstack is used by the target or not. */
4100 #if HAVE_ATTR_length && !defined (STACK_REGS)
4101 return true;
4102 #else
4103 return false;
4104 #endif
4107 virtual unsigned int execute (function *)
4109 return split_all_insns_noflow ();
4112 }; // class pass_split_for_shorten_branches
4114 } // anon namespace
4116 rtl_opt_pass *
4117 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4119 return new pass_split_for_shorten_branches (ctxt);
4122 /* (Re)initialize the target information after a change in target. */
4124 void
4125 recog_init ()
4127 /* The information is zero-initialized, so we don't need to do anything
4128 first time round. */
4129 if (!this_target_recog->x_initialized)
4131 this_target_recog->x_initialized = true;
4132 return;
4134 memset (this_target_recog->x_bool_attr_masks, 0,
4135 sizeof (this_target_recog->x_bool_attr_masks));
4136 for (int i = 0; i < LAST_INSN_CODE; ++i)
4137 if (this_target_recog->x_op_alt[i])
4139 free (this_target_recog->x_op_alt[i]);
4140 this_target_recog->x_op_alt[i] = 0;