* gcc.dg/torture/pr66345.c: Fix assumption about size_t type.
[official-gcc.git] / gcc / recog.c
blob3fbfe076ed8b1f3c682cf07c987a53886eccea65
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "rtl-error.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "insn-attr.h"
32 #include "hard-reg-set.h"
33 #include "recog.h"
34 #include "regs.h"
35 #include "addresses.h"
36 #include "function.h"
37 #include "rtl.h"
38 #include "flags.h"
39 #include "expmed.h"
40 #include "dojump.h"
41 #include "explow.h"
42 #include "calls.h"
43 #include "emit-rtl.h"
44 #include "varasm.h"
45 #include "stmt.h"
46 #include "expr.h"
47 #include "predict.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfgrtl.h"
51 #include "cfgbuild.h"
52 #include "cfgcleanup.h"
53 #include "basic-block.h"
54 #include "reload.h"
55 #include "target.h"
56 #include "tree-pass.h"
57 #include "df.h"
58 #include "insn-codes.h"
60 #ifndef STACK_POP_CODE
61 #if STACK_GROWS_DOWNWARD
62 #define STACK_POP_CODE POST_INC
63 #else
64 #define STACK_POP_CODE POST_DEC
65 #endif
66 #endif
68 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
69 static void validate_replace_src_1 (rtx *, void *);
70 static rtx_insn *split_insn (rtx_insn *);
72 struct target_recog default_target_recog;
73 #if SWITCHABLE_TARGET
74 struct target_recog *this_target_recog = &default_target_recog;
75 #endif
77 /* Nonzero means allow operands to be volatile.
78 This should be 0 if you are generating rtl, such as if you are calling
79 the functions in optabs.c and expmed.c (most of the time).
80 This should be 1 if all valid insns need to be recognized,
81 such as in reginfo.c and final.c and reload.c.
83 init_recog and init_recog_no_volatile are responsible for setting this. */
85 int volatile_ok;
87 struct recog_data_d recog_data;
89 /* Contains a vector of operand_alternative structures, such that
90 operand OP of alternative A is at index A * n_operands + OP.
91 Set up by preprocess_constraints. */
92 const operand_alternative *recog_op_alt;
94 /* Used to provide recog_op_alt for asms. */
95 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
96 * MAX_RECOG_ALTERNATIVES];
98 /* On return from `constrain_operands', indicate which alternative
99 was satisfied. */
101 int which_alternative;
103 /* Nonzero after end of reload pass.
104 Set to 1 or 0 by toplev.c.
105 Controls the significance of (SUBREG (MEM)). */
107 int reload_completed;
109 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
110 int epilogue_completed;
112 /* Initialize data used by the function `recog'.
113 This must be called once in the compilation of a function
114 before any insn recognition may be done in the function. */
116 void
117 init_recog_no_volatile (void)
119 volatile_ok = 0;
122 void
123 init_recog (void)
125 volatile_ok = 1;
129 /* Return true if labels in asm operands BODY are LABEL_REFs. */
131 static bool
132 asm_labels_ok (rtx body)
134 rtx asmop;
135 int i;
137 asmop = extract_asm_operands (body);
138 if (asmop == NULL_RTX)
139 return true;
141 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
142 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
143 return false;
145 return true;
148 /* Check that X is an insn-body for an `asm' with operands
149 and that the operands mentioned in it are legitimate. */
152 check_asm_operands (rtx x)
154 int noperands;
155 rtx *operands;
156 const char **constraints;
157 int i;
159 if (!asm_labels_ok (x))
160 return 0;
162 /* Post-reload, be more strict with things. */
163 if (reload_completed)
165 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
166 rtx_insn *insn = make_insn_raw (x);
167 extract_insn (insn);
168 constrain_operands (1, get_enabled_alternatives (insn));
169 return which_alternative >= 0;
172 noperands = asm_noperands (x);
173 if (noperands < 0)
174 return 0;
175 if (noperands == 0)
176 return 1;
178 operands = XALLOCAVEC (rtx, noperands);
179 constraints = XALLOCAVEC (const char *, noperands);
181 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
183 for (i = 0; i < noperands; i++)
185 const char *c = constraints[i];
186 if (c[0] == '%')
187 c++;
188 if (! asm_operand_ok (operands[i], c, constraints))
189 return 0;
192 return 1;
195 /* Static data for the next two routines. */
197 typedef struct change_t
199 rtx object;
200 int old_code;
201 rtx *loc;
202 rtx old;
203 bool unshare;
204 } change_t;
206 static change_t *changes;
207 static int changes_allocated;
209 static int num_changes = 0;
211 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
212 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
213 the change is simply made.
215 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
216 will be called with the address and mode as parameters. If OBJECT is
217 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
218 the change in place.
220 IN_GROUP is nonzero if this is part of a group of changes that must be
221 performed as a group. In that case, the changes will be stored. The
222 function `apply_change_group' will validate and apply the changes.
224 If IN_GROUP is zero, this is a single change. Try to recognize the insn
225 or validate the memory reference with the change applied. If the result
226 is not valid for the machine, suppress the change and return zero.
227 Otherwise, perform the change and return 1. */
229 static bool
230 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
232 rtx old = *loc;
234 if (old == new_rtx || rtx_equal_p (old, new_rtx))
235 return 1;
237 gcc_assert (in_group != 0 || num_changes == 0);
239 *loc = new_rtx;
241 /* Save the information describing this change. */
242 if (num_changes >= changes_allocated)
244 if (changes_allocated == 0)
245 /* This value allows for repeated substitutions inside complex
246 indexed addresses, or changes in up to 5 insns. */
247 changes_allocated = MAX_RECOG_OPERANDS * 5;
248 else
249 changes_allocated *= 2;
251 changes = XRESIZEVEC (change_t, changes, changes_allocated);
254 changes[num_changes].object = object;
255 changes[num_changes].loc = loc;
256 changes[num_changes].old = old;
257 changes[num_changes].unshare = unshare;
259 if (object && !MEM_P (object))
261 /* Set INSN_CODE to force rerecognition of insn. Save old code in
262 case invalid. */
263 changes[num_changes].old_code = INSN_CODE (object);
264 INSN_CODE (object) = -1;
267 num_changes++;
269 /* If we are making a group of changes, return 1. Otherwise, validate the
270 change group we made. */
272 if (in_group)
273 return 1;
274 else
275 return apply_change_group ();
278 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
279 UNSHARE to false. */
281 bool
282 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
284 return validate_change_1 (object, loc, new_rtx, in_group, false);
287 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
288 UNSHARE to true. */
290 bool
291 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
293 return validate_change_1 (object, loc, new_rtx, in_group, true);
297 /* Keep X canonicalized if some changes have made it non-canonical; only
298 modifies the operands of X, not (for example) its code. Simplifications
299 are not the job of this routine.
301 Return true if anything was changed. */
302 bool
303 canonicalize_change_group (rtx_insn *insn, rtx x)
305 if (COMMUTATIVE_P (x)
306 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
308 /* Oops, the caller has made X no longer canonical.
309 Let's redo the changes in the correct order. */
310 rtx tem = XEXP (x, 0);
311 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
312 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
313 return true;
315 else
316 return false;
320 /* This subroutine of apply_change_group verifies whether the changes to INSN
321 were valid; i.e. whether INSN can still be recognized.
323 If IN_GROUP is true clobbers which have to be added in order to
324 match the instructions will be added to the current change group.
325 Otherwise the changes will take effect immediately. */
328 insn_invalid_p (rtx_insn *insn, bool in_group)
330 rtx pat = PATTERN (insn);
331 int num_clobbers = 0;
332 /* If we are before reload and the pattern is a SET, see if we can add
333 clobbers. */
334 int icode = recog (pat, insn,
335 (GET_CODE (pat) == SET
336 && ! reload_completed
337 && ! reload_in_progress)
338 ? &num_clobbers : 0);
339 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
342 /* If this is an asm and the operand aren't legal, then fail. Likewise if
343 this is not an asm and the insn wasn't recognized. */
344 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
345 || (!is_asm && icode < 0))
346 return 1;
348 /* If we have to add CLOBBERs, fail if we have to add ones that reference
349 hard registers since our callers can't know if they are live or not.
350 Otherwise, add them. */
351 if (num_clobbers > 0)
353 rtx newpat;
355 if (added_clobbers_hard_reg_p (icode))
356 return 1;
358 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
359 XVECEXP (newpat, 0, 0) = pat;
360 add_clobbers (newpat, icode);
361 if (in_group)
362 validate_change (insn, &PATTERN (insn), newpat, 1);
363 else
364 PATTERN (insn) = pat = newpat;
367 /* After reload, verify that all constraints are satisfied. */
368 if (reload_completed)
370 extract_insn (insn);
372 if (! constrain_operands (1, get_preferred_alternatives (insn)))
373 return 1;
376 INSN_CODE (insn) = icode;
377 return 0;
380 /* Return number of changes made and not validated yet. */
382 num_changes_pending (void)
384 return num_changes;
387 /* Tentatively apply the changes numbered NUM and up.
388 Return 1 if all changes are valid, zero otherwise. */
391 verify_changes (int num)
393 int i;
394 rtx last_validated = NULL_RTX;
396 /* The changes have been applied and all INSN_CODEs have been reset to force
397 rerecognition.
399 The changes are valid if we aren't given an object, or if we are
400 given a MEM and it still is a valid address, or if this is in insn
401 and it is recognized. In the latter case, if reload has completed,
402 we also require that the operands meet the constraints for
403 the insn. */
405 for (i = num; i < num_changes; i++)
407 rtx object = changes[i].object;
409 /* If there is no object to test or if it is the same as the one we
410 already tested, ignore it. */
411 if (object == 0 || object == last_validated)
412 continue;
414 if (MEM_P (object))
416 if (! memory_address_addr_space_p (GET_MODE (object),
417 XEXP (object, 0),
418 MEM_ADDR_SPACE (object)))
419 break;
421 else if (/* changes[i].old might be zero, e.g. when putting a
422 REG_FRAME_RELATED_EXPR into a previously empty list. */
423 changes[i].old
424 && REG_P (changes[i].old)
425 && asm_noperands (PATTERN (object)) > 0
426 && REG_EXPR (changes[i].old) != NULL_TREE
427 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
428 && DECL_REGISTER (REG_EXPR (changes[i].old)))
430 /* Don't allow changes of hard register operands to inline
431 assemblies if they have been defined as register asm ("x"). */
432 break;
434 else if (DEBUG_INSN_P (object))
435 continue;
436 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
438 rtx pat = PATTERN (object);
440 /* Perhaps we couldn't recognize the insn because there were
441 extra CLOBBERs at the end. If so, try to re-recognize
442 without the last CLOBBER (later iterations will cause each of
443 them to be eliminated, in turn). But don't do this if we
444 have an ASM_OPERAND. */
445 if (GET_CODE (pat) == PARALLEL
446 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
447 && asm_noperands (PATTERN (object)) < 0)
449 rtx newpat;
451 if (XVECLEN (pat, 0) == 2)
452 newpat = XVECEXP (pat, 0, 0);
453 else
455 int j;
457 newpat
458 = gen_rtx_PARALLEL (VOIDmode,
459 rtvec_alloc (XVECLEN (pat, 0) - 1));
460 for (j = 0; j < XVECLEN (newpat, 0); j++)
461 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
464 /* Add a new change to this group to replace the pattern
465 with this new pattern. Then consider this change
466 as having succeeded. The change we added will
467 cause the entire call to fail if things remain invalid.
469 Note that this can lose if a later change than the one
470 we are processing specified &XVECEXP (PATTERN (object), 0, X)
471 but this shouldn't occur. */
473 validate_change (object, &PATTERN (object), newpat, 1);
474 continue;
476 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
477 || GET_CODE (pat) == VAR_LOCATION)
478 /* If this insn is a CLOBBER or USE, it is always valid, but is
479 never recognized. */
480 continue;
481 else
482 break;
484 last_validated = object;
487 return (i == num_changes);
490 /* A group of changes has previously been issued with validate_change
491 and verified with verify_changes. Call df_insn_rescan for each of
492 the insn changed and clear num_changes. */
494 void
495 confirm_change_group (void)
497 int i;
498 rtx last_object = NULL;
500 for (i = 0; i < num_changes; i++)
502 rtx object = changes[i].object;
504 if (changes[i].unshare)
505 *changes[i].loc = copy_rtx (*changes[i].loc);
507 /* Avoid unnecessary rescanning when multiple changes to same instruction
508 are made. */
509 if (object)
511 if (object != last_object && last_object && INSN_P (last_object))
512 df_insn_rescan (as_a <rtx_insn *> (last_object));
513 last_object = object;
517 if (last_object && INSN_P (last_object))
518 df_insn_rescan (as_a <rtx_insn *> (last_object));
519 num_changes = 0;
522 /* Apply a group of changes previously issued with `validate_change'.
523 If all changes are valid, call confirm_change_group and return 1,
524 otherwise, call cancel_changes and return 0. */
527 apply_change_group (void)
529 if (verify_changes (0))
531 confirm_change_group ();
532 return 1;
534 else
536 cancel_changes (0);
537 return 0;
542 /* Return the number of changes so far in the current group. */
545 num_validated_changes (void)
547 return num_changes;
550 /* Retract the changes numbered NUM and up. */
552 void
553 cancel_changes (int num)
555 int i;
557 /* Back out all the changes. Do this in the opposite order in which
558 they were made. */
559 for (i = num_changes - 1; i >= num; i--)
561 *changes[i].loc = changes[i].old;
562 if (changes[i].object && !MEM_P (changes[i].object))
563 INSN_CODE (changes[i].object) = changes[i].old_code;
565 num_changes = num;
568 /* Reduce conditional compilation elsewhere. */
569 #ifndef HAVE_extv
570 #define HAVE_extv 0
571 #define CODE_FOR_extv CODE_FOR_nothing
572 #endif
573 #ifndef HAVE_extzv
574 #define HAVE_extzv 0
575 #define CODE_FOR_extzv CODE_FOR_nothing
576 #endif
578 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
579 rtx. */
581 static void
582 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
583 machine_mode op0_mode)
585 rtx x = *loc;
586 enum rtx_code code = GET_CODE (x);
587 rtx new_rtx = NULL_RTX;
589 if (SWAPPABLE_OPERANDS_P (x)
590 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
592 validate_unshare_change (object, loc,
593 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
594 : swap_condition (code),
595 GET_MODE (x), XEXP (x, 1),
596 XEXP (x, 0)), 1);
597 x = *loc;
598 code = GET_CODE (x);
601 /* Canonicalize arithmetics with all constant operands. */
602 switch (GET_RTX_CLASS (code))
604 case RTX_UNARY:
605 if (CONSTANT_P (XEXP (x, 0)))
606 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
607 op0_mode);
608 break;
609 case RTX_COMM_ARITH:
610 case RTX_BIN_ARITH:
611 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
612 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
613 XEXP (x, 1));
614 break;
615 case RTX_COMPARE:
616 case RTX_COMM_COMPARE:
617 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
618 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
619 XEXP (x, 0), XEXP (x, 1));
620 break;
621 default:
622 break;
624 if (new_rtx)
626 validate_change (object, loc, new_rtx, 1);
627 return;
630 switch (code)
632 case PLUS:
633 /* If we have a PLUS whose second operand is now a CONST_INT, use
634 simplify_gen_binary to try to simplify it.
635 ??? We may want later to remove this, once simplification is
636 separated from this function. */
637 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
638 validate_change (object, loc,
639 simplify_gen_binary
640 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
641 break;
642 case MINUS:
643 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
644 validate_change (object, loc,
645 simplify_gen_binary
646 (PLUS, GET_MODE (x), XEXP (x, 0),
647 simplify_gen_unary (NEG,
648 GET_MODE (x), XEXP (x, 1),
649 GET_MODE (x))), 1);
650 break;
651 case ZERO_EXTEND:
652 case SIGN_EXTEND:
653 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
655 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
656 op0_mode);
657 /* If any of the above failed, substitute in something that
658 we know won't be recognized. */
659 if (!new_rtx)
660 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
661 validate_change (object, loc, new_rtx, 1);
663 break;
664 case SUBREG:
665 /* All subregs possible to simplify should be simplified. */
666 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
667 SUBREG_BYTE (x));
669 /* Subregs of VOIDmode operands are incorrect. */
670 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
671 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
672 if (new_rtx)
673 validate_change (object, loc, new_rtx, 1);
674 break;
675 case ZERO_EXTRACT:
676 case SIGN_EXTRACT:
677 /* If we are replacing a register with memory, try to change the memory
678 to be the mode required for memory in extract operations (this isn't
679 likely to be an insertion operation; if it was, nothing bad will
680 happen, we might just fail in some cases). */
682 if (MEM_P (XEXP (x, 0))
683 && CONST_INT_P (XEXP (x, 1))
684 && CONST_INT_P (XEXP (x, 2))
685 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
686 MEM_ADDR_SPACE (XEXP (x, 0)))
687 && !MEM_VOLATILE_P (XEXP (x, 0)))
689 machine_mode wanted_mode = VOIDmode;
690 machine_mode is_mode = GET_MODE (XEXP (x, 0));
691 int pos = INTVAL (XEXP (x, 2));
693 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
695 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
696 if (wanted_mode == VOIDmode)
697 wanted_mode = word_mode;
699 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
701 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
702 if (wanted_mode == VOIDmode)
703 wanted_mode = word_mode;
706 /* If we have a narrower mode, we can do something. */
707 if (wanted_mode != VOIDmode
708 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
710 int offset = pos / BITS_PER_UNIT;
711 rtx newmem;
713 /* If the bytes and bits are counted differently, we
714 must adjust the offset. */
715 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
716 offset =
717 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
718 offset);
720 gcc_assert (GET_MODE_PRECISION (wanted_mode)
721 == GET_MODE_BITSIZE (wanted_mode));
722 pos %= GET_MODE_BITSIZE (wanted_mode);
724 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
726 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
727 validate_change (object, &XEXP (x, 0), newmem, 1);
731 break;
733 default:
734 break;
738 /* Replace every occurrence of FROM in X with TO. Mark each change with
739 validate_change passing OBJECT. */
741 static void
742 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
743 bool simplify)
745 int i, j;
746 const char *fmt;
747 rtx x = *loc;
748 enum rtx_code code;
749 machine_mode op0_mode = VOIDmode;
750 int prev_changes = num_changes;
752 if (!x)
753 return;
755 code = GET_CODE (x);
756 fmt = GET_RTX_FORMAT (code);
757 if (fmt[0] == 'e')
758 op0_mode = GET_MODE (XEXP (x, 0));
760 /* X matches FROM if it is the same rtx or they are both referring to the
761 same register in the same mode. Avoid calling rtx_equal_p unless the
762 operands look similar. */
764 if (x == from
765 || (REG_P (x) && REG_P (from)
766 && GET_MODE (x) == GET_MODE (from)
767 && REGNO (x) == REGNO (from))
768 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
769 && rtx_equal_p (x, from)))
771 validate_unshare_change (object, loc, to, 1);
772 return;
775 /* Call ourself recursively to perform the replacements.
776 We must not replace inside already replaced expression, otherwise we
777 get infinite recursion for replacements like (reg X)->(subreg (reg X))
778 so we must special case shared ASM_OPERANDS. */
780 if (GET_CODE (x) == PARALLEL)
782 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
784 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
785 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
787 /* Verify that operands are really shared. */
788 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
789 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
790 (x, 0, j))));
791 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
792 from, to, object, simplify);
794 else
795 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
796 simplify);
799 else
800 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
802 if (fmt[i] == 'e')
803 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
804 else if (fmt[i] == 'E')
805 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
806 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
807 simplify);
810 /* If we didn't substitute, there is nothing more to do. */
811 if (num_changes == prev_changes)
812 return;
814 /* ??? The regmove is no more, so is this aberration still necessary? */
815 /* Allow substituted expression to have different mode. This is used by
816 regmove to change mode of pseudo register. */
817 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
818 op0_mode = GET_MODE (XEXP (x, 0));
820 /* Do changes needed to keep rtx consistent. Don't do any other
821 simplifications, as it is not our job. */
822 if (simplify)
823 simplify_while_replacing (loc, to, object, op0_mode);
826 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
827 with TO. After all changes have been made, validate by seeing
828 if INSN is still valid. */
831 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
833 validate_replace_rtx_1 (loc, from, to, insn, true);
834 return apply_change_group ();
837 /* Try replacing every occurrence of FROM in INSN with TO. After all
838 changes have been made, validate by seeing if INSN is still valid. */
841 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
843 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
844 return apply_change_group ();
847 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
848 is a part of INSN. After all changes have been made, validate by seeing if
849 INSN is still valid.
850 validate_replace_rtx (from, to, insn) is equivalent to
851 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
854 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
856 validate_replace_rtx_1 (where, from, to, insn, true);
857 return apply_change_group ();
860 /* Same as above, but do not simplify rtx afterwards. */
862 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
863 rtx_insn *insn)
865 validate_replace_rtx_1 (where, from, to, insn, false);
866 return apply_change_group ();
870 /* Try replacing every occurrence of FROM in INSN with TO. This also
871 will replace in REG_EQUAL and REG_EQUIV notes. */
873 void
874 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
876 rtx note;
877 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
878 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
879 if (REG_NOTE_KIND (note) == REG_EQUAL
880 || REG_NOTE_KIND (note) == REG_EQUIV)
881 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
884 /* Function called by note_uses to replace used subexpressions. */
885 struct validate_replace_src_data
887 rtx from; /* Old RTX */
888 rtx to; /* New RTX */
889 rtx_insn *insn; /* Insn in which substitution is occurring. */
892 static void
893 validate_replace_src_1 (rtx *x, void *data)
895 struct validate_replace_src_data *d
896 = (struct validate_replace_src_data *) data;
898 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
901 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
902 SET_DESTs. */
904 void
905 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
907 struct validate_replace_src_data d;
909 d.from = from;
910 d.to = to;
911 d.insn = insn;
912 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
915 /* Try simplify INSN.
916 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
917 pattern and return true if something was simplified. */
919 bool
920 validate_simplify_insn (rtx_insn *insn)
922 int i;
923 rtx pat = NULL;
924 rtx newpat = NULL;
926 pat = PATTERN (insn);
928 if (GET_CODE (pat) == SET)
930 newpat = simplify_rtx (SET_SRC (pat));
931 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
932 validate_change (insn, &SET_SRC (pat), newpat, 1);
933 newpat = simplify_rtx (SET_DEST (pat));
934 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
935 validate_change (insn, &SET_DEST (pat), newpat, 1);
937 else if (GET_CODE (pat) == PARALLEL)
938 for (i = 0; i < XVECLEN (pat, 0); i++)
940 rtx s = XVECEXP (pat, 0, i);
942 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
944 newpat = simplify_rtx (SET_SRC (s));
945 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
946 validate_change (insn, &SET_SRC (s), newpat, 1);
947 newpat = simplify_rtx (SET_DEST (s));
948 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
949 validate_change (insn, &SET_DEST (s), newpat, 1);
952 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
955 /* Return 1 if the insn using CC0 set by INSN does not contain
956 any ordered tests applied to the condition codes.
957 EQ and NE tests do not count. */
960 next_insn_tests_no_inequality (rtx_insn *insn)
962 rtx_insn *next = next_cc0_user (insn);
964 /* If there is no next insn, we have to take the conservative choice. */
965 if (next == 0)
966 return 0;
968 return (INSN_P (next)
969 && ! inequality_comparisons_p (PATTERN (next)));
972 /* Return 1 if OP is a valid general operand for machine mode MODE.
973 This is either a register reference, a memory reference,
974 or a constant. In the case of a memory reference, the address
975 is checked for general validity for the target machine.
977 Register and memory references must have mode MODE in order to be valid,
978 but some constants have no machine mode and are valid for any mode.
980 If MODE is VOIDmode, OP is checked for validity for whatever mode
981 it has.
983 The main use of this function is as a predicate in match_operand
984 expressions in the machine description. */
987 general_operand (rtx op, machine_mode mode)
989 enum rtx_code code = GET_CODE (op);
991 if (mode == VOIDmode)
992 mode = GET_MODE (op);
994 /* Don't accept CONST_INT or anything similar
995 if the caller wants something floating. */
996 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
997 && GET_MODE_CLASS (mode) != MODE_INT
998 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
999 return 0;
1001 if (CONST_INT_P (op)
1002 && mode != VOIDmode
1003 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1004 return 0;
1006 if (CONSTANT_P (op))
1007 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1008 || mode == VOIDmode)
1009 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1010 && targetm.legitimate_constant_p (mode == VOIDmode
1011 ? GET_MODE (op)
1012 : mode, op));
1014 /* Except for certain constants with VOIDmode, already checked for,
1015 OP's mode must match MODE if MODE specifies a mode. */
1017 if (GET_MODE (op) != mode)
1018 return 0;
1020 if (code == SUBREG)
1022 rtx sub = SUBREG_REG (op);
1024 #ifdef INSN_SCHEDULING
1025 /* On machines that have insn scheduling, we want all memory
1026 reference to be explicit, so outlaw paradoxical SUBREGs.
1027 However, we must allow them after reload so that they can
1028 get cleaned up by cleanup_subreg_operands. */
1029 if (!reload_completed && MEM_P (sub)
1030 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1031 return 0;
1032 #endif
1033 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1034 may result in incorrect reference. We should simplify all valid
1035 subregs of MEM anyway. But allow this after reload because we
1036 might be called from cleanup_subreg_operands.
1038 ??? This is a kludge. */
1039 if (!reload_completed && SUBREG_BYTE (op) != 0
1040 && MEM_P (sub))
1041 return 0;
1043 #ifdef CANNOT_CHANGE_MODE_CLASS
1044 if (REG_P (sub)
1045 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1046 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1047 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1048 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1049 /* LRA can generate some invalid SUBREGS just for matched
1050 operand reload presentation. LRA needs to treat them as
1051 valid. */
1052 && ! LRA_SUBREG_P (op))
1053 return 0;
1054 #endif
1056 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1057 create such rtl, and we must reject it. */
1058 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1059 /* LRA can use subreg to store a floating point value in an
1060 integer mode. Although the floating point and the
1061 integer modes need the same number of hard registers, the
1062 size of floating point mode can be less than the integer
1063 mode. */
1064 && ! lra_in_progress
1065 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1066 return 0;
1068 op = sub;
1069 code = GET_CODE (op);
1072 if (code == REG)
1073 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1074 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1076 if (code == MEM)
1078 rtx y = XEXP (op, 0);
1080 if (! volatile_ok && MEM_VOLATILE_P (op))
1081 return 0;
1083 /* Use the mem's mode, since it will be reloaded thus. LRA can
1084 generate move insn with invalid addresses which is made valid
1085 and efficiently calculated by LRA through further numerous
1086 transformations. */
1087 if (lra_in_progress
1088 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1089 return 1;
1092 return 0;
1095 /* Return 1 if OP is a valid memory address for a memory reference
1096 of mode MODE.
1098 The main use of this function is as a predicate in match_operand
1099 expressions in the machine description. */
1102 address_operand (rtx op, machine_mode mode)
1104 return memory_address_p (mode, op);
1107 /* Return 1 if OP is a register reference of mode MODE.
1108 If MODE is VOIDmode, accept a register in any mode.
1110 The main use of this function is as a predicate in match_operand
1111 expressions in the machine description. */
1114 register_operand (rtx op, machine_mode mode)
1116 if (GET_CODE (op) == SUBREG)
1118 rtx sub = SUBREG_REG (op);
1120 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1121 because it is guaranteed to be reloaded into one.
1122 Just make sure the MEM is valid in itself.
1123 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1124 but currently it does result from (SUBREG (REG)...) where the
1125 reg went on the stack.) */
1126 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1127 return 0;
1129 else if (!REG_P (op))
1130 return 0;
1131 return general_operand (op, mode);
1134 /* Return 1 for a register in Pmode; ignore the tested mode. */
1137 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1139 return register_operand (op, Pmode);
1142 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1143 or a hard register. */
1146 scratch_operand (rtx op, machine_mode mode)
1148 if (GET_MODE (op) != mode && mode != VOIDmode)
1149 return 0;
1151 return (GET_CODE (op) == SCRATCH
1152 || (REG_P (op)
1153 && (lra_in_progress
1154 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1155 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1158 /* Return 1 if OP is a valid immediate operand for mode MODE.
1160 The main use of this function is as a predicate in match_operand
1161 expressions in the machine description. */
1164 immediate_operand (rtx op, machine_mode mode)
1166 /* Don't accept CONST_INT or anything similar
1167 if the caller wants something floating. */
1168 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1169 && GET_MODE_CLASS (mode) != MODE_INT
1170 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1171 return 0;
1173 if (CONST_INT_P (op)
1174 && mode != VOIDmode
1175 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1176 return 0;
1178 return (CONSTANT_P (op)
1179 && (GET_MODE (op) == mode || mode == VOIDmode
1180 || GET_MODE (op) == VOIDmode)
1181 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1182 && targetm.legitimate_constant_p (mode == VOIDmode
1183 ? GET_MODE (op)
1184 : mode, op));
1187 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1190 const_int_operand (rtx op, machine_mode mode)
1192 if (!CONST_INT_P (op))
1193 return 0;
1195 if (mode != VOIDmode
1196 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1197 return 0;
1199 return 1;
1202 #if TARGET_SUPPORTS_WIDE_INT
1203 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1204 of mode MODE. */
1206 const_scalar_int_operand (rtx op, machine_mode mode)
1208 if (!CONST_SCALAR_INT_P (op))
1209 return 0;
1211 if (CONST_INT_P (op))
1212 return const_int_operand (op, mode);
1214 if (mode != VOIDmode)
1216 int prec = GET_MODE_PRECISION (mode);
1217 int bitsize = GET_MODE_BITSIZE (mode);
1219 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1220 return 0;
1222 if (prec == bitsize)
1223 return 1;
1224 else
1226 /* Multiword partial int. */
1227 HOST_WIDE_INT x
1228 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1229 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1232 return 1;
1235 /* Returns 1 if OP is an operand that is a constant integer or constant
1236 floating-point number of MODE. */
1239 const_double_operand (rtx op, machine_mode mode)
1241 return (GET_CODE (op) == CONST_DOUBLE)
1242 && (GET_MODE (op) == mode || mode == VOIDmode);
1244 #else
1245 /* Returns 1 if OP is an operand that is a constant integer or constant
1246 floating-point number of MODE. */
1249 const_double_operand (rtx op, machine_mode mode)
1251 /* Don't accept CONST_INT or anything similar
1252 if the caller wants something floating. */
1253 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1254 && GET_MODE_CLASS (mode) != MODE_INT
1255 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1256 return 0;
1258 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1259 && (mode == VOIDmode || GET_MODE (op) == mode
1260 || GET_MODE (op) == VOIDmode));
1262 #endif
1263 /* Return 1 if OP is a general operand that is not an immediate
1264 operand of mode MODE. */
1267 nonimmediate_operand (rtx op, machine_mode mode)
1269 return (general_operand (op, mode) && ! CONSTANT_P (op));
1272 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1275 nonmemory_operand (rtx op, machine_mode mode)
1277 if (CONSTANT_P (op))
1278 return immediate_operand (op, mode);
1279 return register_operand (op, mode);
1282 /* Return 1 if OP is a valid operand that stands for pushing a
1283 value of mode MODE onto the stack.
1285 The main use of this function is as a predicate in match_operand
1286 expressions in the machine description. */
1289 push_operand (rtx op, machine_mode mode)
1291 unsigned int rounded_size = GET_MODE_SIZE (mode);
1293 #ifdef PUSH_ROUNDING
1294 rounded_size = PUSH_ROUNDING (rounded_size);
1295 #endif
1297 if (!MEM_P (op))
1298 return 0;
1300 if (mode != VOIDmode && GET_MODE (op) != mode)
1301 return 0;
1303 op = XEXP (op, 0);
1305 if (rounded_size == GET_MODE_SIZE (mode))
1307 if (GET_CODE (op) != STACK_PUSH_CODE)
1308 return 0;
1310 else
1312 if (GET_CODE (op) != PRE_MODIFY
1313 || GET_CODE (XEXP (op, 1)) != PLUS
1314 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1315 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1316 || INTVAL (XEXP (XEXP (op, 1), 1))
1317 != ((STACK_GROWS_DOWNWARD ? -1 : 1) * (int) rounded_size))
1318 return 0;
1321 return XEXP (op, 0) == stack_pointer_rtx;
1324 /* Return 1 if OP is a valid operand that stands for popping a
1325 value of mode MODE off the stack.
1327 The main use of this function is as a predicate in match_operand
1328 expressions in the machine description. */
1331 pop_operand (rtx op, machine_mode mode)
1333 if (!MEM_P (op))
1334 return 0;
1336 if (mode != VOIDmode && GET_MODE (op) != mode)
1337 return 0;
1339 op = XEXP (op, 0);
1341 if (GET_CODE (op) != STACK_POP_CODE)
1342 return 0;
1344 return XEXP (op, 0) == stack_pointer_rtx;
1347 /* Return 1 if ADDR is a valid memory address
1348 for mode MODE in address space AS. */
1351 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1352 rtx addr, addr_space_t as)
1354 #ifdef GO_IF_LEGITIMATE_ADDRESS
1355 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1356 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1357 return 0;
1359 win:
1360 return 1;
1361 #else
1362 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1363 #endif
1366 /* Return 1 if OP is a valid memory reference with mode MODE,
1367 including a valid address.
1369 The main use of this function is as a predicate in match_operand
1370 expressions in the machine description. */
1373 memory_operand (rtx op, machine_mode mode)
1375 rtx inner;
1377 if (! reload_completed)
1378 /* Note that no SUBREG is a memory operand before end of reload pass,
1379 because (SUBREG (MEM...)) forces reloading into a register. */
1380 return MEM_P (op) && general_operand (op, mode);
1382 if (mode != VOIDmode && GET_MODE (op) != mode)
1383 return 0;
1385 inner = op;
1386 if (GET_CODE (inner) == SUBREG)
1387 inner = SUBREG_REG (inner);
1389 return (MEM_P (inner) && general_operand (op, mode));
1392 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1393 that is, a memory reference whose address is a general_operand. */
1396 indirect_operand (rtx op, machine_mode mode)
1398 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1399 if (! reload_completed
1400 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1402 int offset = SUBREG_BYTE (op);
1403 rtx inner = SUBREG_REG (op);
1405 if (mode != VOIDmode && GET_MODE (op) != mode)
1406 return 0;
1408 /* The only way that we can have a general_operand as the resulting
1409 address is if OFFSET is zero and the address already is an operand
1410 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1411 operand. */
1413 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1414 || (GET_CODE (XEXP (inner, 0)) == PLUS
1415 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1416 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1417 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1420 return (MEM_P (op)
1421 && memory_operand (op, mode)
1422 && general_operand (XEXP (op, 0), Pmode));
1425 /* Return 1 if this is an ordered comparison operator (not including
1426 ORDERED and UNORDERED). */
1429 ordered_comparison_operator (rtx op, machine_mode mode)
1431 if (mode != VOIDmode && GET_MODE (op) != mode)
1432 return false;
1433 switch (GET_CODE (op))
1435 case EQ:
1436 case NE:
1437 case LT:
1438 case LTU:
1439 case LE:
1440 case LEU:
1441 case GT:
1442 case GTU:
1443 case GE:
1444 case GEU:
1445 return true;
1446 default:
1447 return false;
1451 /* Return 1 if this is a comparison operator. This allows the use of
1452 MATCH_OPERATOR to recognize all the branch insns. */
1455 comparison_operator (rtx op, machine_mode mode)
1457 return ((mode == VOIDmode || GET_MODE (op) == mode)
1458 && COMPARISON_P (op));
1461 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1464 extract_asm_operands (rtx body)
1466 rtx tmp;
1467 switch (GET_CODE (body))
1469 case ASM_OPERANDS:
1470 return body;
1472 case SET:
1473 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1474 tmp = SET_SRC (body);
1475 if (GET_CODE (tmp) == ASM_OPERANDS)
1476 return tmp;
1477 break;
1479 case PARALLEL:
1480 tmp = XVECEXP (body, 0, 0);
1481 if (GET_CODE (tmp) == ASM_OPERANDS)
1482 return tmp;
1483 if (GET_CODE (tmp) == SET)
1485 tmp = SET_SRC (tmp);
1486 if (GET_CODE (tmp) == ASM_OPERANDS)
1487 return tmp;
1489 break;
1491 default:
1492 break;
1494 return NULL;
1497 /* If BODY is an insn body that uses ASM_OPERANDS,
1498 return the number of operands (both input and output) in the insn.
1499 Otherwise return -1. */
1502 asm_noperands (const_rtx body)
1504 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1505 int n_sets = 0;
1507 if (asm_op == NULL)
1508 return -1;
1510 if (GET_CODE (body) == SET)
1511 n_sets = 1;
1512 else if (GET_CODE (body) == PARALLEL)
1514 int i;
1515 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1517 /* Multiple output operands, or 1 output plus some clobbers:
1518 body is
1519 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1520 /* Count backwards through CLOBBERs to determine number of SETs. */
1521 for (i = XVECLEN (body, 0); i > 0; i--)
1523 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1524 break;
1525 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1526 return -1;
1529 /* N_SETS is now number of output operands. */
1530 n_sets = i;
1532 /* Verify that all the SETs we have
1533 came from a single original asm_operands insn
1534 (so that invalid combinations are blocked). */
1535 for (i = 0; i < n_sets; i++)
1537 rtx elt = XVECEXP (body, 0, i);
1538 if (GET_CODE (elt) != SET)
1539 return -1;
1540 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1541 return -1;
1542 /* If these ASM_OPERANDS rtx's came from different original insns
1543 then they aren't allowed together. */
1544 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1545 != ASM_OPERANDS_INPUT_VEC (asm_op))
1546 return -1;
1549 else
1551 /* 0 outputs, but some clobbers:
1552 body is [(asm_operands ...) (clobber (reg ...))...]. */
1553 /* Make sure all the other parallel things really are clobbers. */
1554 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1555 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1556 return -1;
1560 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1561 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1564 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1565 copy its operands (both input and output) into the vector OPERANDS,
1566 the locations of the operands within the insn into the vector OPERAND_LOCS,
1567 and the constraints for the operands into CONSTRAINTS.
1568 Write the modes of the operands into MODES.
1569 Return the assembler-template.
1571 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1572 we don't store that info. */
1574 const char *
1575 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1576 const char **constraints, machine_mode *modes,
1577 location_t *loc)
1579 int nbase = 0, n, i;
1580 rtx asmop;
1582 switch (GET_CODE (body))
1584 case ASM_OPERANDS:
1585 /* Zero output asm: BODY is (asm_operands ...). */
1586 asmop = body;
1587 break;
1589 case SET:
1590 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1591 asmop = SET_SRC (body);
1593 /* The output is in the SET.
1594 Its constraint is in the ASM_OPERANDS itself. */
1595 if (operands)
1596 operands[0] = SET_DEST (body);
1597 if (operand_locs)
1598 operand_locs[0] = &SET_DEST (body);
1599 if (constraints)
1600 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1601 if (modes)
1602 modes[0] = GET_MODE (SET_DEST (body));
1603 nbase = 1;
1604 break;
1606 case PARALLEL:
1608 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1610 asmop = XVECEXP (body, 0, 0);
1611 if (GET_CODE (asmop) == SET)
1613 asmop = SET_SRC (asmop);
1615 /* At least one output, plus some CLOBBERs. The outputs are in
1616 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1617 for (i = 0; i < nparallel; i++)
1619 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1620 break; /* Past last SET */
1621 if (operands)
1622 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1623 if (operand_locs)
1624 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1625 if (constraints)
1626 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1627 if (modes)
1628 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1630 nbase = i;
1632 break;
1635 default:
1636 gcc_unreachable ();
1639 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1640 for (i = 0; i < n; i++)
1642 if (operand_locs)
1643 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1644 if (operands)
1645 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1646 if (constraints)
1647 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1648 if (modes)
1649 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1651 nbase += n;
1653 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1654 for (i = 0; i < n; i++)
1656 if (operand_locs)
1657 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1658 if (operands)
1659 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1660 if (constraints)
1661 constraints[nbase + i] = "";
1662 if (modes)
1663 modes[nbase + i] = Pmode;
1666 if (loc)
1667 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1669 return ASM_OPERANDS_TEMPLATE (asmop);
1672 /* Parse inline assembly string STRING and determine which operands are
1673 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1674 to true if operand I is referenced.
1676 This is intended to distinguish barrier-like asms such as:
1678 asm ("" : "=m" (...));
1680 from real references such as:
1682 asm ("sw\t$0, %0" : "=m" (...)); */
1684 void
1685 get_referenced_operands (const char *string, bool *used,
1686 unsigned int noperands)
1688 memset (used, 0, sizeof (bool) * noperands);
1689 const char *p = string;
1690 while (*p)
1691 switch (*p)
1693 case '%':
1694 p += 1;
1695 /* A letter followed by a digit indicates an operand number. */
1696 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1697 p += 1;
1698 if (ISDIGIT (*p))
1700 char *endptr;
1701 unsigned long opnum = strtoul (p, &endptr, 10);
1702 if (endptr != p && opnum < noperands)
1703 used[opnum] = true;
1704 p = endptr;
1706 else
1707 p += 1;
1708 break;
1710 default:
1711 p++;
1712 break;
1716 /* Check if an asm_operand matches its constraints.
1717 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1720 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1722 int result = 0;
1723 #ifdef AUTO_INC_DEC
1724 bool incdec_ok = false;
1725 #endif
1727 /* Use constrain_operands after reload. */
1728 gcc_assert (!reload_completed);
1730 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1731 many alternatives as required to match the other operands. */
1732 if (*constraint == '\0')
1733 result = 1;
1735 while (*constraint)
1737 enum constraint_num cn;
1738 char c = *constraint;
1739 int len;
1740 switch (c)
1742 case ',':
1743 constraint++;
1744 continue;
1746 case '0': case '1': case '2': case '3': case '4':
1747 case '5': case '6': case '7': case '8': case '9':
1748 /* If caller provided constraints pointer, look up
1749 the matching constraint. Otherwise, our caller should have
1750 given us the proper matching constraint, but we can't
1751 actually fail the check if they didn't. Indicate that
1752 results are inconclusive. */
1753 if (constraints)
1755 char *end;
1756 unsigned long match;
1758 match = strtoul (constraint, &end, 10);
1759 if (!result)
1760 result = asm_operand_ok (op, constraints[match], NULL);
1761 constraint = (const char *) end;
1763 else
1766 constraint++;
1767 while (ISDIGIT (*constraint));
1768 if (! result)
1769 result = -1;
1771 continue;
1773 /* The rest of the compiler assumes that reloading the address
1774 of a MEM into a register will make it fit an 'o' constraint.
1775 That is, if it sees a MEM operand for an 'o' constraint,
1776 it assumes that (mem (base-reg)) will fit.
1778 That assumption fails on targets that don't have offsettable
1779 addresses at all. We therefore need to treat 'o' asm
1780 constraints as a special case and only accept operands that
1781 are already offsettable, thus proving that at least one
1782 offsettable address exists. */
1783 case 'o': /* offsettable */
1784 if (offsettable_nonstrict_memref_p (op))
1785 result = 1;
1786 break;
1788 case 'g':
1789 if (general_operand (op, VOIDmode))
1790 result = 1;
1791 break;
1793 #ifdef AUTO_INC_DEC
1794 case '<':
1795 case '>':
1796 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1797 to exist, excepting those that expand_call created. Further,
1798 on some machines which do not have generalized auto inc/dec,
1799 an inc/dec is not a memory_operand.
1801 Match any memory and hope things are resolved after reload. */
1802 incdec_ok = true;
1803 #endif
1804 default:
1805 cn = lookup_constraint (constraint);
1806 switch (get_constraint_type (cn))
1808 case CT_REGISTER:
1809 if (!result
1810 && reg_class_for_constraint (cn) != NO_REGS
1811 && GET_MODE (op) != BLKmode
1812 && register_operand (op, VOIDmode))
1813 result = 1;
1814 break;
1816 case CT_CONST_INT:
1817 if (!result
1818 && CONST_INT_P (op)
1819 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1820 result = 1;
1821 break;
1823 case CT_MEMORY:
1824 /* Every memory operand can be reloaded to fit. */
1825 result = result || memory_operand (op, VOIDmode);
1826 break;
1828 case CT_ADDRESS:
1829 /* Every address operand can be reloaded to fit. */
1830 result = result || address_operand (op, VOIDmode);
1831 break;
1833 case CT_FIXED_FORM:
1834 result = result || constraint_satisfied_p (op, cn);
1835 break;
1837 break;
1839 len = CONSTRAINT_LEN (c, constraint);
1841 constraint++;
1842 while (--len && *constraint);
1843 if (len)
1844 return 0;
1847 #ifdef AUTO_INC_DEC
1848 /* For operands without < or > constraints reject side-effects. */
1849 if (!incdec_ok && result && MEM_P (op))
1850 switch (GET_CODE (XEXP (op, 0)))
1852 case PRE_INC:
1853 case POST_INC:
1854 case PRE_DEC:
1855 case POST_DEC:
1856 case PRE_MODIFY:
1857 case POST_MODIFY:
1858 return 0;
1859 default:
1860 break;
1862 #endif
1864 return result;
1867 /* Given an rtx *P, if it is a sum containing an integer constant term,
1868 return the location (type rtx *) of the pointer to that constant term.
1869 Otherwise, return a null pointer. */
1871 rtx *
1872 find_constant_term_loc (rtx *p)
1874 rtx *tem;
1875 enum rtx_code code = GET_CODE (*p);
1877 /* If *P IS such a constant term, P is its location. */
1879 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1880 || code == CONST)
1881 return p;
1883 /* Otherwise, if not a sum, it has no constant term. */
1885 if (GET_CODE (*p) != PLUS)
1886 return 0;
1888 /* If one of the summands is constant, return its location. */
1890 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1891 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1892 return p;
1894 /* Otherwise, check each summand for containing a constant term. */
1896 if (XEXP (*p, 0) != 0)
1898 tem = find_constant_term_loc (&XEXP (*p, 0));
1899 if (tem != 0)
1900 return tem;
1903 if (XEXP (*p, 1) != 0)
1905 tem = find_constant_term_loc (&XEXP (*p, 1));
1906 if (tem != 0)
1907 return tem;
1910 return 0;
1913 /* Return 1 if OP is a memory reference
1914 whose address contains no side effects
1915 and remains valid after the addition
1916 of a positive integer less than the
1917 size of the object being referenced.
1919 We assume that the original address is valid and do not check it.
1921 This uses strict_memory_address_p as a subroutine, so
1922 don't use it before reload. */
1925 offsettable_memref_p (rtx op)
1927 return ((MEM_P (op))
1928 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1929 MEM_ADDR_SPACE (op)));
1932 /* Similar, but don't require a strictly valid mem ref:
1933 consider pseudo-regs valid as index or base regs. */
1936 offsettable_nonstrict_memref_p (rtx op)
1938 return ((MEM_P (op))
1939 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1940 MEM_ADDR_SPACE (op)));
1943 /* Return 1 if Y is a memory address which contains no side effects
1944 and would remain valid for address space AS after the addition of
1945 a positive integer less than the size of that mode.
1947 We assume that the original address is valid and do not check it.
1948 We do check that it is valid for narrower modes.
1950 If STRICTP is nonzero, we require a strictly valid address,
1951 for the sake of use in reload.c. */
1954 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1955 addr_space_t as)
1957 enum rtx_code ycode = GET_CODE (y);
1958 rtx z;
1959 rtx y1 = y;
1960 rtx *y2;
1961 int (*addressp) (machine_mode, rtx, addr_space_t) =
1962 (strictp ? strict_memory_address_addr_space_p
1963 : memory_address_addr_space_p);
1964 unsigned int mode_sz = GET_MODE_SIZE (mode);
1966 if (CONSTANT_ADDRESS_P (y))
1967 return 1;
1969 /* Adjusting an offsettable address involves changing to a narrower mode.
1970 Make sure that's OK. */
1972 if (mode_dependent_address_p (y, as))
1973 return 0;
1975 machine_mode address_mode = GET_MODE (y);
1976 if (address_mode == VOIDmode)
1977 address_mode = targetm.addr_space.address_mode (as);
1978 #ifdef POINTERS_EXTEND_UNSIGNED
1979 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1980 #endif
1982 /* ??? How much offset does an offsettable BLKmode reference need?
1983 Clearly that depends on the situation in which it's being used.
1984 However, the current situation in which we test 0xffffffff is
1985 less than ideal. Caveat user. */
1986 if (mode_sz == 0)
1987 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1989 /* If the expression contains a constant term,
1990 see if it remains valid when max possible offset is added. */
1992 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1994 int good;
1996 y1 = *y2;
1997 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1998 /* Use QImode because an odd displacement may be automatically invalid
1999 for any wider mode. But it should be valid for a single byte. */
2000 good = (*addressp) (QImode, y, as);
2002 /* In any case, restore old contents of memory. */
2003 *y2 = y1;
2004 return good;
2007 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2008 return 0;
2010 /* The offset added here is chosen as the maximum offset that
2011 any instruction could need to add when operating on something
2012 of the specified mode. We assume that if Y and Y+c are
2013 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2014 go inside a LO_SUM here, so we do so as well. */
2015 if (GET_CODE (y) == LO_SUM
2016 && mode != BLKmode
2017 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2018 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2019 plus_constant (address_mode, XEXP (y, 1),
2020 mode_sz - 1));
2021 #ifdef POINTERS_EXTEND_UNSIGNED
2022 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2023 else if (POINTERS_EXTEND_UNSIGNED > 0
2024 && GET_CODE (y) == ZERO_EXTEND
2025 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2026 z = gen_rtx_ZERO_EXTEND (address_mode,
2027 plus_constant (pointer_mode, XEXP (y, 0),
2028 mode_sz - 1));
2029 #endif
2030 else
2031 z = plus_constant (address_mode, y, mode_sz - 1);
2033 /* Use QImode because an odd displacement may be automatically invalid
2034 for any wider mode. But it should be valid for a single byte. */
2035 return (*addressp) (QImode, z, as);
2038 /* Return 1 if ADDR is an address-expression whose effect depends
2039 on the mode of the memory reference it is used in.
2041 ADDRSPACE is the address space associated with the address.
2043 Autoincrement addressing is a typical example of mode-dependence
2044 because the amount of the increment depends on the mode. */
2046 bool
2047 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2049 /* Auto-increment addressing with anything other than post_modify
2050 or pre_modify always introduces a mode dependency. Catch such
2051 cases now instead of deferring to the target. */
2052 if (GET_CODE (addr) == PRE_INC
2053 || GET_CODE (addr) == POST_INC
2054 || GET_CODE (addr) == PRE_DEC
2055 || GET_CODE (addr) == POST_DEC)
2056 return true;
2058 return targetm.mode_dependent_address_p (addr, addrspace);
2061 /* Return true if boolean attribute ATTR is supported. */
2063 static bool
2064 have_bool_attr (bool_attr attr)
2066 switch (attr)
2068 case BA_ENABLED:
2069 return HAVE_ATTR_enabled;
2070 case BA_PREFERRED_FOR_SIZE:
2071 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2072 case BA_PREFERRED_FOR_SPEED:
2073 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2075 gcc_unreachable ();
2078 /* Return the value of ATTR for instruction INSN. */
2080 static bool
2081 get_bool_attr (rtx_insn *insn, bool_attr attr)
2083 switch (attr)
2085 case BA_ENABLED:
2086 return get_attr_enabled (insn);
2087 case BA_PREFERRED_FOR_SIZE:
2088 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2089 case BA_PREFERRED_FOR_SPEED:
2090 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2092 gcc_unreachable ();
2095 /* Like get_bool_attr_mask, but don't use the cache. */
2097 static alternative_mask
2098 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2100 /* Temporarily install enough information for get_attr_<foo> to assume
2101 that the insn operands are already cached. As above, the attribute
2102 mustn't depend on the values of operands, so we don't provide their
2103 real values here. */
2104 rtx_insn *old_insn = recog_data.insn;
2105 int old_alternative = which_alternative;
2107 recog_data.insn = insn;
2108 alternative_mask mask = ALL_ALTERNATIVES;
2109 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2110 for (int i = 0; i < n_alternatives; i++)
2112 which_alternative = i;
2113 if (!get_bool_attr (insn, attr))
2114 mask &= ~ALTERNATIVE_BIT (i);
2117 recog_data.insn = old_insn;
2118 which_alternative = old_alternative;
2119 return mask;
2122 /* Return the mask of operand alternatives that are allowed for INSN
2123 by boolean attribute ATTR. This mask depends only on INSN and on
2124 the current target; it does not depend on things like the values of
2125 operands. */
2127 static alternative_mask
2128 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2130 /* Quick exit for asms and for targets that don't use these attributes. */
2131 int code = INSN_CODE (insn);
2132 if (code < 0 || !have_bool_attr (attr))
2133 return ALL_ALTERNATIVES;
2135 /* Calling get_attr_<foo> can be expensive, so cache the mask
2136 for speed. */
2137 if (!this_target_recog->x_bool_attr_masks[code][attr])
2138 this_target_recog->x_bool_attr_masks[code][attr]
2139 = get_bool_attr_mask_uncached (insn, attr);
2140 return this_target_recog->x_bool_attr_masks[code][attr];
2143 /* Return the set of alternatives of INSN that are allowed by the current
2144 target. */
2146 alternative_mask
2147 get_enabled_alternatives (rtx_insn *insn)
2149 return get_bool_attr_mask (insn, BA_ENABLED);
2152 /* Return the set of alternatives of INSN that are allowed by the current
2153 target and are preferred for the current size/speed optimization
2154 choice. */
2156 alternative_mask
2157 get_preferred_alternatives (rtx_insn *insn)
2159 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2160 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2161 else
2162 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2165 /* Return the set of alternatives of INSN that are allowed by the current
2166 target and are preferred for the size/speed optimization choice
2167 associated with BB. Passing a separate BB is useful if INSN has not
2168 been emitted yet or if we are considering moving it to a different
2169 block. */
2171 alternative_mask
2172 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2174 if (optimize_bb_for_speed_p (bb))
2175 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2176 else
2177 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2180 /* Assert that the cached boolean attributes for INSN are still accurate.
2181 The backend is required to define these attributes in a way that only
2182 depends on the current target (rather than operands, compiler phase,
2183 etc.). */
2185 bool
2186 check_bool_attrs (rtx_insn *insn)
2188 int code = INSN_CODE (insn);
2189 if (code >= 0)
2190 for (int i = 0; i <= BA_LAST; ++i)
2192 enum bool_attr attr = (enum bool_attr) i;
2193 if (this_target_recog->x_bool_attr_masks[code][attr])
2194 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2195 == get_bool_attr_mask_uncached (insn, attr));
2197 return true;
2200 /* Like extract_insn, but save insn extracted and don't extract again, when
2201 called again for the same insn expecting that recog_data still contain the
2202 valid information. This is used primary by gen_attr infrastructure that
2203 often does extract insn again and again. */
2204 void
2205 extract_insn_cached (rtx_insn *insn)
2207 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2208 return;
2209 extract_insn (insn);
2210 recog_data.insn = insn;
2213 /* Do uncached extract_insn, constrain_operands and complain about failures.
2214 This should be used when extracting a pre-existing constrained instruction
2215 if the caller wants to know which alternative was chosen. */
2216 void
2217 extract_constrain_insn (rtx_insn *insn)
2219 extract_insn (insn);
2220 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2221 fatal_insn_not_found (insn);
2224 /* Do cached extract_insn, constrain_operands and complain about failures.
2225 Used by insn_attrtab. */
2226 void
2227 extract_constrain_insn_cached (rtx_insn *insn)
2229 extract_insn_cached (insn);
2230 if (which_alternative == -1
2231 && !constrain_operands (reload_completed,
2232 get_enabled_alternatives (insn)))
2233 fatal_insn_not_found (insn);
2236 /* Do cached constrain_operands on INSN and complain about failures. */
2238 constrain_operands_cached (rtx_insn *insn, int strict)
2240 if (which_alternative == -1)
2241 return constrain_operands (strict, get_enabled_alternatives (insn));
2242 else
2243 return 1;
2246 /* Analyze INSN and fill in recog_data. */
2248 void
2249 extract_insn (rtx_insn *insn)
2251 int i;
2252 int icode;
2253 int noperands;
2254 rtx body = PATTERN (insn);
2256 recog_data.n_operands = 0;
2257 recog_data.n_alternatives = 0;
2258 recog_data.n_dups = 0;
2259 recog_data.is_asm = false;
2261 switch (GET_CODE (body))
2263 case USE:
2264 case CLOBBER:
2265 case ASM_INPUT:
2266 case ADDR_VEC:
2267 case ADDR_DIFF_VEC:
2268 case VAR_LOCATION:
2269 return;
2271 case SET:
2272 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2273 goto asm_insn;
2274 else
2275 goto normal_insn;
2276 case PARALLEL:
2277 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2278 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2279 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2280 goto asm_insn;
2281 else
2282 goto normal_insn;
2283 case ASM_OPERANDS:
2284 asm_insn:
2285 recog_data.n_operands = noperands = asm_noperands (body);
2286 if (noperands >= 0)
2288 /* This insn is an `asm' with operands. */
2290 /* expand_asm_operands makes sure there aren't too many operands. */
2291 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2293 /* Now get the operand values and constraints out of the insn. */
2294 decode_asm_operands (body, recog_data.operand,
2295 recog_data.operand_loc,
2296 recog_data.constraints,
2297 recog_data.operand_mode, NULL);
2298 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2299 if (noperands > 0)
2301 const char *p = recog_data.constraints[0];
2302 recog_data.n_alternatives = 1;
2303 while (*p)
2304 recog_data.n_alternatives += (*p++ == ',');
2306 recog_data.is_asm = true;
2307 break;
2309 fatal_insn_not_found (insn);
2311 default:
2312 normal_insn:
2313 /* Ordinary insn: recognize it, get the operands via insn_extract
2314 and get the constraints. */
2316 icode = recog_memoized (insn);
2317 if (icode < 0)
2318 fatal_insn_not_found (insn);
2320 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2321 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2322 recog_data.n_dups = insn_data[icode].n_dups;
2324 insn_extract (insn);
2326 for (i = 0; i < noperands; i++)
2328 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2329 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2330 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2331 /* VOIDmode match_operands gets mode from their real operand. */
2332 if (recog_data.operand_mode[i] == VOIDmode)
2333 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2336 for (i = 0; i < noperands; i++)
2337 recog_data.operand_type[i]
2338 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2339 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2340 : OP_IN);
2342 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2344 recog_data.insn = NULL;
2345 which_alternative = -1;
2348 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2349 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2350 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2351 has N_OPERANDS entries. */
2353 void
2354 preprocess_constraints (int n_operands, int n_alternatives,
2355 const char **constraints,
2356 operand_alternative *op_alt_base)
2358 for (int i = 0; i < n_operands; i++)
2360 int j;
2361 struct operand_alternative *op_alt;
2362 const char *p = constraints[i];
2364 op_alt = op_alt_base;
2366 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2368 op_alt[i].cl = NO_REGS;
2369 op_alt[i].constraint = p;
2370 op_alt[i].matches = -1;
2371 op_alt[i].matched = -1;
2373 if (*p == '\0' || *p == ',')
2375 op_alt[i].anything_ok = 1;
2376 continue;
2379 for (;;)
2381 char c = *p;
2382 if (c == '#')
2384 c = *++p;
2385 while (c != ',' && c != '\0');
2386 if (c == ',' || c == '\0')
2388 p++;
2389 break;
2392 switch (c)
2394 case '?':
2395 op_alt[i].reject += 6;
2396 break;
2397 case '!':
2398 op_alt[i].reject += 600;
2399 break;
2400 case '&':
2401 op_alt[i].earlyclobber = 1;
2402 break;
2404 case '0': case '1': case '2': case '3': case '4':
2405 case '5': case '6': case '7': case '8': case '9':
2407 char *end;
2408 op_alt[i].matches = strtoul (p, &end, 10);
2409 op_alt[op_alt[i].matches].matched = i;
2410 p = end;
2412 continue;
2414 case 'X':
2415 op_alt[i].anything_ok = 1;
2416 break;
2418 case 'g':
2419 op_alt[i].cl =
2420 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2421 break;
2423 default:
2424 enum constraint_num cn = lookup_constraint (p);
2425 enum reg_class cl;
2426 switch (get_constraint_type (cn))
2428 case CT_REGISTER:
2429 cl = reg_class_for_constraint (cn);
2430 if (cl != NO_REGS)
2431 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2432 break;
2434 case CT_CONST_INT:
2435 break;
2437 case CT_MEMORY:
2438 op_alt[i].memory_ok = 1;
2439 break;
2441 case CT_ADDRESS:
2442 op_alt[i].is_address = 1;
2443 op_alt[i].cl
2444 = (reg_class_subunion
2445 [(int) op_alt[i].cl]
2446 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2447 ADDRESS, SCRATCH)]);
2448 break;
2450 case CT_FIXED_FORM:
2451 break;
2453 break;
2455 p += CONSTRAINT_LEN (c, p);
2461 /* Return an array of operand_alternative instructions for
2462 instruction ICODE. */
2464 const operand_alternative *
2465 preprocess_insn_constraints (int icode)
2467 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2468 if (this_target_recog->x_op_alt[icode])
2469 return this_target_recog->x_op_alt[icode];
2471 int n_operands = insn_data[icode].n_operands;
2472 if (n_operands == 0)
2473 return 0;
2474 /* Always provide at least one alternative so that which_op_alt ()
2475 works correctly. If the instruction has 0 alternatives (i.e. all
2476 constraint strings are empty) then each operand in this alternative
2477 will have anything_ok set. */
2478 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2479 int n_entries = n_operands * n_alternatives;
2481 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2482 const char **constraints = XALLOCAVEC (const char *, n_operands);
2484 for (int i = 0; i < n_operands; ++i)
2485 constraints[i] = insn_data[icode].operand[i].constraint;
2486 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2488 this_target_recog->x_op_alt[icode] = op_alt;
2489 return op_alt;
2492 /* After calling extract_insn, you can use this function to extract some
2493 information from the constraint strings into a more usable form.
2494 The collected data is stored in recog_op_alt. */
2496 void
2497 preprocess_constraints (rtx_insn *insn)
2499 int icode = INSN_CODE (insn);
2500 if (icode >= 0)
2501 recog_op_alt = preprocess_insn_constraints (icode);
2502 else
2504 int n_operands = recog_data.n_operands;
2505 int n_alternatives = recog_data.n_alternatives;
2506 int n_entries = n_operands * n_alternatives;
2507 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2508 preprocess_constraints (n_operands, n_alternatives,
2509 recog_data.constraints, asm_op_alt);
2510 recog_op_alt = asm_op_alt;
2514 /* Check the operands of an insn against the insn's operand constraints
2515 and return 1 if they match any of the alternatives in ALTERNATIVES.
2517 The information about the insn's operands, constraints, operand modes
2518 etc. is obtained from the global variables set up by extract_insn.
2520 WHICH_ALTERNATIVE is set to a number which indicates which
2521 alternative of constraints was matched: 0 for the first alternative,
2522 1 for the next, etc.
2524 In addition, when two operands are required to match
2525 and it happens that the output operand is (reg) while the
2526 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2527 make the output operand look like the input.
2528 This is because the output operand is the one the template will print.
2530 This is used in final, just before printing the assembler code and by
2531 the routines that determine an insn's attribute.
2533 If STRICT is a positive nonzero value, it means that we have been
2534 called after reload has been completed. In that case, we must
2535 do all checks strictly. If it is zero, it means that we have been called
2536 before reload has completed. In that case, we first try to see if we can
2537 find an alternative that matches strictly. If not, we try again, this
2538 time assuming that reload will fix up the insn. This provides a "best
2539 guess" for the alternative and is used to compute attributes of insns prior
2540 to reload. A negative value of STRICT is used for this internal call. */
2542 struct funny_match
2544 int this_op, other;
2548 constrain_operands (int strict, alternative_mask alternatives)
2550 const char *constraints[MAX_RECOG_OPERANDS];
2551 int matching_operands[MAX_RECOG_OPERANDS];
2552 int earlyclobber[MAX_RECOG_OPERANDS];
2553 int c;
2555 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2556 int funny_match_index;
2558 which_alternative = 0;
2559 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2560 return 1;
2562 for (c = 0; c < recog_data.n_operands; c++)
2564 constraints[c] = recog_data.constraints[c];
2565 matching_operands[c] = -1;
2570 int seen_earlyclobber_at = -1;
2571 int opno;
2572 int lose = 0;
2573 funny_match_index = 0;
2575 if (!TEST_BIT (alternatives, which_alternative))
2577 int i;
2579 for (i = 0; i < recog_data.n_operands; i++)
2580 constraints[i] = skip_alternative (constraints[i]);
2582 which_alternative++;
2583 continue;
2586 for (opno = 0; opno < recog_data.n_operands; opno++)
2588 rtx op = recog_data.operand[opno];
2589 machine_mode mode = GET_MODE (op);
2590 const char *p = constraints[opno];
2591 int offset = 0;
2592 int win = 0;
2593 int val;
2594 int len;
2596 earlyclobber[opno] = 0;
2598 /* A unary operator may be accepted by the predicate, but it
2599 is irrelevant for matching constraints. */
2600 if (UNARY_P (op))
2601 op = XEXP (op, 0);
2603 if (GET_CODE (op) == SUBREG)
2605 if (REG_P (SUBREG_REG (op))
2606 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2607 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2608 GET_MODE (SUBREG_REG (op)),
2609 SUBREG_BYTE (op),
2610 GET_MODE (op));
2611 op = SUBREG_REG (op);
2614 /* An empty constraint or empty alternative
2615 allows anything which matched the pattern. */
2616 if (*p == 0 || *p == ',')
2617 win = 1;
2620 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2622 case '\0':
2623 len = 0;
2624 break;
2625 case ',':
2626 c = '\0';
2627 break;
2629 case '#':
2630 /* Ignore rest of this alternative as far as
2631 constraint checking is concerned. */
2633 p++;
2634 while (*p && *p != ',');
2635 len = 0;
2636 break;
2638 case '&':
2639 earlyclobber[opno] = 1;
2640 if (seen_earlyclobber_at < 0)
2641 seen_earlyclobber_at = opno;
2642 break;
2644 case '0': case '1': case '2': case '3': case '4':
2645 case '5': case '6': case '7': case '8': case '9':
2647 /* This operand must be the same as a previous one.
2648 This kind of constraint is used for instructions such
2649 as add when they take only two operands.
2651 Note that the lower-numbered operand is passed first.
2653 If we are not testing strictly, assume that this
2654 constraint will be satisfied. */
2656 char *end;
2657 int match;
2659 match = strtoul (p, &end, 10);
2660 p = end;
2662 if (strict < 0)
2663 val = 1;
2664 else
2666 rtx op1 = recog_data.operand[match];
2667 rtx op2 = recog_data.operand[opno];
2669 /* A unary operator may be accepted by the predicate,
2670 but it is irrelevant for matching constraints. */
2671 if (UNARY_P (op1))
2672 op1 = XEXP (op1, 0);
2673 if (UNARY_P (op2))
2674 op2 = XEXP (op2, 0);
2676 val = operands_match_p (op1, op2);
2679 matching_operands[opno] = match;
2680 matching_operands[match] = opno;
2682 if (val != 0)
2683 win = 1;
2685 /* If output is *x and input is *--x, arrange later
2686 to change the output to *--x as well, since the
2687 output op is the one that will be printed. */
2688 if (val == 2 && strict > 0)
2690 funny_match[funny_match_index].this_op = opno;
2691 funny_match[funny_match_index++].other = match;
2694 len = 0;
2695 break;
2697 case 'p':
2698 /* p is used for address_operands. When we are called by
2699 gen_reload, no one will have checked that the address is
2700 strictly valid, i.e., that all pseudos requiring hard regs
2701 have gotten them. */
2702 if (strict <= 0
2703 || (strict_memory_address_p (recog_data.operand_mode[opno],
2704 op)))
2705 win = 1;
2706 break;
2708 /* No need to check general_operand again;
2709 it was done in insn-recog.c. Well, except that reload
2710 doesn't check the validity of its replacements, but
2711 that should only matter when there's a bug. */
2712 case 'g':
2713 /* Anything goes unless it is a REG and really has a hard reg
2714 but the hard reg is not in the class GENERAL_REGS. */
2715 if (REG_P (op))
2717 if (strict < 0
2718 || GENERAL_REGS == ALL_REGS
2719 || (reload_in_progress
2720 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2721 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2722 win = 1;
2724 else if (strict < 0 || general_operand (op, mode))
2725 win = 1;
2726 break;
2728 default:
2730 enum constraint_num cn = lookup_constraint (p);
2731 enum reg_class cl = reg_class_for_constraint (cn);
2732 if (cl != NO_REGS)
2734 if (strict < 0
2735 || (strict == 0
2736 && REG_P (op)
2737 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2738 || (strict == 0 && GET_CODE (op) == SCRATCH)
2739 || (REG_P (op)
2740 && reg_fits_class_p (op, cl, offset, mode)))
2741 win = 1;
2744 else if (constraint_satisfied_p (op, cn))
2745 win = 1;
2747 else if (insn_extra_memory_constraint (cn)
2748 /* Every memory operand can be reloaded to fit. */
2749 && ((strict < 0 && MEM_P (op))
2750 /* Before reload, accept what reload can turn
2751 into a mem. */
2752 || (strict < 0 && CONSTANT_P (op))
2753 /* Before reload, accept a pseudo,
2754 since LRA can turn it into a mem. */
2755 || (strict < 0 && targetm.lra_p () && REG_P (op)
2756 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2757 /* During reload, accept a pseudo */
2758 || (reload_in_progress && REG_P (op)
2759 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2760 win = 1;
2761 else if (insn_extra_address_constraint (cn)
2762 /* Every address operand can be reloaded to fit. */
2763 && strict < 0)
2764 win = 1;
2765 /* Cater to architectures like IA-64 that define extra memory
2766 constraints without using define_memory_constraint. */
2767 else if (reload_in_progress
2768 && REG_P (op)
2769 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2770 && reg_renumber[REGNO (op)] < 0
2771 && reg_equiv_mem (REGNO (op)) != 0
2772 && constraint_satisfied_p
2773 (reg_equiv_mem (REGNO (op)), cn))
2774 win = 1;
2775 break;
2778 while (p += len, c);
2780 constraints[opno] = p;
2781 /* If this operand did not win somehow,
2782 this alternative loses. */
2783 if (! win)
2784 lose = 1;
2786 /* This alternative won; the operands are ok.
2787 Change whichever operands this alternative says to change. */
2788 if (! lose)
2790 int opno, eopno;
2792 /* See if any earlyclobber operand conflicts with some other
2793 operand. */
2795 if (strict > 0 && seen_earlyclobber_at >= 0)
2796 for (eopno = seen_earlyclobber_at;
2797 eopno < recog_data.n_operands;
2798 eopno++)
2799 /* Ignore earlyclobber operands now in memory,
2800 because we would often report failure when we have
2801 two memory operands, one of which was formerly a REG. */
2802 if (earlyclobber[eopno]
2803 && REG_P (recog_data.operand[eopno]))
2804 for (opno = 0; opno < recog_data.n_operands; opno++)
2805 if ((MEM_P (recog_data.operand[opno])
2806 || recog_data.operand_type[opno] != OP_OUT)
2807 && opno != eopno
2808 /* Ignore things like match_operator operands. */
2809 && *recog_data.constraints[opno] != 0
2810 && ! (matching_operands[opno] == eopno
2811 && operands_match_p (recog_data.operand[opno],
2812 recog_data.operand[eopno]))
2813 && ! safe_from_earlyclobber (recog_data.operand[opno],
2814 recog_data.operand[eopno]))
2815 lose = 1;
2817 if (! lose)
2819 while (--funny_match_index >= 0)
2821 recog_data.operand[funny_match[funny_match_index].other]
2822 = recog_data.operand[funny_match[funny_match_index].this_op];
2825 #ifdef AUTO_INC_DEC
2826 /* For operands without < or > constraints reject side-effects. */
2827 if (recog_data.is_asm)
2829 for (opno = 0; opno < recog_data.n_operands; opno++)
2830 if (MEM_P (recog_data.operand[opno]))
2831 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2833 case PRE_INC:
2834 case POST_INC:
2835 case PRE_DEC:
2836 case POST_DEC:
2837 case PRE_MODIFY:
2838 case POST_MODIFY:
2839 if (strchr (recog_data.constraints[opno], '<') == NULL
2840 && strchr (recog_data.constraints[opno], '>')
2841 == NULL)
2842 return 0;
2843 break;
2844 default:
2845 break;
2848 #endif
2849 return 1;
2853 which_alternative++;
2855 while (which_alternative < recog_data.n_alternatives);
2857 which_alternative = -1;
2858 /* If we are about to reject this, but we are not to test strictly,
2859 try a very loose test. Only return failure if it fails also. */
2860 if (strict == 0)
2861 return constrain_operands (-1, alternatives);
2862 else
2863 return 0;
2866 /* Return true iff OPERAND (assumed to be a REG rtx)
2867 is a hard reg in class CLASS when its regno is offset by OFFSET
2868 and changed to mode MODE.
2869 If REG occupies multiple hard regs, all of them must be in CLASS. */
2871 bool
2872 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2873 machine_mode mode)
2875 unsigned int regno = REGNO (operand);
2877 if (cl == NO_REGS)
2878 return false;
2880 /* Regno must not be a pseudo register. Offset may be negative. */
2881 return (HARD_REGISTER_NUM_P (regno)
2882 && HARD_REGISTER_NUM_P (regno + offset)
2883 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2884 regno + offset));
2887 /* Split single instruction. Helper function for split_all_insns and
2888 split_all_insns_noflow. Return last insn in the sequence if successful,
2889 or NULL if unsuccessful. */
2891 static rtx_insn *
2892 split_insn (rtx_insn *insn)
2894 /* Split insns here to get max fine-grain parallelism. */
2895 rtx_insn *first = PREV_INSN (insn);
2896 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2897 rtx insn_set, last_set, note;
2899 if (last == insn)
2900 return NULL;
2902 /* If the original instruction was a single set that was known to be
2903 equivalent to a constant, see if we can say the same about the last
2904 instruction in the split sequence. The two instructions must set
2905 the same destination. */
2906 insn_set = single_set (insn);
2907 if (insn_set)
2909 last_set = single_set (last);
2910 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2912 note = find_reg_equal_equiv_note (insn);
2913 if (note && CONSTANT_P (XEXP (note, 0)))
2914 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2915 else if (CONSTANT_P (SET_SRC (insn_set)))
2916 set_unique_reg_note (last, REG_EQUAL,
2917 copy_rtx (SET_SRC (insn_set)));
2921 /* try_split returns the NOTE that INSN became. */
2922 SET_INSN_DELETED (insn);
2924 /* ??? Coddle to md files that generate subregs in post-reload
2925 splitters instead of computing the proper hard register. */
2926 if (reload_completed && first != last)
2928 first = NEXT_INSN (first);
2929 for (;;)
2931 if (INSN_P (first))
2932 cleanup_subreg_operands (first);
2933 if (first == last)
2934 break;
2935 first = NEXT_INSN (first);
2939 return last;
2942 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2944 void
2945 split_all_insns (void)
2947 sbitmap blocks;
2948 bool changed;
2949 basic_block bb;
2951 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2952 bitmap_clear (blocks);
2953 changed = false;
2955 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2957 rtx_insn *insn, *next;
2958 bool finish = false;
2960 rtl_profile_for_bb (bb);
2961 for (insn = BB_HEAD (bb); !finish ; insn = next)
2963 /* Can't use `next_real_insn' because that might go across
2964 CODE_LABELS and short-out basic blocks. */
2965 next = NEXT_INSN (insn);
2966 finish = (insn == BB_END (bb));
2967 if (INSN_P (insn))
2969 rtx set = single_set (insn);
2971 /* Don't split no-op move insns. These should silently
2972 disappear later in final. Splitting such insns would
2973 break the code that handles LIBCALL blocks. */
2974 if (set && set_noop_p (set))
2976 /* Nops get in the way while scheduling, so delete them
2977 now if register allocation has already been done. It
2978 is too risky to try to do this before register
2979 allocation, and there are unlikely to be very many
2980 nops then anyways. */
2981 if (reload_completed)
2982 delete_insn_and_edges (insn);
2984 else
2986 if (split_insn (insn))
2988 bitmap_set_bit (blocks, bb->index);
2989 changed = true;
2996 default_rtl_profile ();
2997 if (changed)
2998 find_many_sub_basic_blocks (blocks);
3000 #ifdef ENABLE_CHECKING
3001 verify_flow_info ();
3002 #endif
3004 sbitmap_free (blocks);
3007 /* Same as split_all_insns, but do not expect CFG to be available.
3008 Used by machine dependent reorg passes. */
3010 unsigned int
3011 split_all_insns_noflow (void)
3013 rtx_insn *next, *insn;
3015 for (insn = get_insns (); insn; insn = next)
3017 next = NEXT_INSN (insn);
3018 if (INSN_P (insn))
3020 /* Don't split no-op move insns. These should silently
3021 disappear later in final. Splitting such insns would
3022 break the code that handles LIBCALL blocks. */
3023 rtx set = single_set (insn);
3024 if (set && set_noop_p (set))
3026 /* Nops get in the way while scheduling, so delete them
3027 now if register allocation has already been done. It
3028 is too risky to try to do this before register
3029 allocation, and there are unlikely to be very many
3030 nops then anyways.
3032 ??? Should we use delete_insn when the CFG isn't valid? */
3033 if (reload_completed)
3034 delete_insn_and_edges (insn);
3036 else
3037 split_insn (insn);
3040 return 0;
3043 #ifdef HAVE_peephole2
3044 struct peep2_insn_data
3046 rtx_insn *insn;
3047 regset live_before;
3050 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3051 static int peep2_current;
3053 static bool peep2_do_rebuild_jump_labels;
3054 static bool peep2_do_cleanup_cfg;
3056 /* The number of instructions available to match a peep2. */
3057 int peep2_current_count;
3059 /* A marker indicating the last insn of the block. The live_before regset
3060 for this element is correct, indicating DF_LIVE_OUT for the block. */
3061 #define PEEP2_EOB invalid_insn_rtx
3063 /* Wrap N to fit into the peep2_insn_data buffer. */
3065 static int
3066 peep2_buf_position (int n)
3068 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3069 n -= MAX_INSNS_PER_PEEP2 + 1;
3070 return n;
3073 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3074 does not exist. Used by the recognizer to find the next insn to match
3075 in a multi-insn pattern. */
3077 rtx_insn *
3078 peep2_next_insn (int n)
3080 gcc_assert (n <= peep2_current_count);
3082 n = peep2_buf_position (peep2_current + n);
3084 return peep2_insn_data[n].insn;
3087 /* Return true if REGNO is dead before the Nth non-note insn
3088 after `current'. */
3091 peep2_regno_dead_p (int ofs, int regno)
3093 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3095 ofs = peep2_buf_position (peep2_current + ofs);
3097 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3099 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3102 /* Similarly for a REG. */
3105 peep2_reg_dead_p (int ofs, rtx reg)
3107 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3109 ofs = peep2_buf_position (peep2_current + ofs);
3111 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3113 unsigned int end_regno = END_REGNO (reg);
3114 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3115 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3116 return 0;
3117 return 1;
3120 /* Regno offset to be used in the register search. */
3121 static int search_ofs;
3123 /* Try to find a hard register of mode MODE, matching the register class in
3124 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3125 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3126 in which case the only condition is that the register must be available
3127 before CURRENT_INSN.
3128 Registers that already have bits set in REG_SET will not be considered.
3130 If an appropriate register is available, it will be returned and the
3131 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3132 returned. */
3135 peep2_find_free_register (int from, int to, const char *class_str,
3136 machine_mode mode, HARD_REG_SET *reg_set)
3138 enum reg_class cl;
3139 HARD_REG_SET live;
3140 df_ref def;
3141 int i;
3143 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3144 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3146 from = peep2_buf_position (peep2_current + from);
3147 to = peep2_buf_position (peep2_current + to);
3149 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3150 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3152 while (from != to)
3154 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3156 /* Don't use registers set or clobbered by the insn. */
3157 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3158 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3160 from = peep2_buf_position (from + 1);
3163 cl = reg_class_for_constraint (lookup_constraint (class_str));
3165 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3167 int raw_regno, regno, success, j;
3169 /* Distribute the free registers as much as possible. */
3170 raw_regno = search_ofs + i;
3171 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3172 raw_regno -= FIRST_PSEUDO_REGISTER;
3173 #ifdef REG_ALLOC_ORDER
3174 regno = reg_alloc_order[raw_regno];
3175 #else
3176 regno = raw_regno;
3177 #endif
3179 /* Can it support the mode we need? */
3180 if (! HARD_REGNO_MODE_OK (regno, mode))
3181 continue;
3183 success = 1;
3184 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3186 /* Don't allocate fixed registers. */
3187 if (fixed_regs[regno + j])
3189 success = 0;
3190 break;
3192 /* Don't allocate global registers. */
3193 if (global_regs[regno + j])
3195 success = 0;
3196 break;
3198 /* Make sure the register is of the right class. */
3199 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3201 success = 0;
3202 break;
3204 /* And that we don't create an extra save/restore. */
3205 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3207 success = 0;
3208 break;
3211 if (! targetm.hard_regno_scratch_ok (regno + j))
3213 success = 0;
3214 break;
3217 /* And we don't clobber traceback for noreturn functions. */
3218 if ((regno + j == FRAME_POINTER_REGNUM
3219 || regno + j == HARD_FRAME_POINTER_REGNUM)
3220 && (! reload_completed || frame_pointer_needed))
3222 success = 0;
3223 break;
3226 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3227 || TEST_HARD_REG_BIT (live, regno + j))
3229 success = 0;
3230 break;
3234 if (success)
3236 add_to_hard_reg_set (reg_set, mode, regno);
3238 /* Start the next search with the next register. */
3239 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3240 raw_regno = 0;
3241 search_ofs = raw_regno;
3243 return gen_rtx_REG (mode, regno);
3247 search_ofs = 0;
3248 return NULL_RTX;
3251 /* Forget all currently tracked instructions, only remember current
3252 LIVE regset. */
3254 static void
3255 peep2_reinit_state (regset live)
3257 int i;
3259 /* Indicate that all slots except the last holds invalid data. */
3260 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3261 peep2_insn_data[i].insn = NULL;
3262 peep2_current_count = 0;
3264 /* Indicate that the last slot contains live_after data. */
3265 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3266 peep2_current = MAX_INSNS_PER_PEEP2;
3268 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3271 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3272 starting at INSN. Perform the replacement, removing the old insns and
3273 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3274 if the replacement is rejected. */
3276 static rtx_insn *
3277 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3279 int i;
3280 rtx_insn *last, *before_try, *x;
3281 rtx eh_note, as_note;
3282 rtx_insn *old_insn;
3283 rtx_insn *new_insn;
3284 bool was_call = false;
3286 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3287 match more than one insn, or to be split into more than one insn. */
3288 old_insn = peep2_insn_data[peep2_current].insn;
3289 if (RTX_FRAME_RELATED_P (old_insn))
3291 bool any_note = false;
3292 rtx note;
3294 if (match_len != 0)
3295 return NULL;
3297 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3298 may be in the stream for the purpose of register allocation. */
3299 if (active_insn_p (attempt))
3300 new_insn = attempt;
3301 else
3302 new_insn = next_active_insn (attempt);
3303 if (next_active_insn (new_insn))
3304 return NULL;
3306 /* We have a 1-1 replacement. Copy over any frame-related info. */
3307 RTX_FRAME_RELATED_P (new_insn) = 1;
3309 /* Allow the backend to fill in a note during the split. */
3310 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3311 switch (REG_NOTE_KIND (note))
3313 case REG_FRAME_RELATED_EXPR:
3314 case REG_CFA_DEF_CFA:
3315 case REG_CFA_ADJUST_CFA:
3316 case REG_CFA_OFFSET:
3317 case REG_CFA_REGISTER:
3318 case REG_CFA_EXPRESSION:
3319 case REG_CFA_RESTORE:
3320 case REG_CFA_SET_VDRAP:
3321 any_note = true;
3322 break;
3323 default:
3324 break;
3327 /* If the backend didn't supply a note, copy one over. */
3328 if (!any_note)
3329 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3330 switch (REG_NOTE_KIND (note))
3332 case REG_FRAME_RELATED_EXPR:
3333 case REG_CFA_DEF_CFA:
3334 case REG_CFA_ADJUST_CFA:
3335 case REG_CFA_OFFSET:
3336 case REG_CFA_REGISTER:
3337 case REG_CFA_EXPRESSION:
3338 case REG_CFA_RESTORE:
3339 case REG_CFA_SET_VDRAP:
3340 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3341 any_note = true;
3342 break;
3343 default:
3344 break;
3347 /* If there still isn't a note, make sure the unwind info sees the
3348 same expression as before the split. */
3349 if (!any_note)
3351 rtx old_set, new_set;
3353 /* The old insn had better have been simple, or annotated. */
3354 old_set = single_set (old_insn);
3355 gcc_assert (old_set != NULL);
3357 new_set = single_set (new_insn);
3358 if (!new_set || !rtx_equal_p (new_set, old_set))
3359 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3362 /* Copy prologue/epilogue status. This is required in order to keep
3363 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3364 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3367 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3368 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3369 cfg-related call notes. */
3370 for (i = 0; i <= match_len; ++i)
3372 int j;
3373 rtx note;
3375 j = peep2_buf_position (peep2_current + i);
3376 old_insn = peep2_insn_data[j].insn;
3377 if (!CALL_P (old_insn))
3378 continue;
3379 was_call = true;
3381 new_insn = attempt;
3382 while (new_insn != NULL_RTX)
3384 if (CALL_P (new_insn))
3385 break;
3386 new_insn = NEXT_INSN (new_insn);
3389 gcc_assert (new_insn != NULL_RTX);
3391 CALL_INSN_FUNCTION_USAGE (new_insn)
3392 = CALL_INSN_FUNCTION_USAGE (old_insn);
3393 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3395 for (note = REG_NOTES (old_insn);
3396 note;
3397 note = XEXP (note, 1))
3398 switch (REG_NOTE_KIND (note))
3400 case REG_NORETURN:
3401 case REG_SETJMP:
3402 case REG_TM:
3403 add_reg_note (new_insn, REG_NOTE_KIND (note),
3404 XEXP (note, 0));
3405 break;
3406 default:
3407 /* Discard all other reg notes. */
3408 break;
3411 /* Croak if there is another call in the sequence. */
3412 while (++i <= match_len)
3414 j = peep2_buf_position (peep2_current + i);
3415 old_insn = peep2_insn_data[j].insn;
3416 gcc_assert (!CALL_P (old_insn));
3418 break;
3421 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3422 move those notes over to the new sequence. */
3423 as_note = NULL;
3424 for (i = match_len; i >= 0; --i)
3426 int j = peep2_buf_position (peep2_current + i);
3427 old_insn = peep2_insn_data[j].insn;
3429 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3430 if (as_note)
3431 break;
3434 i = peep2_buf_position (peep2_current + match_len);
3435 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3437 /* Replace the old sequence with the new. */
3438 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3439 last = emit_insn_after_setloc (attempt,
3440 peep2_insn_data[i].insn,
3441 INSN_LOCATION (peepinsn));
3442 before_try = PREV_INSN (insn);
3443 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3445 /* Re-insert the EH_REGION notes. */
3446 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3448 edge eh_edge;
3449 edge_iterator ei;
3451 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3452 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3453 break;
3455 if (eh_note)
3456 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3458 if (eh_edge)
3459 for (x = last; x != before_try; x = PREV_INSN (x))
3460 if (x != BB_END (bb)
3461 && (can_throw_internal (x)
3462 || can_nonlocal_goto (x)))
3464 edge nfte, nehe;
3465 int flags;
3467 nfte = split_block (bb, x);
3468 flags = (eh_edge->flags
3469 & (EDGE_EH | EDGE_ABNORMAL));
3470 if (CALL_P (x))
3471 flags |= EDGE_ABNORMAL_CALL;
3472 nehe = make_edge (nfte->src, eh_edge->dest,
3473 flags);
3475 nehe->probability = eh_edge->probability;
3476 nfte->probability
3477 = REG_BR_PROB_BASE - nehe->probability;
3479 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3480 bb = nfte->src;
3481 eh_edge = nehe;
3484 /* Converting possibly trapping insn to non-trapping is
3485 possible. Zap dummy outgoing edges. */
3486 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3489 /* Re-insert the ARGS_SIZE notes. */
3490 if (as_note)
3491 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3493 /* If we generated a jump instruction, it won't have
3494 JUMP_LABEL set. Recompute after we're done. */
3495 for (x = last; x != before_try; x = PREV_INSN (x))
3496 if (JUMP_P (x))
3498 peep2_do_rebuild_jump_labels = true;
3499 break;
3502 return last;
3505 /* After performing a replacement in basic block BB, fix up the life
3506 information in our buffer. LAST is the last of the insns that we
3507 emitted as a replacement. PREV is the insn before the start of
3508 the replacement. MATCH_LEN is the number of instructions that were
3509 matched, and which now need to be replaced in the buffer. */
3511 static void
3512 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3513 rtx_insn *prev)
3515 int i = peep2_buf_position (peep2_current + match_len + 1);
3516 rtx_insn *x;
3517 regset_head live;
3519 INIT_REG_SET (&live);
3520 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3522 gcc_assert (peep2_current_count >= match_len + 1);
3523 peep2_current_count -= match_len + 1;
3525 x = last;
3528 if (INSN_P (x))
3530 df_insn_rescan (x);
3531 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3533 peep2_current_count++;
3534 if (--i < 0)
3535 i = MAX_INSNS_PER_PEEP2;
3536 peep2_insn_data[i].insn = x;
3537 df_simulate_one_insn_backwards (bb, x, &live);
3538 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3541 x = PREV_INSN (x);
3543 while (x != prev);
3544 CLEAR_REG_SET (&live);
3546 peep2_current = i;
3549 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3550 Return true if we added it, false otherwise. The caller will try to match
3551 peepholes against the buffer if we return false; otherwise it will try to
3552 add more instructions to the buffer. */
3554 static bool
3555 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3557 int pos;
3559 /* Once we have filled the maximum number of insns the buffer can hold,
3560 allow the caller to match the insns against peepholes. We wait until
3561 the buffer is full in case the target has similar peepholes of different
3562 length; we always want to match the longest if possible. */
3563 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3564 return false;
3566 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3567 any other pattern, lest it change the semantics of the frame info. */
3568 if (RTX_FRAME_RELATED_P (insn))
3570 /* Let the buffer drain first. */
3571 if (peep2_current_count > 0)
3572 return false;
3573 /* Now the insn will be the only thing in the buffer. */
3576 pos = peep2_buf_position (peep2_current + peep2_current_count);
3577 peep2_insn_data[pos].insn = insn;
3578 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3579 peep2_current_count++;
3581 df_simulate_one_insn_forwards (bb, insn, live);
3582 return true;
3585 /* Perform the peephole2 optimization pass. */
3587 static void
3588 peephole2_optimize (void)
3590 rtx_insn *insn;
3591 bitmap live;
3592 int i;
3593 basic_block bb;
3595 peep2_do_cleanup_cfg = false;
3596 peep2_do_rebuild_jump_labels = false;
3598 df_set_flags (DF_LR_RUN_DCE);
3599 df_note_add_problem ();
3600 df_analyze ();
3602 /* Initialize the regsets we're going to use. */
3603 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3604 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3605 search_ofs = 0;
3606 live = BITMAP_ALLOC (&reg_obstack);
3608 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3610 bool past_end = false;
3611 int pos;
3613 rtl_profile_for_bb (bb);
3615 /* Start up propagation. */
3616 bitmap_copy (live, DF_LR_IN (bb));
3617 df_simulate_initialize_forwards (bb, live);
3618 peep2_reinit_state (live);
3620 insn = BB_HEAD (bb);
3621 for (;;)
3623 rtx_insn *attempt, *head;
3624 int match_len;
3626 if (!past_end && !NONDEBUG_INSN_P (insn))
3628 next_insn:
3629 insn = NEXT_INSN (insn);
3630 if (insn == NEXT_INSN (BB_END (bb)))
3631 past_end = true;
3632 continue;
3634 if (!past_end && peep2_fill_buffer (bb, insn, live))
3635 goto next_insn;
3637 /* If we did not fill an empty buffer, it signals the end of the
3638 block. */
3639 if (peep2_current_count == 0)
3640 break;
3642 /* The buffer filled to the current maximum, so try to match. */
3644 pos = peep2_buf_position (peep2_current + peep2_current_count);
3645 peep2_insn_data[pos].insn = PEEP2_EOB;
3646 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3648 /* Match the peephole. */
3649 head = peep2_insn_data[peep2_current].insn;
3650 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3651 if (attempt != NULL)
3653 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3654 if (last)
3656 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3657 continue;
3661 /* No match: advance the buffer by one insn. */
3662 peep2_current = peep2_buf_position (peep2_current + 1);
3663 peep2_current_count--;
3667 default_rtl_profile ();
3668 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3669 BITMAP_FREE (peep2_insn_data[i].live_before);
3670 BITMAP_FREE (live);
3671 if (peep2_do_rebuild_jump_labels)
3672 rebuild_jump_labels (get_insns ());
3673 if (peep2_do_cleanup_cfg)
3674 cleanup_cfg (CLEANUP_CFG_CHANGED);
3676 #endif /* HAVE_peephole2 */
3678 /* Common predicates for use with define_bypass. */
3680 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3681 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3682 must be either a single_set or a PARALLEL with SETs inside. */
3685 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3687 rtx out_set, in_set;
3688 rtx out_pat, in_pat;
3689 rtx out_exp, in_exp;
3690 int i, j;
3692 in_set = single_set (in_insn);
3693 if (in_set)
3695 if (!MEM_P (SET_DEST (in_set)))
3696 return false;
3698 out_set = single_set (out_insn);
3699 if (out_set)
3701 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3702 return false;
3704 else
3706 out_pat = PATTERN (out_insn);
3708 if (GET_CODE (out_pat) != PARALLEL)
3709 return false;
3711 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3713 out_exp = XVECEXP (out_pat, 0, i);
3715 if (GET_CODE (out_exp) == CLOBBER)
3716 continue;
3718 gcc_assert (GET_CODE (out_exp) == SET);
3720 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3721 return false;
3725 else
3727 in_pat = PATTERN (in_insn);
3728 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3730 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3732 in_exp = XVECEXP (in_pat, 0, i);
3734 if (GET_CODE (in_exp) == CLOBBER)
3735 continue;
3737 gcc_assert (GET_CODE (in_exp) == SET);
3739 if (!MEM_P (SET_DEST (in_exp)))
3740 return false;
3742 out_set = single_set (out_insn);
3743 if (out_set)
3745 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3746 return false;
3748 else
3750 out_pat = PATTERN (out_insn);
3751 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3753 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3755 out_exp = XVECEXP (out_pat, 0, j);
3757 if (GET_CODE (out_exp) == CLOBBER)
3758 continue;
3760 gcc_assert (GET_CODE (out_exp) == SET);
3762 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3763 return false;
3769 return true;
3772 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3773 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3774 or multiple set; IN_INSN should be single_set for truth, but for convenience
3775 of insn categorization may be any JUMP or CALL insn. */
3778 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3780 rtx out_set, in_set;
3782 in_set = single_set (in_insn);
3783 if (! in_set)
3785 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3786 return false;
3789 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3790 return false;
3791 in_set = SET_SRC (in_set);
3793 out_set = single_set (out_insn);
3794 if (out_set)
3796 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3797 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3798 return false;
3800 else
3802 rtx out_pat;
3803 int i;
3805 out_pat = PATTERN (out_insn);
3806 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3808 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3810 rtx exp = XVECEXP (out_pat, 0, i);
3812 if (GET_CODE (exp) == CLOBBER)
3813 continue;
3815 gcc_assert (GET_CODE (exp) == SET);
3817 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3818 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3819 return false;
3823 return true;
3826 static unsigned int
3827 rest_of_handle_peephole2 (void)
3829 #ifdef HAVE_peephole2
3830 peephole2_optimize ();
3831 #endif
3832 return 0;
3835 namespace {
3837 const pass_data pass_data_peephole2 =
3839 RTL_PASS, /* type */
3840 "peephole2", /* name */
3841 OPTGROUP_NONE, /* optinfo_flags */
3842 TV_PEEPHOLE2, /* tv_id */
3843 0, /* properties_required */
3844 0, /* properties_provided */
3845 0, /* properties_destroyed */
3846 0, /* todo_flags_start */
3847 TODO_df_finish, /* todo_flags_finish */
3850 class pass_peephole2 : public rtl_opt_pass
3852 public:
3853 pass_peephole2 (gcc::context *ctxt)
3854 : rtl_opt_pass (pass_data_peephole2, ctxt)
3857 /* opt_pass methods: */
3858 /* The epiphany backend creates a second instance of this pass, so we need
3859 a clone method. */
3860 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3861 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3862 virtual unsigned int execute (function *)
3864 return rest_of_handle_peephole2 ();
3867 }; // class pass_peephole2
3869 } // anon namespace
3871 rtl_opt_pass *
3872 make_pass_peephole2 (gcc::context *ctxt)
3874 return new pass_peephole2 (ctxt);
3877 namespace {
3879 const pass_data pass_data_split_all_insns =
3881 RTL_PASS, /* type */
3882 "split1", /* name */
3883 OPTGROUP_NONE, /* optinfo_flags */
3884 TV_NONE, /* tv_id */
3885 0, /* properties_required */
3886 0, /* properties_provided */
3887 0, /* properties_destroyed */
3888 0, /* todo_flags_start */
3889 0, /* todo_flags_finish */
3892 class pass_split_all_insns : public rtl_opt_pass
3894 public:
3895 pass_split_all_insns (gcc::context *ctxt)
3896 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3899 /* opt_pass methods: */
3900 /* The epiphany backend creates a second instance of this pass, so
3901 we need a clone method. */
3902 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3903 virtual unsigned int execute (function *)
3905 split_all_insns ();
3906 return 0;
3909 }; // class pass_split_all_insns
3911 } // anon namespace
3913 rtl_opt_pass *
3914 make_pass_split_all_insns (gcc::context *ctxt)
3916 return new pass_split_all_insns (ctxt);
3919 static unsigned int
3920 rest_of_handle_split_after_reload (void)
3922 /* If optimizing, then go ahead and split insns now. */
3923 #ifndef STACK_REGS
3924 if (optimize > 0)
3925 #endif
3926 split_all_insns ();
3927 return 0;
3930 namespace {
3932 const pass_data pass_data_split_after_reload =
3934 RTL_PASS, /* type */
3935 "split2", /* name */
3936 OPTGROUP_NONE, /* optinfo_flags */
3937 TV_NONE, /* tv_id */
3938 0, /* properties_required */
3939 0, /* properties_provided */
3940 0, /* properties_destroyed */
3941 0, /* todo_flags_start */
3942 0, /* todo_flags_finish */
3945 class pass_split_after_reload : public rtl_opt_pass
3947 public:
3948 pass_split_after_reload (gcc::context *ctxt)
3949 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3952 /* opt_pass methods: */
3953 virtual unsigned int execute (function *)
3955 return rest_of_handle_split_after_reload ();
3958 }; // class pass_split_after_reload
3960 } // anon namespace
3962 rtl_opt_pass *
3963 make_pass_split_after_reload (gcc::context *ctxt)
3965 return new pass_split_after_reload (ctxt);
3968 namespace {
3970 const pass_data pass_data_split_before_regstack =
3972 RTL_PASS, /* type */
3973 "split3", /* name */
3974 OPTGROUP_NONE, /* optinfo_flags */
3975 TV_NONE, /* tv_id */
3976 0, /* properties_required */
3977 0, /* properties_provided */
3978 0, /* properties_destroyed */
3979 0, /* todo_flags_start */
3980 0, /* todo_flags_finish */
3983 class pass_split_before_regstack : public rtl_opt_pass
3985 public:
3986 pass_split_before_regstack (gcc::context *ctxt)
3987 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3990 /* opt_pass methods: */
3991 virtual bool gate (function *);
3992 virtual unsigned int execute (function *)
3994 split_all_insns ();
3995 return 0;
3998 }; // class pass_split_before_regstack
4000 bool
4001 pass_split_before_regstack::gate (function *)
4003 #if HAVE_ATTR_length && defined (STACK_REGS)
4004 /* If flow2 creates new instructions which need splitting
4005 and scheduling after reload is not done, they might not be
4006 split until final which doesn't allow splitting
4007 if HAVE_ATTR_length. */
4008 # ifdef INSN_SCHEDULING
4009 return (optimize && !flag_schedule_insns_after_reload);
4010 # else
4011 return (optimize);
4012 # endif
4013 #else
4014 return 0;
4015 #endif
4018 } // anon namespace
4020 rtl_opt_pass *
4021 make_pass_split_before_regstack (gcc::context *ctxt)
4023 return new pass_split_before_regstack (ctxt);
4026 static unsigned int
4027 rest_of_handle_split_before_sched2 (void)
4029 #ifdef INSN_SCHEDULING
4030 split_all_insns ();
4031 #endif
4032 return 0;
4035 namespace {
4037 const pass_data pass_data_split_before_sched2 =
4039 RTL_PASS, /* type */
4040 "split4", /* name */
4041 OPTGROUP_NONE, /* optinfo_flags */
4042 TV_NONE, /* tv_id */
4043 0, /* properties_required */
4044 0, /* properties_provided */
4045 0, /* properties_destroyed */
4046 0, /* todo_flags_start */
4047 0, /* todo_flags_finish */
4050 class pass_split_before_sched2 : public rtl_opt_pass
4052 public:
4053 pass_split_before_sched2 (gcc::context *ctxt)
4054 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4057 /* opt_pass methods: */
4058 virtual bool gate (function *)
4060 #ifdef INSN_SCHEDULING
4061 return optimize > 0 && flag_schedule_insns_after_reload;
4062 #else
4063 return false;
4064 #endif
4067 virtual unsigned int execute (function *)
4069 return rest_of_handle_split_before_sched2 ();
4072 }; // class pass_split_before_sched2
4074 } // anon namespace
4076 rtl_opt_pass *
4077 make_pass_split_before_sched2 (gcc::context *ctxt)
4079 return new pass_split_before_sched2 (ctxt);
4082 namespace {
4084 const pass_data pass_data_split_for_shorten_branches =
4086 RTL_PASS, /* type */
4087 "split5", /* name */
4088 OPTGROUP_NONE, /* optinfo_flags */
4089 TV_NONE, /* tv_id */
4090 0, /* properties_required */
4091 0, /* properties_provided */
4092 0, /* properties_destroyed */
4093 0, /* todo_flags_start */
4094 0, /* todo_flags_finish */
4097 class pass_split_for_shorten_branches : public rtl_opt_pass
4099 public:
4100 pass_split_for_shorten_branches (gcc::context *ctxt)
4101 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4104 /* opt_pass methods: */
4105 virtual bool gate (function *)
4107 /* The placement of the splitting that we do for shorten_branches
4108 depends on whether regstack is used by the target or not. */
4109 #if HAVE_ATTR_length && !defined (STACK_REGS)
4110 return true;
4111 #else
4112 return false;
4113 #endif
4116 virtual unsigned int execute (function *)
4118 return split_all_insns_noflow ();
4121 }; // class pass_split_for_shorten_branches
4123 } // anon namespace
4125 rtl_opt_pass *
4126 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4128 return new pass_split_for_shorten_branches (ctxt);
4131 /* (Re)initialize the target information after a change in target. */
4133 void
4134 recog_init ()
4136 /* The information is zero-initialized, so we don't need to do anything
4137 first time round. */
4138 if (!this_target_recog->x_initialized)
4140 this_target_recog->x_initialized = true;
4141 return;
4143 memset (this_target_recog->x_bool_attr_masks, 0,
4144 sizeof (this_target_recog->x_bool_attr_masks));
4145 for (int i = 0; i < LAST_INSN_CODE; ++i)
4146 if (this_target_recog->x_op_alt[i])
4148 free (this_target_recog->x_op_alt[i]);
4149 this_target_recog->x_op_alt[i] = 0;