2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / recog.c
blob73592de891b7391dbaf54dba075fb45499af25d0
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "input.h"
26 #include "alias.h"
27 #include "symtab.h"
28 #include "tree.h"
29 #include "rtl-error.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "insn-attr.h"
33 #include "hard-reg-set.h"
34 #include "recog.h"
35 #include "regs.h"
36 #include "addresses.h"
37 #include "function.h"
38 #include "rtl.h"
39 #include "flags.h"
40 #include "expmed.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "emit-rtl.h"
45 #include "varasm.h"
46 #include "stmt.h"
47 #include "expr.h"
48 #include "predict.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "cfgrtl.h"
52 #include "cfgbuild.h"
53 #include "cfgcleanup.h"
54 #include "basic-block.h"
55 #include "reload.h"
56 #include "target.h"
57 #include "tree-pass.h"
58 #include "df.h"
59 #include "insn-codes.h"
61 #ifndef STACK_POP_CODE
62 #if STACK_GROWS_DOWNWARD
63 #define STACK_POP_CODE POST_INC
64 #else
65 #define STACK_POP_CODE POST_DEC
66 #endif
67 #endif
69 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
70 static void validate_replace_src_1 (rtx *, void *);
71 static rtx_insn *split_insn (rtx_insn *);
73 struct target_recog default_target_recog;
74 #if SWITCHABLE_TARGET
75 struct target_recog *this_target_recog = &default_target_recog;
76 #endif
78 /* Nonzero means allow operands to be volatile.
79 This should be 0 if you are generating rtl, such as if you are calling
80 the functions in optabs.c and expmed.c (most of the time).
81 This should be 1 if all valid insns need to be recognized,
82 such as in reginfo.c and final.c and reload.c.
84 init_recog and init_recog_no_volatile are responsible for setting this. */
86 int volatile_ok;
88 struct recog_data_d recog_data;
90 /* Contains a vector of operand_alternative structures, such that
91 operand OP of alternative A is at index A * n_operands + OP.
92 Set up by preprocess_constraints. */
93 const operand_alternative *recog_op_alt;
95 /* Used to provide recog_op_alt for asms. */
96 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
97 * MAX_RECOG_ALTERNATIVES];
99 /* On return from `constrain_operands', indicate which alternative
100 was satisfied. */
102 int which_alternative;
104 /* Nonzero after end of reload pass.
105 Set to 1 or 0 by toplev.c.
106 Controls the significance of (SUBREG (MEM)). */
108 int reload_completed;
110 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
111 int epilogue_completed;
113 /* Initialize data used by the function `recog'.
114 This must be called once in the compilation of a function
115 before any insn recognition may be done in the function. */
117 void
118 init_recog_no_volatile (void)
120 volatile_ok = 0;
123 void
124 init_recog (void)
126 volatile_ok = 1;
130 /* Return true if labels in asm operands BODY are LABEL_REFs. */
132 static bool
133 asm_labels_ok (rtx body)
135 rtx asmop;
136 int i;
138 asmop = extract_asm_operands (body);
139 if (asmop == NULL_RTX)
140 return true;
142 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
143 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
144 return false;
146 return true;
149 /* Check that X is an insn-body for an `asm' with operands
150 and that the operands mentioned in it are legitimate. */
153 check_asm_operands (rtx x)
155 int noperands;
156 rtx *operands;
157 const char **constraints;
158 int i;
160 if (!asm_labels_ok (x))
161 return 0;
163 /* Post-reload, be more strict with things. */
164 if (reload_completed)
166 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
167 rtx_insn *insn = make_insn_raw (x);
168 extract_insn (insn);
169 constrain_operands (1, get_enabled_alternatives (insn));
170 return which_alternative >= 0;
173 noperands = asm_noperands (x);
174 if (noperands < 0)
175 return 0;
176 if (noperands == 0)
177 return 1;
179 operands = XALLOCAVEC (rtx, noperands);
180 constraints = XALLOCAVEC (const char *, noperands);
182 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
184 for (i = 0; i < noperands; i++)
186 const char *c = constraints[i];
187 if (c[0] == '%')
188 c++;
189 if (! asm_operand_ok (operands[i], c, constraints))
190 return 0;
193 return 1;
196 /* Static data for the next two routines. */
198 typedef struct change_t
200 rtx object;
201 int old_code;
202 rtx *loc;
203 rtx old;
204 bool unshare;
205 } change_t;
207 static change_t *changes;
208 static int changes_allocated;
210 static int num_changes = 0;
212 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
213 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
214 the change is simply made.
216 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
217 will be called with the address and mode as parameters. If OBJECT is
218 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
219 the change in place.
221 IN_GROUP is nonzero if this is part of a group of changes that must be
222 performed as a group. In that case, the changes will be stored. The
223 function `apply_change_group' will validate and apply the changes.
225 If IN_GROUP is zero, this is a single change. Try to recognize the insn
226 or validate the memory reference with the change applied. If the result
227 is not valid for the machine, suppress the change and return zero.
228 Otherwise, perform the change and return 1. */
230 static bool
231 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
233 rtx old = *loc;
235 if (old == new_rtx || rtx_equal_p (old, new_rtx))
236 return 1;
238 gcc_assert (in_group != 0 || num_changes == 0);
240 *loc = new_rtx;
242 /* Save the information describing this change. */
243 if (num_changes >= changes_allocated)
245 if (changes_allocated == 0)
246 /* This value allows for repeated substitutions inside complex
247 indexed addresses, or changes in up to 5 insns. */
248 changes_allocated = MAX_RECOG_OPERANDS * 5;
249 else
250 changes_allocated *= 2;
252 changes = XRESIZEVEC (change_t, changes, changes_allocated);
255 changes[num_changes].object = object;
256 changes[num_changes].loc = loc;
257 changes[num_changes].old = old;
258 changes[num_changes].unshare = unshare;
260 if (object && !MEM_P (object))
262 /* Set INSN_CODE to force rerecognition of insn. Save old code in
263 case invalid. */
264 changes[num_changes].old_code = INSN_CODE (object);
265 INSN_CODE (object) = -1;
268 num_changes++;
270 /* If we are making a group of changes, return 1. Otherwise, validate the
271 change group we made. */
273 if (in_group)
274 return 1;
275 else
276 return apply_change_group ();
279 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
280 UNSHARE to false. */
282 bool
283 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
285 return validate_change_1 (object, loc, new_rtx, in_group, false);
288 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
289 UNSHARE to true. */
291 bool
292 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
294 return validate_change_1 (object, loc, new_rtx, in_group, true);
298 /* Keep X canonicalized if some changes have made it non-canonical; only
299 modifies the operands of X, not (for example) its code. Simplifications
300 are not the job of this routine.
302 Return true if anything was changed. */
303 bool
304 canonicalize_change_group (rtx_insn *insn, rtx x)
306 if (COMMUTATIVE_P (x)
307 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
309 /* Oops, the caller has made X no longer canonical.
310 Let's redo the changes in the correct order. */
311 rtx tem = XEXP (x, 0);
312 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
313 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
314 return true;
316 else
317 return false;
321 /* This subroutine of apply_change_group verifies whether the changes to INSN
322 were valid; i.e. whether INSN can still be recognized.
324 If IN_GROUP is true clobbers which have to be added in order to
325 match the instructions will be added to the current change group.
326 Otherwise the changes will take effect immediately. */
329 insn_invalid_p (rtx_insn *insn, bool in_group)
331 rtx pat = PATTERN (insn);
332 int num_clobbers = 0;
333 /* If we are before reload and the pattern is a SET, see if we can add
334 clobbers. */
335 int icode = recog (pat, insn,
336 (GET_CODE (pat) == SET
337 && ! reload_completed
338 && ! reload_in_progress)
339 ? &num_clobbers : 0);
340 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
343 /* If this is an asm and the operand aren't legal, then fail. Likewise if
344 this is not an asm and the insn wasn't recognized. */
345 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
346 || (!is_asm && icode < 0))
347 return 1;
349 /* If we have to add CLOBBERs, fail if we have to add ones that reference
350 hard registers since our callers can't know if they are live or not.
351 Otherwise, add them. */
352 if (num_clobbers > 0)
354 rtx newpat;
356 if (added_clobbers_hard_reg_p (icode))
357 return 1;
359 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
360 XVECEXP (newpat, 0, 0) = pat;
361 add_clobbers (newpat, icode);
362 if (in_group)
363 validate_change (insn, &PATTERN (insn), newpat, 1);
364 else
365 PATTERN (insn) = pat = newpat;
368 /* After reload, verify that all constraints are satisfied. */
369 if (reload_completed)
371 extract_insn (insn);
373 if (! constrain_operands (1, get_preferred_alternatives (insn)))
374 return 1;
377 INSN_CODE (insn) = icode;
378 return 0;
381 /* Return number of changes made and not validated yet. */
383 num_changes_pending (void)
385 return num_changes;
388 /* Tentatively apply the changes numbered NUM and up.
389 Return 1 if all changes are valid, zero otherwise. */
392 verify_changes (int num)
394 int i;
395 rtx last_validated = NULL_RTX;
397 /* The changes have been applied and all INSN_CODEs have been reset to force
398 rerecognition.
400 The changes are valid if we aren't given an object, or if we are
401 given a MEM and it still is a valid address, or if this is in insn
402 and it is recognized. In the latter case, if reload has completed,
403 we also require that the operands meet the constraints for
404 the insn. */
406 for (i = num; i < num_changes; i++)
408 rtx object = changes[i].object;
410 /* If there is no object to test or if it is the same as the one we
411 already tested, ignore it. */
412 if (object == 0 || object == last_validated)
413 continue;
415 if (MEM_P (object))
417 if (! memory_address_addr_space_p (GET_MODE (object),
418 XEXP (object, 0),
419 MEM_ADDR_SPACE (object)))
420 break;
422 else if (/* changes[i].old might be zero, e.g. when putting a
423 REG_FRAME_RELATED_EXPR into a previously empty list. */
424 changes[i].old
425 && REG_P (changes[i].old)
426 && asm_noperands (PATTERN (object)) > 0
427 && REG_EXPR (changes[i].old) != NULL_TREE
428 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
429 && DECL_REGISTER (REG_EXPR (changes[i].old)))
431 /* Don't allow changes of hard register operands to inline
432 assemblies if they have been defined as register asm ("x"). */
433 break;
435 else if (DEBUG_INSN_P (object))
436 continue;
437 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
439 rtx pat = PATTERN (object);
441 /* Perhaps we couldn't recognize the insn because there were
442 extra CLOBBERs at the end. If so, try to re-recognize
443 without the last CLOBBER (later iterations will cause each of
444 them to be eliminated, in turn). But don't do this if we
445 have an ASM_OPERAND. */
446 if (GET_CODE (pat) == PARALLEL
447 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
448 && asm_noperands (PATTERN (object)) < 0)
450 rtx newpat;
452 if (XVECLEN (pat, 0) == 2)
453 newpat = XVECEXP (pat, 0, 0);
454 else
456 int j;
458 newpat
459 = gen_rtx_PARALLEL (VOIDmode,
460 rtvec_alloc (XVECLEN (pat, 0) - 1));
461 for (j = 0; j < XVECLEN (newpat, 0); j++)
462 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
465 /* Add a new change to this group to replace the pattern
466 with this new pattern. Then consider this change
467 as having succeeded. The change we added will
468 cause the entire call to fail if things remain invalid.
470 Note that this can lose if a later change than the one
471 we are processing specified &XVECEXP (PATTERN (object), 0, X)
472 but this shouldn't occur. */
474 validate_change (object, &PATTERN (object), newpat, 1);
475 continue;
477 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
478 || GET_CODE (pat) == VAR_LOCATION)
479 /* If this insn is a CLOBBER or USE, it is always valid, but is
480 never recognized. */
481 continue;
482 else
483 break;
485 last_validated = object;
488 return (i == num_changes);
491 /* A group of changes has previously been issued with validate_change
492 and verified with verify_changes. Call df_insn_rescan for each of
493 the insn changed and clear num_changes. */
495 void
496 confirm_change_group (void)
498 int i;
499 rtx last_object = NULL;
501 for (i = 0; i < num_changes; i++)
503 rtx object = changes[i].object;
505 if (changes[i].unshare)
506 *changes[i].loc = copy_rtx (*changes[i].loc);
508 /* Avoid unnecessary rescanning when multiple changes to same instruction
509 are made. */
510 if (object)
512 if (object != last_object && last_object && INSN_P (last_object))
513 df_insn_rescan (as_a <rtx_insn *> (last_object));
514 last_object = object;
518 if (last_object && INSN_P (last_object))
519 df_insn_rescan (as_a <rtx_insn *> (last_object));
520 num_changes = 0;
523 /* Apply a group of changes previously issued with `validate_change'.
524 If all changes are valid, call confirm_change_group and return 1,
525 otherwise, call cancel_changes and return 0. */
528 apply_change_group (void)
530 if (verify_changes (0))
532 confirm_change_group ();
533 return 1;
535 else
537 cancel_changes (0);
538 return 0;
543 /* Return the number of changes so far in the current group. */
546 num_validated_changes (void)
548 return num_changes;
551 /* Retract the changes numbered NUM and up. */
553 void
554 cancel_changes (int num)
556 int i;
558 /* Back out all the changes. Do this in the opposite order in which
559 they were made. */
560 for (i = num_changes - 1; i >= num; i--)
562 *changes[i].loc = changes[i].old;
563 if (changes[i].object && !MEM_P (changes[i].object))
564 INSN_CODE (changes[i].object) = changes[i].old_code;
566 num_changes = num;
569 /* Reduce conditional compilation elsewhere. */
570 #ifndef HAVE_extv
571 #define HAVE_extv 0
572 #define CODE_FOR_extv CODE_FOR_nothing
573 #endif
574 #ifndef HAVE_extzv
575 #define HAVE_extzv 0
576 #define CODE_FOR_extzv CODE_FOR_nothing
577 #endif
579 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
580 rtx. */
582 static void
583 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
584 machine_mode op0_mode)
586 rtx x = *loc;
587 enum rtx_code code = GET_CODE (x);
588 rtx new_rtx = NULL_RTX;
590 if (SWAPPABLE_OPERANDS_P (x)
591 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
593 validate_unshare_change (object, loc,
594 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
595 : swap_condition (code),
596 GET_MODE (x), XEXP (x, 1),
597 XEXP (x, 0)), 1);
598 x = *loc;
599 code = GET_CODE (x);
602 /* Canonicalize arithmetics with all constant operands. */
603 switch (GET_RTX_CLASS (code))
605 case RTX_UNARY:
606 if (CONSTANT_P (XEXP (x, 0)))
607 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
608 op0_mode);
609 break;
610 case RTX_COMM_ARITH:
611 case RTX_BIN_ARITH:
612 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
613 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
614 XEXP (x, 1));
615 break;
616 case RTX_COMPARE:
617 case RTX_COMM_COMPARE:
618 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
619 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
620 XEXP (x, 0), XEXP (x, 1));
621 break;
622 default:
623 break;
625 if (new_rtx)
627 validate_change (object, loc, new_rtx, 1);
628 return;
631 switch (code)
633 case PLUS:
634 /* If we have a PLUS whose second operand is now a CONST_INT, use
635 simplify_gen_binary to try to simplify it.
636 ??? We may want later to remove this, once simplification is
637 separated from this function. */
638 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
639 validate_change (object, loc,
640 simplify_gen_binary
641 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
642 break;
643 case MINUS:
644 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
645 validate_change (object, loc,
646 simplify_gen_binary
647 (PLUS, GET_MODE (x), XEXP (x, 0),
648 simplify_gen_unary (NEG,
649 GET_MODE (x), XEXP (x, 1),
650 GET_MODE (x))), 1);
651 break;
652 case ZERO_EXTEND:
653 case SIGN_EXTEND:
654 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
656 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
657 op0_mode);
658 /* If any of the above failed, substitute in something that
659 we know won't be recognized. */
660 if (!new_rtx)
661 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
662 validate_change (object, loc, new_rtx, 1);
664 break;
665 case SUBREG:
666 /* All subregs possible to simplify should be simplified. */
667 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
668 SUBREG_BYTE (x));
670 /* Subregs of VOIDmode operands are incorrect. */
671 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
672 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
673 if (new_rtx)
674 validate_change (object, loc, new_rtx, 1);
675 break;
676 case ZERO_EXTRACT:
677 case SIGN_EXTRACT:
678 /* If we are replacing a register with memory, try to change the memory
679 to be the mode required for memory in extract operations (this isn't
680 likely to be an insertion operation; if it was, nothing bad will
681 happen, we might just fail in some cases). */
683 if (MEM_P (XEXP (x, 0))
684 && CONST_INT_P (XEXP (x, 1))
685 && CONST_INT_P (XEXP (x, 2))
686 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
687 MEM_ADDR_SPACE (XEXP (x, 0)))
688 && !MEM_VOLATILE_P (XEXP (x, 0)))
690 machine_mode wanted_mode = VOIDmode;
691 machine_mode is_mode = GET_MODE (XEXP (x, 0));
692 int pos = INTVAL (XEXP (x, 2));
694 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
696 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
697 if (wanted_mode == VOIDmode)
698 wanted_mode = word_mode;
700 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
702 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
703 if (wanted_mode == VOIDmode)
704 wanted_mode = word_mode;
707 /* If we have a narrower mode, we can do something. */
708 if (wanted_mode != VOIDmode
709 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
711 int offset = pos / BITS_PER_UNIT;
712 rtx newmem;
714 /* If the bytes and bits are counted differently, we
715 must adjust the offset. */
716 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
717 offset =
718 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
719 offset);
721 gcc_assert (GET_MODE_PRECISION (wanted_mode)
722 == GET_MODE_BITSIZE (wanted_mode));
723 pos %= GET_MODE_BITSIZE (wanted_mode);
725 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
727 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
728 validate_change (object, &XEXP (x, 0), newmem, 1);
732 break;
734 default:
735 break;
739 /* Replace every occurrence of FROM in X with TO. Mark each change with
740 validate_change passing OBJECT. */
742 static void
743 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
744 bool simplify)
746 int i, j;
747 const char *fmt;
748 rtx x = *loc;
749 enum rtx_code code;
750 machine_mode op0_mode = VOIDmode;
751 int prev_changes = num_changes;
753 if (!x)
754 return;
756 code = GET_CODE (x);
757 fmt = GET_RTX_FORMAT (code);
758 if (fmt[0] == 'e')
759 op0_mode = GET_MODE (XEXP (x, 0));
761 /* X matches FROM if it is the same rtx or they are both referring to the
762 same register in the same mode. Avoid calling rtx_equal_p unless the
763 operands look similar. */
765 if (x == from
766 || (REG_P (x) && REG_P (from)
767 && GET_MODE (x) == GET_MODE (from)
768 && REGNO (x) == REGNO (from))
769 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
770 && rtx_equal_p (x, from)))
772 validate_unshare_change (object, loc, to, 1);
773 return;
776 /* Call ourself recursively to perform the replacements.
777 We must not replace inside already replaced expression, otherwise we
778 get infinite recursion for replacements like (reg X)->(subreg (reg X))
779 so we must special case shared ASM_OPERANDS. */
781 if (GET_CODE (x) == PARALLEL)
783 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
785 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
786 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
788 /* Verify that operands are really shared. */
789 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
790 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
791 (x, 0, j))));
792 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
793 from, to, object, simplify);
795 else
796 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
797 simplify);
800 else
801 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
803 if (fmt[i] == 'e')
804 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
805 else if (fmt[i] == 'E')
806 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
807 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
808 simplify);
811 /* If we didn't substitute, there is nothing more to do. */
812 if (num_changes == prev_changes)
813 return;
815 /* ??? The regmove is no more, so is this aberration still necessary? */
816 /* Allow substituted expression to have different mode. This is used by
817 regmove to change mode of pseudo register. */
818 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
819 op0_mode = GET_MODE (XEXP (x, 0));
821 /* Do changes needed to keep rtx consistent. Don't do any other
822 simplifications, as it is not our job. */
823 if (simplify)
824 simplify_while_replacing (loc, to, object, op0_mode);
827 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
828 with TO. After all changes have been made, validate by seeing
829 if INSN is still valid. */
832 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
834 validate_replace_rtx_1 (loc, from, to, insn, true);
835 return apply_change_group ();
838 /* Try replacing every occurrence of FROM in INSN with TO. After all
839 changes have been made, validate by seeing if INSN is still valid. */
842 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
844 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
845 return apply_change_group ();
848 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
849 is a part of INSN. After all changes have been made, validate by seeing if
850 INSN is still valid.
851 validate_replace_rtx (from, to, insn) is equivalent to
852 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
855 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
857 validate_replace_rtx_1 (where, from, to, insn, true);
858 return apply_change_group ();
861 /* Same as above, but do not simplify rtx afterwards. */
863 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
864 rtx_insn *insn)
866 validate_replace_rtx_1 (where, from, to, insn, false);
867 return apply_change_group ();
871 /* Try replacing every occurrence of FROM in INSN with TO. This also
872 will replace in REG_EQUAL and REG_EQUIV notes. */
874 void
875 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
877 rtx note;
878 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
879 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
880 if (REG_NOTE_KIND (note) == REG_EQUAL
881 || REG_NOTE_KIND (note) == REG_EQUIV)
882 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
885 /* Function called by note_uses to replace used subexpressions. */
886 struct validate_replace_src_data
888 rtx from; /* Old RTX */
889 rtx to; /* New RTX */
890 rtx_insn *insn; /* Insn in which substitution is occurring. */
893 static void
894 validate_replace_src_1 (rtx *x, void *data)
896 struct validate_replace_src_data *d
897 = (struct validate_replace_src_data *) data;
899 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
902 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
903 SET_DESTs. */
905 void
906 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
908 struct validate_replace_src_data d;
910 d.from = from;
911 d.to = to;
912 d.insn = insn;
913 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
916 /* Try simplify INSN.
917 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
918 pattern and return true if something was simplified. */
920 bool
921 validate_simplify_insn (rtx_insn *insn)
923 int i;
924 rtx pat = NULL;
925 rtx newpat = NULL;
927 pat = PATTERN (insn);
929 if (GET_CODE (pat) == SET)
931 newpat = simplify_rtx (SET_SRC (pat));
932 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
933 validate_change (insn, &SET_SRC (pat), newpat, 1);
934 newpat = simplify_rtx (SET_DEST (pat));
935 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
936 validate_change (insn, &SET_DEST (pat), newpat, 1);
938 else if (GET_CODE (pat) == PARALLEL)
939 for (i = 0; i < XVECLEN (pat, 0); i++)
941 rtx s = XVECEXP (pat, 0, i);
943 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
945 newpat = simplify_rtx (SET_SRC (s));
946 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
947 validate_change (insn, &SET_SRC (s), newpat, 1);
948 newpat = simplify_rtx (SET_DEST (s));
949 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
950 validate_change (insn, &SET_DEST (s), newpat, 1);
953 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
956 /* Return 1 if the insn using CC0 set by INSN does not contain
957 any ordered tests applied to the condition codes.
958 EQ and NE tests do not count. */
961 next_insn_tests_no_inequality (rtx_insn *insn)
963 rtx_insn *next = next_cc0_user (insn);
965 /* If there is no next insn, we have to take the conservative choice. */
966 if (next == 0)
967 return 0;
969 return (INSN_P (next)
970 && ! inequality_comparisons_p (PATTERN (next)));
973 /* Return 1 if OP is a valid general operand for machine mode MODE.
974 This is either a register reference, a memory reference,
975 or a constant. In the case of a memory reference, the address
976 is checked for general validity for the target machine.
978 Register and memory references must have mode MODE in order to be valid,
979 but some constants have no machine mode and are valid for any mode.
981 If MODE is VOIDmode, OP is checked for validity for whatever mode
982 it has.
984 The main use of this function is as a predicate in match_operand
985 expressions in the machine description. */
988 general_operand (rtx op, machine_mode mode)
990 enum rtx_code code = GET_CODE (op);
992 if (mode == VOIDmode)
993 mode = GET_MODE (op);
995 /* Don't accept CONST_INT or anything similar
996 if the caller wants something floating. */
997 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
998 && GET_MODE_CLASS (mode) != MODE_INT
999 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1000 return 0;
1002 if (CONST_INT_P (op)
1003 && mode != VOIDmode
1004 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1005 return 0;
1007 if (CONSTANT_P (op))
1008 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1009 || mode == VOIDmode)
1010 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1011 && targetm.legitimate_constant_p (mode == VOIDmode
1012 ? GET_MODE (op)
1013 : mode, op));
1015 /* Except for certain constants with VOIDmode, already checked for,
1016 OP's mode must match MODE if MODE specifies a mode. */
1018 if (GET_MODE (op) != mode)
1019 return 0;
1021 if (code == SUBREG)
1023 rtx sub = SUBREG_REG (op);
1025 #ifdef INSN_SCHEDULING
1026 /* On machines that have insn scheduling, we want all memory
1027 reference to be explicit, so outlaw paradoxical SUBREGs.
1028 However, we must allow them after reload so that they can
1029 get cleaned up by cleanup_subreg_operands. */
1030 if (!reload_completed && MEM_P (sub)
1031 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1032 return 0;
1033 #endif
1034 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1035 may result in incorrect reference. We should simplify all valid
1036 subregs of MEM anyway. But allow this after reload because we
1037 might be called from cleanup_subreg_operands.
1039 ??? This is a kludge. */
1040 if (!reload_completed && SUBREG_BYTE (op) != 0
1041 && MEM_P (sub))
1042 return 0;
1044 #ifdef CANNOT_CHANGE_MODE_CLASS
1045 if (REG_P (sub)
1046 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1047 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1048 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1049 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1050 /* LRA can generate some invalid SUBREGS just for matched
1051 operand reload presentation. LRA needs to treat them as
1052 valid. */
1053 && ! LRA_SUBREG_P (op))
1054 return 0;
1055 #endif
1057 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1058 create such rtl, and we must reject it. */
1059 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1060 /* LRA can use subreg to store a floating point value in an
1061 integer mode. Although the floating point and the
1062 integer modes need the same number of hard registers, the
1063 size of floating point mode can be less than the integer
1064 mode. */
1065 && ! lra_in_progress
1066 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1067 return 0;
1069 op = sub;
1070 code = GET_CODE (op);
1073 if (code == REG)
1074 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1075 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1077 if (code == MEM)
1079 rtx y = XEXP (op, 0);
1081 if (! volatile_ok && MEM_VOLATILE_P (op))
1082 return 0;
1084 /* Use the mem's mode, since it will be reloaded thus. LRA can
1085 generate move insn with invalid addresses which is made valid
1086 and efficiently calculated by LRA through further numerous
1087 transformations. */
1088 if (lra_in_progress
1089 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1090 return 1;
1093 return 0;
1096 /* Return 1 if OP is a valid memory address for a memory reference
1097 of mode MODE.
1099 The main use of this function is as a predicate in match_operand
1100 expressions in the machine description. */
1103 address_operand (rtx op, machine_mode mode)
1105 return memory_address_p (mode, op);
1108 /* Return 1 if OP is a register reference of mode MODE.
1109 If MODE is VOIDmode, accept a register in any mode.
1111 The main use of this function is as a predicate in match_operand
1112 expressions in the machine description. */
1115 register_operand (rtx op, machine_mode mode)
1117 if (GET_CODE (op) == SUBREG)
1119 rtx sub = SUBREG_REG (op);
1121 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1122 because it is guaranteed to be reloaded into one.
1123 Just make sure the MEM is valid in itself.
1124 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1125 but currently it does result from (SUBREG (REG)...) where the
1126 reg went on the stack.) */
1127 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1128 return 0;
1130 else if (!REG_P (op))
1131 return 0;
1132 return general_operand (op, mode);
1135 /* Return 1 for a register in Pmode; ignore the tested mode. */
1138 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1140 return register_operand (op, Pmode);
1143 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1144 or a hard register. */
1147 scratch_operand (rtx op, machine_mode mode)
1149 if (GET_MODE (op) != mode && mode != VOIDmode)
1150 return 0;
1152 return (GET_CODE (op) == SCRATCH
1153 || (REG_P (op)
1154 && (lra_in_progress
1155 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1156 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1159 /* Return 1 if OP is a valid immediate operand for mode MODE.
1161 The main use of this function is as a predicate in match_operand
1162 expressions in the machine description. */
1165 immediate_operand (rtx op, machine_mode mode)
1167 /* Don't accept CONST_INT or anything similar
1168 if the caller wants something floating. */
1169 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1170 && GET_MODE_CLASS (mode) != MODE_INT
1171 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1172 return 0;
1174 if (CONST_INT_P (op)
1175 && mode != VOIDmode
1176 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1177 return 0;
1179 return (CONSTANT_P (op)
1180 && (GET_MODE (op) == mode || mode == VOIDmode
1181 || GET_MODE (op) == VOIDmode)
1182 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1183 && targetm.legitimate_constant_p (mode == VOIDmode
1184 ? GET_MODE (op)
1185 : mode, op));
1188 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1191 const_int_operand (rtx op, machine_mode mode)
1193 if (!CONST_INT_P (op))
1194 return 0;
1196 if (mode != VOIDmode
1197 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1198 return 0;
1200 return 1;
1203 #if TARGET_SUPPORTS_WIDE_INT
1204 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1205 of mode MODE. */
1207 const_scalar_int_operand (rtx op, machine_mode mode)
1209 if (!CONST_SCALAR_INT_P (op))
1210 return 0;
1212 if (CONST_INT_P (op))
1213 return const_int_operand (op, mode);
1215 if (mode != VOIDmode)
1217 int prec = GET_MODE_PRECISION (mode);
1218 int bitsize = GET_MODE_BITSIZE (mode);
1220 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1221 return 0;
1223 if (prec == bitsize)
1224 return 1;
1225 else
1227 /* Multiword partial int. */
1228 HOST_WIDE_INT x
1229 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1230 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1233 return 1;
1236 /* Returns 1 if OP is an operand that is a constant integer or constant
1237 floating-point number of MODE. */
1240 const_double_operand (rtx op, machine_mode mode)
1242 return (GET_CODE (op) == CONST_DOUBLE)
1243 && (GET_MODE (op) == mode || mode == VOIDmode);
1245 #else
1246 /* Returns 1 if OP is an operand that is a constant integer or constant
1247 floating-point number of MODE. */
1250 const_double_operand (rtx op, machine_mode mode)
1252 /* Don't accept CONST_INT or anything similar
1253 if the caller wants something floating. */
1254 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1255 && GET_MODE_CLASS (mode) != MODE_INT
1256 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1257 return 0;
1259 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1260 && (mode == VOIDmode || GET_MODE (op) == mode
1261 || GET_MODE (op) == VOIDmode));
1263 #endif
1264 /* Return 1 if OP is a general operand that is not an immediate
1265 operand of mode MODE. */
1268 nonimmediate_operand (rtx op, machine_mode mode)
1270 return (general_operand (op, mode) && ! CONSTANT_P (op));
1273 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1276 nonmemory_operand (rtx op, machine_mode mode)
1278 if (CONSTANT_P (op))
1279 return immediate_operand (op, mode);
1280 return register_operand (op, mode);
1283 /* Return 1 if OP is a valid operand that stands for pushing a
1284 value of mode MODE onto the stack.
1286 The main use of this function is as a predicate in match_operand
1287 expressions in the machine description. */
1290 push_operand (rtx op, machine_mode mode)
1292 unsigned int rounded_size = GET_MODE_SIZE (mode);
1294 #ifdef PUSH_ROUNDING
1295 rounded_size = PUSH_ROUNDING (rounded_size);
1296 #endif
1298 if (!MEM_P (op))
1299 return 0;
1301 if (mode != VOIDmode && GET_MODE (op) != mode)
1302 return 0;
1304 op = XEXP (op, 0);
1306 if (rounded_size == GET_MODE_SIZE (mode))
1308 if (GET_CODE (op) != STACK_PUSH_CODE)
1309 return 0;
1311 else
1313 if (GET_CODE (op) != PRE_MODIFY
1314 || GET_CODE (XEXP (op, 1)) != PLUS
1315 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1316 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1317 || INTVAL (XEXP (XEXP (op, 1), 1))
1318 != ((STACK_GROWS_DOWNWARD ? -1 : 1) * (int) rounded_size))
1319 return 0;
1322 return XEXP (op, 0) == stack_pointer_rtx;
1325 /* Return 1 if OP is a valid operand that stands for popping a
1326 value of mode MODE off the stack.
1328 The main use of this function is as a predicate in match_operand
1329 expressions in the machine description. */
1332 pop_operand (rtx op, machine_mode mode)
1334 if (!MEM_P (op))
1335 return 0;
1337 if (mode != VOIDmode && GET_MODE (op) != mode)
1338 return 0;
1340 op = XEXP (op, 0);
1342 if (GET_CODE (op) != STACK_POP_CODE)
1343 return 0;
1345 return XEXP (op, 0) == stack_pointer_rtx;
1348 /* Return 1 if ADDR is a valid memory address
1349 for mode MODE in address space AS. */
1352 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1353 rtx addr, addr_space_t as)
1355 #ifdef GO_IF_LEGITIMATE_ADDRESS
1356 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1357 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1358 return 0;
1360 win:
1361 return 1;
1362 #else
1363 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1364 #endif
1367 /* Return 1 if OP is a valid memory reference with mode MODE,
1368 including a valid address.
1370 The main use of this function is as a predicate in match_operand
1371 expressions in the machine description. */
1374 memory_operand (rtx op, machine_mode mode)
1376 rtx inner;
1378 if (! reload_completed)
1379 /* Note that no SUBREG is a memory operand before end of reload pass,
1380 because (SUBREG (MEM...)) forces reloading into a register. */
1381 return MEM_P (op) && general_operand (op, mode);
1383 if (mode != VOIDmode && GET_MODE (op) != mode)
1384 return 0;
1386 inner = op;
1387 if (GET_CODE (inner) == SUBREG)
1388 inner = SUBREG_REG (inner);
1390 return (MEM_P (inner) && general_operand (op, mode));
1393 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1394 that is, a memory reference whose address is a general_operand. */
1397 indirect_operand (rtx op, machine_mode mode)
1399 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1400 if (! reload_completed
1401 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1403 int offset = SUBREG_BYTE (op);
1404 rtx inner = SUBREG_REG (op);
1406 if (mode != VOIDmode && GET_MODE (op) != mode)
1407 return 0;
1409 /* The only way that we can have a general_operand as the resulting
1410 address is if OFFSET is zero and the address already is an operand
1411 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1412 operand. */
1414 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1415 || (GET_CODE (XEXP (inner, 0)) == PLUS
1416 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1417 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1418 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1421 return (MEM_P (op)
1422 && memory_operand (op, mode)
1423 && general_operand (XEXP (op, 0), Pmode));
1426 /* Return 1 if this is an ordered comparison operator (not including
1427 ORDERED and UNORDERED). */
1430 ordered_comparison_operator (rtx op, machine_mode mode)
1432 if (mode != VOIDmode && GET_MODE (op) != mode)
1433 return false;
1434 switch (GET_CODE (op))
1436 case EQ:
1437 case NE:
1438 case LT:
1439 case LTU:
1440 case LE:
1441 case LEU:
1442 case GT:
1443 case GTU:
1444 case GE:
1445 case GEU:
1446 return true;
1447 default:
1448 return false;
1452 /* Return 1 if this is a comparison operator. This allows the use of
1453 MATCH_OPERATOR to recognize all the branch insns. */
1456 comparison_operator (rtx op, machine_mode mode)
1458 return ((mode == VOIDmode || GET_MODE (op) == mode)
1459 && COMPARISON_P (op));
1462 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1465 extract_asm_operands (rtx body)
1467 rtx tmp;
1468 switch (GET_CODE (body))
1470 case ASM_OPERANDS:
1471 return body;
1473 case SET:
1474 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1475 tmp = SET_SRC (body);
1476 if (GET_CODE (tmp) == ASM_OPERANDS)
1477 return tmp;
1478 break;
1480 case PARALLEL:
1481 tmp = XVECEXP (body, 0, 0);
1482 if (GET_CODE (tmp) == ASM_OPERANDS)
1483 return tmp;
1484 if (GET_CODE (tmp) == SET)
1486 tmp = SET_SRC (tmp);
1487 if (GET_CODE (tmp) == ASM_OPERANDS)
1488 return tmp;
1490 break;
1492 default:
1493 break;
1495 return NULL;
1498 /* If BODY is an insn body that uses ASM_OPERANDS,
1499 return the number of operands (both input and output) in the insn.
1500 Otherwise return -1. */
1503 asm_noperands (const_rtx body)
1505 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1506 int n_sets = 0;
1508 if (asm_op == NULL)
1509 return -1;
1511 if (GET_CODE (body) == SET)
1512 n_sets = 1;
1513 else if (GET_CODE (body) == PARALLEL)
1515 int i;
1516 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1518 /* Multiple output operands, or 1 output plus some clobbers:
1519 body is
1520 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1521 /* Count backwards through CLOBBERs to determine number of SETs. */
1522 for (i = XVECLEN (body, 0); i > 0; i--)
1524 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1525 break;
1526 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1527 return -1;
1530 /* N_SETS is now number of output operands. */
1531 n_sets = i;
1533 /* Verify that all the SETs we have
1534 came from a single original asm_operands insn
1535 (so that invalid combinations are blocked). */
1536 for (i = 0; i < n_sets; i++)
1538 rtx elt = XVECEXP (body, 0, i);
1539 if (GET_CODE (elt) != SET)
1540 return -1;
1541 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1542 return -1;
1543 /* If these ASM_OPERANDS rtx's came from different original insns
1544 then they aren't allowed together. */
1545 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1546 != ASM_OPERANDS_INPUT_VEC (asm_op))
1547 return -1;
1550 else
1552 /* 0 outputs, but some clobbers:
1553 body is [(asm_operands ...) (clobber (reg ...))...]. */
1554 /* Make sure all the other parallel things really are clobbers. */
1555 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1556 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1557 return -1;
1561 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1562 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1565 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1566 copy its operands (both input and output) into the vector OPERANDS,
1567 the locations of the operands within the insn into the vector OPERAND_LOCS,
1568 and the constraints for the operands into CONSTRAINTS.
1569 Write the modes of the operands into MODES.
1570 Return the assembler-template.
1572 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1573 we don't store that info. */
1575 const char *
1576 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1577 const char **constraints, machine_mode *modes,
1578 location_t *loc)
1580 int nbase = 0, n, i;
1581 rtx asmop;
1583 switch (GET_CODE (body))
1585 case ASM_OPERANDS:
1586 /* Zero output asm: BODY is (asm_operands ...). */
1587 asmop = body;
1588 break;
1590 case SET:
1591 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1592 asmop = SET_SRC (body);
1594 /* The output is in the SET.
1595 Its constraint is in the ASM_OPERANDS itself. */
1596 if (operands)
1597 operands[0] = SET_DEST (body);
1598 if (operand_locs)
1599 operand_locs[0] = &SET_DEST (body);
1600 if (constraints)
1601 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1602 if (modes)
1603 modes[0] = GET_MODE (SET_DEST (body));
1604 nbase = 1;
1605 break;
1607 case PARALLEL:
1609 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1611 asmop = XVECEXP (body, 0, 0);
1612 if (GET_CODE (asmop) == SET)
1614 asmop = SET_SRC (asmop);
1616 /* At least one output, plus some CLOBBERs. The outputs are in
1617 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1618 for (i = 0; i < nparallel; i++)
1620 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1621 break; /* Past last SET */
1622 if (operands)
1623 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1624 if (operand_locs)
1625 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1626 if (constraints)
1627 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1628 if (modes)
1629 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1631 nbase = i;
1633 break;
1636 default:
1637 gcc_unreachable ();
1640 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1641 for (i = 0; i < n; i++)
1643 if (operand_locs)
1644 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1645 if (operands)
1646 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1647 if (constraints)
1648 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1649 if (modes)
1650 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1652 nbase += n;
1654 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1655 for (i = 0; i < n; i++)
1657 if (operand_locs)
1658 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1659 if (operands)
1660 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1661 if (constraints)
1662 constraints[nbase + i] = "";
1663 if (modes)
1664 modes[nbase + i] = Pmode;
1667 if (loc)
1668 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1670 return ASM_OPERANDS_TEMPLATE (asmop);
1673 /* Parse inline assembly string STRING and determine which operands are
1674 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1675 to true if operand I is referenced.
1677 This is intended to distinguish barrier-like asms such as:
1679 asm ("" : "=m" (...));
1681 from real references such as:
1683 asm ("sw\t$0, %0" : "=m" (...)); */
1685 void
1686 get_referenced_operands (const char *string, bool *used,
1687 unsigned int noperands)
1689 memset (used, 0, sizeof (bool) * noperands);
1690 const char *p = string;
1691 while (*p)
1692 switch (*p)
1694 case '%':
1695 p += 1;
1696 /* A letter followed by a digit indicates an operand number. */
1697 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1698 p += 1;
1699 if (ISDIGIT (*p))
1701 char *endptr;
1702 unsigned long opnum = strtoul (p, &endptr, 10);
1703 if (endptr != p && opnum < noperands)
1704 used[opnum] = true;
1705 p = endptr;
1707 else
1708 p += 1;
1709 break;
1711 default:
1712 p++;
1713 break;
1717 /* Check if an asm_operand matches its constraints.
1718 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1721 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1723 int result = 0;
1724 #ifdef AUTO_INC_DEC
1725 bool incdec_ok = false;
1726 #endif
1728 /* Use constrain_operands after reload. */
1729 gcc_assert (!reload_completed);
1731 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1732 many alternatives as required to match the other operands. */
1733 if (*constraint == '\0')
1734 result = 1;
1736 while (*constraint)
1738 enum constraint_num cn;
1739 char c = *constraint;
1740 int len;
1741 switch (c)
1743 case ',':
1744 constraint++;
1745 continue;
1747 case '0': case '1': case '2': case '3': case '4':
1748 case '5': case '6': case '7': case '8': case '9':
1749 /* If caller provided constraints pointer, look up
1750 the matching constraint. Otherwise, our caller should have
1751 given us the proper matching constraint, but we can't
1752 actually fail the check if they didn't. Indicate that
1753 results are inconclusive. */
1754 if (constraints)
1756 char *end;
1757 unsigned long match;
1759 match = strtoul (constraint, &end, 10);
1760 if (!result)
1761 result = asm_operand_ok (op, constraints[match], NULL);
1762 constraint = (const char *) end;
1764 else
1767 constraint++;
1768 while (ISDIGIT (*constraint));
1769 if (! result)
1770 result = -1;
1772 continue;
1774 /* The rest of the compiler assumes that reloading the address
1775 of a MEM into a register will make it fit an 'o' constraint.
1776 That is, if it sees a MEM operand for an 'o' constraint,
1777 it assumes that (mem (base-reg)) will fit.
1779 That assumption fails on targets that don't have offsettable
1780 addresses at all. We therefore need to treat 'o' asm
1781 constraints as a special case and only accept operands that
1782 are already offsettable, thus proving that at least one
1783 offsettable address exists. */
1784 case 'o': /* offsettable */
1785 if (offsettable_nonstrict_memref_p (op))
1786 result = 1;
1787 break;
1789 case 'g':
1790 if (general_operand (op, VOIDmode))
1791 result = 1;
1792 break;
1794 #ifdef AUTO_INC_DEC
1795 case '<':
1796 case '>':
1797 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1798 to exist, excepting those that expand_call created. Further,
1799 on some machines which do not have generalized auto inc/dec,
1800 an inc/dec is not a memory_operand.
1802 Match any memory and hope things are resolved after reload. */
1803 incdec_ok = true;
1804 #endif
1805 default:
1806 cn = lookup_constraint (constraint);
1807 switch (get_constraint_type (cn))
1809 case CT_REGISTER:
1810 if (!result
1811 && reg_class_for_constraint (cn) != NO_REGS
1812 && GET_MODE (op) != BLKmode
1813 && register_operand (op, VOIDmode))
1814 result = 1;
1815 break;
1817 case CT_CONST_INT:
1818 if (!result
1819 && CONST_INT_P (op)
1820 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1821 result = 1;
1822 break;
1824 case CT_MEMORY:
1825 /* Every memory operand can be reloaded to fit. */
1826 result = result || memory_operand (op, VOIDmode);
1827 break;
1829 case CT_ADDRESS:
1830 /* Every address operand can be reloaded to fit. */
1831 result = result || address_operand (op, VOIDmode);
1832 break;
1834 case CT_FIXED_FORM:
1835 result = result || constraint_satisfied_p (op, cn);
1836 break;
1838 break;
1840 len = CONSTRAINT_LEN (c, constraint);
1842 constraint++;
1843 while (--len && *constraint);
1844 if (len)
1845 return 0;
1848 #ifdef AUTO_INC_DEC
1849 /* For operands without < or > constraints reject side-effects. */
1850 if (!incdec_ok && result && MEM_P (op))
1851 switch (GET_CODE (XEXP (op, 0)))
1853 case PRE_INC:
1854 case POST_INC:
1855 case PRE_DEC:
1856 case POST_DEC:
1857 case PRE_MODIFY:
1858 case POST_MODIFY:
1859 return 0;
1860 default:
1861 break;
1863 #endif
1865 return result;
1868 /* Given an rtx *P, if it is a sum containing an integer constant term,
1869 return the location (type rtx *) of the pointer to that constant term.
1870 Otherwise, return a null pointer. */
1872 rtx *
1873 find_constant_term_loc (rtx *p)
1875 rtx *tem;
1876 enum rtx_code code = GET_CODE (*p);
1878 /* If *P IS such a constant term, P is its location. */
1880 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1881 || code == CONST)
1882 return p;
1884 /* Otherwise, if not a sum, it has no constant term. */
1886 if (GET_CODE (*p) != PLUS)
1887 return 0;
1889 /* If one of the summands is constant, return its location. */
1891 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1892 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1893 return p;
1895 /* Otherwise, check each summand for containing a constant term. */
1897 if (XEXP (*p, 0) != 0)
1899 tem = find_constant_term_loc (&XEXP (*p, 0));
1900 if (tem != 0)
1901 return tem;
1904 if (XEXP (*p, 1) != 0)
1906 tem = find_constant_term_loc (&XEXP (*p, 1));
1907 if (tem != 0)
1908 return tem;
1911 return 0;
1914 /* Return 1 if OP is a memory reference
1915 whose address contains no side effects
1916 and remains valid after the addition
1917 of a positive integer less than the
1918 size of the object being referenced.
1920 We assume that the original address is valid and do not check it.
1922 This uses strict_memory_address_p as a subroutine, so
1923 don't use it before reload. */
1926 offsettable_memref_p (rtx op)
1928 return ((MEM_P (op))
1929 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1930 MEM_ADDR_SPACE (op)));
1933 /* Similar, but don't require a strictly valid mem ref:
1934 consider pseudo-regs valid as index or base regs. */
1937 offsettable_nonstrict_memref_p (rtx op)
1939 return ((MEM_P (op))
1940 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1941 MEM_ADDR_SPACE (op)));
1944 /* Return 1 if Y is a memory address which contains no side effects
1945 and would remain valid for address space AS after the addition of
1946 a positive integer less than the size of that mode.
1948 We assume that the original address is valid and do not check it.
1949 We do check that it is valid for narrower modes.
1951 If STRICTP is nonzero, we require a strictly valid address,
1952 for the sake of use in reload.c. */
1955 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1956 addr_space_t as)
1958 enum rtx_code ycode = GET_CODE (y);
1959 rtx z;
1960 rtx y1 = y;
1961 rtx *y2;
1962 int (*addressp) (machine_mode, rtx, addr_space_t) =
1963 (strictp ? strict_memory_address_addr_space_p
1964 : memory_address_addr_space_p);
1965 unsigned int mode_sz = GET_MODE_SIZE (mode);
1967 if (CONSTANT_ADDRESS_P (y))
1968 return 1;
1970 /* Adjusting an offsettable address involves changing to a narrower mode.
1971 Make sure that's OK. */
1973 if (mode_dependent_address_p (y, as))
1974 return 0;
1976 machine_mode address_mode = GET_MODE (y);
1977 if (address_mode == VOIDmode)
1978 address_mode = targetm.addr_space.address_mode (as);
1979 #ifdef POINTERS_EXTEND_UNSIGNED
1980 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1981 #endif
1983 /* ??? How much offset does an offsettable BLKmode reference need?
1984 Clearly that depends on the situation in which it's being used.
1985 However, the current situation in which we test 0xffffffff is
1986 less than ideal. Caveat user. */
1987 if (mode_sz == 0)
1988 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1990 /* If the expression contains a constant term,
1991 see if it remains valid when max possible offset is added. */
1993 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1995 int good;
1997 y1 = *y2;
1998 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1999 /* Use QImode because an odd displacement may be automatically invalid
2000 for any wider mode. But it should be valid for a single byte. */
2001 good = (*addressp) (QImode, y, as);
2003 /* In any case, restore old contents of memory. */
2004 *y2 = y1;
2005 return good;
2008 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2009 return 0;
2011 /* The offset added here is chosen as the maximum offset that
2012 any instruction could need to add when operating on something
2013 of the specified mode. We assume that if Y and Y+c are
2014 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2015 go inside a LO_SUM here, so we do so as well. */
2016 if (GET_CODE (y) == LO_SUM
2017 && mode != BLKmode
2018 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2019 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2020 plus_constant (address_mode, XEXP (y, 1),
2021 mode_sz - 1));
2022 #ifdef POINTERS_EXTEND_UNSIGNED
2023 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2024 else if (POINTERS_EXTEND_UNSIGNED > 0
2025 && GET_CODE (y) == ZERO_EXTEND
2026 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2027 z = gen_rtx_ZERO_EXTEND (address_mode,
2028 plus_constant (pointer_mode, XEXP (y, 0),
2029 mode_sz - 1));
2030 #endif
2031 else
2032 z = plus_constant (address_mode, y, mode_sz - 1);
2034 /* Use QImode because an odd displacement may be automatically invalid
2035 for any wider mode. But it should be valid for a single byte. */
2036 return (*addressp) (QImode, z, as);
2039 /* Return 1 if ADDR is an address-expression whose effect depends
2040 on the mode of the memory reference it is used in.
2042 ADDRSPACE is the address space associated with the address.
2044 Autoincrement addressing is a typical example of mode-dependence
2045 because the amount of the increment depends on the mode. */
2047 bool
2048 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2050 /* Auto-increment addressing with anything other than post_modify
2051 or pre_modify always introduces a mode dependency. Catch such
2052 cases now instead of deferring to the target. */
2053 if (GET_CODE (addr) == PRE_INC
2054 || GET_CODE (addr) == POST_INC
2055 || GET_CODE (addr) == PRE_DEC
2056 || GET_CODE (addr) == POST_DEC)
2057 return true;
2059 return targetm.mode_dependent_address_p (addr, addrspace);
2062 /* Return true if boolean attribute ATTR is supported. */
2064 static bool
2065 have_bool_attr (bool_attr attr)
2067 switch (attr)
2069 case BA_ENABLED:
2070 return HAVE_ATTR_enabled;
2071 case BA_PREFERRED_FOR_SIZE:
2072 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2073 case BA_PREFERRED_FOR_SPEED:
2074 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2076 gcc_unreachable ();
2079 /* Return the value of ATTR for instruction INSN. */
2081 static bool
2082 get_bool_attr (rtx_insn *insn, bool_attr attr)
2084 switch (attr)
2086 case BA_ENABLED:
2087 return get_attr_enabled (insn);
2088 case BA_PREFERRED_FOR_SIZE:
2089 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2090 case BA_PREFERRED_FOR_SPEED:
2091 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2093 gcc_unreachable ();
2096 /* Like get_bool_attr_mask, but don't use the cache. */
2098 static alternative_mask
2099 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2101 /* Temporarily install enough information for get_attr_<foo> to assume
2102 that the insn operands are already cached. As above, the attribute
2103 mustn't depend on the values of operands, so we don't provide their
2104 real values here. */
2105 rtx_insn *old_insn = recog_data.insn;
2106 int old_alternative = which_alternative;
2108 recog_data.insn = insn;
2109 alternative_mask mask = ALL_ALTERNATIVES;
2110 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2111 for (int i = 0; i < n_alternatives; i++)
2113 which_alternative = i;
2114 if (!get_bool_attr (insn, attr))
2115 mask &= ~ALTERNATIVE_BIT (i);
2118 recog_data.insn = old_insn;
2119 which_alternative = old_alternative;
2120 return mask;
2123 /* Return the mask of operand alternatives that are allowed for INSN
2124 by boolean attribute ATTR. This mask depends only on INSN and on
2125 the current target; it does not depend on things like the values of
2126 operands. */
2128 static alternative_mask
2129 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2131 /* Quick exit for asms and for targets that don't use these attributes. */
2132 int code = INSN_CODE (insn);
2133 if (code < 0 || !have_bool_attr (attr))
2134 return ALL_ALTERNATIVES;
2136 /* Calling get_attr_<foo> can be expensive, so cache the mask
2137 for speed. */
2138 if (!this_target_recog->x_bool_attr_masks[code][attr])
2139 this_target_recog->x_bool_attr_masks[code][attr]
2140 = get_bool_attr_mask_uncached (insn, attr);
2141 return this_target_recog->x_bool_attr_masks[code][attr];
2144 /* Return the set of alternatives of INSN that are allowed by the current
2145 target. */
2147 alternative_mask
2148 get_enabled_alternatives (rtx_insn *insn)
2150 return get_bool_attr_mask (insn, BA_ENABLED);
2153 /* Return the set of alternatives of INSN that are allowed by the current
2154 target and are preferred for the current size/speed optimization
2155 choice. */
2157 alternative_mask
2158 get_preferred_alternatives (rtx_insn *insn)
2160 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2161 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2162 else
2163 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2166 /* Return the set of alternatives of INSN that are allowed by the current
2167 target and are preferred for the size/speed optimization choice
2168 associated with BB. Passing a separate BB is useful if INSN has not
2169 been emitted yet or if we are considering moving it to a different
2170 block. */
2172 alternative_mask
2173 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2175 if (optimize_bb_for_speed_p (bb))
2176 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2177 else
2178 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2181 /* Assert that the cached boolean attributes for INSN are still accurate.
2182 The backend is required to define these attributes in a way that only
2183 depends on the current target (rather than operands, compiler phase,
2184 etc.). */
2186 bool
2187 check_bool_attrs (rtx_insn *insn)
2189 int code = INSN_CODE (insn);
2190 if (code >= 0)
2191 for (int i = 0; i <= BA_LAST; ++i)
2193 enum bool_attr attr = (enum bool_attr) i;
2194 if (this_target_recog->x_bool_attr_masks[code][attr])
2195 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2196 == get_bool_attr_mask_uncached (insn, attr));
2198 return true;
2201 /* Like extract_insn, but save insn extracted and don't extract again, when
2202 called again for the same insn expecting that recog_data still contain the
2203 valid information. This is used primary by gen_attr infrastructure that
2204 often does extract insn again and again. */
2205 void
2206 extract_insn_cached (rtx_insn *insn)
2208 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2209 return;
2210 extract_insn (insn);
2211 recog_data.insn = insn;
2214 /* Do uncached extract_insn, constrain_operands and complain about failures.
2215 This should be used when extracting a pre-existing constrained instruction
2216 if the caller wants to know which alternative was chosen. */
2217 void
2218 extract_constrain_insn (rtx_insn *insn)
2220 extract_insn (insn);
2221 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2222 fatal_insn_not_found (insn);
2225 /* Do cached extract_insn, constrain_operands and complain about failures.
2226 Used by insn_attrtab. */
2227 void
2228 extract_constrain_insn_cached (rtx_insn *insn)
2230 extract_insn_cached (insn);
2231 if (which_alternative == -1
2232 && !constrain_operands (reload_completed,
2233 get_enabled_alternatives (insn)))
2234 fatal_insn_not_found (insn);
2237 /* Do cached constrain_operands on INSN and complain about failures. */
2239 constrain_operands_cached (rtx_insn *insn, int strict)
2241 if (which_alternative == -1)
2242 return constrain_operands (strict, get_enabled_alternatives (insn));
2243 else
2244 return 1;
2247 /* Analyze INSN and fill in recog_data. */
2249 void
2250 extract_insn (rtx_insn *insn)
2252 int i;
2253 int icode;
2254 int noperands;
2255 rtx body = PATTERN (insn);
2257 recog_data.n_operands = 0;
2258 recog_data.n_alternatives = 0;
2259 recog_data.n_dups = 0;
2260 recog_data.is_asm = false;
2262 switch (GET_CODE (body))
2264 case USE:
2265 case CLOBBER:
2266 case ASM_INPUT:
2267 case ADDR_VEC:
2268 case ADDR_DIFF_VEC:
2269 case VAR_LOCATION:
2270 return;
2272 case SET:
2273 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2274 goto asm_insn;
2275 else
2276 goto normal_insn;
2277 case PARALLEL:
2278 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2279 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2280 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2281 goto asm_insn;
2282 else
2283 goto normal_insn;
2284 case ASM_OPERANDS:
2285 asm_insn:
2286 recog_data.n_operands = noperands = asm_noperands (body);
2287 if (noperands >= 0)
2289 /* This insn is an `asm' with operands. */
2291 /* expand_asm_operands makes sure there aren't too many operands. */
2292 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2294 /* Now get the operand values and constraints out of the insn. */
2295 decode_asm_operands (body, recog_data.operand,
2296 recog_data.operand_loc,
2297 recog_data.constraints,
2298 recog_data.operand_mode, NULL);
2299 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2300 if (noperands > 0)
2302 const char *p = recog_data.constraints[0];
2303 recog_data.n_alternatives = 1;
2304 while (*p)
2305 recog_data.n_alternatives += (*p++ == ',');
2307 recog_data.is_asm = true;
2308 break;
2310 fatal_insn_not_found (insn);
2312 default:
2313 normal_insn:
2314 /* Ordinary insn: recognize it, get the operands via insn_extract
2315 and get the constraints. */
2317 icode = recog_memoized (insn);
2318 if (icode < 0)
2319 fatal_insn_not_found (insn);
2321 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2322 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2323 recog_data.n_dups = insn_data[icode].n_dups;
2325 insn_extract (insn);
2327 for (i = 0; i < noperands; i++)
2329 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2330 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2331 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2332 /* VOIDmode match_operands gets mode from their real operand. */
2333 if (recog_data.operand_mode[i] == VOIDmode)
2334 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2337 for (i = 0; i < noperands; i++)
2338 recog_data.operand_type[i]
2339 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2340 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2341 : OP_IN);
2343 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2345 recog_data.insn = NULL;
2346 which_alternative = -1;
2349 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2350 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2351 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2352 has N_OPERANDS entries. */
2354 void
2355 preprocess_constraints (int n_operands, int n_alternatives,
2356 const char **constraints,
2357 operand_alternative *op_alt_base)
2359 for (int i = 0; i < n_operands; i++)
2361 int j;
2362 struct operand_alternative *op_alt;
2363 const char *p = constraints[i];
2365 op_alt = op_alt_base;
2367 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2369 op_alt[i].cl = NO_REGS;
2370 op_alt[i].constraint = p;
2371 op_alt[i].matches = -1;
2372 op_alt[i].matched = -1;
2374 if (*p == '\0' || *p == ',')
2376 op_alt[i].anything_ok = 1;
2377 continue;
2380 for (;;)
2382 char c = *p;
2383 if (c == '#')
2385 c = *++p;
2386 while (c != ',' && c != '\0');
2387 if (c == ',' || c == '\0')
2389 p++;
2390 break;
2393 switch (c)
2395 case '?':
2396 op_alt[i].reject += 6;
2397 break;
2398 case '!':
2399 op_alt[i].reject += 600;
2400 break;
2401 case '&':
2402 op_alt[i].earlyclobber = 1;
2403 break;
2405 case '0': case '1': case '2': case '3': case '4':
2406 case '5': case '6': case '7': case '8': case '9':
2408 char *end;
2409 op_alt[i].matches = strtoul (p, &end, 10);
2410 op_alt[op_alt[i].matches].matched = i;
2411 p = end;
2413 continue;
2415 case 'X':
2416 op_alt[i].anything_ok = 1;
2417 break;
2419 case 'g':
2420 op_alt[i].cl =
2421 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2422 break;
2424 default:
2425 enum constraint_num cn = lookup_constraint (p);
2426 enum reg_class cl;
2427 switch (get_constraint_type (cn))
2429 case CT_REGISTER:
2430 cl = reg_class_for_constraint (cn);
2431 if (cl != NO_REGS)
2432 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2433 break;
2435 case CT_CONST_INT:
2436 break;
2438 case CT_MEMORY:
2439 op_alt[i].memory_ok = 1;
2440 break;
2442 case CT_ADDRESS:
2443 op_alt[i].is_address = 1;
2444 op_alt[i].cl
2445 = (reg_class_subunion
2446 [(int) op_alt[i].cl]
2447 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2448 ADDRESS, SCRATCH)]);
2449 break;
2451 case CT_FIXED_FORM:
2452 break;
2454 break;
2456 p += CONSTRAINT_LEN (c, p);
2462 /* Return an array of operand_alternative instructions for
2463 instruction ICODE. */
2465 const operand_alternative *
2466 preprocess_insn_constraints (int icode)
2468 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2469 if (this_target_recog->x_op_alt[icode])
2470 return this_target_recog->x_op_alt[icode];
2472 int n_operands = insn_data[icode].n_operands;
2473 if (n_operands == 0)
2474 return 0;
2475 /* Always provide at least one alternative so that which_op_alt ()
2476 works correctly. If the instruction has 0 alternatives (i.e. all
2477 constraint strings are empty) then each operand in this alternative
2478 will have anything_ok set. */
2479 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2480 int n_entries = n_operands * n_alternatives;
2482 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2483 const char **constraints = XALLOCAVEC (const char *, n_operands);
2485 for (int i = 0; i < n_operands; ++i)
2486 constraints[i] = insn_data[icode].operand[i].constraint;
2487 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2489 this_target_recog->x_op_alt[icode] = op_alt;
2490 return op_alt;
2493 /* After calling extract_insn, you can use this function to extract some
2494 information from the constraint strings into a more usable form.
2495 The collected data is stored in recog_op_alt. */
2497 void
2498 preprocess_constraints (rtx_insn *insn)
2500 int icode = INSN_CODE (insn);
2501 if (icode >= 0)
2502 recog_op_alt = preprocess_insn_constraints (icode);
2503 else
2505 int n_operands = recog_data.n_operands;
2506 int n_alternatives = recog_data.n_alternatives;
2507 int n_entries = n_operands * n_alternatives;
2508 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2509 preprocess_constraints (n_operands, n_alternatives,
2510 recog_data.constraints, asm_op_alt);
2511 recog_op_alt = asm_op_alt;
2515 /* Check the operands of an insn against the insn's operand constraints
2516 and return 1 if they match any of the alternatives in ALTERNATIVES.
2518 The information about the insn's operands, constraints, operand modes
2519 etc. is obtained from the global variables set up by extract_insn.
2521 WHICH_ALTERNATIVE is set to a number which indicates which
2522 alternative of constraints was matched: 0 for the first alternative,
2523 1 for the next, etc.
2525 In addition, when two operands are required to match
2526 and it happens that the output operand is (reg) while the
2527 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2528 make the output operand look like the input.
2529 This is because the output operand is the one the template will print.
2531 This is used in final, just before printing the assembler code and by
2532 the routines that determine an insn's attribute.
2534 If STRICT is a positive nonzero value, it means that we have been
2535 called after reload has been completed. In that case, we must
2536 do all checks strictly. If it is zero, it means that we have been called
2537 before reload has completed. In that case, we first try to see if we can
2538 find an alternative that matches strictly. If not, we try again, this
2539 time assuming that reload will fix up the insn. This provides a "best
2540 guess" for the alternative and is used to compute attributes of insns prior
2541 to reload. A negative value of STRICT is used for this internal call. */
2543 struct funny_match
2545 int this_op, other;
2549 constrain_operands (int strict, alternative_mask alternatives)
2551 const char *constraints[MAX_RECOG_OPERANDS];
2552 int matching_operands[MAX_RECOG_OPERANDS];
2553 int earlyclobber[MAX_RECOG_OPERANDS];
2554 int c;
2556 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2557 int funny_match_index;
2559 which_alternative = 0;
2560 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2561 return 1;
2563 for (c = 0; c < recog_data.n_operands; c++)
2565 constraints[c] = recog_data.constraints[c];
2566 matching_operands[c] = -1;
2571 int seen_earlyclobber_at = -1;
2572 int opno;
2573 int lose = 0;
2574 funny_match_index = 0;
2576 if (!TEST_BIT (alternatives, which_alternative))
2578 int i;
2580 for (i = 0; i < recog_data.n_operands; i++)
2581 constraints[i] = skip_alternative (constraints[i]);
2583 which_alternative++;
2584 continue;
2587 for (opno = 0; opno < recog_data.n_operands; opno++)
2589 rtx op = recog_data.operand[opno];
2590 machine_mode mode = GET_MODE (op);
2591 const char *p = constraints[opno];
2592 int offset = 0;
2593 int win = 0;
2594 int val;
2595 int len;
2597 earlyclobber[opno] = 0;
2599 /* A unary operator may be accepted by the predicate, but it
2600 is irrelevant for matching constraints. */
2601 if (UNARY_P (op))
2602 op = XEXP (op, 0);
2604 if (GET_CODE (op) == SUBREG)
2606 if (REG_P (SUBREG_REG (op))
2607 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2608 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2609 GET_MODE (SUBREG_REG (op)),
2610 SUBREG_BYTE (op),
2611 GET_MODE (op));
2612 op = SUBREG_REG (op);
2615 /* An empty constraint or empty alternative
2616 allows anything which matched the pattern. */
2617 if (*p == 0 || *p == ',')
2618 win = 1;
2621 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2623 case '\0':
2624 len = 0;
2625 break;
2626 case ',':
2627 c = '\0';
2628 break;
2630 case '#':
2631 /* Ignore rest of this alternative as far as
2632 constraint checking is concerned. */
2634 p++;
2635 while (*p && *p != ',');
2636 len = 0;
2637 break;
2639 case '&':
2640 earlyclobber[opno] = 1;
2641 if (seen_earlyclobber_at < 0)
2642 seen_earlyclobber_at = opno;
2643 break;
2645 case '0': case '1': case '2': case '3': case '4':
2646 case '5': case '6': case '7': case '8': case '9':
2648 /* This operand must be the same as a previous one.
2649 This kind of constraint is used for instructions such
2650 as add when they take only two operands.
2652 Note that the lower-numbered operand is passed first.
2654 If we are not testing strictly, assume that this
2655 constraint will be satisfied. */
2657 char *end;
2658 int match;
2660 match = strtoul (p, &end, 10);
2661 p = end;
2663 if (strict < 0)
2664 val = 1;
2665 else
2667 rtx op1 = recog_data.operand[match];
2668 rtx op2 = recog_data.operand[opno];
2670 /* A unary operator may be accepted by the predicate,
2671 but it is irrelevant for matching constraints. */
2672 if (UNARY_P (op1))
2673 op1 = XEXP (op1, 0);
2674 if (UNARY_P (op2))
2675 op2 = XEXP (op2, 0);
2677 val = operands_match_p (op1, op2);
2680 matching_operands[opno] = match;
2681 matching_operands[match] = opno;
2683 if (val != 0)
2684 win = 1;
2686 /* If output is *x and input is *--x, arrange later
2687 to change the output to *--x as well, since the
2688 output op is the one that will be printed. */
2689 if (val == 2 && strict > 0)
2691 funny_match[funny_match_index].this_op = opno;
2692 funny_match[funny_match_index++].other = match;
2695 len = 0;
2696 break;
2698 case 'p':
2699 /* p is used for address_operands. When we are called by
2700 gen_reload, no one will have checked that the address is
2701 strictly valid, i.e., that all pseudos requiring hard regs
2702 have gotten them. */
2703 if (strict <= 0
2704 || (strict_memory_address_p (recog_data.operand_mode[opno],
2705 op)))
2706 win = 1;
2707 break;
2709 /* No need to check general_operand again;
2710 it was done in insn-recog.c. Well, except that reload
2711 doesn't check the validity of its replacements, but
2712 that should only matter when there's a bug. */
2713 case 'g':
2714 /* Anything goes unless it is a REG and really has a hard reg
2715 but the hard reg is not in the class GENERAL_REGS. */
2716 if (REG_P (op))
2718 if (strict < 0
2719 || GENERAL_REGS == ALL_REGS
2720 || (reload_in_progress
2721 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2722 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2723 win = 1;
2725 else if (strict < 0 || general_operand (op, mode))
2726 win = 1;
2727 break;
2729 default:
2731 enum constraint_num cn = lookup_constraint (p);
2732 enum reg_class cl = reg_class_for_constraint (cn);
2733 if (cl != NO_REGS)
2735 if (strict < 0
2736 || (strict == 0
2737 && REG_P (op)
2738 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2739 || (strict == 0 && GET_CODE (op) == SCRATCH)
2740 || (REG_P (op)
2741 && reg_fits_class_p (op, cl, offset, mode)))
2742 win = 1;
2745 else if (constraint_satisfied_p (op, cn))
2746 win = 1;
2748 else if (insn_extra_memory_constraint (cn)
2749 /* Every memory operand can be reloaded to fit. */
2750 && ((strict < 0 && MEM_P (op))
2751 /* Before reload, accept what reload can turn
2752 into a mem. */
2753 || (strict < 0 && CONSTANT_P (op))
2754 /* Before reload, accept a pseudo,
2755 since LRA can turn it into a mem. */
2756 || (strict < 0 && targetm.lra_p () && REG_P (op)
2757 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2758 /* During reload, accept a pseudo */
2759 || (reload_in_progress && REG_P (op)
2760 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2761 win = 1;
2762 else if (insn_extra_address_constraint (cn)
2763 /* Every address operand can be reloaded to fit. */
2764 && strict < 0)
2765 win = 1;
2766 /* Cater to architectures like IA-64 that define extra memory
2767 constraints without using define_memory_constraint. */
2768 else if (reload_in_progress
2769 && REG_P (op)
2770 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2771 && reg_renumber[REGNO (op)] < 0
2772 && reg_equiv_mem (REGNO (op)) != 0
2773 && constraint_satisfied_p
2774 (reg_equiv_mem (REGNO (op)), cn))
2775 win = 1;
2776 break;
2779 while (p += len, c);
2781 constraints[opno] = p;
2782 /* If this operand did not win somehow,
2783 this alternative loses. */
2784 if (! win)
2785 lose = 1;
2787 /* This alternative won; the operands are ok.
2788 Change whichever operands this alternative says to change. */
2789 if (! lose)
2791 int opno, eopno;
2793 /* See if any earlyclobber operand conflicts with some other
2794 operand. */
2796 if (strict > 0 && seen_earlyclobber_at >= 0)
2797 for (eopno = seen_earlyclobber_at;
2798 eopno < recog_data.n_operands;
2799 eopno++)
2800 /* Ignore earlyclobber operands now in memory,
2801 because we would often report failure when we have
2802 two memory operands, one of which was formerly a REG. */
2803 if (earlyclobber[eopno]
2804 && REG_P (recog_data.operand[eopno]))
2805 for (opno = 0; opno < recog_data.n_operands; opno++)
2806 if ((MEM_P (recog_data.operand[opno])
2807 || recog_data.operand_type[opno] != OP_OUT)
2808 && opno != eopno
2809 /* Ignore things like match_operator operands. */
2810 && *recog_data.constraints[opno] != 0
2811 && ! (matching_operands[opno] == eopno
2812 && operands_match_p (recog_data.operand[opno],
2813 recog_data.operand[eopno]))
2814 && ! safe_from_earlyclobber (recog_data.operand[opno],
2815 recog_data.operand[eopno]))
2816 lose = 1;
2818 if (! lose)
2820 while (--funny_match_index >= 0)
2822 recog_data.operand[funny_match[funny_match_index].other]
2823 = recog_data.operand[funny_match[funny_match_index].this_op];
2826 #ifdef AUTO_INC_DEC
2827 /* For operands without < or > constraints reject side-effects. */
2828 if (recog_data.is_asm)
2830 for (opno = 0; opno < recog_data.n_operands; opno++)
2831 if (MEM_P (recog_data.operand[opno]))
2832 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2834 case PRE_INC:
2835 case POST_INC:
2836 case PRE_DEC:
2837 case POST_DEC:
2838 case PRE_MODIFY:
2839 case POST_MODIFY:
2840 if (strchr (recog_data.constraints[opno], '<') == NULL
2841 && strchr (recog_data.constraints[opno], '>')
2842 == NULL)
2843 return 0;
2844 break;
2845 default:
2846 break;
2849 #endif
2850 return 1;
2854 which_alternative++;
2856 while (which_alternative < recog_data.n_alternatives);
2858 which_alternative = -1;
2859 /* If we are about to reject this, but we are not to test strictly,
2860 try a very loose test. Only return failure if it fails also. */
2861 if (strict == 0)
2862 return constrain_operands (-1, alternatives);
2863 else
2864 return 0;
2867 /* Return true iff OPERAND (assumed to be a REG rtx)
2868 is a hard reg in class CLASS when its regno is offset by OFFSET
2869 and changed to mode MODE.
2870 If REG occupies multiple hard regs, all of them must be in CLASS. */
2872 bool
2873 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2874 machine_mode mode)
2876 unsigned int regno = REGNO (operand);
2878 if (cl == NO_REGS)
2879 return false;
2881 /* Regno must not be a pseudo register. Offset may be negative. */
2882 return (HARD_REGISTER_NUM_P (regno)
2883 && HARD_REGISTER_NUM_P (regno + offset)
2884 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2885 regno + offset));
2888 /* Split single instruction. Helper function for split_all_insns and
2889 split_all_insns_noflow. Return last insn in the sequence if successful,
2890 or NULL if unsuccessful. */
2892 static rtx_insn *
2893 split_insn (rtx_insn *insn)
2895 /* Split insns here to get max fine-grain parallelism. */
2896 rtx_insn *first = PREV_INSN (insn);
2897 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2898 rtx insn_set, last_set, note;
2900 if (last == insn)
2901 return NULL;
2903 /* If the original instruction was a single set that was known to be
2904 equivalent to a constant, see if we can say the same about the last
2905 instruction in the split sequence. The two instructions must set
2906 the same destination. */
2907 insn_set = single_set (insn);
2908 if (insn_set)
2910 last_set = single_set (last);
2911 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2913 note = find_reg_equal_equiv_note (insn);
2914 if (note && CONSTANT_P (XEXP (note, 0)))
2915 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2916 else if (CONSTANT_P (SET_SRC (insn_set)))
2917 set_unique_reg_note (last, REG_EQUAL,
2918 copy_rtx (SET_SRC (insn_set)));
2922 /* try_split returns the NOTE that INSN became. */
2923 SET_INSN_DELETED (insn);
2925 /* ??? Coddle to md files that generate subregs in post-reload
2926 splitters instead of computing the proper hard register. */
2927 if (reload_completed && first != last)
2929 first = NEXT_INSN (first);
2930 for (;;)
2932 if (INSN_P (first))
2933 cleanup_subreg_operands (first);
2934 if (first == last)
2935 break;
2936 first = NEXT_INSN (first);
2940 return last;
2943 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2945 void
2946 split_all_insns (void)
2948 sbitmap blocks;
2949 bool changed;
2950 basic_block bb;
2952 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2953 bitmap_clear (blocks);
2954 changed = false;
2956 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2958 rtx_insn *insn, *next;
2959 bool finish = false;
2961 rtl_profile_for_bb (bb);
2962 for (insn = BB_HEAD (bb); !finish ; insn = next)
2964 /* Can't use `next_real_insn' because that might go across
2965 CODE_LABELS and short-out basic blocks. */
2966 next = NEXT_INSN (insn);
2967 finish = (insn == BB_END (bb));
2968 if (INSN_P (insn))
2970 rtx set = single_set (insn);
2972 /* Don't split no-op move insns. These should silently
2973 disappear later in final. Splitting such insns would
2974 break the code that handles LIBCALL blocks. */
2975 if (set && set_noop_p (set))
2977 /* Nops get in the way while scheduling, so delete them
2978 now if register allocation has already been done. It
2979 is too risky to try to do this before register
2980 allocation, and there are unlikely to be very many
2981 nops then anyways. */
2982 if (reload_completed)
2983 delete_insn_and_edges (insn);
2985 else
2987 if (split_insn (insn))
2989 bitmap_set_bit (blocks, bb->index);
2990 changed = true;
2997 default_rtl_profile ();
2998 if (changed)
2999 find_many_sub_basic_blocks (blocks);
3001 #ifdef ENABLE_CHECKING
3002 verify_flow_info ();
3003 #endif
3005 sbitmap_free (blocks);
3008 /* Same as split_all_insns, but do not expect CFG to be available.
3009 Used by machine dependent reorg passes. */
3011 unsigned int
3012 split_all_insns_noflow (void)
3014 rtx_insn *next, *insn;
3016 for (insn = get_insns (); insn; insn = next)
3018 next = NEXT_INSN (insn);
3019 if (INSN_P (insn))
3021 /* Don't split no-op move insns. These should silently
3022 disappear later in final. Splitting such insns would
3023 break the code that handles LIBCALL blocks. */
3024 rtx set = single_set (insn);
3025 if (set && set_noop_p (set))
3027 /* Nops get in the way while scheduling, so delete them
3028 now if register allocation has already been done. It
3029 is too risky to try to do this before register
3030 allocation, and there are unlikely to be very many
3031 nops then anyways.
3033 ??? Should we use delete_insn when the CFG isn't valid? */
3034 if (reload_completed)
3035 delete_insn_and_edges (insn);
3037 else
3038 split_insn (insn);
3041 return 0;
3044 #ifdef HAVE_peephole2
3045 struct peep2_insn_data
3047 rtx_insn *insn;
3048 regset live_before;
3051 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3052 static int peep2_current;
3054 static bool peep2_do_rebuild_jump_labels;
3055 static bool peep2_do_cleanup_cfg;
3057 /* The number of instructions available to match a peep2. */
3058 int peep2_current_count;
3060 /* A marker indicating the last insn of the block. The live_before regset
3061 for this element is correct, indicating DF_LIVE_OUT for the block. */
3062 #define PEEP2_EOB invalid_insn_rtx
3064 /* Wrap N to fit into the peep2_insn_data buffer. */
3066 static int
3067 peep2_buf_position (int n)
3069 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3070 n -= MAX_INSNS_PER_PEEP2 + 1;
3071 return n;
3074 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3075 does not exist. Used by the recognizer to find the next insn to match
3076 in a multi-insn pattern. */
3078 rtx_insn *
3079 peep2_next_insn (int n)
3081 gcc_assert (n <= peep2_current_count);
3083 n = peep2_buf_position (peep2_current + n);
3085 return peep2_insn_data[n].insn;
3088 /* Return true if REGNO is dead before the Nth non-note insn
3089 after `current'. */
3092 peep2_regno_dead_p (int ofs, int regno)
3094 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3096 ofs = peep2_buf_position (peep2_current + ofs);
3098 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3100 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3103 /* Similarly for a REG. */
3106 peep2_reg_dead_p (int ofs, rtx reg)
3108 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3110 ofs = peep2_buf_position (peep2_current + ofs);
3112 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3114 unsigned int end_regno = END_REGNO (reg);
3115 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3116 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3117 return 0;
3118 return 1;
3121 /* Regno offset to be used in the register search. */
3122 static int search_ofs;
3124 /* Try to find a hard register of mode MODE, matching the register class in
3125 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3126 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3127 in which case the only condition is that the register must be available
3128 before CURRENT_INSN.
3129 Registers that already have bits set in REG_SET will not be considered.
3131 If an appropriate register is available, it will be returned and the
3132 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3133 returned. */
3136 peep2_find_free_register (int from, int to, const char *class_str,
3137 machine_mode mode, HARD_REG_SET *reg_set)
3139 enum reg_class cl;
3140 HARD_REG_SET live;
3141 df_ref def;
3142 int i;
3144 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3145 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3147 from = peep2_buf_position (peep2_current + from);
3148 to = peep2_buf_position (peep2_current + to);
3150 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3151 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3153 while (from != to)
3155 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3157 /* Don't use registers set or clobbered by the insn. */
3158 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3159 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3161 from = peep2_buf_position (from + 1);
3164 cl = reg_class_for_constraint (lookup_constraint (class_str));
3166 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3168 int raw_regno, regno, success, j;
3170 /* Distribute the free registers as much as possible. */
3171 raw_regno = search_ofs + i;
3172 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3173 raw_regno -= FIRST_PSEUDO_REGISTER;
3174 #ifdef REG_ALLOC_ORDER
3175 regno = reg_alloc_order[raw_regno];
3176 #else
3177 regno = raw_regno;
3178 #endif
3180 /* Can it support the mode we need? */
3181 if (! HARD_REGNO_MODE_OK (regno, mode))
3182 continue;
3184 success = 1;
3185 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3187 /* Don't allocate fixed registers. */
3188 if (fixed_regs[regno + j])
3190 success = 0;
3191 break;
3193 /* Don't allocate global registers. */
3194 if (global_regs[regno + j])
3196 success = 0;
3197 break;
3199 /* Make sure the register is of the right class. */
3200 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3202 success = 0;
3203 break;
3205 /* And that we don't create an extra save/restore. */
3206 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3208 success = 0;
3209 break;
3212 if (! targetm.hard_regno_scratch_ok (regno + j))
3214 success = 0;
3215 break;
3218 /* And we don't clobber traceback for noreturn functions. */
3219 if ((regno + j == FRAME_POINTER_REGNUM
3220 || regno + j == HARD_FRAME_POINTER_REGNUM)
3221 && (! reload_completed || frame_pointer_needed))
3223 success = 0;
3224 break;
3227 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3228 || TEST_HARD_REG_BIT (live, regno + j))
3230 success = 0;
3231 break;
3235 if (success)
3237 add_to_hard_reg_set (reg_set, mode, regno);
3239 /* Start the next search with the next register. */
3240 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3241 raw_regno = 0;
3242 search_ofs = raw_regno;
3244 return gen_rtx_REG (mode, regno);
3248 search_ofs = 0;
3249 return NULL_RTX;
3252 /* Forget all currently tracked instructions, only remember current
3253 LIVE regset. */
3255 static void
3256 peep2_reinit_state (regset live)
3258 int i;
3260 /* Indicate that all slots except the last holds invalid data. */
3261 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3262 peep2_insn_data[i].insn = NULL;
3263 peep2_current_count = 0;
3265 /* Indicate that the last slot contains live_after data. */
3266 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3267 peep2_current = MAX_INSNS_PER_PEEP2;
3269 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3272 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3273 starting at INSN. Perform the replacement, removing the old insns and
3274 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3275 if the replacement is rejected. */
3277 static rtx_insn *
3278 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3280 int i;
3281 rtx_insn *last, *before_try, *x;
3282 rtx eh_note, as_note;
3283 rtx_insn *old_insn;
3284 rtx_insn *new_insn;
3285 bool was_call = false;
3287 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3288 match more than one insn, or to be split into more than one insn. */
3289 old_insn = peep2_insn_data[peep2_current].insn;
3290 if (RTX_FRAME_RELATED_P (old_insn))
3292 bool any_note = false;
3293 rtx note;
3295 if (match_len != 0)
3296 return NULL;
3298 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3299 may be in the stream for the purpose of register allocation. */
3300 if (active_insn_p (attempt))
3301 new_insn = attempt;
3302 else
3303 new_insn = next_active_insn (attempt);
3304 if (next_active_insn (new_insn))
3305 return NULL;
3307 /* We have a 1-1 replacement. Copy over any frame-related info. */
3308 RTX_FRAME_RELATED_P (new_insn) = 1;
3310 /* Allow the backend to fill in a note during the split. */
3311 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3312 switch (REG_NOTE_KIND (note))
3314 case REG_FRAME_RELATED_EXPR:
3315 case REG_CFA_DEF_CFA:
3316 case REG_CFA_ADJUST_CFA:
3317 case REG_CFA_OFFSET:
3318 case REG_CFA_REGISTER:
3319 case REG_CFA_EXPRESSION:
3320 case REG_CFA_RESTORE:
3321 case REG_CFA_SET_VDRAP:
3322 any_note = true;
3323 break;
3324 default:
3325 break;
3328 /* If the backend didn't supply a note, copy one over. */
3329 if (!any_note)
3330 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3331 switch (REG_NOTE_KIND (note))
3333 case REG_FRAME_RELATED_EXPR:
3334 case REG_CFA_DEF_CFA:
3335 case REG_CFA_ADJUST_CFA:
3336 case REG_CFA_OFFSET:
3337 case REG_CFA_REGISTER:
3338 case REG_CFA_EXPRESSION:
3339 case REG_CFA_RESTORE:
3340 case REG_CFA_SET_VDRAP:
3341 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3342 any_note = true;
3343 break;
3344 default:
3345 break;
3348 /* If there still isn't a note, make sure the unwind info sees the
3349 same expression as before the split. */
3350 if (!any_note)
3352 rtx old_set, new_set;
3354 /* The old insn had better have been simple, or annotated. */
3355 old_set = single_set (old_insn);
3356 gcc_assert (old_set != NULL);
3358 new_set = single_set (new_insn);
3359 if (!new_set || !rtx_equal_p (new_set, old_set))
3360 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3363 /* Copy prologue/epilogue status. This is required in order to keep
3364 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3365 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3368 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3369 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3370 cfg-related call notes. */
3371 for (i = 0; i <= match_len; ++i)
3373 int j;
3374 rtx note;
3376 j = peep2_buf_position (peep2_current + i);
3377 old_insn = peep2_insn_data[j].insn;
3378 if (!CALL_P (old_insn))
3379 continue;
3380 was_call = true;
3382 new_insn = attempt;
3383 while (new_insn != NULL_RTX)
3385 if (CALL_P (new_insn))
3386 break;
3387 new_insn = NEXT_INSN (new_insn);
3390 gcc_assert (new_insn != NULL_RTX);
3392 CALL_INSN_FUNCTION_USAGE (new_insn)
3393 = CALL_INSN_FUNCTION_USAGE (old_insn);
3394 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3396 for (note = REG_NOTES (old_insn);
3397 note;
3398 note = XEXP (note, 1))
3399 switch (REG_NOTE_KIND (note))
3401 case REG_NORETURN:
3402 case REG_SETJMP:
3403 case REG_TM:
3404 add_reg_note (new_insn, REG_NOTE_KIND (note),
3405 XEXP (note, 0));
3406 break;
3407 default:
3408 /* Discard all other reg notes. */
3409 break;
3412 /* Croak if there is another call in the sequence. */
3413 while (++i <= match_len)
3415 j = peep2_buf_position (peep2_current + i);
3416 old_insn = peep2_insn_data[j].insn;
3417 gcc_assert (!CALL_P (old_insn));
3419 break;
3422 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3423 move those notes over to the new sequence. */
3424 as_note = NULL;
3425 for (i = match_len; i >= 0; --i)
3427 int j = peep2_buf_position (peep2_current + i);
3428 old_insn = peep2_insn_data[j].insn;
3430 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3431 if (as_note)
3432 break;
3435 i = peep2_buf_position (peep2_current + match_len);
3436 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3438 /* Replace the old sequence with the new. */
3439 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3440 last = emit_insn_after_setloc (attempt,
3441 peep2_insn_data[i].insn,
3442 INSN_LOCATION (peepinsn));
3443 before_try = PREV_INSN (insn);
3444 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3446 /* Re-insert the EH_REGION notes. */
3447 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3449 edge eh_edge;
3450 edge_iterator ei;
3452 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3453 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3454 break;
3456 if (eh_note)
3457 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3459 if (eh_edge)
3460 for (x = last; x != before_try; x = PREV_INSN (x))
3461 if (x != BB_END (bb)
3462 && (can_throw_internal (x)
3463 || can_nonlocal_goto (x)))
3465 edge nfte, nehe;
3466 int flags;
3468 nfte = split_block (bb, x);
3469 flags = (eh_edge->flags
3470 & (EDGE_EH | EDGE_ABNORMAL));
3471 if (CALL_P (x))
3472 flags |= EDGE_ABNORMAL_CALL;
3473 nehe = make_edge (nfte->src, eh_edge->dest,
3474 flags);
3476 nehe->probability = eh_edge->probability;
3477 nfte->probability
3478 = REG_BR_PROB_BASE - nehe->probability;
3480 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3481 bb = nfte->src;
3482 eh_edge = nehe;
3485 /* Converting possibly trapping insn to non-trapping is
3486 possible. Zap dummy outgoing edges. */
3487 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3490 /* Re-insert the ARGS_SIZE notes. */
3491 if (as_note)
3492 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3494 /* If we generated a jump instruction, it won't have
3495 JUMP_LABEL set. Recompute after we're done. */
3496 for (x = last; x != before_try; x = PREV_INSN (x))
3497 if (JUMP_P (x))
3499 peep2_do_rebuild_jump_labels = true;
3500 break;
3503 return last;
3506 /* After performing a replacement in basic block BB, fix up the life
3507 information in our buffer. LAST is the last of the insns that we
3508 emitted as a replacement. PREV is the insn before the start of
3509 the replacement. MATCH_LEN is the number of instructions that were
3510 matched, and which now need to be replaced in the buffer. */
3512 static void
3513 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3514 rtx_insn *prev)
3516 int i = peep2_buf_position (peep2_current + match_len + 1);
3517 rtx_insn *x;
3518 regset_head live;
3520 INIT_REG_SET (&live);
3521 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3523 gcc_assert (peep2_current_count >= match_len + 1);
3524 peep2_current_count -= match_len + 1;
3526 x = last;
3529 if (INSN_P (x))
3531 df_insn_rescan (x);
3532 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3534 peep2_current_count++;
3535 if (--i < 0)
3536 i = MAX_INSNS_PER_PEEP2;
3537 peep2_insn_data[i].insn = x;
3538 df_simulate_one_insn_backwards (bb, x, &live);
3539 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3542 x = PREV_INSN (x);
3544 while (x != prev);
3545 CLEAR_REG_SET (&live);
3547 peep2_current = i;
3550 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3551 Return true if we added it, false otherwise. The caller will try to match
3552 peepholes against the buffer if we return false; otherwise it will try to
3553 add more instructions to the buffer. */
3555 static bool
3556 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3558 int pos;
3560 /* Once we have filled the maximum number of insns the buffer can hold,
3561 allow the caller to match the insns against peepholes. We wait until
3562 the buffer is full in case the target has similar peepholes of different
3563 length; we always want to match the longest if possible. */
3564 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3565 return false;
3567 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3568 any other pattern, lest it change the semantics of the frame info. */
3569 if (RTX_FRAME_RELATED_P (insn))
3571 /* Let the buffer drain first. */
3572 if (peep2_current_count > 0)
3573 return false;
3574 /* Now the insn will be the only thing in the buffer. */
3577 pos = peep2_buf_position (peep2_current + peep2_current_count);
3578 peep2_insn_data[pos].insn = insn;
3579 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3580 peep2_current_count++;
3582 df_simulate_one_insn_forwards (bb, insn, live);
3583 return true;
3586 /* Perform the peephole2 optimization pass. */
3588 static void
3589 peephole2_optimize (void)
3591 rtx_insn *insn;
3592 bitmap live;
3593 int i;
3594 basic_block bb;
3596 peep2_do_cleanup_cfg = false;
3597 peep2_do_rebuild_jump_labels = false;
3599 df_set_flags (DF_LR_RUN_DCE);
3600 df_note_add_problem ();
3601 df_analyze ();
3603 /* Initialize the regsets we're going to use. */
3604 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3605 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3606 search_ofs = 0;
3607 live = BITMAP_ALLOC (&reg_obstack);
3609 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3611 bool past_end = false;
3612 int pos;
3614 rtl_profile_for_bb (bb);
3616 /* Start up propagation. */
3617 bitmap_copy (live, DF_LR_IN (bb));
3618 df_simulate_initialize_forwards (bb, live);
3619 peep2_reinit_state (live);
3621 insn = BB_HEAD (bb);
3622 for (;;)
3624 rtx_insn *attempt, *head;
3625 int match_len;
3627 if (!past_end && !NONDEBUG_INSN_P (insn))
3629 next_insn:
3630 insn = NEXT_INSN (insn);
3631 if (insn == NEXT_INSN (BB_END (bb)))
3632 past_end = true;
3633 continue;
3635 if (!past_end && peep2_fill_buffer (bb, insn, live))
3636 goto next_insn;
3638 /* If we did not fill an empty buffer, it signals the end of the
3639 block. */
3640 if (peep2_current_count == 0)
3641 break;
3643 /* The buffer filled to the current maximum, so try to match. */
3645 pos = peep2_buf_position (peep2_current + peep2_current_count);
3646 peep2_insn_data[pos].insn = PEEP2_EOB;
3647 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3649 /* Match the peephole. */
3650 head = peep2_insn_data[peep2_current].insn;
3651 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3652 if (attempt != NULL)
3654 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3655 if (last)
3657 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3658 continue;
3662 /* No match: advance the buffer by one insn. */
3663 peep2_current = peep2_buf_position (peep2_current + 1);
3664 peep2_current_count--;
3668 default_rtl_profile ();
3669 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3670 BITMAP_FREE (peep2_insn_data[i].live_before);
3671 BITMAP_FREE (live);
3672 if (peep2_do_rebuild_jump_labels)
3673 rebuild_jump_labels (get_insns ());
3674 if (peep2_do_cleanup_cfg)
3675 cleanup_cfg (CLEANUP_CFG_CHANGED);
3677 #endif /* HAVE_peephole2 */
3679 /* Common predicates for use with define_bypass. */
3681 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3682 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3683 must be either a single_set or a PARALLEL with SETs inside. */
3686 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3688 rtx out_set, in_set;
3689 rtx out_pat, in_pat;
3690 rtx out_exp, in_exp;
3691 int i, j;
3693 in_set = single_set (in_insn);
3694 if (in_set)
3696 if (!MEM_P (SET_DEST (in_set)))
3697 return false;
3699 out_set = single_set (out_insn);
3700 if (out_set)
3702 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3703 return false;
3705 else
3707 out_pat = PATTERN (out_insn);
3709 if (GET_CODE (out_pat) != PARALLEL)
3710 return false;
3712 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3714 out_exp = XVECEXP (out_pat, 0, i);
3716 if (GET_CODE (out_exp) == CLOBBER)
3717 continue;
3719 gcc_assert (GET_CODE (out_exp) == SET);
3721 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3722 return false;
3726 else
3728 in_pat = PATTERN (in_insn);
3729 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3731 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3733 in_exp = XVECEXP (in_pat, 0, i);
3735 if (GET_CODE (in_exp) == CLOBBER)
3736 continue;
3738 gcc_assert (GET_CODE (in_exp) == SET);
3740 if (!MEM_P (SET_DEST (in_exp)))
3741 return false;
3743 out_set = single_set (out_insn);
3744 if (out_set)
3746 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3747 return false;
3749 else
3751 out_pat = PATTERN (out_insn);
3752 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3754 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3756 out_exp = XVECEXP (out_pat, 0, j);
3758 if (GET_CODE (out_exp) == CLOBBER)
3759 continue;
3761 gcc_assert (GET_CODE (out_exp) == SET);
3763 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3764 return false;
3770 return true;
3773 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3774 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3775 or multiple set; IN_INSN should be single_set for truth, but for convenience
3776 of insn categorization may be any JUMP or CALL insn. */
3779 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3781 rtx out_set, in_set;
3783 in_set = single_set (in_insn);
3784 if (! in_set)
3786 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3787 return false;
3790 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3791 return false;
3792 in_set = SET_SRC (in_set);
3794 out_set = single_set (out_insn);
3795 if (out_set)
3797 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3798 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3799 return false;
3801 else
3803 rtx out_pat;
3804 int i;
3806 out_pat = PATTERN (out_insn);
3807 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3809 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3811 rtx exp = XVECEXP (out_pat, 0, i);
3813 if (GET_CODE (exp) == CLOBBER)
3814 continue;
3816 gcc_assert (GET_CODE (exp) == SET);
3818 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3819 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3820 return false;
3824 return true;
3827 static unsigned int
3828 rest_of_handle_peephole2 (void)
3830 #ifdef HAVE_peephole2
3831 peephole2_optimize ();
3832 #endif
3833 return 0;
3836 namespace {
3838 const pass_data pass_data_peephole2 =
3840 RTL_PASS, /* type */
3841 "peephole2", /* name */
3842 OPTGROUP_NONE, /* optinfo_flags */
3843 TV_PEEPHOLE2, /* tv_id */
3844 0, /* properties_required */
3845 0, /* properties_provided */
3846 0, /* properties_destroyed */
3847 0, /* todo_flags_start */
3848 TODO_df_finish, /* todo_flags_finish */
3851 class pass_peephole2 : public rtl_opt_pass
3853 public:
3854 pass_peephole2 (gcc::context *ctxt)
3855 : rtl_opt_pass (pass_data_peephole2, ctxt)
3858 /* opt_pass methods: */
3859 /* The epiphany backend creates a second instance of this pass, so we need
3860 a clone method. */
3861 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3862 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3863 virtual unsigned int execute (function *)
3865 return rest_of_handle_peephole2 ();
3868 }; // class pass_peephole2
3870 } // anon namespace
3872 rtl_opt_pass *
3873 make_pass_peephole2 (gcc::context *ctxt)
3875 return new pass_peephole2 (ctxt);
3878 namespace {
3880 const pass_data pass_data_split_all_insns =
3882 RTL_PASS, /* type */
3883 "split1", /* name */
3884 OPTGROUP_NONE, /* optinfo_flags */
3885 TV_NONE, /* tv_id */
3886 0, /* properties_required */
3887 0, /* properties_provided */
3888 0, /* properties_destroyed */
3889 0, /* todo_flags_start */
3890 0, /* todo_flags_finish */
3893 class pass_split_all_insns : public rtl_opt_pass
3895 public:
3896 pass_split_all_insns (gcc::context *ctxt)
3897 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3900 /* opt_pass methods: */
3901 /* The epiphany backend creates a second instance of this pass, so
3902 we need a clone method. */
3903 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3904 virtual unsigned int execute (function *)
3906 split_all_insns ();
3907 return 0;
3910 }; // class pass_split_all_insns
3912 } // anon namespace
3914 rtl_opt_pass *
3915 make_pass_split_all_insns (gcc::context *ctxt)
3917 return new pass_split_all_insns (ctxt);
3920 static unsigned int
3921 rest_of_handle_split_after_reload (void)
3923 /* If optimizing, then go ahead and split insns now. */
3924 #ifndef STACK_REGS
3925 if (optimize > 0)
3926 #endif
3927 split_all_insns ();
3928 return 0;
3931 namespace {
3933 const pass_data pass_data_split_after_reload =
3935 RTL_PASS, /* type */
3936 "split2", /* name */
3937 OPTGROUP_NONE, /* optinfo_flags */
3938 TV_NONE, /* tv_id */
3939 0, /* properties_required */
3940 0, /* properties_provided */
3941 0, /* properties_destroyed */
3942 0, /* todo_flags_start */
3943 0, /* todo_flags_finish */
3946 class pass_split_after_reload : public rtl_opt_pass
3948 public:
3949 pass_split_after_reload (gcc::context *ctxt)
3950 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3953 /* opt_pass methods: */
3954 virtual unsigned int execute (function *)
3956 return rest_of_handle_split_after_reload ();
3959 }; // class pass_split_after_reload
3961 } // anon namespace
3963 rtl_opt_pass *
3964 make_pass_split_after_reload (gcc::context *ctxt)
3966 return new pass_split_after_reload (ctxt);
3969 namespace {
3971 const pass_data pass_data_split_before_regstack =
3973 RTL_PASS, /* type */
3974 "split3", /* name */
3975 OPTGROUP_NONE, /* optinfo_flags */
3976 TV_NONE, /* tv_id */
3977 0, /* properties_required */
3978 0, /* properties_provided */
3979 0, /* properties_destroyed */
3980 0, /* todo_flags_start */
3981 0, /* todo_flags_finish */
3984 class pass_split_before_regstack : public rtl_opt_pass
3986 public:
3987 pass_split_before_regstack (gcc::context *ctxt)
3988 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3991 /* opt_pass methods: */
3992 virtual bool gate (function *);
3993 virtual unsigned int execute (function *)
3995 split_all_insns ();
3996 return 0;
3999 }; // class pass_split_before_regstack
4001 bool
4002 pass_split_before_regstack::gate (function *)
4004 #if HAVE_ATTR_length && defined (STACK_REGS)
4005 /* If flow2 creates new instructions which need splitting
4006 and scheduling after reload is not done, they might not be
4007 split until final which doesn't allow splitting
4008 if HAVE_ATTR_length. */
4009 # ifdef INSN_SCHEDULING
4010 return (optimize && !flag_schedule_insns_after_reload);
4011 # else
4012 return (optimize);
4013 # endif
4014 #else
4015 return 0;
4016 #endif
4019 } // anon namespace
4021 rtl_opt_pass *
4022 make_pass_split_before_regstack (gcc::context *ctxt)
4024 return new pass_split_before_regstack (ctxt);
4027 static unsigned int
4028 rest_of_handle_split_before_sched2 (void)
4030 #ifdef INSN_SCHEDULING
4031 split_all_insns ();
4032 #endif
4033 return 0;
4036 namespace {
4038 const pass_data pass_data_split_before_sched2 =
4040 RTL_PASS, /* type */
4041 "split4", /* name */
4042 OPTGROUP_NONE, /* optinfo_flags */
4043 TV_NONE, /* tv_id */
4044 0, /* properties_required */
4045 0, /* properties_provided */
4046 0, /* properties_destroyed */
4047 0, /* todo_flags_start */
4048 0, /* todo_flags_finish */
4051 class pass_split_before_sched2 : public rtl_opt_pass
4053 public:
4054 pass_split_before_sched2 (gcc::context *ctxt)
4055 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4058 /* opt_pass methods: */
4059 virtual bool gate (function *)
4061 #ifdef INSN_SCHEDULING
4062 return optimize > 0 && flag_schedule_insns_after_reload;
4063 #else
4064 return false;
4065 #endif
4068 virtual unsigned int execute (function *)
4070 return rest_of_handle_split_before_sched2 ();
4073 }; // class pass_split_before_sched2
4075 } // anon namespace
4077 rtl_opt_pass *
4078 make_pass_split_before_sched2 (gcc::context *ctxt)
4080 return new pass_split_before_sched2 (ctxt);
4083 namespace {
4085 const pass_data pass_data_split_for_shorten_branches =
4087 RTL_PASS, /* type */
4088 "split5", /* name */
4089 OPTGROUP_NONE, /* optinfo_flags */
4090 TV_NONE, /* tv_id */
4091 0, /* properties_required */
4092 0, /* properties_provided */
4093 0, /* properties_destroyed */
4094 0, /* todo_flags_start */
4095 0, /* todo_flags_finish */
4098 class pass_split_for_shorten_branches : public rtl_opt_pass
4100 public:
4101 pass_split_for_shorten_branches (gcc::context *ctxt)
4102 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4105 /* opt_pass methods: */
4106 virtual bool gate (function *)
4108 /* The placement of the splitting that we do for shorten_branches
4109 depends on whether regstack is used by the target or not. */
4110 #if HAVE_ATTR_length && !defined (STACK_REGS)
4111 return true;
4112 #else
4113 return false;
4114 #endif
4117 virtual unsigned int execute (function *)
4119 return split_all_insns_noflow ();
4122 }; // class pass_split_for_shorten_branches
4124 } // anon namespace
4126 rtl_opt_pass *
4127 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4129 return new pass_split_for_shorten_branches (ctxt);
4132 /* (Re)initialize the target information after a change in target. */
4134 void
4135 recog_init ()
4137 /* The information is zero-initialized, so we don't need to do anything
4138 first time round. */
4139 if (!this_target_recog->x_initialized)
4141 this_target_recog->x_initialized = true;
4142 return;
4144 memset (this_target_recog->x_bool_attr_masks, 0,
4145 sizeof (this_target_recog->x_bool_attr_masks));
4146 for (int i = 0; i < LAST_INSN_CODE; ++i)
4147 if (this_target_recog->x_op_alt[i])
4149 free (this_target_recog->x_op_alt[i]);
4150 this_target_recog->x_op_alt[i] = 0;