P0409R2 - allow lambda capture [=, this]
[official-gcc.git] / gcc / recog.c
blobcfce0291ba03732abe98c21cccec423db95fbe4c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
44 #ifndef STACK_POP_CODE
45 #if STACK_GROWS_DOWNWARD
46 #define STACK_POP_CODE POST_INC
47 #else
48 #define STACK_POP_CODE POST_DEC
49 #endif
50 #endif
52 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
53 static void validate_replace_src_1 (rtx *, void *);
54 static rtx_insn *split_insn (rtx_insn *);
56 struct target_recog default_target_recog;
57 #if SWITCHABLE_TARGET
58 struct target_recog *this_target_recog = &default_target_recog;
59 #endif
61 /* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in reginfo.c and final.c and reload.c.
67 init_recog and init_recog_no_volatile are responsible for setting this. */
69 int volatile_ok;
71 struct recog_data_d recog_data;
73 /* Contains a vector of operand_alternative structures, such that
74 operand OP of alternative A is at index A * n_operands + OP.
75 Set up by preprocess_constraints. */
76 const operand_alternative *recog_op_alt;
78 /* Used to provide recog_op_alt for asms. */
79 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
80 * MAX_RECOG_ALTERNATIVES];
82 /* On return from `constrain_operands', indicate which alternative
83 was satisfied. */
85 int which_alternative;
87 /* Nonzero after end of reload pass.
88 Set to 1 or 0 by toplev.c.
89 Controls the significance of (SUBREG (MEM)). */
91 int reload_completed;
93 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
94 int epilogue_completed;
96 /* Initialize data used by the function `recog'.
97 This must be called once in the compilation of a function
98 before any insn recognition may be done in the function. */
100 void
101 init_recog_no_volatile (void)
103 volatile_ok = 0;
106 void
107 init_recog (void)
109 volatile_ok = 1;
113 /* Return true if labels in asm operands BODY are LABEL_REFs. */
115 static bool
116 asm_labels_ok (rtx body)
118 rtx asmop;
119 int i;
121 asmop = extract_asm_operands (body);
122 if (asmop == NULL_RTX)
123 return true;
125 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
126 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
127 return false;
129 return true;
132 /* Check that X is an insn-body for an `asm' with operands
133 and that the operands mentioned in it are legitimate. */
136 check_asm_operands (rtx x)
138 int noperands;
139 rtx *operands;
140 const char **constraints;
141 int i;
143 if (!asm_labels_ok (x))
144 return 0;
146 /* Post-reload, be more strict with things. */
147 if (reload_completed)
149 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
150 rtx_insn *insn = make_insn_raw (x);
151 extract_insn (insn);
152 constrain_operands (1, get_enabled_alternatives (insn));
153 return which_alternative >= 0;
156 noperands = asm_noperands (x);
157 if (noperands < 0)
158 return 0;
159 if (noperands == 0)
160 return 1;
162 operands = XALLOCAVEC (rtx, noperands);
163 constraints = XALLOCAVEC (const char *, noperands);
165 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
167 for (i = 0; i < noperands; i++)
169 const char *c = constraints[i];
170 if (c[0] == '%')
171 c++;
172 if (! asm_operand_ok (operands[i], c, constraints))
173 return 0;
176 return 1;
179 /* Static data for the next two routines. */
181 struct change_t
183 rtx object;
184 int old_code;
185 bool unshare;
186 rtx *loc;
187 rtx old;
190 static change_t *changes;
191 static int changes_allocated;
193 static int num_changes = 0;
195 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
196 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
197 the change is simply made.
199 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
200 will be called with the address and mode as parameters. If OBJECT is
201 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
202 the change in place.
204 IN_GROUP is nonzero if this is part of a group of changes that must be
205 performed as a group. In that case, the changes will be stored. The
206 function `apply_change_group' will validate and apply the changes.
208 If IN_GROUP is zero, this is a single change. Try to recognize the insn
209 or validate the memory reference with the change applied. If the result
210 is not valid for the machine, suppress the change and return zero.
211 Otherwise, perform the change and return 1. */
213 static bool
214 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
216 rtx old = *loc;
218 if (old == new_rtx || rtx_equal_p (old, new_rtx))
219 return 1;
221 gcc_assert (in_group != 0 || num_changes == 0);
223 *loc = new_rtx;
225 /* Save the information describing this change. */
226 if (num_changes >= changes_allocated)
228 if (changes_allocated == 0)
229 /* This value allows for repeated substitutions inside complex
230 indexed addresses, or changes in up to 5 insns. */
231 changes_allocated = MAX_RECOG_OPERANDS * 5;
232 else
233 changes_allocated *= 2;
235 changes = XRESIZEVEC (change_t, changes, changes_allocated);
238 changes[num_changes].object = object;
239 changes[num_changes].loc = loc;
240 changes[num_changes].old = old;
241 changes[num_changes].unshare = unshare;
243 if (object && !MEM_P (object))
245 /* Set INSN_CODE to force rerecognition of insn. Save old code in
246 case invalid. */
247 changes[num_changes].old_code = INSN_CODE (object);
248 INSN_CODE (object) = -1;
251 num_changes++;
253 /* If we are making a group of changes, return 1. Otherwise, validate the
254 change group we made. */
256 if (in_group)
257 return 1;
258 else
259 return apply_change_group ();
262 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
263 UNSHARE to false. */
265 bool
266 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
268 return validate_change_1 (object, loc, new_rtx, in_group, false);
271 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
272 UNSHARE to true. */
274 bool
275 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
277 return validate_change_1 (object, loc, new_rtx, in_group, true);
281 /* Keep X canonicalized if some changes have made it non-canonical; only
282 modifies the operands of X, not (for example) its code. Simplifications
283 are not the job of this routine.
285 Return true if anything was changed. */
286 bool
287 canonicalize_change_group (rtx_insn *insn, rtx x)
289 if (COMMUTATIVE_P (x)
290 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
292 /* Oops, the caller has made X no longer canonical.
293 Let's redo the changes in the correct order. */
294 rtx tem = XEXP (x, 0);
295 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
296 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
297 return true;
299 else
300 return false;
304 /* This subroutine of apply_change_group verifies whether the changes to INSN
305 were valid; i.e. whether INSN can still be recognized.
307 If IN_GROUP is true clobbers which have to be added in order to
308 match the instructions will be added to the current change group.
309 Otherwise the changes will take effect immediately. */
312 insn_invalid_p (rtx_insn *insn, bool in_group)
314 rtx pat = PATTERN (insn);
315 int num_clobbers = 0;
316 /* If we are before reload and the pattern is a SET, see if we can add
317 clobbers. */
318 int icode = recog (pat, insn,
319 (GET_CODE (pat) == SET
320 && ! reload_completed
321 && ! reload_in_progress)
322 ? &num_clobbers : 0);
323 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
326 /* If this is an asm and the operand aren't legal, then fail. Likewise if
327 this is not an asm and the insn wasn't recognized. */
328 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
329 || (!is_asm && icode < 0))
330 return 1;
332 /* If we have to add CLOBBERs, fail if we have to add ones that reference
333 hard registers since our callers can't know if they are live or not.
334 Otherwise, add them. */
335 if (num_clobbers > 0)
337 rtx newpat;
339 if (added_clobbers_hard_reg_p (icode))
340 return 1;
342 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
343 XVECEXP (newpat, 0, 0) = pat;
344 add_clobbers (newpat, icode);
345 if (in_group)
346 validate_change (insn, &PATTERN (insn), newpat, 1);
347 else
348 PATTERN (insn) = pat = newpat;
351 /* After reload, verify that all constraints are satisfied. */
352 if (reload_completed)
354 extract_insn (insn);
356 if (! constrain_operands (1, get_preferred_alternatives (insn)))
357 return 1;
360 INSN_CODE (insn) = icode;
361 return 0;
364 /* Return number of changes made and not validated yet. */
366 num_changes_pending (void)
368 return num_changes;
371 /* Tentatively apply the changes numbered NUM and up.
372 Return 1 if all changes are valid, zero otherwise. */
375 verify_changes (int num)
377 int i;
378 rtx last_validated = NULL_RTX;
380 /* The changes have been applied and all INSN_CODEs have been reset to force
381 rerecognition.
383 The changes are valid if we aren't given an object, or if we are
384 given a MEM and it still is a valid address, or if this is in insn
385 and it is recognized. In the latter case, if reload has completed,
386 we also require that the operands meet the constraints for
387 the insn. */
389 for (i = num; i < num_changes; i++)
391 rtx object = changes[i].object;
393 /* If there is no object to test or if it is the same as the one we
394 already tested, ignore it. */
395 if (object == 0 || object == last_validated)
396 continue;
398 if (MEM_P (object))
400 if (! memory_address_addr_space_p (GET_MODE (object),
401 XEXP (object, 0),
402 MEM_ADDR_SPACE (object)))
403 break;
405 else if (/* changes[i].old might be zero, e.g. when putting a
406 REG_FRAME_RELATED_EXPR into a previously empty list. */
407 changes[i].old
408 && REG_P (changes[i].old)
409 && asm_noperands (PATTERN (object)) > 0
410 && REG_EXPR (changes[i].old) != NULL_TREE
411 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
412 && DECL_REGISTER (REG_EXPR (changes[i].old)))
414 /* Don't allow changes of hard register operands to inline
415 assemblies if they have been defined as register asm ("x"). */
416 break;
418 else if (DEBUG_INSN_P (object))
419 continue;
420 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
422 rtx pat = PATTERN (object);
424 /* Perhaps we couldn't recognize the insn because there were
425 extra CLOBBERs at the end. If so, try to re-recognize
426 without the last CLOBBER (later iterations will cause each of
427 them to be eliminated, in turn). But don't do this if we
428 have an ASM_OPERAND. */
429 if (GET_CODE (pat) == PARALLEL
430 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
431 && asm_noperands (PATTERN (object)) < 0)
433 rtx newpat;
435 if (XVECLEN (pat, 0) == 2)
436 newpat = XVECEXP (pat, 0, 0);
437 else
439 int j;
441 newpat
442 = gen_rtx_PARALLEL (VOIDmode,
443 rtvec_alloc (XVECLEN (pat, 0) - 1));
444 for (j = 0; j < XVECLEN (newpat, 0); j++)
445 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
448 /* Add a new change to this group to replace the pattern
449 with this new pattern. Then consider this change
450 as having succeeded. The change we added will
451 cause the entire call to fail if things remain invalid.
453 Note that this can lose if a later change than the one
454 we are processing specified &XVECEXP (PATTERN (object), 0, X)
455 but this shouldn't occur. */
457 validate_change (object, &PATTERN (object), newpat, 1);
458 continue;
460 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
461 || GET_CODE (pat) == VAR_LOCATION)
462 /* If this insn is a CLOBBER or USE, it is always valid, but is
463 never recognized. */
464 continue;
465 else
466 break;
468 last_validated = object;
471 return (i == num_changes);
474 /* A group of changes has previously been issued with validate_change
475 and verified with verify_changes. Call df_insn_rescan for each of
476 the insn changed and clear num_changes. */
478 void
479 confirm_change_group (void)
481 int i;
482 rtx last_object = NULL;
484 for (i = 0; i < num_changes; i++)
486 rtx object = changes[i].object;
488 if (changes[i].unshare)
489 *changes[i].loc = copy_rtx (*changes[i].loc);
491 /* Avoid unnecessary rescanning when multiple changes to same instruction
492 are made. */
493 if (object)
495 if (object != last_object && last_object && INSN_P (last_object))
496 df_insn_rescan (as_a <rtx_insn *> (last_object));
497 last_object = object;
501 if (last_object && INSN_P (last_object))
502 df_insn_rescan (as_a <rtx_insn *> (last_object));
503 num_changes = 0;
506 /* Apply a group of changes previously issued with `validate_change'.
507 If all changes are valid, call confirm_change_group and return 1,
508 otherwise, call cancel_changes and return 0. */
511 apply_change_group (void)
513 if (verify_changes (0))
515 confirm_change_group ();
516 return 1;
518 else
520 cancel_changes (0);
521 return 0;
526 /* Return the number of changes so far in the current group. */
529 num_validated_changes (void)
531 return num_changes;
534 /* Retract the changes numbered NUM and up. */
536 void
537 cancel_changes (int num)
539 int i;
541 /* Back out all the changes. Do this in the opposite order in which
542 they were made. */
543 for (i = num_changes - 1; i >= num; i--)
545 *changes[i].loc = changes[i].old;
546 if (changes[i].object && !MEM_P (changes[i].object))
547 INSN_CODE (changes[i].object) = changes[i].old_code;
549 num_changes = num;
552 /* Reduce conditional compilation elsewhere. */
553 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
554 rtx. */
556 static void
557 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
558 machine_mode op0_mode)
560 rtx x = *loc;
561 enum rtx_code code = GET_CODE (x);
562 rtx new_rtx = NULL_RTX;
563 scalar_int_mode is_mode;
565 if (SWAPPABLE_OPERANDS_P (x)
566 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
568 validate_unshare_change (object, loc,
569 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
570 : swap_condition (code),
571 GET_MODE (x), XEXP (x, 1),
572 XEXP (x, 0)), 1);
573 x = *loc;
574 code = GET_CODE (x);
577 /* Canonicalize arithmetics with all constant operands. */
578 switch (GET_RTX_CLASS (code))
580 case RTX_UNARY:
581 if (CONSTANT_P (XEXP (x, 0)))
582 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
583 op0_mode);
584 break;
585 case RTX_COMM_ARITH:
586 case RTX_BIN_ARITH:
587 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
588 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
589 XEXP (x, 1));
590 break;
591 case RTX_COMPARE:
592 case RTX_COMM_COMPARE:
593 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
594 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
595 XEXP (x, 0), XEXP (x, 1));
596 break;
597 default:
598 break;
600 if (new_rtx)
602 validate_change (object, loc, new_rtx, 1);
603 return;
606 switch (code)
608 case PLUS:
609 /* If we have a PLUS whose second operand is now a CONST_INT, use
610 simplify_gen_binary to try to simplify it.
611 ??? We may want later to remove this, once simplification is
612 separated from this function. */
613 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
614 validate_change (object, loc,
615 simplify_gen_binary
616 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
617 break;
618 case MINUS:
619 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
620 validate_change (object, loc,
621 simplify_gen_binary
622 (PLUS, GET_MODE (x), XEXP (x, 0),
623 simplify_gen_unary (NEG,
624 GET_MODE (x), XEXP (x, 1),
625 GET_MODE (x))), 1);
626 break;
627 case ZERO_EXTEND:
628 case SIGN_EXTEND:
629 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
631 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
632 op0_mode);
633 /* If any of the above failed, substitute in something that
634 we know won't be recognized. */
635 if (!new_rtx)
636 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
637 validate_change (object, loc, new_rtx, 1);
639 break;
640 case SUBREG:
641 /* All subregs possible to simplify should be simplified. */
642 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
643 SUBREG_BYTE (x));
645 /* Subregs of VOIDmode operands are incorrect. */
646 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
647 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
648 if (new_rtx)
649 validate_change (object, loc, new_rtx, 1);
650 break;
651 case ZERO_EXTRACT:
652 case SIGN_EXTRACT:
653 /* If we are replacing a register with memory, try to change the memory
654 to be the mode required for memory in extract operations (this isn't
655 likely to be an insertion operation; if it was, nothing bad will
656 happen, we might just fail in some cases). */
658 if (MEM_P (XEXP (x, 0))
659 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
660 && CONST_INT_P (XEXP (x, 1))
661 && CONST_INT_P (XEXP (x, 2))
662 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
663 MEM_ADDR_SPACE (XEXP (x, 0)))
664 && !MEM_VOLATILE_P (XEXP (x, 0)))
666 int pos = INTVAL (XEXP (x, 2));
667 machine_mode new_mode = is_mode;
668 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
669 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
670 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
671 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
672 scalar_int_mode wanted_mode = (new_mode == VOIDmode
673 ? word_mode
674 : as_a <scalar_int_mode> (new_mode));
676 /* If we have a narrower mode, we can do something. */
677 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
679 int offset = pos / BITS_PER_UNIT;
680 rtx newmem;
682 /* If the bytes and bits are counted differently, we
683 must adjust the offset. */
684 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
685 offset =
686 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
687 offset);
689 gcc_assert (GET_MODE_PRECISION (wanted_mode)
690 == GET_MODE_BITSIZE (wanted_mode));
691 pos %= GET_MODE_BITSIZE (wanted_mode);
693 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
695 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
696 validate_change (object, &XEXP (x, 0), newmem, 1);
700 break;
702 default:
703 break;
707 /* Replace every occurrence of FROM in X with TO. Mark each change with
708 validate_change passing OBJECT. */
710 static void
711 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
712 bool simplify)
714 int i, j;
715 const char *fmt;
716 rtx x = *loc;
717 enum rtx_code code;
718 machine_mode op0_mode = VOIDmode;
719 int prev_changes = num_changes;
721 if (!x)
722 return;
724 code = GET_CODE (x);
725 fmt = GET_RTX_FORMAT (code);
726 if (fmt[0] == 'e')
727 op0_mode = GET_MODE (XEXP (x, 0));
729 /* X matches FROM if it is the same rtx or they are both referring to the
730 same register in the same mode. Avoid calling rtx_equal_p unless the
731 operands look similar. */
733 if (x == from
734 || (REG_P (x) && REG_P (from)
735 && GET_MODE (x) == GET_MODE (from)
736 && REGNO (x) == REGNO (from))
737 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
738 && rtx_equal_p (x, from)))
740 validate_unshare_change (object, loc, to, 1);
741 return;
744 /* Call ourself recursively to perform the replacements.
745 We must not replace inside already replaced expression, otherwise we
746 get infinite recursion for replacements like (reg X)->(subreg (reg X))
747 so we must special case shared ASM_OPERANDS. */
749 if (GET_CODE (x) == PARALLEL)
751 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
753 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
754 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
756 /* Verify that operands are really shared. */
757 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
758 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
759 (x, 0, j))));
760 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
761 from, to, object, simplify);
763 else
764 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
765 simplify);
768 else
769 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
771 if (fmt[i] == 'e')
772 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
773 else if (fmt[i] == 'E')
774 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
775 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
776 simplify);
779 /* If we didn't substitute, there is nothing more to do. */
780 if (num_changes == prev_changes)
781 return;
783 /* ??? The regmove is no more, so is this aberration still necessary? */
784 /* Allow substituted expression to have different mode. This is used by
785 regmove to change mode of pseudo register. */
786 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
787 op0_mode = GET_MODE (XEXP (x, 0));
789 /* Do changes needed to keep rtx consistent. Don't do any other
790 simplifications, as it is not our job. */
791 if (simplify)
792 simplify_while_replacing (loc, to, object, op0_mode);
795 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
796 with TO. After all changes have been made, validate by seeing
797 if INSN is still valid. */
800 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
802 validate_replace_rtx_1 (loc, from, to, insn, true);
803 return apply_change_group ();
806 /* Try replacing every occurrence of FROM in INSN with TO. After all
807 changes have been made, validate by seeing if INSN is still valid. */
810 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
812 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
813 return apply_change_group ();
816 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
817 is a part of INSN. After all changes have been made, validate by seeing if
818 INSN is still valid.
819 validate_replace_rtx (from, to, insn) is equivalent to
820 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
823 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
825 validate_replace_rtx_1 (where, from, to, insn, true);
826 return apply_change_group ();
829 /* Same as above, but do not simplify rtx afterwards. */
831 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
832 rtx_insn *insn)
834 validate_replace_rtx_1 (where, from, to, insn, false);
835 return apply_change_group ();
839 /* Try replacing every occurrence of FROM in INSN with TO. This also
840 will replace in REG_EQUAL and REG_EQUIV notes. */
842 void
843 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
845 rtx note;
846 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
847 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
848 if (REG_NOTE_KIND (note) == REG_EQUAL
849 || REG_NOTE_KIND (note) == REG_EQUIV)
850 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
853 /* Function called by note_uses to replace used subexpressions. */
854 struct validate_replace_src_data
856 rtx from; /* Old RTX */
857 rtx to; /* New RTX */
858 rtx_insn *insn; /* Insn in which substitution is occurring. */
861 static void
862 validate_replace_src_1 (rtx *x, void *data)
864 struct validate_replace_src_data *d
865 = (struct validate_replace_src_data *) data;
867 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
870 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
871 SET_DESTs. */
873 void
874 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
876 struct validate_replace_src_data d;
878 d.from = from;
879 d.to = to;
880 d.insn = insn;
881 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
884 /* Try simplify INSN.
885 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
886 pattern and return true if something was simplified. */
888 bool
889 validate_simplify_insn (rtx_insn *insn)
891 int i;
892 rtx pat = NULL;
893 rtx newpat = NULL;
895 pat = PATTERN (insn);
897 if (GET_CODE (pat) == SET)
899 newpat = simplify_rtx (SET_SRC (pat));
900 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
901 validate_change (insn, &SET_SRC (pat), newpat, 1);
902 newpat = simplify_rtx (SET_DEST (pat));
903 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
904 validate_change (insn, &SET_DEST (pat), newpat, 1);
906 else if (GET_CODE (pat) == PARALLEL)
907 for (i = 0; i < XVECLEN (pat, 0); i++)
909 rtx s = XVECEXP (pat, 0, i);
911 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
913 newpat = simplify_rtx (SET_SRC (s));
914 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
915 validate_change (insn, &SET_SRC (s), newpat, 1);
916 newpat = simplify_rtx (SET_DEST (s));
917 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
918 validate_change (insn, &SET_DEST (s), newpat, 1);
921 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
924 /* Return 1 if the insn using CC0 set by INSN does not contain
925 any ordered tests applied to the condition codes.
926 EQ and NE tests do not count. */
929 next_insn_tests_no_inequality (rtx_insn *insn)
931 rtx_insn *next = next_cc0_user (insn);
933 /* If there is no next insn, we have to take the conservative choice. */
934 if (next == 0)
935 return 0;
937 return (INSN_P (next)
938 && ! inequality_comparisons_p (PATTERN (next)));
941 /* Return 1 if OP is a valid general operand for machine mode MODE.
942 This is either a register reference, a memory reference,
943 or a constant. In the case of a memory reference, the address
944 is checked for general validity for the target machine.
946 Register and memory references must have mode MODE in order to be valid,
947 but some constants have no machine mode and are valid for any mode.
949 If MODE is VOIDmode, OP is checked for validity for whatever mode
950 it has.
952 The main use of this function is as a predicate in match_operand
953 expressions in the machine description. */
956 general_operand (rtx op, machine_mode mode)
958 enum rtx_code code = GET_CODE (op);
960 if (mode == VOIDmode)
961 mode = GET_MODE (op);
963 /* Don't accept CONST_INT or anything similar
964 if the caller wants something floating. */
965 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
966 && GET_MODE_CLASS (mode) != MODE_INT
967 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
968 return 0;
970 if (CONST_INT_P (op)
971 && mode != VOIDmode
972 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
973 return 0;
975 if (CONSTANT_P (op))
976 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
977 || mode == VOIDmode)
978 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
979 && targetm.legitimate_constant_p (mode == VOIDmode
980 ? GET_MODE (op)
981 : mode, op));
983 /* Except for certain constants with VOIDmode, already checked for,
984 OP's mode must match MODE if MODE specifies a mode. */
986 if (GET_MODE (op) != mode)
987 return 0;
989 if (code == SUBREG)
991 rtx sub = SUBREG_REG (op);
993 #ifdef INSN_SCHEDULING
994 /* On machines that have insn scheduling, we want all memory
995 reference to be explicit, so outlaw paradoxical SUBREGs.
996 However, we must allow them after reload so that they can
997 get cleaned up by cleanup_subreg_operands. */
998 if (!reload_completed && MEM_P (sub)
999 && paradoxical_subreg_p (op))
1000 return 0;
1001 #endif
1002 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1003 may result in incorrect reference. We should simplify all valid
1004 subregs of MEM anyway. But allow this after reload because we
1005 might be called from cleanup_subreg_operands.
1007 ??? This is a kludge. */
1008 if (!reload_completed && SUBREG_BYTE (op) != 0
1009 && MEM_P (sub))
1010 return 0;
1012 if (REG_P (sub)
1013 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1014 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1015 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1016 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1017 /* LRA can generate some invalid SUBREGS just for matched
1018 operand reload presentation. LRA needs to treat them as
1019 valid. */
1020 && ! LRA_SUBREG_P (op))
1021 return 0;
1023 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1024 create such rtl, and we must reject it. */
1025 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1026 /* LRA can use subreg to store a floating point value in an
1027 integer mode. Although the floating point and the
1028 integer modes need the same number of hard registers, the
1029 size of floating point mode can be less than the integer
1030 mode. */
1031 && ! lra_in_progress
1032 && paradoxical_subreg_p (op))
1033 return 0;
1035 op = sub;
1036 code = GET_CODE (op);
1039 if (code == REG)
1040 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1041 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1043 if (code == MEM)
1045 rtx y = XEXP (op, 0);
1047 if (! volatile_ok && MEM_VOLATILE_P (op))
1048 return 0;
1050 /* Use the mem's mode, since it will be reloaded thus. LRA can
1051 generate move insn with invalid addresses which is made valid
1052 and efficiently calculated by LRA through further numerous
1053 transformations. */
1054 if (lra_in_progress
1055 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1056 return 1;
1059 return 0;
1062 /* Return 1 if OP is a valid memory address for a memory reference
1063 of mode MODE.
1065 The main use of this function is as a predicate in match_operand
1066 expressions in the machine description. */
1069 address_operand (rtx op, machine_mode mode)
1071 return memory_address_p (mode, op);
1074 /* Return 1 if OP is a register reference of mode MODE.
1075 If MODE is VOIDmode, accept a register in any mode.
1077 The main use of this function is as a predicate in match_operand
1078 expressions in the machine description. */
1081 register_operand (rtx op, machine_mode mode)
1083 if (GET_CODE (op) == SUBREG)
1085 rtx sub = SUBREG_REG (op);
1087 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1088 because it is guaranteed to be reloaded into one.
1089 Just make sure the MEM is valid in itself.
1090 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1091 but currently it does result from (SUBREG (REG)...) where the
1092 reg went on the stack.) */
1093 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1094 return 0;
1096 else if (!REG_P (op))
1097 return 0;
1098 return general_operand (op, mode);
1101 /* Return 1 for a register in Pmode; ignore the tested mode. */
1104 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1106 return register_operand (op, Pmode);
1109 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1110 or a hard register. */
1113 scratch_operand (rtx op, machine_mode mode)
1115 if (GET_MODE (op) != mode && mode != VOIDmode)
1116 return 0;
1118 return (GET_CODE (op) == SCRATCH
1119 || (REG_P (op)
1120 && (lra_in_progress
1121 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1122 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1125 /* Return 1 if OP is a valid immediate operand for mode MODE.
1127 The main use of this function is as a predicate in match_operand
1128 expressions in the machine description. */
1131 immediate_operand (rtx op, machine_mode mode)
1133 /* Don't accept CONST_INT or anything similar
1134 if the caller wants something floating. */
1135 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1136 && GET_MODE_CLASS (mode) != MODE_INT
1137 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1138 return 0;
1140 if (CONST_INT_P (op)
1141 && mode != VOIDmode
1142 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1143 return 0;
1145 return (CONSTANT_P (op)
1146 && (GET_MODE (op) == mode || mode == VOIDmode
1147 || GET_MODE (op) == VOIDmode)
1148 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1149 && targetm.legitimate_constant_p (mode == VOIDmode
1150 ? GET_MODE (op)
1151 : mode, op));
1154 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1157 const_int_operand (rtx op, machine_mode mode)
1159 if (!CONST_INT_P (op))
1160 return 0;
1162 if (mode != VOIDmode
1163 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1164 return 0;
1166 return 1;
1169 #if TARGET_SUPPORTS_WIDE_INT
1170 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1171 of mode MODE. */
1173 const_scalar_int_operand (rtx op, machine_mode mode)
1175 if (!CONST_SCALAR_INT_P (op))
1176 return 0;
1178 if (CONST_INT_P (op))
1179 return const_int_operand (op, mode);
1181 if (mode != VOIDmode)
1183 scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1184 int prec = GET_MODE_PRECISION (int_mode);
1185 int bitsize = GET_MODE_BITSIZE (int_mode);
1187 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1188 return 0;
1190 if (prec == bitsize)
1191 return 1;
1192 else
1194 /* Multiword partial int. */
1195 HOST_WIDE_INT x
1196 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1197 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1200 return 1;
1203 /* Returns 1 if OP is an operand that is a constant integer or constant
1204 floating-point number of MODE. */
1207 const_double_operand (rtx op, machine_mode mode)
1209 return (GET_CODE (op) == CONST_DOUBLE)
1210 && (GET_MODE (op) == mode || mode == VOIDmode);
1212 #else
1213 /* Returns 1 if OP is an operand that is a constant integer or constant
1214 floating-point number of MODE. */
1217 const_double_operand (rtx op, machine_mode mode)
1219 /* Don't accept CONST_INT or anything similar
1220 if the caller wants something floating. */
1221 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1222 && GET_MODE_CLASS (mode) != MODE_INT
1223 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1224 return 0;
1226 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1227 && (mode == VOIDmode || GET_MODE (op) == mode
1228 || GET_MODE (op) == VOIDmode));
1230 #endif
1231 /* Return 1 if OP is a general operand that is not an immediate
1232 operand of mode MODE. */
1235 nonimmediate_operand (rtx op, machine_mode mode)
1237 return (general_operand (op, mode) && ! CONSTANT_P (op));
1240 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1243 nonmemory_operand (rtx op, machine_mode mode)
1245 if (CONSTANT_P (op))
1246 return immediate_operand (op, mode);
1247 return register_operand (op, mode);
1250 /* Return 1 if OP is a valid operand that stands for pushing a
1251 value of mode MODE onto the stack.
1253 The main use of this function is as a predicate in match_operand
1254 expressions in the machine description. */
1257 push_operand (rtx op, machine_mode mode)
1259 unsigned int rounded_size = GET_MODE_SIZE (mode);
1261 #ifdef PUSH_ROUNDING
1262 rounded_size = PUSH_ROUNDING (rounded_size);
1263 #endif
1265 if (!MEM_P (op))
1266 return 0;
1268 if (mode != VOIDmode && GET_MODE (op) != mode)
1269 return 0;
1271 op = XEXP (op, 0);
1273 if (rounded_size == GET_MODE_SIZE (mode))
1275 if (GET_CODE (op) != STACK_PUSH_CODE)
1276 return 0;
1278 else
1280 if (GET_CODE (op) != PRE_MODIFY
1281 || GET_CODE (XEXP (op, 1)) != PLUS
1282 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1283 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1284 || INTVAL (XEXP (XEXP (op, 1), 1))
1285 != ((STACK_GROWS_DOWNWARD ? -1 : 1) * (int) rounded_size))
1286 return 0;
1289 return XEXP (op, 0) == stack_pointer_rtx;
1292 /* Return 1 if OP is a valid operand that stands for popping a
1293 value of mode MODE off the stack.
1295 The main use of this function is as a predicate in match_operand
1296 expressions in the machine description. */
1299 pop_operand (rtx op, machine_mode mode)
1301 if (!MEM_P (op))
1302 return 0;
1304 if (mode != VOIDmode && GET_MODE (op) != mode)
1305 return 0;
1307 op = XEXP (op, 0);
1309 if (GET_CODE (op) != STACK_POP_CODE)
1310 return 0;
1312 return XEXP (op, 0) == stack_pointer_rtx;
1315 /* Return 1 if ADDR is a valid memory address
1316 for mode MODE in address space AS. */
1319 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1320 rtx addr, addr_space_t as)
1322 #ifdef GO_IF_LEGITIMATE_ADDRESS
1323 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1324 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1325 return 0;
1327 win:
1328 return 1;
1329 #else
1330 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1331 #endif
1334 /* Return 1 if OP is a valid memory reference with mode MODE,
1335 including a valid address.
1337 The main use of this function is as a predicate in match_operand
1338 expressions in the machine description. */
1341 memory_operand (rtx op, machine_mode mode)
1343 rtx inner;
1345 if (! reload_completed)
1346 /* Note that no SUBREG is a memory operand before end of reload pass,
1347 because (SUBREG (MEM...)) forces reloading into a register. */
1348 return MEM_P (op) && general_operand (op, mode);
1350 if (mode != VOIDmode && GET_MODE (op) != mode)
1351 return 0;
1353 inner = op;
1354 if (GET_CODE (inner) == SUBREG)
1355 inner = SUBREG_REG (inner);
1357 return (MEM_P (inner) && general_operand (op, mode));
1360 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1361 that is, a memory reference whose address is a general_operand. */
1364 indirect_operand (rtx op, machine_mode mode)
1366 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1367 if (! reload_completed
1368 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1370 int offset = SUBREG_BYTE (op);
1371 rtx inner = SUBREG_REG (op);
1373 if (mode != VOIDmode && GET_MODE (op) != mode)
1374 return 0;
1376 /* The only way that we can have a general_operand as the resulting
1377 address is if OFFSET is zero and the address already is an operand
1378 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1379 operand. */
1381 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1382 || (GET_CODE (XEXP (inner, 0)) == PLUS
1383 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1384 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1385 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1388 return (MEM_P (op)
1389 && memory_operand (op, mode)
1390 && general_operand (XEXP (op, 0), Pmode));
1393 /* Return 1 if this is an ordered comparison operator (not including
1394 ORDERED and UNORDERED). */
1397 ordered_comparison_operator (rtx op, machine_mode mode)
1399 if (mode != VOIDmode && GET_MODE (op) != mode)
1400 return false;
1401 switch (GET_CODE (op))
1403 case EQ:
1404 case NE:
1405 case LT:
1406 case LTU:
1407 case LE:
1408 case LEU:
1409 case GT:
1410 case GTU:
1411 case GE:
1412 case GEU:
1413 return true;
1414 default:
1415 return false;
1419 /* Return 1 if this is a comparison operator. This allows the use of
1420 MATCH_OPERATOR to recognize all the branch insns. */
1423 comparison_operator (rtx op, machine_mode mode)
1425 return ((mode == VOIDmode || GET_MODE (op) == mode)
1426 && COMPARISON_P (op));
1429 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1432 extract_asm_operands (rtx body)
1434 rtx tmp;
1435 switch (GET_CODE (body))
1437 case ASM_OPERANDS:
1438 return body;
1440 case SET:
1441 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1442 tmp = SET_SRC (body);
1443 if (GET_CODE (tmp) == ASM_OPERANDS)
1444 return tmp;
1445 break;
1447 case PARALLEL:
1448 tmp = XVECEXP (body, 0, 0);
1449 if (GET_CODE (tmp) == ASM_OPERANDS)
1450 return tmp;
1451 if (GET_CODE (tmp) == SET)
1453 tmp = SET_SRC (tmp);
1454 if (GET_CODE (tmp) == ASM_OPERANDS)
1455 return tmp;
1457 break;
1459 default:
1460 break;
1462 return NULL;
1465 /* If BODY is an insn body that uses ASM_OPERANDS,
1466 return the number of operands (both input and output) in the insn.
1467 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1468 return 0.
1469 Otherwise return -1. */
1472 asm_noperands (const_rtx body)
1474 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1475 int i, n_sets = 0;
1477 if (asm_op == NULL)
1479 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1480 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1482 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1483 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1484 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1485 return -1;
1486 return 0;
1488 return -1;
1491 if (GET_CODE (body) == SET)
1492 n_sets = 1;
1493 else if (GET_CODE (body) == PARALLEL)
1495 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1497 /* Multiple output operands, or 1 output plus some clobbers:
1498 body is
1499 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1500 /* Count backwards through CLOBBERs to determine number of SETs. */
1501 for (i = XVECLEN (body, 0); i > 0; i--)
1503 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1504 break;
1505 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1506 return -1;
1509 /* N_SETS is now number of output operands. */
1510 n_sets = i;
1512 /* Verify that all the SETs we have
1513 came from a single original asm_operands insn
1514 (so that invalid combinations are blocked). */
1515 for (i = 0; i < n_sets; i++)
1517 rtx elt = XVECEXP (body, 0, i);
1518 if (GET_CODE (elt) != SET)
1519 return -1;
1520 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1521 return -1;
1522 /* If these ASM_OPERANDS rtx's came from different original insns
1523 then they aren't allowed together. */
1524 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1525 != ASM_OPERANDS_INPUT_VEC (asm_op))
1526 return -1;
1529 else
1531 /* 0 outputs, but some clobbers:
1532 body is [(asm_operands ...) (clobber (reg ...))...]. */
1533 /* Make sure all the other parallel things really are clobbers. */
1534 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1535 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1536 return -1;
1540 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1541 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1544 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1545 copy its operands (both input and output) into the vector OPERANDS,
1546 the locations of the operands within the insn into the vector OPERAND_LOCS,
1547 and the constraints for the operands into CONSTRAINTS.
1548 Write the modes of the operands into MODES.
1549 Write the location info into LOC.
1550 Return the assembler-template.
1551 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1552 return the basic assembly string.
1554 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1555 we don't store that info. */
1557 const char *
1558 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1559 const char **constraints, machine_mode *modes,
1560 location_t *loc)
1562 int nbase = 0, n, i;
1563 rtx asmop;
1565 switch (GET_CODE (body))
1567 case ASM_OPERANDS:
1568 /* Zero output asm: BODY is (asm_operands ...). */
1569 asmop = body;
1570 break;
1572 case SET:
1573 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1574 asmop = SET_SRC (body);
1576 /* The output is in the SET.
1577 Its constraint is in the ASM_OPERANDS itself. */
1578 if (operands)
1579 operands[0] = SET_DEST (body);
1580 if (operand_locs)
1581 operand_locs[0] = &SET_DEST (body);
1582 if (constraints)
1583 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1584 if (modes)
1585 modes[0] = GET_MODE (SET_DEST (body));
1586 nbase = 1;
1587 break;
1589 case PARALLEL:
1591 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1593 asmop = XVECEXP (body, 0, 0);
1594 if (GET_CODE (asmop) == SET)
1596 asmop = SET_SRC (asmop);
1598 /* At least one output, plus some CLOBBERs. The outputs are in
1599 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1600 for (i = 0; i < nparallel; i++)
1602 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1603 break; /* Past last SET */
1604 if (operands)
1605 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1606 if (operand_locs)
1607 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1608 if (constraints)
1609 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1610 if (modes)
1611 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1613 nbase = i;
1615 else if (GET_CODE (asmop) == ASM_INPUT)
1617 if (loc)
1618 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1619 return XSTR (asmop, 0);
1621 break;
1624 default:
1625 gcc_unreachable ();
1628 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1629 for (i = 0; i < n; i++)
1631 if (operand_locs)
1632 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1633 if (operands)
1634 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1635 if (constraints)
1636 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1637 if (modes)
1638 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1640 nbase += n;
1642 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1643 for (i = 0; i < n; i++)
1645 if (operand_locs)
1646 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1647 if (operands)
1648 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1649 if (constraints)
1650 constraints[nbase + i] = "";
1651 if (modes)
1652 modes[nbase + i] = Pmode;
1655 if (loc)
1656 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1658 return ASM_OPERANDS_TEMPLATE (asmop);
1661 /* Parse inline assembly string STRING and determine which operands are
1662 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1663 to true if operand I is referenced.
1665 This is intended to distinguish barrier-like asms such as:
1667 asm ("" : "=m" (...));
1669 from real references such as:
1671 asm ("sw\t$0, %0" : "=m" (...)); */
1673 void
1674 get_referenced_operands (const char *string, bool *used,
1675 unsigned int noperands)
1677 memset (used, 0, sizeof (bool) * noperands);
1678 const char *p = string;
1679 while (*p)
1680 switch (*p)
1682 case '%':
1683 p += 1;
1684 /* A letter followed by a digit indicates an operand number. */
1685 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1686 p += 1;
1687 if (ISDIGIT (*p))
1689 char *endptr;
1690 unsigned long opnum = strtoul (p, &endptr, 10);
1691 if (endptr != p && opnum < noperands)
1692 used[opnum] = true;
1693 p = endptr;
1695 else
1696 p += 1;
1697 break;
1699 default:
1700 p++;
1701 break;
1705 /* Check if an asm_operand matches its constraints.
1706 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1709 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1711 int result = 0;
1712 bool incdec_ok = false;
1714 /* Use constrain_operands after reload. */
1715 gcc_assert (!reload_completed);
1717 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1718 many alternatives as required to match the other operands. */
1719 if (*constraint == '\0')
1720 result = 1;
1722 while (*constraint)
1724 enum constraint_num cn;
1725 char c = *constraint;
1726 int len;
1727 switch (c)
1729 case ',':
1730 constraint++;
1731 continue;
1733 case '0': case '1': case '2': case '3': case '4':
1734 case '5': case '6': case '7': case '8': case '9':
1735 /* If caller provided constraints pointer, look up
1736 the matching constraint. Otherwise, our caller should have
1737 given us the proper matching constraint, but we can't
1738 actually fail the check if they didn't. Indicate that
1739 results are inconclusive. */
1740 if (constraints)
1742 char *end;
1743 unsigned long match;
1745 match = strtoul (constraint, &end, 10);
1746 if (!result)
1747 result = asm_operand_ok (op, constraints[match], NULL);
1748 constraint = (const char *) end;
1750 else
1753 constraint++;
1754 while (ISDIGIT (*constraint));
1755 if (! result)
1756 result = -1;
1758 continue;
1760 /* The rest of the compiler assumes that reloading the address
1761 of a MEM into a register will make it fit an 'o' constraint.
1762 That is, if it sees a MEM operand for an 'o' constraint,
1763 it assumes that (mem (base-reg)) will fit.
1765 That assumption fails on targets that don't have offsettable
1766 addresses at all. We therefore need to treat 'o' asm
1767 constraints as a special case and only accept operands that
1768 are already offsettable, thus proving that at least one
1769 offsettable address exists. */
1770 case 'o': /* offsettable */
1771 if (offsettable_nonstrict_memref_p (op))
1772 result = 1;
1773 break;
1775 case 'g':
1776 if (general_operand (op, VOIDmode))
1777 result = 1;
1778 break;
1780 case '<':
1781 case '>':
1782 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1783 to exist, excepting those that expand_call created. Further,
1784 on some machines which do not have generalized auto inc/dec,
1785 an inc/dec is not a memory_operand.
1787 Match any memory and hope things are resolved after reload. */
1788 incdec_ok = true;
1789 /* FALLTHRU */
1790 default:
1791 cn = lookup_constraint (constraint);
1792 switch (get_constraint_type (cn))
1794 case CT_REGISTER:
1795 if (!result
1796 && reg_class_for_constraint (cn) != NO_REGS
1797 && GET_MODE (op) != BLKmode
1798 && register_operand (op, VOIDmode))
1799 result = 1;
1800 break;
1802 case CT_CONST_INT:
1803 if (!result
1804 && CONST_INT_P (op)
1805 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1806 result = 1;
1807 break;
1809 case CT_MEMORY:
1810 case CT_SPECIAL_MEMORY:
1811 /* Every memory operand can be reloaded to fit. */
1812 result = result || memory_operand (op, VOIDmode);
1813 break;
1815 case CT_ADDRESS:
1816 /* Every address operand can be reloaded to fit. */
1817 result = result || address_operand (op, VOIDmode);
1818 break;
1820 case CT_FIXED_FORM:
1821 result = result || constraint_satisfied_p (op, cn);
1822 break;
1824 break;
1826 len = CONSTRAINT_LEN (c, constraint);
1828 constraint++;
1829 while (--len && *constraint);
1830 if (len)
1831 return 0;
1834 /* For operands without < or > constraints reject side-effects. */
1835 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1836 switch (GET_CODE (XEXP (op, 0)))
1838 case PRE_INC:
1839 case POST_INC:
1840 case PRE_DEC:
1841 case POST_DEC:
1842 case PRE_MODIFY:
1843 case POST_MODIFY:
1844 return 0;
1845 default:
1846 break;
1849 return result;
1852 /* Given an rtx *P, if it is a sum containing an integer constant term,
1853 return the location (type rtx *) of the pointer to that constant term.
1854 Otherwise, return a null pointer. */
1856 rtx *
1857 find_constant_term_loc (rtx *p)
1859 rtx *tem;
1860 enum rtx_code code = GET_CODE (*p);
1862 /* If *P IS such a constant term, P is its location. */
1864 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1865 || code == CONST)
1866 return p;
1868 /* Otherwise, if not a sum, it has no constant term. */
1870 if (GET_CODE (*p) != PLUS)
1871 return 0;
1873 /* If one of the summands is constant, return its location. */
1875 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1876 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1877 return p;
1879 /* Otherwise, check each summand for containing a constant term. */
1881 if (XEXP (*p, 0) != 0)
1883 tem = find_constant_term_loc (&XEXP (*p, 0));
1884 if (tem != 0)
1885 return tem;
1888 if (XEXP (*p, 1) != 0)
1890 tem = find_constant_term_loc (&XEXP (*p, 1));
1891 if (tem != 0)
1892 return tem;
1895 return 0;
1898 /* Return 1 if OP is a memory reference
1899 whose address contains no side effects
1900 and remains valid after the addition
1901 of a positive integer less than the
1902 size of the object being referenced.
1904 We assume that the original address is valid and do not check it.
1906 This uses strict_memory_address_p as a subroutine, so
1907 don't use it before reload. */
1910 offsettable_memref_p (rtx op)
1912 return ((MEM_P (op))
1913 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1914 MEM_ADDR_SPACE (op)));
1917 /* Similar, but don't require a strictly valid mem ref:
1918 consider pseudo-regs valid as index or base regs. */
1921 offsettable_nonstrict_memref_p (rtx op)
1923 return ((MEM_P (op))
1924 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1925 MEM_ADDR_SPACE (op)));
1928 /* Return 1 if Y is a memory address which contains no side effects
1929 and would remain valid for address space AS after the addition of
1930 a positive integer less than the size of that mode.
1932 We assume that the original address is valid and do not check it.
1933 We do check that it is valid for narrower modes.
1935 If STRICTP is nonzero, we require a strictly valid address,
1936 for the sake of use in reload.c. */
1939 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1940 addr_space_t as)
1942 enum rtx_code ycode = GET_CODE (y);
1943 rtx z;
1944 rtx y1 = y;
1945 rtx *y2;
1946 int (*addressp) (machine_mode, rtx, addr_space_t) =
1947 (strictp ? strict_memory_address_addr_space_p
1948 : memory_address_addr_space_p);
1949 unsigned int mode_sz = GET_MODE_SIZE (mode);
1951 if (CONSTANT_ADDRESS_P (y))
1952 return 1;
1954 /* Adjusting an offsettable address involves changing to a narrower mode.
1955 Make sure that's OK. */
1957 if (mode_dependent_address_p (y, as))
1958 return 0;
1960 machine_mode address_mode = GET_MODE (y);
1961 if (address_mode == VOIDmode)
1962 address_mode = targetm.addr_space.address_mode (as);
1963 #ifdef POINTERS_EXTEND_UNSIGNED
1964 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1965 #endif
1967 /* ??? How much offset does an offsettable BLKmode reference need?
1968 Clearly that depends on the situation in which it's being used.
1969 However, the current situation in which we test 0xffffffff is
1970 less than ideal. Caveat user. */
1971 if (mode_sz == 0)
1972 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1974 /* If the expression contains a constant term,
1975 see if it remains valid when max possible offset is added. */
1977 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1979 int good;
1981 y1 = *y2;
1982 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1983 /* Use QImode because an odd displacement may be automatically invalid
1984 for any wider mode. But it should be valid for a single byte. */
1985 good = (*addressp) (QImode, y, as);
1987 /* In any case, restore old contents of memory. */
1988 *y2 = y1;
1989 return good;
1992 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1993 return 0;
1995 /* The offset added here is chosen as the maximum offset that
1996 any instruction could need to add when operating on something
1997 of the specified mode. We assume that if Y and Y+c are
1998 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1999 go inside a LO_SUM here, so we do so as well. */
2000 if (GET_CODE (y) == LO_SUM
2001 && mode != BLKmode
2002 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2003 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2004 plus_constant (address_mode, XEXP (y, 1),
2005 mode_sz - 1));
2006 #ifdef POINTERS_EXTEND_UNSIGNED
2007 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2008 else if (POINTERS_EXTEND_UNSIGNED > 0
2009 && GET_CODE (y) == ZERO_EXTEND
2010 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2011 z = gen_rtx_ZERO_EXTEND (address_mode,
2012 plus_constant (pointer_mode, XEXP (y, 0),
2013 mode_sz - 1));
2014 #endif
2015 else
2016 z = plus_constant (address_mode, y, mode_sz - 1);
2018 /* Use QImode because an odd displacement may be automatically invalid
2019 for any wider mode. But it should be valid for a single byte. */
2020 return (*addressp) (QImode, z, as);
2023 /* Return 1 if ADDR is an address-expression whose effect depends
2024 on the mode of the memory reference it is used in.
2026 ADDRSPACE is the address space associated with the address.
2028 Autoincrement addressing is a typical example of mode-dependence
2029 because the amount of the increment depends on the mode. */
2031 bool
2032 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2034 /* Auto-increment addressing with anything other than post_modify
2035 or pre_modify always introduces a mode dependency. Catch such
2036 cases now instead of deferring to the target. */
2037 if (GET_CODE (addr) == PRE_INC
2038 || GET_CODE (addr) == POST_INC
2039 || GET_CODE (addr) == PRE_DEC
2040 || GET_CODE (addr) == POST_DEC)
2041 return true;
2043 return targetm.mode_dependent_address_p (addr, addrspace);
2046 /* Return true if boolean attribute ATTR is supported. */
2048 static bool
2049 have_bool_attr (bool_attr attr)
2051 switch (attr)
2053 case BA_ENABLED:
2054 return HAVE_ATTR_enabled;
2055 case BA_PREFERRED_FOR_SIZE:
2056 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2057 case BA_PREFERRED_FOR_SPEED:
2058 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2060 gcc_unreachable ();
2063 /* Return the value of ATTR for instruction INSN. */
2065 static bool
2066 get_bool_attr (rtx_insn *insn, bool_attr attr)
2068 switch (attr)
2070 case BA_ENABLED:
2071 return get_attr_enabled (insn);
2072 case BA_PREFERRED_FOR_SIZE:
2073 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2074 case BA_PREFERRED_FOR_SPEED:
2075 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2077 gcc_unreachable ();
2080 /* Like get_bool_attr_mask, but don't use the cache. */
2082 static alternative_mask
2083 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2085 /* Temporarily install enough information for get_attr_<foo> to assume
2086 that the insn operands are already cached. As above, the attribute
2087 mustn't depend on the values of operands, so we don't provide their
2088 real values here. */
2089 rtx_insn *old_insn = recog_data.insn;
2090 int old_alternative = which_alternative;
2092 recog_data.insn = insn;
2093 alternative_mask mask = ALL_ALTERNATIVES;
2094 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2095 for (int i = 0; i < n_alternatives; i++)
2097 which_alternative = i;
2098 if (!get_bool_attr (insn, attr))
2099 mask &= ~ALTERNATIVE_BIT (i);
2102 recog_data.insn = old_insn;
2103 which_alternative = old_alternative;
2104 return mask;
2107 /* Return the mask of operand alternatives that are allowed for INSN
2108 by boolean attribute ATTR. This mask depends only on INSN and on
2109 the current target; it does not depend on things like the values of
2110 operands. */
2112 static alternative_mask
2113 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2115 /* Quick exit for asms and for targets that don't use these attributes. */
2116 int code = INSN_CODE (insn);
2117 if (code < 0 || !have_bool_attr (attr))
2118 return ALL_ALTERNATIVES;
2120 /* Calling get_attr_<foo> can be expensive, so cache the mask
2121 for speed. */
2122 if (!this_target_recog->x_bool_attr_masks[code][attr])
2123 this_target_recog->x_bool_attr_masks[code][attr]
2124 = get_bool_attr_mask_uncached (insn, attr);
2125 return this_target_recog->x_bool_attr_masks[code][attr];
2128 /* Return the set of alternatives of INSN that are allowed by the current
2129 target. */
2131 alternative_mask
2132 get_enabled_alternatives (rtx_insn *insn)
2134 return get_bool_attr_mask (insn, BA_ENABLED);
2137 /* Return the set of alternatives of INSN that are allowed by the current
2138 target and are preferred for the current size/speed optimization
2139 choice. */
2141 alternative_mask
2142 get_preferred_alternatives (rtx_insn *insn)
2144 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2145 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2146 else
2147 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2150 /* Return the set of alternatives of INSN that are allowed by the current
2151 target and are preferred for the size/speed optimization choice
2152 associated with BB. Passing a separate BB is useful if INSN has not
2153 been emitted yet or if we are considering moving it to a different
2154 block. */
2156 alternative_mask
2157 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2159 if (optimize_bb_for_speed_p (bb))
2160 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2161 else
2162 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2165 /* Assert that the cached boolean attributes for INSN are still accurate.
2166 The backend is required to define these attributes in a way that only
2167 depends on the current target (rather than operands, compiler phase,
2168 etc.). */
2170 bool
2171 check_bool_attrs (rtx_insn *insn)
2173 int code = INSN_CODE (insn);
2174 if (code >= 0)
2175 for (int i = 0; i <= BA_LAST; ++i)
2177 enum bool_attr attr = (enum bool_attr) i;
2178 if (this_target_recog->x_bool_attr_masks[code][attr])
2179 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2180 == get_bool_attr_mask_uncached (insn, attr));
2182 return true;
2185 /* Like extract_insn, but save insn extracted and don't extract again, when
2186 called again for the same insn expecting that recog_data still contain the
2187 valid information. This is used primary by gen_attr infrastructure that
2188 often does extract insn again and again. */
2189 void
2190 extract_insn_cached (rtx_insn *insn)
2192 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2193 return;
2194 extract_insn (insn);
2195 recog_data.insn = insn;
2198 /* Do uncached extract_insn, constrain_operands and complain about failures.
2199 This should be used when extracting a pre-existing constrained instruction
2200 if the caller wants to know which alternative was chosen. */
2201 void
2202 extract_constrain_insn (rtx_insn *insn)
2204 extract_insn (insn);
2205 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2206 fatal_insn_not_found (insn);
2209 /* Do cached extract_insn, constrain_operands and complain about failures.
2210 Used by insn_attrtab. */
2211 void
2212 extract_constrain_insn_cached (rtx_insn *insn)
2214 extract_insn_cached (insn);
2215 if (which_alternative == -1
2216 && !constrain_operands (reload_completed,
2217 get_enabled_alternatives (insn)))
2218 fatal_insn_not_found (insn);
2221 /* Do cached constrain_operands on INSN and complain about failures. */
2223 constrain_operands_cached (rtx_insn *insn, int strict)
2225 if (which_alternative == -1)
2226 return constrain_operands (strict, get_enabled_alternatives (insn));
2227 else
2228 return 1;
2231 /* Analyze INSN and fill in recog_data. */
2233 void
2234 extract_insn (rtx_insn *insn)
2236 int i;
2237 int icode;
2238 int noperands;
2239 rtx body = PATTERN (insn);
2241 recog_data.n_operands = 0;
2242 recog_data.n_alternatives = 0;
2243 recog_data.n_dups = 0;
2244 recog_data.is_asm = false;
2246 switch (GET_CODE (body))
2248 case USE:
2249 case CLOBBER:
2250 case ASM_INPUT:
2251 case ADDR_VEC:
2252 case ADDR_DIFF_VEC:
2253 case VAR_LOCATION:
2254 return;
2256 case SET:
2257 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2258 goto asm_insn;
2259 else
2260 goto normal_insn;
2261 case PARALLEL:
2262 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2263 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2264 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2265 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2266 goto asm_insn;
2267 else
2268 goto normal_insn;
2269 case ASM_OPERANDS:
2270 asm_insn:
2271 recog_data.n_operands = noperands = asm_noperands (body);
2272 if (noperands >= 0)
2274 /* This insn is an `asm' with operands. */
2276 /* expand_asm_operands makes sure there aren't too many operands. */
2277 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2279 /* Now get the operand values and constraints out of the insn. */
2280 decode_asm_operands (body, recog_data.operand,
2281 recog_data.operand_loc,
2282 recog_data.constraints,
2283 recog_data.operand_mode, NULL);
2284 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2285 if (noperands > 0)
2287 const char *p = recog_data.constraints[0];
2288 recog_data.n_alternatives = 1;
2289 while (*p)
2290 recog_data.n_alternatives += (*p++ == ',');
2292 recog_data.is_asm = true;
2293 break;
2295 fatal_insn_not_found (insn);
2297 default:
2298 normal_insn:
2299 /* Ordinary insn: recognize it, get the operands via insn_extract
2300 and get the constraints. */
2302 icode = recog_memoized (insn);
2303 if (icode < 0)
2304 fatal_insn_not_found (insn);
2306 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2307 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2308 recog_data.n_dups = insn_data[icode].n_dups;
2310 insn_extract (insn);
2312 for (i = 0; i < noperands; i++)
2314 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2315 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2316 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2317 /* VOIDmode match_operands gets mode from their real operand. */
2318 if (recog_data.operand_mode[i] == VOIDmode)
2319 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2322 for (i = 0; i < noperands; i++)
2323 recog_data.operand_type[i]
2324 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2325 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2326 : OP_IN);
2328 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2330 recog_data.insn = NULL;
2331 which_alternative = -1;
2334 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2335 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2336 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2337 has N_OPERANDS entries. */
2339 void
2340 preprocess_constraints (int n_operands, int n_alternatives,
2341 const char **constraints,
2342 operand_alternative *op_alt_base)
2344 for (int i = 0; i < n_operands; i++)
2346 int j;
2347 struct operand_alternative *op_alt;
2348 const char *p = constraints[i];
2350 op_alt = op_alt_base;
2352 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2354 op_alt[i].cl = NO_REGS;
2355 op_alt[i].constraint = p;
2356 op_alt[i].matches = -1;
2357 op_alt[i].matched = -1;
2359 if (*p == '\0' || *p == ',')
2361 op_alt[i].anything_ok = 1;
2362 continue;
2365 for (;;)
2367 char c = *p;
2368 if (c == '#')
2370 c = *++p;
2371 while (c != ',' && c != '\0');
2372 if (c == ',' || c == '\0')
2374 p++;
2375 break;
2378 switch (c)
2380 case '?':
2381 op_alt[i].reject += 6;
2382 break;
2383 case '!':
2384 op_alt[i].reject += 600;
2385 break;
2386 case '&':
2387 op_alt[i].earlyclobber = 1;
2388 break;
2390 case '0': case '1': case '2': case '3': case '4':
2391 case '5': case '6': case '7': case '8': case '9':
2393 char *end;
2394 op_alt[i].matches = strtoul (p, &end, 10);
2395 op_alt[op_alt[i].matches].matched = i;
2396 p = end;
2398 continue;
2400 case 'X':
2401 op_alt[i].anything_ok = 1;
2402 break;
2404 case 'g':
2405 op_alt[i].cl =
2406 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2407 break;
2409 default:
2410 enum constraint_num cn = lookup_constraint (p);
2411 enum reg_class cl;
2412 switch (get_constraint_type (cn))
2414 case CT_REGISTER:
2415 cl = reg_class_for_constraint (cn);
2416 if (cl != NO_REGS)
2417 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2418 break;
2420 case CT_CONST_INT:
2421 break;
2423 case CT_MEMORY:
2424 case CT_SPECIAL_MEMORY:
2425 op_alt[i].memory_ok = 1;
2426 break;
2428 case CT_ADDRESS:
2429 op_alt[i].is_address = 1;
2430 op_alt[i].cl
2431 = (reg_class_subunion
2432 [(int) op_alt[i].cl]
2433 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2434 ADDRESS, SCRATCH)]);
2435 break;
2437 case CT_FIXED_FORM:
2438 break;
2440 break;
2442 p += CONSTRAINT_LEN (c, p);
2448 /* Return an array of operand_alternative instructions for
2449 instruction ICODE. */
2451 const operand_alternative *
2452 preprocess_insn_constraints (unsigned int icode)
2454 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2455 if (this_target_recog->x_op_alt[icode])
2456 return this_target_recog->x_op_alt[icode];
2458 int n_operands = insn_data[icode].n_operands;
2459 if (n_operands == 0)
2460 return 0;
2461 /* Always provide at least one alternative so that which_op_alt ()
2462 works correctly. If the instruction has 0 alternatives (i.e. all
2463 constraint strings are empty) then each operand in this alternative
2464 will have anything_ok set. */
2465 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2466 int n_entries = n_operands * n_alternatives;
2468 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2469 const char **constraints = XALLOCAVEC (const char *, n_operands);
2471 for (int i = 0; i < n_operands; ++i)
2472 constraints[i] = insn_data[icode].operand[i].constraint;
2473 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2475 this_target_recog->x_op_alt[icode] = op_alt;
2476 return op_alt;
2479 /* After calling extract_insn, you can use this function to extract some
2480 information from the constraint strings into a more usable form.
2481 The collected data is stored in recog_op_alt. */
2483 void
2484 preprocess_constraints (rtx_insn *insn)
2486 int icode = INSN_CODE (insn);
2487 if (icode >= 0)
2488 recog_op_alt = preprocess_insn_constraints (icode);
2489 else
2491 int n_operands = recog_data.n_operands;
2492 int n_alternatives = recog_data.n_alternatives;
2493 int n_entries = n_operands * n_alternatives;
2494 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2495 preprocess_constraints (n_operands, n_alternatives,
2496 recog_data.constraints, asm_op_alt);
2497 recog_op_alt = asm_op_alt;
2501 /* Check the operands of an insn against the insn's operand constraints
2502 and return 1 if they match any of the alternatives in ALTERNATIVES.
2504 The information about the insn's operands, constraints, operand modes
2505 etc. is obtained from the global variables set up by extract_insn.
2507 WHICH_ALTERNATIVE is set to a number which indicates which
2508 alternative of constraints was matched: 0 for the first alternative,
2509 1 for the next, etc.
2511 In addition, when two operands are required to match
2512 and it happens that the output operand is (reg) while the
2513 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2514 make the output operand look like the input.
2515 This is because the output operand is the one the template will print.
2517 This is used in final, just before printing the assembler code and by
2518 the routines that determine an insn's attribute.
2520 If STRICT is a positive nonzero value, it means that we have been
2521 called after reload has been completed. In that case, we must
2522 do all checks strictly. If it is zero, it means that we have been called
2523 before reload has completed. In that case, we first try to see if we can
2524 find an alternative that matches strictly. If not, we try again, this
2525 time assuming that reload will fix up the insn. This provides a "best
2526 guess" for the alternative and is used to compute attributes of insns prior
2527 to reload. A negative value of STRICT is used for this internal call. */
2529 struct funny_match
2531 int this_op, other;
2535 constrain_operands (int strict, alternative_mask alternatives)
2537 const char *constraints[MAX_RECOG_OPERANDS];
2538 int matching_operands[MAX_RECOG_OPERANDS];
2539 int earlyclobber[MAX_RECOG_OPERANDS];
2540 int c;
2542 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2543 int funny_match_index;
2545 which_alternative = 0;
2546 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2547 return 1;
2549 for (c = 0; c < recog_data.n_operands; c++)
2551 constraints[c] = recog_data.constraints[c];
2552 matching_operands[c] = -1;
2557 int seen_earlyclobber_at = -1;
2558 int opno;
2559 int lose = 0;
2560 funny_match_index = 0;
2562 if (!TEST_BIT (alternatives, which_alternative))
2564 int i;
2566 for (i = 0; i < recog_data.n_operands; i++)
2567 constraints[i] = skip_alternative (constraints[i]);
2569 which_alternative++;
2570 continue;
2573 for (opno = 0; opno < recog_data.n_operands; opno++)
2575 rtx op = recog_data.operand[opno];
2576 machine_mode mode = GET_MODE (op);
2577 const char *p = constraints[opno];
2578 int offset = 0;
2579 int win = 0;
2580 int val;
2581 int len;
2583 earlyclobber[opno] = 0;
2585 /* A unary operator may be accepted by the predicate, but it
2586 is irrelevant for matching constraints. */
2587 if (UNARY_P (op))
2588 op = XEXP (op, 0);
2590 if (GET_CODE (op) == SUBREG)
2592 if (REG_P (SUBREG_REG (op))
2593 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2594 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2595 GET_MODE (SUBREG_REG (op)),
2596 SUBREG_BYTE (op),
2597 GET_MODE (op));
2598 op = SUBREG_REG (op);
2601 /* An empty constraint or empty alternative
2602 allows anything which matched the pattern. */
2603 if (*p == 0 || *p == ',')
2604 win = 1;
2607 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2609 case '\0':
2610 len = 0;
2611 break;
2612 case ',':
2613 c = '\0';
2614 break;
2616 case '#':
2617 /* Ignore rest of this alternative as far as
2618 constraint checking is concerned. */
2620 p++;
2621 while (*p && *p != ',');
2622 len = 0;
2623 break;
2625 case '&':
2626 earlyclobber[opno] = 1;
2627 if (seen_earlyclobber_at < 0)
2628 seen_earlyclobber_at = opno;
2629 break;
2631 case '0': case '1': case '2': case '3': case '4':
2632 case '5': case '6': case '7': case '8': case '9':
2634 /* This operand must be the same as a previous one.
2635 This kind of constraint is used for instructions such
2636 as add when they take only two operands.
2638 Note that the lower-numbered operand is passed first.
2640 If we are not testing strictly, assume that this
2641 constraint will be satisfied. */
2643 char *end;
2644 int match;
2646 match = strtoul (p, &end, 10);
2647 p = end;
2649 if (strict < 0)
2650 val = 1;
2651 else
2653 rtx op1 = recog_data.operand[match];
2654 rtx op2 = recog_data.operand[opno];
2656 /* A unary operator may be accepted by the predicate,
2657 but it is irrelevant for matching constraints. */
2658 if (UNARY_P (op1))
2659 op1 = XEXP (op1, 0);
2660 if (UNARY_P (op2))
2661 op2 = XEXP (op2, 0);
2663 val = operands_match_p (op1, op2);
2666 matching_operands[opno] = match;
2667 matching_operands[match] = opno;
2669 if (val != 0)
2670 win = 1;
2672 /* If output is *x and input is *--x, arrange later
2673 to change the output to *--x as well, since the
2674 output op is the one that will be printed. */
2675 if (val == 2 && strict > 0)
2677 funny_match[funny_match_index].this_op = opno;
2678 funny_match[funny_match_index++].other = match;
2681 len = 0;
2682 break;
2684 case 'p':
2685 /* p is used for address_operands. When we are called by
2686 gen_reload, no one will have checked that the address is
2687 strictly valid, i.e., that all pseudos requiring hard regs
2688 have gotten them. */
2689 if (strict <= 0
2690 || (strict_memory_address_p (recog_data.operand_mode[opno],
2691 op)))
2692 win = 1;
2693 break;
2695 /* No need to check general_operand again;
2696 it was done in insn-recog.c. Well, except that reload
2697 doesn't check the validity of its replacements, but
2698 that should only matter when there's a bug. */
2699 case 'g':
2700 /* Anything goes unless it is a REG and really has a hard reg
2701 but the hard reg is not in the class GENERAL_REGS. */
2702 if (REG_P (op))
2704 if (strict < 0
2705 || GENERAL_REGS == ALL_REGS
2706 || (reload_in_progress
2707 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2708 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2709 win = 1;
2711 else if (strict < 0 || general_operand (op, mode))
2712 win = 1;
2713 break;
2715 default:
2717 enum constraint_num cn = lookup_constraint (p);
2718 enum reg_class cl = reg_class_for_constraint (cn);
2719 if (cl != NO_REGS)
2721 if (strict < 0
2722 || (strict == 0
2723 && REG_P (op)
2724 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2725 || (strict == 0 && GET_CODE (op) == SCRATCH)
2726 || (REG_P (op)
2727 && reg_fits_class_p (op, cl, offset, mode)))
2728 win = 1;
2731 else if (constraint_satisfied_p (op, cn))
2732 win = 1;
2734 else if (insn_extra_memory_constraint (cn)
2735 /* Every memory operand can be reloaded to fit. */
2736 && ((strict < 0 && MEM_P (op))
2737 /* Before reload, accept what reload can turn
2738 into a mem. */
2739 || (strict < 0 && CONSTANT_P (op))
2740 /* Before reload, accept a pseudo,
2741 since LRA can turn it into a mem. */
2742 || (strict < 0 && targetm.lra_p () && REG_P (op)
2743 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2744 /* During reload, accept a pseudo */
2745 || (reload_in_progress && REG_P (op)
2746 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2747 win = 1;
2748 else if (insn_extra_address_constraint (cn)
2749 /* Every address operand can be reloaded to fit. */
2750 && strict < 0)
2751 win = 1;
2752 /* Cater to architectures like IA-64 that define extra memory
2753 constraints without using define_memory_constraint. */
2754 else if (reload_in_progress
2755 && REG_P (op)
2756 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2757 && reg_renumber[REGNO (op)] < 0
2758 && reg_equiv_mem (REGNO (op)) != 0
2759 && constraint_satisfied_p
2760 (reg_equiv_mem (REGNO (op)), cn))
2761 win = 1;
2762 break;
2765 while (p += len, c);
2767 constraints[opno] = p;
2768 /* If this operand did not win somehow,
2769 this alternative loses. */
2770 if (! win)
2771 lose = 1;
2773 /* This alternative won; the operands are ok.
2774 Change whichever operands this alternative says to change. */
2775 if (! lose)
2777 int opno, eopno;
2779 /* See if any earlyclobber operand conflicts with some other
2780 operand. */
2782 if (strict > 0 && seen_earlyclobber_at >= 0)
2783 for (eopno = seen_earlyclobber_at;
2784 eopno < recog_data.n_operands;
2785 eopno++)
2786 /* Ignore earlyclobber operands now in memory,
2787 because we would often report failure when we have
2788 two memory operands, one of which was formerly a REG. */
2789 if (earlyclobber[eopno]
2790 && REG_P (recog_data.operand[eopno]))
2791 for (opno = 0; opno < recog_data.n_operands; opno++)
2792 if ((MEM_P (recog_data.operand[opno])
2793 || recog_data.operand_type[opno] != OP_OUT)
2794 && opno != eopno
2795 /* Ignore things like match_operator operands. */
2796 && *recog_data.constraints[opno] != 0
2797 && ! (matching_operands[opno] == eopno
2798 && operands_match_p (recog_data.operand[opno],
2799 recog_data.operand[eopno]))
2800 && ! safe_from_earlyclobber (recog_data.operand[opno],
2801 recog_data.operand[eopno]))
2802 lose = 1;
2804 if (! lose)
2806 while (--funny_match_index >= 0)
2808 recog_data.operand[funny_match[funny_match_index].other]
2809 = recog_data.operand[funny_match[funny_match_index].this_op];
2812 /* For operands without < or > constraints reject side-effects. */
2813 if (AUTO_INC_DEC && recog_data.is_asm)
2815 for (opno = 0; opno < recog_data.n_operands; opno++)
2816 if (MEM_P (recog_data.operand[opno]))
2817 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2819 case PRE_INC:
2820 case POST_INC:
2821 case PRE_DEC:
2822 case POST_DEC:
2823 case PRE_MODIFY:
2824 case POST_MODIFY:
2825 if (strchr (recog_data.constraints[opno], '<') == NULL
2826 && strchr (recog_data.constraints[opno], '>')
2827 == NULL)
2828 return 0;
2829 break;
2830 default:
2831 break;
2835 return 1;
2839 which_alternative++;
2841 while (which_alternative < recog_data.n_alternatives);
2843 which_alternative = -1;
2844 /* If we are about to reject this, but we are not to test strictly,
2845 try a very loose test. Only return failure if it fails also. */
2846 if (strict == 0)
2847 return constrain_operands (-1, alternatives);
2848 else
2849 return 0;
2852 /* Return true iff OPERAND (assumed to be a REG rtx)
2853 is a hard reg in class CLASS when its regno is offset by OFFSET
2854 and changed to mode MODE.
2855 If REG occupies multiple hard regs, all of them must be in CLASS. */
2857 bool
2858 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2859 machine_mode mode)
2861 unsigned int regno = REGNO (operand);
2863 if (cl == NO_REGS)
2864 return false;
2866 /* Regno must not be a pseudo register. Offset may be negative. */
2867 return (HARD_REGISTER_NUM_P (regno)
2868 && HARD_REGISTER_NUM_P (regno + offset)
2869 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2870 regno + offset));
2873 /* Split single instruction. Helper function for split_all_insns and
2874 split_all_insns_noflow. Return last insn in the sequence if successful,
2875 or NULL if unsuccessful. */
2877 static rtx_insn *
2878 split_insn (rtx_insn *insn)
2880 /* Split insns here to get max fine-grain parallelism. */
2881 rtx_insn *first = PREV_INSN (insn);
2882 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2883 rtx insn_set, last_set, note;
2885 if (last == insn)
2886 return NULL;
2888 /* If the original instruction was a single set that was known to be
2889 equivalent to a constant, see if we can say the same about the last
2890 instruction in the split sequence. The two instructions must set
2891 the same destination. */
2892 insn_set = single_set (insn);
2893 if (insn_set)
2895 last_set = single_set (last);
2896 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2898 note = find_reg_equal_equiv_note (insn);
2899 if (note && CONSTANT_P (XEXP (note, 0)))
2900 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2901 else if (CONSTANT_P (SET_SRC (insn_set)))
2902 set_unique_reg_note (last, REG_EQUAL,
2903 copy_rtx (SET_SRC (insn_set)));
2907 /* try_split returns the NOTE that INSN became. */
2908 SET_INSN_DELETED (insn);
2910 /* ??? Coddle to md files that generate subregs in post-reload
2911 splitters instead of computing the proper hard register. */
2912 if (reload_completed && first != last)
2914 first = NEXT_INSN (first);
2915 for (;;)
2917 if (INSN_P (first))
2918 cleanup_subreg_operands (first);
2919 if (first == last)
2920 break;
2921 first = NEXT_INSN (first);
2925 return last;
2928 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2930 void
2931 split_all_insns (void)
2933 bool changed;
2934 basic_block bb;
2936 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2937 bitmap_clear (blocks);
2938 changed = false;
2940 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2942 rtx_insn *insn, *next;
2943 bool finish = false;
2945 rtl_profile_for_bb (bb);
2946 for (insn = BB_HEAD (bb); !finish ; insn = next)
2948 /* Can't use `next_real_insn' because that might go across
2949 CODE_LABELS and short-out basic blocks. */
2950 next = NEXT_INSN (insn);
2951 finish = (insn == BB_END (bb));
2952 if (INSN_P (insn))
2954 rtx set = single_set (insn);
2956 /* Don't split no-op move insns. These should silently
2957 disappear later in final. Splitting such insns would
2958 break the code that handles LIBCALL blocks. */
2959 if (set && set_noop_p (set))
2961 /* Nops get in the way while scheduling, so delete them
2962 now if register allocation has already been done. It
2963 is too risky to try to do this before register
2964 allocation, and there are unlikely to be very many
2965 nops then anyways. */
2966 if (reload_completed)
2967 delete_insn_and_edges (insn);
2969 else
2971 if (split_insn (insn))
2973 bitmap_set_bit (blocks, bb->index);
2974 changed = true;
2981 default_rtl_profile ();
2982 if (changed)
2983 find_many_sub_basic_blocks (blocks);
2985 checking_verify_flow_info ();
2988 /* Same as split_all_insns, but do not expect CFG to be available.
2989 Used by machine dependent reorg passes. */
2991 unsigned int
2992 split_all_insns_noflow (void)
2994 rtx_insn *next, *insn;
2996 for (insn = get_insns (); insn; insn = next)
2998 next = NEXT_INSN (insn);
2999 if (INSN_P (insn))
3001 /* Don't split no-op move insns. These should silently
3002 disappear later in final. Splitting such insns would
3003 break the code that handles LIBCALL blocks. */
3004 rtx set = single_set (insn);
3005 if (set && set_noop_p (set))
3007 /* Nops get in the way while scheduling, so delete them
3008 now if register allocation has already been done. It
3009 is too risky to try to do this before register
3010 allocation, and there are unlikely to be very many
3011 nops then anyways.
3013 ??? Should we use delete_insn when the CFG isn't valid? */
3014 if (reload_completed)
3015 delete_insn_and_edges (insn);
3017 else
3018 split_insn (insn);
3021 return 0;
3024 struct peep2_insn_data
3026 rtx_insn *insn;
3027 regset live_before;
3030 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3031 static int peep2_current;
3033 static bool peep2_do_rebuild_jump_labels;
3034 static bool peep2_do_cleanup_cfg;
3036 /* The number of instructions available to match a peep2. */
3037 int peep2_current_count;
3039 /* A marker indicating the last insn of the block. The live_before regset
3040 for this element is correct, indicating DF_LIVE_OUT for the block. */
3041 #define PEEP2_EOB invalid_insn_rtx
3043 /* Wrap N to fit into the peep2_insn_data buffer. */
3045 static int
3046 peep2_buf_position (int n)
3048 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3049 n -= MAX_INSNS_PER_PEEP2 + 1;
3050 return n;
3053 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3054 does not exist. Used by the recognizer to find the next insn to match
3055 in a multi-insn pattern. */
3057 rtx_insn *
3058 peep2_next_insn (int n)
3060 gcc_assert (n <= peep2_current_count);
3062 n = peep2_buf_position (peep2_current + n);
3064 return peep2_insn_data[n].insn;
3067 /* Return true if REGNO is dead before the Nth non-note insn
3068 after `current'. */
3071 peep2_regno_dead_p (int ofs, int regno)
3073 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3075 ofs = peep2_buf_position (peep2_current + ofs);
3077 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3079 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3082 /* Similarly for a REG. */
3085 peep2_reg_dead_p (int ofs, rtx reg)
3087 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3089 ofs = peep2_buf_position (peep2_current + ofs);
3091 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3093 unsigned int end_regno = END_REGNO (reg);
3094 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3095 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3096 return 0;
3097 return 1;
3100 /* Regno offset to be used in the register search. */
3101 static int search_ofs;
3103 /* Try to find a hard register of mode MODE, matching the register class in
3104 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3105 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3106 in which case the only condition is that the register must be available
3107 before CURRENT_INSN.
3108 Registers that already have bits set in REG_SET will not be considered.
3110 If an appropriate register is available, it will be returned and the
3111 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3112 returned. */
3115 peep2_find_free_register (int from, int to, const char *class_str,
3116 machine_mode mode, HARD_REG_SET *reg_set)
3118 enum reg_class cl;
3119 HARD_REG_SET live;
3120 df_ref def;
3121 int i;
3123 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3124 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3126 from = peep2_buf_position (peep2_current + from);
3127 to = peep2_buf_position (peep2_current + to);
3129 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3130 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3132 while (from != to)
3134 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3136 /* Don't use registers set or clobbered by the insn. */
3137 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3138 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3140 from = peep2_buf_position (from + 1);
3143 cl = reg_class_for_constraint (lookup_constraint (class_str));
3145 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3147 int raw_regno, regno, success, j;
3149 /* Distribute the free registers as much as possible. */
3150 raw_regno = search_ofs + i;
3151 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3152 raw_regno -= FIRST_PSEUDO_REGISTER;
3153 #ifdef REG_ALLOC_ORDER
3154 regno = reg_alloc_order[raw_regno];
3155 #else
3156 regno = raw_regno;
3157 #endif
3159 /* Can it support the mode we need? */
3160 if (!targetm.hard_regno_mode_ok (regno, mode))
3161 continue;
3163 success = 1;
3164 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3166 /* Don't allocate fixed registers. */
3167 if (fixed_regs[regno + j])
3169 success = 0;
3170 break;
3172 /* Don't allocate global registers. */
3173 if (global_regs[regno + j])
3175 success = 0;
3176 break;
3178 /* Make sure the register is of the right class. */
3179 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3181 success = 0;
3182 break;
3184 /* And that we don't create an extra save/restore. */
3185 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3187 success = 0;
3188 break;
3191 if (! targetm.hard_regno_scratch_ok (regno + j))
3193 success = 0;
3194 break;
3197 /* And we don't clobber traceback for noreturn functions. */
3198 if ((regno + j == FRAME_POINTER_REGNUM
3199 || regno + j == HARD_FRAME_POINTER_REGNUM)
3200 && (! reload_completed || frame_pointer_needed))
3202 success = 0;
3203 break;
3206 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3207 || TEST_HARD_REG_BIT (live, regno + j))
3209 success = 0;
3210 break;
3214 if (success)
3216 add_to_hard_reg_set (reg_set, mode, regno);
3218 /* Start the next search with the next register. */
3219 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3220 raw_regno = 0;
3221 search_ofs = raw_regno;
3223 return gen_rtx_REG (mode, regno);
3227 search_ofs = 0;
3228 return NULL_RTX;
3231 /* Forget all currently tracked instructions, only remember current
3232 LIVE regset. */
3234 static void
3235 peep2_reinit_state (regset live)
3237 int i;
3239 /* Indicate that all slots except the last holds invalid data. */
3240 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3241 peep2_insn_data[i].insn = NULL;
3242 peep2_current_count = 0;
3244 /* Indicate that the last slot contains live_after data. */
3245 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3246 peep2_current = MAX_INSNS_PER_PEEP2;
3248 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3251 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3252 starting at INSN. Perform the replacement, removing the old insns and
3253 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3254 if the replacement is rejected. */
3256 static rtx_insn *
3257 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3259 int i;
3260 rtx_insn *last, *before_try, *x;
3261 rtx eh_note, as_note;
3262 rtx_insn *old_insn;
3263 rtx_insn *new_insn;
3264 bool was_call = false;
3266 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3267 match more than one insn, or to be split into more than one insn. */
3268 old_insn = peep2_insn_data[peep2_current].insn;
3269 if (RTX_FRAME_RELATED_P (old_insn))
3271 bool any_note = false;
3272 rtx note;
3274 if (match_len != 0)
3275 return NULL;
3277 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3278 may be in the stream for the purpose of register allocation. */
3279 if (active_insn_p (attempt))
3280 new_insn = attempt;
3281 else
3282 new_insn = next_active_insn (attempt);
3283 if (next_active_insn (new_insn))
3284 return NULL;
3286 /* We have a 1-1 replacement. Copy over any frame-related info. */
3287 RTX_FRAME_RELATED_P (new_insn) = 1;
3289 /* Allow the backend to fill in a note during the split. */
3290 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3291 switch (REG_NOTE_KIND (note))
3293 case REG_FRAME_RELATED_EXPR:
3294 case REG_CFA_DEF_CFA:
3295 case REG_CFA_ADJUST_CFA:
3296 case REG_CFA_OFFSET:
3297 case REG_CFA_REGISTER:
3298 case REG_CFA_EXPRESSION:
3299 case REG_CFA_RESTORE:
3300 case REG_CFA_SET_VDRAP:
3301 any_note = true;
3302 break;
3303 default:
3304 break;
3307 /* If the backend didn't supply a note, copy one over. */
3308 if (!any_note)
3309 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3310 switch (REG_NOTE_KIND (note))
3312 case REG_FRAME_RELATED_EXPR:
3313 case REG_CFA_DEF_CFA:
3314 case REG_CFA_ADJUST_CFA:
3315 case REG_CFA_OFFSET:
3316 case REG_CFA_REGISTER:
3317 case REG_CFA_EXPRESSION:
3318 case REG_CFA_RESTORE:
3319 case REG_CFA_SET_VDRAP:
3320 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3321 any_note = true;
3322 break;
3323 default:
3324 break;
3327 /* If there still isn't a note, make sure the unwind info sees the
3328 same expression as before the split. */
3329 if (!any_note)
3331 rtx old_set, new_set;
3333 /* The old insn had better have been simple, or annotated. */
3334 old_set = single_set (old_insn);
3335 gcc_assert (old_set != NULL);
3337 new_set = single_set (new_insn);
3338 if (!new_set || !rtx_equal_p (new_set, old_set))
3339 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3342 /* Copy prologue/epilogue status. This is required in order to keep
3343 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3344 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3347 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3348 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3349 cfg-related call notes. */
3350 for (i = 0; i <= match_len; ++i)
3352 int j;
3353 rtx note;
3355 j = peep2_buf_position (peep2_current + i);
3356 old_insn = peep2_insn_data[j].insn;
3357 if (!CALL_P (old_insn))
3358 continue;
3359 was_call = true;
3361 new_insn = attempt;
3362 while (new_insn != NULL_RTX)
3364 if (CALL_P (new_insn))
3365 break;
3366 new_insn = NEXT_INSN (new_insn);
3369 gcc_assert (new_insn != NULL_RTX);
3371 CALL_INSN_FUNCTION_USAGE (new_insn)
3372 = CALL_INSN_FUNCTION_USAGE (old_insn);
3373 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3375 for (note = REG_NOTES (old_insn);
3376 note;
3377 note = XEXP (note, 1))
3378 switch (REG_NOTE_KIND (note))
3380 case REG_NORETURN:
3381 case REG_SETJMP:
3382 case REG_TM:
3383 add_reg_note (new_insn, REG_NOTE_KIND (note),
3384 XEXP (note, 0));
3385 break;
3386 default:
3387 /* Discard all other reg notes. */
3388 break;
3391 /* Croak if there is another call in the sequence. */
3392 while (++i <= match_len)
3394 j = peep2_buf_position (peep2_current + i);
3395 old_insn = peep2_insn_data[j].insn;
3396 gcc_assert (!CALL_P (old_insn));
3398 break;
3401 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3402 move those notes over to the new sequence. */
3403 as_note = NULL;
3404 for (i = match_len; i >= 0; --i)
3406 int j = peep2_buf_position (peep2_current + i);
3407 old_insn = peep2_insn_data[j].insn;
3409 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3410 if (as_note)
3411 break;
3414 i = peep2_buf_position (peep2_current + match_len);
3415 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3417 /* Replace the old sequence with the new. */
3418 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3419 last = emit_insn_after_setloc (attempt,
3420 peep2_insn_data[i].insn,
3421 INSN_LOCATION (peepinsn));
3422 before_try = PREV_INSN (insn);
3423 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3425 /* Re-insert the EH_REGION notes. */
3426 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3428 edge eh_edge;
3429 edge_iterator ei;
3431 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3432 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3433 break;
3435 if (eh_note)
3436 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3438 if (eh_edge)
3439 for (x = last; x != before_try; x = PREV_INSN (x))
3440 if (x != BB_END (bb)
3441 && (can_throw_internal (x)
3442 || can_nonlocal_goto (x)))
3444 edge nfte, nehe;
3445 int flags;
3447 nfte = split_block (bb, x);
3448 flags = (eh_edge->flags
3449 & (EDGE_EH | EDGE_ABNORMAL));
3450 if (CALL_P (x))
3451 flags |= EDGE_ABNORMAL_CALL;
3452 nehe = make_edge (nfte->src, eh_edge->dest,
3453 flags);
3455 nehe->probability = eh_edge->probability;
3456 nfte->probability = nehe->probability.invert ();
3458 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3459 bb = nfte->src;
3460 eh_edge = nehe;
3463 /* Converting possibly trapping insn to non-trapping is
3464 possible. Zap dummy outgoing edges. */
3465 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3468 /* Re-insert the ARGS_SIZE notes. */
3469 if (as_note)
3470 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3472 /* If we generated a jump instruction, it won't have
3473 JUMP_LABEL set. Recompute after we're done. */
3474 for (x = last; x != before_try; x = PREV_INSN (x))
3475 if (JUMP_P (x))
3477 peep2_do_rebuild_jump_labels = true;
3478 break;
3481 return last;
3484 /* After performing a replacement in basic block BB, fix up the life
3485 information in our buffer. LAST is the last of the insns that we
3486 emitted as a replacement. PREV is the insn before the start of
3487 the replacement. MATCH_LEN is the number of instructions that were
3488 matched, and which now need to be replaced in the buffer. */
3490 static void
3491 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3492 rtx_insn *prev)
3494 int i = peep2_buf_position (peep2_current + match_len + 1);
3495 rtx_insn *x;
3496 regset_head live;
3498 INIT_REG_SET (&live);
3499 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3501 gcc_assert (peep2_current_count >= match_len + 1);
3502 peep2_current_count -= match_len + 1;
3504 x = last;
3507 if (INSN_P (x))
3509 df_insn_rescan (x);
3510 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3512 peep2_current_count++;
3513 if (--i < 0)
3514 i = MAX_INSNS_PER_PEEP2;
3515 peep2_insn_data[i].insn = x;
3516 df_simulate_one_insn_backwards (bb, x, &live);
3517 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3520 x = PREV_INSN (x);
3522 while (x != prev);
3523 CLEAR_REG_SET (&live);
3525 peep2_current = i;
3528 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3529 Return true if we added it, false otherwise. The caller will try to match
3530 peepholes against the buffer if we return false; otherwise it will try to
3531 add more instructions to the buffer. */
3533 static bool
3534 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3536 int pos;
3538 /* Once we have filled the maximum number of insns the buffer can hold,
3539 allow the caller to match the insns against peepholes. We wait until
3540 the buffer is full in case the target has similar peepholes of different
3541 length; we always want to match the longest if possible. */
3542 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3543 return false;
3545 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3546 any other pattern, lest it change the semantics of the frame info. */
3547 if (RTX_FRAME_RELATED_P (insn))
3549 /* Let the buffer drain first. */
3550 if (peep2_current_count > 0)
3551 return false;
3552 /* Now the insn will be the only thing in the buffer. */
3555 pos = peep2_buf_position (peep2_current + peep2_current_count);
3556 peep2_insn_data[pos].insn = insn;
3557 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3558 peep2_current_count++;
3560 df_simulate_one_insn_forwards (bb, insn, live);
3561 return true;
3564 /* Perform the peephole2 optimization pass. */
3566 static void
3567 peephole2_optimize (void)
3569 rtx_insn *insn;
3570 bitmap live;
3571 int i;
3572 basic_block bb;
3574 peep2_do_cleanup_cfg = false;
3575 peep2_do_rebuild_jump_labels = false;
3577 df_set_flags (DF_LR_RUN_DCE);
3578 df_note_add_problem ();
3579 df_analyze ();
3581 /* Initialize the regsets we're going to use. */
3582 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3583 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3584 search_ofs = 0;
3585 live = BITMAP_ALLOC (&reg_obstack);
3587 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3589 bool past_end = false;
3590 int pos;
3592 rtl_profile_for_bb (bb);
3594 /* Start up propagation. */
3595 bitmap_copy (live, DF_LR_IN (bb));
3596 df_simulate_initialize_forwards (bb, live);
3597 peep2_reinit_state (live);
3599 insn = BB_HEAD (bb);
3600 for (;;)
3602 rtx_insn *attempt, *head;
3603 int match_len;
3605 if (!past_end && !NONDEBUG_INSN_P (insn))
3607 next_insn:
3608 insn = NEXT_INSN (insn);
3609 if (insn == NEXT_INSN (BB_END (bb)))
3610 past_end = true;
3611 continue;
3613 if (!past_end && peep2_fill_buffer (bb, insn, live))
3614 goto next_insn;
3616 /* If we did not fill an empty buffer, it signals the end of the
3617 block. */
3618 if (peep2_current_count == 0)
3619 break;
3621 /* The buffer filled to the current maximum, so try to match. */
3623 pos = peep2_buf_position (peep2_current + peep2_current_count);
3624 peep2_insn_data[pos].insn = PEEP2_EOB;
3625 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3627 /* Match the peephole. */
3628 head = peep2_insn_data[peep2_current].insn;
3629 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3630 if (attempt != NULL)
3632 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3633 if (last)
3635 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3636 continue;
3640 /* No match: advance the buffer by one insn. */
3641 peep2_current = peep2_buf_position (peep2_current + 1);
3642 peep2_current_count--;
3646 default_rtl_profile ();
3647 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3648 BITMAP_FREE (peep2_insn_data[i].live_before);
3649 BITMAP_FREE (live);
3650 if (peep2_do_rebuild_jump_labels)
3651 rebuild_jump_labels (get_insns ());
3652 if (peep2_do_cleanup_cfg)
3653 cleanup_cfg (CLEANUP_CFG_CHANGED);
3656 /* Common predicates for use with define_bypass. */
3658 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3659 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3660 must be either a single_set or a PARALLEL with SETs inside. */
3663 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3665 rtx out_set, in_set;
3666 rtx out_pat, in_pat;
3667 rtx out_exp, in_exp;
3668 int i, j;
3670 in_set = single_set (in_insn);
3671 if (in_set)
3673 if (!MEM_P (SET_DEST (in_set)))
3674 return false;
3676 out_set = single_set (out_insn);
3677 if (out_set)
3679 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3680 return false;
3682 else
3684 out_pat = PATTERN (out_insn);
3686 if (GET_CODE (out_pat) != PARALLEL)
3687 return false;
3689 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3691 out_exp = XVECEXP (out_pat, 0, i);
3693 if (GET_CODE (out_exp) == CLOBBER)
3694 continue;
3696 gcc_assert (GET_CODE (out_exp) == SET);
3698 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3699 return false;
3703 else
3705 in_pat = PATTERN (in_insn);
3706 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3708 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3710 in_exp = XVECEXP (in_pat, 0, i);
3712 if (GET_CODE (in_exp) == CLOBBER)
3713 continue;
3715 gcc_assert (GET_CODE (in_exp) == SET);
3717 if (!MEM_P (SET_DEST (in_exp)))
3718 return false;
3720 out_set = single_set (out_insn);
3721 if (out_set)
3723 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3724 return false;
3726 else
3728 out_pat = PATTERN (out_insn);
3729 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3731 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3733 out_exp = XVECEXP (out_pat, 0, j);
3735 if (GET_CODE (out_exp) == CLOBBER)
3736 continue;
3738 gcc_assert (GET_CODE (out_exp) == SET);
3740 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3741 return false;
3747 return true;
3750 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3751 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3752 or multiple set; IN_INSN should be single_set for truth, but for convenience
3753 of insn categorization may be any JUMP or CALL insn. */
3756 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3758 rtx out_set, in_set;
3760 in_set = single_set (in_insn);
3761 if (! in_set)
3763 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3764 return false;
3767 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3768 return false;
3769 in_set = SET_SRC (in_set);
3771 out_set = single_set (out_insn);
3772 if (out_set)
3774 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3775 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3776 return false;
3778 else
3780 rtx out_pat;
3781 int i;
3783 out_pat = PATTERN (out_insn);
3784 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3786 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3788 rtx exp = XVECEXP (out_pat, 0, i);
3790 if (GET_CODE (exp) == CLOBBER)
3791 continue;
3793 gcc_assert (GET_CODE (exp) == SET);
3795 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3796 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3797 return false;
3801 return true;
3804 static unsigned int
3805 rest_of_handle_peephole2 (void)
3807 if (HAVE_peephole2)
3808 peephole2_optimize ();
3810 return 0;
3813 namespace {
3815 const pass_data pass_data_peephole2 =
3817 RTL_PASS, /* type */
3818 "peephole2", /* name */
3819 OPTGROUP_NONE, /* optinfo_flags */
3820 TV_PEEPHOLE2, /* tv_id */
3821 0, /* properties_required */
3822 0, /* properties_provided */
3823 0, /* properties_destroyed */
3824 0, /* todo_flags_start */
3825 TODO_df_finish, /* todo_flags_finish */
3828 class pass_peephole2 : public rtl_opt_pass
3830 public:
3831 pass_peephole2 (gcc::context *ctxt)
3832 : rtl_opt_pass (pass_data_peephole2, ctxt)
3835 /* opt_pass methods: */
3836 /* The epiphany backend creates a second instance of this pass, so we need
3837 a clone method. */
3838 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3839 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3840 virtual unsigned int execute (function *)
3842 return rest_of_handle_peephole2 ();
3845 }; // class pass_peephole2
3847 } // anon namespace
3849 rtl_opt_pass *
3850 make_pass_peephole2 (gcc::context *ctxt)
3852 return new pass_peephole2 (ctxt);
3855 namespace {
3857 const pass_data pass_data_split_all_insns =
3859 RTL_PASS, /* type */
3860 "split1", /* name */
3861 OPTGROUP_NONE, /* optinfo_flags */
3862 TV_NONE, /* tv_id */
3863 0, /* properties_required */
3864 0, /* properties_provided */
3865 0, /* properties_destroyed */
3866 0, /* todo_flags_start */
3867 0, /* todo_flags_finish */
3870 class pass_split_all_insns : public rtl_opt_pass
3872 public:
3873 pass_split_all_insns (gcc::context *ctxt)
3874 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3877 /* opt_pass methods: */
3878 /* The epiphany backend creates a second instance of this pass, so
3879 we need a clone method. */
3880 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3881 virtual unsigned int execute (function *)
3883 split_all_insns ();
3884 return 0;
3887 }; // class pass_split_all_insns
3889 } // anon namespace
3891 rtl_opt_pass *
3892 make_pass_split_all_insns (gcc::context *ctxt)
3894 return new pass_split_all_insns (ctxt);
3897 namespace {
3899 const pass_data pass_data_split_after_reload =
3901 RTL_PASS, /* type */
3902 "split2", /* name */
3903 OPTGROUP_NONE, /* optinfo_flags */
3904 TV_NONE, /* tv_id */
3905 0, /* properties_required */
3906 0, /* properties_provided */
3907 0, /* properties_destroyed */
3908 0, /* todo_flags_start */
3909 0, /* todo_flags_finish */
3912 class pass_split_after_reload : public rtl_opt_pass
3914 public:
3915 pass_split_after_reload (gcc::context *ctxt)
3916 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3919 /* opt_pass methods: */
3920 virtual bool gate (function *)
3922 /* If optimizing, then go ahead and split insns now. */
3923 if (optimize > 0)
3924 return true;
3926 #ifdef STACK_REGS
3927 return true;
3928 #else
3929 return false;
3930 #endif
3933 virtual unsigned int execute (function *)
3935 split_all_insns ();
3936 return 0;
3939 }; // class pass_split_after_reload
3941 } // anon namespace
3943 rtl_opt_pass *
3944 make_pass_split_after_reload (gcc::context *ctxt)
3946 return new pass_split_after_reload (ctxt);
3949 namespace {
3951 const pass_data pass_data_split_before_regstack =
3953 RTL_PASS, /* type */
3954 "split3", /* name */
3955 OPTGROUP_NONE, /* optinfo_flags */
3956 TV_NONE, /* tv_id */
3957 0, /* properties_required */
3958 0, /* properties_provided */
3959 0, /* properties_destroyed */
3960 0, /* todo_flags_start */
3961 0, /* todo_flags_finish */
3964 class pass_split_before_regstack : public rtl_opt_pass
3966 public:
3967 pass_split_before_regstack (gcc::context *ctxt)
3968 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3971 /* opt_pass methods: */
3972 virtual bool gate (function *);
3973 virtual unsigned int execute (function *)
3975 split_all_insns ();
3976 return 0;
3979 }; // class pass_split_before_regstack
3981 bool
3982 pass_split_before_regstack::gate (function *)
3984 #if HAVE_ATTR_length && defined (STACK_REGS)
3985 /* If flow2 creates new instructions which need splitting
3986 and scheduling after reload is not done, they might not be
3987 split until final which doesn't allow splitting
3988 if HAVE_ATTR_length. */
3989 # ifdef INSN_SCHEDULING
3990 return (optimize && !flag_schedule_insns_after_reload);
3991 # else
3992 return (optimize);
3993 # endif
3994 #else
3995 return 0;
3996 #endif
3999 } // anon namespace
4001 rtl_opt_pass *
4002 make_pass_split_before_regstack (gcc::context *ctxt)
4004 return new pass_split_before_regstack (ctxt);
4007 static unsigned int
4008 rest_of_handle_split_before_sched2 (void)
4010 #ifdef INSN_SCHEDULING
4011 split_all_insns ();
4012 #endif
4013 return 0;
4016 namespace {
4018 const pass_data pass_data_split_before_sched2 =
4020 RTL_PASS, /* type */
4021 "split4", /* name */
4022 OPTGROUP_NONE, /* optinfo_flags */
4023 TV_NONE, /* tv_id */
4024 0, /* properties_required */
4025 0, /* properties_provided */
4026 0, /* properties_destroyed */
4027 0, /* todo_flags_start */
4028 0, /* todo_flags_finish */
4031 class pass_split_before_sched2 : public rtl_opt_pass
4033 public:
4034 pass_split_before_sched2 (gcc::context *ctxt)
4035 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4038 /* opt_pass methods: */
4039 virtual bool gate (function *)
4041 #ifdef INSN_SCHEDULING
4042 return optimize > 0 && flag_schedule_insns_after_reload;
4043 #else
4044 return false;
4045 #endif
4048 virtual unsigned int execute (function *)
4050 return rest_of_handle_split_before_sched2 ();
4053 }; // class pass_split_before_sched2
4055 } // anon namespace
4057 rtl_opt_pass *
4058 make_pass_split_before_sched2 (gcc::context *ctxt)
4060 return new pass_split_before_sched2 (ctxt);
4063 namespace {
4065 const pass_data pass_data_split_for_shorten_branches =
4067 RTL_PASS, /* type */
4068 "split5", /* name */
4069 OPTGROUP_NONE, /* optinfo_flags */
4070 TV_NONE, /* tv_id */
4071 0, /* properties_required */
4072 0, /* properties_provided */
4073 0, /* properties_destroyed */
4074 0, /* todo_flags_start */
4075 0, /* todo_flags_finish */
4078 class pass_split_for_shorten_branches : public rtl_opt_pass
4080 public:
4081 pass_split_for_shorten_branches (gcc::context *ctxt)
4082 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4085 /* opt_pass methods: */
4086 virtual bool gate (function *)
4088 /* The placement of the splitting that we do for shorten_branches
4089 depends on whether regstack is used by the target or not. */
4090 #if HAVE_ATTR_length && !defined (STACK_REGS)
4091 return true;
4092 #else
4093 return false;
4094 #endif
4097 virtual unsigned int execute (function *)
4099 return split_all_insns_noflow ();
4102 }; // class pass_split_for_shorten_branches
4104 } // anon namespace
4106 rtl_opt_pass *
4107 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4109 return new pass_split_for_shorten_branches (ctxt);
4112 /* (Re)initialize the target information after a change in target. */
4114 void
4115 recog_init ()
4117 /* The information is zero-initialized, so we don't need to do anything
4118 first time round. */
4119 if (!this_target_recog->x_initialized)
4121 this_target_recog->x_initialized = true;
4122 return;
4124 memset (this_target_recog->x_bool_attr_masks, 0,
4125 sizeof (this_target_recog->x_bool_attr_masks));
4126 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4127 if (this_target_recog->x_op_alt[i])
4129 free (this_target_recog->x_op_alt[i]);
4130 this_target_recog->x_op_alt[i] = 0;