typeck.c (cp_truthvalue_conversion): Add tsubst_flags_t parameter and use it in calls...
[official-gcc.git] / gcc / recog.c
blob9e9cca7db024cc81f638c38ee179aa6ffee9eda6
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
53 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54 static void validate_replace_src_1 (rtx *, void *);
55 static rtx_insn *split_insn (rtx_insn *);
57 struct target_recog default_target_recog;
58 #if SWITCHABLE_TARGET
59 struct target_recog *this_target_recog = &default_target_recog;
60 #endif
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data_d recog_data;
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative *recog_op_alt;
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES];
83 /* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
86 int which_alternative;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.c.
90 Controls the significance of (SUBREG (MEM)). */
92 int reload_completed;
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
101 void
102 init_recog_no_volatile (void)
104 volatile_ok = 0;
107 void
108 init_recog (void)
110 volatile_ok = 1;
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
116 static bool
117 asm_labels_ok (rtx body)
119 rtx asmop;
120 int i;
122 asmop = extract_asm_operands (body);
123 if (asmop == NULL_RTX)
124 return true;
126 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128 return false;
130 return true;
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
137 check_asm_operands (rtx x)
139 int noperands;
140 rtx *operands;
141 const char **constraints;
142 int i;
144 if (!asm_labels_ok (x))
145 return 0;
147 /* Post-reload, be more strict with things. */
148 if (reload_completed)
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn *insn = make_insn_raw (x);
152 extract_insn (insn);
153 constrain_operands (1, get_enabled_alternatives (insn));
154 return which_alternative >= 0;
157 noperands = asm_noperands (x);
158 if (noperands < 0)
159 return 0;
160 if (noperands == 0)
161 return 1;
163 operands = XALLOCAVEC (rtx, noperands);
164 constraints = XALLOCAVEC (const char *, noperands);
166 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
168 for (i = 0; i < noperands; i++)
170 const char *c = constraints[i];
171 if (c[0] == '%')
172 c++;
173 if (! asm_operand_ok (operands[i], c, constraints))
174 return 0;
177 return 1;
180 /* Static data for the next two routines. */
182 struct change_t
184 rtx object;
185 int old_code;
186 bool unshare;
187 rtx *loc;
188 rtx old;
191 static change_t *changes;
192 static int changes_allocated;
194 static int num_changes = 0;
196 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
197 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
198 the change is simply made.
200 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
201 will be called with the address and mode as parameters. If OBJECT is
202 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
203 the change in place.
205 IN_GROUP is nonzero if this is part of a group of changes that must be
206 performed as a group. In that case, the changes will be stored. The
207 function `apply_change_group' will validate and apply the changes.
209 If IN_GROUP is zero, this is a single change. Try to recognize the insn
210 or validate the memory reference with the change applied. If the result
211 is not valid for the machine, suppress the change and return zero.
212 Otherwise, perform the change and return 1. */
214 static bool
215 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
217 rtx old = *loc;
219 if (old == new_rtx || rtx_equal_p (old, new_rtx))
220 return 1;
222 gcc_assert (in_group != 0 || num_changes == 0);
224 *loc = new_rtx;
226 /* Save the information describing this change. */
227 if (num_changes >= changes_allocated)
229 if (changes_allocated == 0)
230 /* This value allows for repeated substitutions inside complex
231 indexed addresses, or changes in up to 5 insns. */
232 changes_allocated = MAX_RECOG_OPERANDS * 5;
233 else
234 changes_allocated *= 2;
236 changes = XRESIZEVEC (change_t, changes, changes_allocated);
239 changes[num_changes].object = object;
240 changes[num_changes].loc = loc;
241 changes[num_changes].old = old;
242 changes[num_changes].unshare = unshare;
244 if (object && !MEM_P (object))
246 /* Set INSN_CODE to force rerecognition of insn. Save old code in
247 case invalid. */
248 changes[num_changes].old_code = INSN_CODE (object);
249 INSN_CODE (object) = -1;
252 num_changes++;
254 /* If we are making a group of changes, return 1. Otherwise, validate the
255 change group we made. */
257 if (in_group)
258 return 1;
259 else
260 return apply_change_group ();
263 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
264 UNSHARE to false. */
266 bool
267 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
269 return validate_change_1 (object, loc, new_rtx, in_group, false);
272 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
273 UNSHARE to true. */
275 bool
276 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
278 return validate_change_1 (object, loc, new_rtx, in_group, true);
282 /* Keep X canonicalized if some changes have made it non-canonical; only
283 modifies the operands of X, not (for example) its code. Simplifications
284 are not the job of this routine.
286 Return true if anything was changed. */
287 bool
288 canonicalize_change_group (rtx_insn *insn, rtx x)
290 if (COMMUTATIVE_P (x)
291 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
293 /* Oops, the caller has made X no longer canonical.
294 Let's redo the changes in the correct order. */
295 rtx tem = XEXP (x, 0);
296 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
297 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
298 return true;
300 else
301 return false;
305 /* This subroutine of apply_change_group verifies whether the changes to INSN
306 were valid; i.e. whether INSN can still be recognized.
308 If IN_GROUP is true clobbers which have to be added in order to
309 match the instructions will be added to the current change group.
310 Otherwise the changes will take effect immediately. */
313 insn_invalid_p (rtx_insn *insn, bool in_group)
315 rtx pat = PATTERN (insn);
316 int num_clobbers = 0;
317 /* If we are before reload and the pattern is a SET, see if we can add
318 clobbers. */
319 int icode = recog (pat, insn,
320 (GET_CODE (pat) == SET
321 && ! reload_completed
322 && ! reload_in_progress)
323 ? &num_clobbers : 0);
324 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
327 /* If this is an asm and the operand aren't legal, then fail. Likewise if
328 this is not an asm and the insn wasn't recognized. */
329 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
330 || (!is_asm && icode < 0))
331 return 1;
333 /* If we have to add CLOBBERs, fail if we have to add ones that reference
334 hard registers since our callers can't know if they are live or not.
335 Otherwise, add them. */
336 if (num_clobbers > 0)
338 rtx newpat;
340 if (added_clobbers_hard_reg_p (icode))
341 return 1;
343 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
344 XVECEXP (newpat, 0, 0) = pat;
345 add_clobbers (newpat, icode);
346 if (in_group)
347 validate_change (insn, &PATTERN (insn), newpat, 1);
348 else
349 PATTERN (insn) = pat = newpat;
352 /* After reload, verify that all constraints are satisfied. */
353 if (reload_completed)
355 extract_insn (insn);
357 if (! constrain_operands (1, get_preferred_alternatives (insn)))
358 return 1;
361 INSN_CODE (insn) = icode;
362 return 0;
365 /* Return number of changes made and not validated yet. */
367 num_changes_pending (void)
369 return num_changes;
372 /* Tentatively apply the changes numbered NUM and up.
373 Return 1 if all changes are valid, zero otherwise. */
376 verify_changes (int num)
378 int i;
379 rtx last_validated = NULL_RTX;
381 /* The changes have been applied and all INSN_CODEs have been reset to force
382 rerecognition.
384 The changes are valid if we aren't given an object, or if we are
385 given a MEM and it still is a valid address, or if this is in insn
386 and it is recognized. In the latter case, if reload has completed,
387 we also require that the operands meet the constraints for
388 the insn. */
390 for (i = num; i < num_changes; i++)
392 rtx object = changes[i].object;
394 /* If there is no object to test or if it is the same as the one we
395 already tested, ignore it. */
396 if (object == 0 || object == last_validated)
397 continue;
399 if (MEM_P (object))
401 if (! memory_address_addr_space_p (GET_MODE (object),
402 XEXP (object, 0),
403 MEM_ADDR_SPACE (object)))
404 break;
406 else if (/* changes[i].old might be zero, e.g. when putting a
407 REG_FRAME_RELATED_EXPR into a previously empty list. */
408 changes[i].old
409 && REG_P (changes[i].old)
410 && asm_noperands (PATTERN (object)) > 0
411 && REG_EXPR (changes[i].old) != NULL_TREE
412 && HAS_DECL_ASSEMBLER_NAME_P (REG_EXPR (changes[i].old))
413 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
414 && DECL_REGISTER (REG_EXPR (changes[i].old)))
416 /* Don't allow changes of hard register operands to inline
417 assemblies if they have been defined as register asm ("x"). */
418 break;
420 else if (DEBUG_INSN_P (object))
421 continue;
422 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
424 rtx pat = PATTERN (object);
426 /* Perhaps we couldn't recognize the insn because there were
427 extra CLOBBERs at the end. If so, try to re-recognize
428 without the last CLOBBER (later iterations will cause each of
429 them to be eliminated, in turn). But don't do this if we
430 have an ASM_OPERAND. */
431 if (GET_CODE (pat) == PARALLEL
432 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
433 && asm_noperands (PATTERN (object)) < 0)
435 rtx newpat;
437 if (XVECLEN (pat, 0) == 2)
438 newpat = XVECEXP (pat, 0, 0);
439 else
441 int j;
443 newpat
444 = gen_rtx_PARALLEL (VOIDmode,
445 rtvec_alloc (XVECLEN (pat, 0) - 1));
446 for (j = 0; j < XVECLEN (newpat, 0); j++)
447 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
450 /* Add a new change to this group to replace the pattern
451 with this new pattern. Then consider this change
452 as having succeeded. The change we added will
453 cause the entire call to fail if things remain invalid.
455 Note that this can lose if a later change than the one
456 we are processing specified &XVECEXP (PATTERN (object), 0, X)
457 but this shouldn't occur. */
459 validate_change (object, &PATTERN (object), newpat, 1);
460 continue;
462 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
463 || GET_CODE (pat) == VAR_LOCATION)
464 /* If this insn is a CLOBBER or USE, it is always valid, but is
465 never recognized. */
466 continue;
467 else
468 break;
470 last_validated = object;
473 return (i == num_changes);
476 /* A group of changes has previously been issued with validate_change
477 and verified with verify_changes. Call df_insn_rescan for each of
478 the insn changed and clear num_changes. */
480 void
481 confirm_change_group (void)
483 int i;
484 rtx last_object = NULL;
486 for (i = 0; i < num_changes; i++)
488 rtx object = changes[i].object;
490 if (changes[i].unshare)
491 *changes[i].loc = copy_rtx (*changes[i].loc);
493 /* Avoid unnecessary rescanning when multiple changes to same instruction
494 are made. */
495 if (object)
497 if (object != last_object && last_object && INSN_P (last_object))
498 df_insn_rescan (as_a <rtx_insn *> (last_object));
499 last_object = object;
503 if (last_object && INSN_P (last_object))
504 df_insn_rescan (as_a <rtx_insn *> (last_object));
505 num_changes = 0;
508 /* Apply a group of changes previously issued with `validate_change'.
509 If all changes are valid, call confirm_change_group and return 1,
510 otherwise, call cancel_changes and return 0. */
513 apply_change_group (void)
515 if (verify_changes (0))
517 confirm_change_group ();
518 return 1;
520 else
522 cancel_changes (0);
523 return 0;
528 /* Return the number of changes so far in the current group. */
531 num_validated_changes (void)
533 return num_changes;
536 /* Retract the changes numbered NUM and up. */
538 void
539 cancel_changes (int num)
541 int i;
543 /* Back out all the changes. Do this in the opposite order in which
544 they were made. */
545 for (i = num_changes - 1; i >= num; i--)
547 *changes[i].loc = changes[i].old;
548 if (changes[i].object && !MEM_P (changes[i].object))
549 INSN_CODE (changes[i].object) = changes[i].old_code;
551 num_changes = num;
554 /* Reduce conditional compilation elsewhere. */
555 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
556 rtx. */
558 static void
559 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
560 machine_mode op0_mode)
562 rtx x = *loc;
563 enum rtx_code code = GET_CODE (x);
564 rtx new_rtx = NULL_RTX;
565 scalar_int_mode is_mode;
567 if (SWAPPABLE_OPERANDS_P (x)
568 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
570 validate_unshare_change (object, loc,
571 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
572 : swap_condition (code),
573 GET_MODE (x), XEXP (x, 1),
574 XEXP (x, 0)), 1);
575 x = *loc;
576 code = GET_CODE (x);
579 /* Canonicalize arithmetics with all constant operands. */
580 switch (GET_RTX_CLASS (code))
582 case RTX_UNARY:
583 if (CONSTANT_P (XEXP (x, 0)))
584 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
585 op0_mode);
586 break;
587 case RTX_COMM_ARITH:
588 case RTX_BIN_ARITH:
589 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
590 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
591 XEXP (x, 1));
592 break;
593 case RTX_COMPARE:
594 case RTX_COMM_COMPARE:
595 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
596 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
597 XEXP (x, 0), XEXP (x, 1));
598 break;
599 default:
600 break;
602 if (new_rtx)
604 validate_change (object, loc, new_rtx, 1);
605 return;
608 switch (code)
610 case PLUS:
611 /* If we have a PLUS whose second operand is now a CONST_INT, use
612 simplify_gen_binary to try to simplify it.
613 ??? We may want later to remove this, once simplification is
614 separated from this function. */
615 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
616 validate_change (object, loc,
617 simplify_gen_binary
618 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
619 break;
620 case MINUS:
621 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
622 validate_change (object, loc,
623 simplify_gen_binary
624 (PLUS, GET_MODE (x), XEXP (x, 0),
625 simplify_gen_unary (NEG,
626 GET_MODE (x), XEXP (x, 1),
627 GET_MODE (x))), 1);
628 break;
629 case ZERO_EXTEND:
630 case SIGN_EXTEND:
631 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
633 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
634 op0_mode);
635 /* If any of the above failed, substitute in something that
636 we know won't be recognized. */
637 if (!new_rtx)
638 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
639 validate_change (object, loc, new_rtx, 1);
641 break;
642 case SUBREG:
643 /* All subregs possible to simplify should be simplified. */
644 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
645 SUBREG_BYTE (x));
647 /* Subregs of VOIDmode operands are incorrect. */
648 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
649 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
650 if (new_rtx)
651 validate_change (object, loc, new_rtx, 1);
652 break;
653 case ZERO_EXTRACT:
654 case SIGN_EXTRACT:
655 /* If we are replacing a register with memory, try to change the memory
656 to be the mode required for memory in extract operations (this isn't
657 likely to be an insertion operation; if it was, nothing bad will
658 happen, we might just fail in some cases). */
660 if (MEM_P (XEXP (x, 0))
661 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
662 && CONST_INT_P (XEXP (x, 1))
663 && CONST_INT_P (XEXP (x, 2))
664 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
665 MEM_ADDR_SPACE (XEXP (x, 0)))
666 && !MEM_VOLATILE_P (XEXP (x, 0)))
668 int pos = INTVAL (XEXP (x, 2));
669 machine_mode new_mode = is_mode;
670 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
671 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
672 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
673 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
674 scalar_int_mode wanted_mode = (new_mode == VOIDmode
675 ? word_mode
676 : as_a <scalar_int_mode> (new_mode));
678 /* If we have a narrower mode, we can do something. */
679 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
681 int offset = pos / BITS_PER_UNIT;
682 rtx newmem;
684 /* If the bytes and bits are counted differently, we
685 must adjust the offset. */
686 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
687 offset =
688 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
689 offset);
691 gcc_assert (GET_MODE_PRECISION (wanted_mode)
692 == GET_MODE_BITSIZE (wanted_mode));
693 pos %= GET_MODE_BITSIZE (wanted_mode);
695 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
697 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
698 validate_change (object, &XEXP (x, 0), newmem, 1);
702 break;
704 default:
705 break;
709 /* Replace every occurrence of FROM in X with TO. Mark each change with
710 validate_change passing OBJECT. */
712 static void
713 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
714 bool simplify)
716 int i, j;
717 const char *fmt;
718 rtx x = *loc;
719 enum rtx_code code;
720 machine_mode op0_mode = VOIDmode;
721 int prev_changes = num_changes;
723 if (!x)
724 return;
726 code = GET_CODE (x);
727 fmt = GET_RTX_FORMAT (code);
728 if (fmt[0] == 'e')
729 op0_mode = GET_MODE (XEXP (x, 0));
731 /* X matches FROM if it is the same rtx or they are both referring to the
732 same register in the same mode. Avoid calling rtx_equal_p unless the
733 operands look similar. */
735 if (x == from
736 || (REG_P (x) && REG_P (from)
737 && GET_MODE (x) == GET_MODE (from)
738 && REGNO (x) == REGNO (from))
739 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
740 && rtx_equal_p (x, from)))
742 validate_unshare_change (object, loc, to, 1);
743 return;
746 /* Call ourself recursively to perform the replacements.
747 We must not replace inside already replaced expression, otherwise we
748 get infinite recursion for replacements like (reg X)->(subreg (reg X))
749 so we must special case shared ASM_OPERANDS. */
751 if (GET_CODE (x) == PARALLEL)
753 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
755 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
756 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
758 /* Verify that operands are really shared. */
759 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
760 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
761 (x, 0, j))));
762 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
763 from, to, object, simplify);
765 else
766 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
767 simplify);
770 else
771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
773 if (fmt[i] == 'e')
774 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
775 else if (fmt[i] == 'E')
776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
777 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
778 simplify);
781 /* If we didn't substitute, there is nothing more to do. */
782 if (num_changes == prev_changes)
783 return;
785 /* ??? The regmove is no more, so is this aberration still necessary? */
786 /* Allow substituted expression to have different mode. This is used by
787 regmove to change mode of pseudo register. */
788 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
789 op0_mode = GET_MODE (XEXP (x, 0));
791 /* Do changes needed to keep rtx consistent. Don't do any other
792 simplifications, as it is not our job. */
793 if (simplify)
794 simplify_while_replacing (loc, to, object, op0_mode);
797 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
798 with TO. After all changes have been made, validate by seeing
799 if INSN is still valid. */
802 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
804 validate_replace_rtx_1 (loc, from, to, insn, true);
805 return apply_change_group ();
808 /* Try replacing every occurrence of FROM in INSN with TO. After all
809 changes have been made, validate by seeing if INSN is still valid. */
812 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
814 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
815 return apply_change_group ();
818 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
819 is a part of INSN. After all changes have been made, validate by seeing if
820 INSN is still valid.
821 validate_replace_rtx (from, to, insn) is equivalent to
822 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
825 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
827 validate_replace_rtx_1 (where, from, to, insn, true);
828 return apply_change_group ();
831 /* Same as above, but do not simplify rtx afterwards. */
833 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
834 rtx_insn *insn)
836 validate_replace_rtx_1 (where, from, to, insn, false);
837 return apply_change_group ();
841 /* Try replacing every occurrence of FROM in INSN with TO. This also
842 will replace in REG_EQUAL and REG_EQUIV notes. */
844 void
845 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
847 rtx note;
848 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
849 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
850 if (REG_NOTE_KIND (note) == REG_EQUAL
851 || REG_NOTE_KIND (note) == REG_EQUIV)
852 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
855 /* Function called by note_uses to replace used subexpressions. */
856 struct validate_replace_src_data
858 rtx from; /* Old RTX */
859 rtx to; /* New RTX */
860 rtx_insn *insn; /* Insn in which substitution is occurring. */
863 static void
864 validate_replace_src_1 (rtx *x, void *data)
866 struct validate_replace_src_data *d
867 = (struct validate_replace_src_data *) data;
869 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
872 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
873 SET_DESTs. */
875 void
876 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
878 struct validate_replace_src_data d;
880 d.from = from;
881 d.to = to;
882 d.insn = insn;
883 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
886 /* Try simplify INSN.
887 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
888 pattern and return true if something was simplified. */
890 bool
891 validate_simplify_insn (rtx_insn *insn)
893 int i;
894 rtx pat = NULL;
895 rtx newpat = NULL;
897 pat = PATTERN (insn);
899 if (GET_CODE (pat) == SET)
901 newpat = simplify_rtx (SET_SRC (pat));
902 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
903 validate_change (insn, &SET_SRC (pat), newpat, 1);
904 newpat = simplify_rtx (SET_DEST (pat));
905 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
906 validate_change (insn, &SET_DEST (pat), newpat, 1);
908 else if (GET_CODE (pat) == PARALLEL)
909 for (i = 0; i < XVECLEN (pat, 0); i++)
911 rtx s = XVECEXP (pat, 0, i);
913 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
915 newpat = simplify_rtx (SET_SRC (s));
916 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
917 validate_change (insn, &SET_SRC (s), newpat, 1);
918 newpat = simplify_rtx (SET_DEST (s));
919 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
920 validate_change (insn, &SET_DEST (s), newpat, 1);
923 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
926 /* Return 1 if the insn using CC0 set by INSN does not contain
927 any ordered tests applied to the condition codes.
928 EQ and NE tests do not count. */
931 next_insn_tests_no_inequality (rtx_insn *insn)
933 rtx_insn *next = next_cc0_user (insn);
935 /* If there is no next insn, we have to take the conservative choice. */
936 if (next == 0)
937 return 0;
939 return (INSN_P (next)
940 && ! inequality_comparisons_p (PATTERN (next)));
943 /* Return 1 if OP is a valid general operand for machine mode MODE.
944 This is either a register reference, a memory reference,
945 or a constant. In the case of a memory reference, the address
946 is checked for general validity for the target machine.
948 Register and memory references must have mode MODE in order to be valid,
949 but some constants have no machine mode and are valid for any mode.
951 If MODE is VOIDmode, OP is checked for validity for whatever mode
952 it has.
954 The main use of this function is as a predicate in match_operand
955 expressions in the machine description. */
958 general_operand (rtx op, machine_mode mode)
960 enum rtx_code code = GET_CODE (op);
962 if (mode == VOIDmode)
963 mode = GET_MODE (op);
965 /* Don't accept CONST_INT or anything similar
966 if the caller wants something floating. */
967 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
968 && GET_MODE_CLASS (mode) != MODE_INT
969 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
970 return 0;
972 if (CONST_INT_P (op)
973 && mode != VOIDmode
974 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
975 return 0;
977 if (CONSTANT_P (op))
978 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
979 || mode == VOIDmode)
980 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
981 && targetm.legitimate_constant_p (mode == VOIDmode
982 ? GET_MODE (op)
983 : mode, op));
985 /* Except for certain constants with VOIDmode, already checked for,
986 OP's mode must match MODE if MODE specifies a mode. */
988 if (GET_MODE (op) != mode)
989 return 0;
991 if (code == SUBREG)
993 rtx sub = SUBREG_REG (op);
995 #ifdef INSN_SCHEDULING
996 /* On machines that have insn scheduling, we want all memory
997 reference to be explicit, so outlaw paradoxical SUBREGs.
998 However, we must allow them after reload so that they can
999 get cleaned up by cleanup_subreg_operands. */
1000 if (!reload_completed && MEM_P (sub)
1001 && paradoxical_subreg_p (op))
1002 return 0;
1003 #endif
1004 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1005 may result in incorrect reference. We should simplify all valid
1006 subregs of MEM anyway. But allow this after reload because we
1007 might be called from cleanup_subreg_operands.
1009 ??? This is a kludge. */
1010 if (!reload_completed
1011 && maybe_ne (SUBREG_BYTE (op), 0)
1012 && MEM_P (sub))
1013 return 0;
1015 if (REG_P (sub)
1016 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1017 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1018 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1019 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1020 /* LRA can generate some invalid SUBREGS just for matched
1021 operand reload presentation. LRA needs to treat them as
1022 valid. */
1023 && ! LRA_SUBREG_P (op))
1024 return 0;
1026 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1027 create such rtl, and we must reject it. */
1028 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1029 /* LRA can use subreg to store a floating point value in an
1030 integer mode. Although the floating point and the
1031 integer modes need the same number of hard registers, the
1032 size of floating point mode can be less than the integer
1033 mode. */
1034 && ! lra_in_progress
1035 && paradoxical_subreg_p (op))
1036 return 0;
1038 op = sub;
1039 code = GET_CODE (op);
1042 if (code == REG)
1043 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1044 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1046 if (code == MEM)
1048 rtx y = XEXP (op, 0);
1050 if (! volatile_ok && MEM_VOLATILE_P (op))
1051 return 0;
1053 /* Use the mem's mode, since it will be reloaded thus. LRA can
1054 generate move insn with invalid addresses which is made valid
1055 and efficiently calculated by LRA through further numerous
1056 transformations. */
1057 if (lra_in_progress
1058 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1059 return 1;
1062 return 0;
1065 /* Return 1 if OP is a valid memory address for a memory reference
1066 of mode MODE.
1068 The main use of this function is as a predicate in match_operand
1069 expressions in the machine description. */
1072 address_operand (rtx op, machine_mode mode)
1074 /* Wrong mode for an address expr. */
1075 if (GET_MODE (op) != VOIDmode
1076 && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1077 return false;
1079 return memory_address_p (mode, op);
1082 /* Return 1 if OP is a register reference of mode MODE.
1083 If MODE is VOIDmode, accept a register in any mode.
1085 The main use of this function is as a predicate in match_operand
1086 expressions in the machine description. */
1089 register_operand (rtx op, machine_mode mode)
1091 if (GET_CODE (op) == SUBREG)
1093 rtx sub = SUBREG_REG (op);
1095 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1096 because it is guaranteed to be reloaded into one.
1097 Just make sure the MEM is valid in itself.
1098 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1099 but currently it does result from (SUBREG (REG)...) where the
1100 reg went on the stack.) */
1101 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1102 return 0;
1104 else if (!REG_P (op))
1105 return 0;
1106 return general_operand (op, mode);
1109 /* Return 1 for a register in Pmode; ignore the tested mode. */
1112 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1114 return register_operand (op, Pmode);
1117 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1118 or a hard register. */
1121 scratch_operand (rtx op, machine_mode mode)
1123 if (GET_MODE (op) != mode && mode != VOIDmode)
1124 return 0;
1126 return (GET_CODE (op) == SCRATCH
1127 || (REG_P (op)
1128 && (lra_in_progress
1129 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1130 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1133 /* Return 1 if OP is a valid immediate operand for mode MODE.
1135 The main use of this function is as a predicate in match_operand
1136 expressions in the machine description. */
1139 immediate_operand (rtx op, machine_mode mode)
1141 /* Don't accept CONST_INT or anything similar
1142 if the caller wants something floating. */
1143 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1144 && GET_MODE_CLASS (mode) != MODE_INT
1145 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1146 return 0;
1148 if (CONST_INT_P (op)
1149 && mode != VOIDmode
1150 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1151 return 0;
1153 return (CONSTANT_P (op)
1154 && (GET_MODE (op) == mode || mode == VOIDmode
1155 || GET_MODE (op) == VOIDmode)
1156 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1157 && targetm.legitimate_constant_p (mode == VOIDmode
1158 ? GET_MODE (op)
1159 : mode, op));
1162 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1165 const_int_operand (rtx op, machine_mode mode)
1167 if (!CONST_INT_P (op))
1168 return 0;
1170 if (mode != VOIDmode
1171 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1172 return 0;
1174 return 1;
1177 #if TARGET_SUPPORTS_WIDE_INT
1178 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1179 of mode MODE. */
1181 const_scalar_int_operand (rtx op, machine_mode mode)
1183 if (!CONST_SCALAR_INT_P (op))
1184 return 0;
1186 if (CONST_INT_P (op))
1187 return const_int_operand (op, mode);
1189 if (mode != VOIDmode)
1191 scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1192 int prec = GET_MODE_PRECISION (int_mode);
1193 int bitsize = GET_MODE_BITSIZE (int_mode);
1195 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1196 return 0;
1198 if (prec == bitsize)
1199 return 1;
1200 else
1202 /* Multiword partial int. */
1203 HOST_WIDE_INT x
1204 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1205 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1208 return 1;
1211 /* Returns 1 if OP is an operand that is a constant integer or constant
1212 floating-point number of MODE. */
1215 const_double_operand (rtx op, machine_mode mode)
1217 return (GET_CODE (op) == CONST_DOUBLE)
1218 && (GET_MODE (op) == mode || mode == VOIDmode);
1220 #else
1221 /* Returns 1 if OP is an operand that is a constant integer or constant
1222 floating-point number of MODE. */
1225 const_double_operand (rtx op, machine_mode mode)
1227 /* Don't accept CONST_INT or anything similar
1228 if the caller wants something floating. */
1229 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1230 && GET_MODE_CLASS (mode) != MODE_INT
1231 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1232 return 0;
1234 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1235 && (mode == VOIDmode || GET_MODE (op) == mode
1236 || GET_MODE (op) == VOIDmode));
1238 #endif
1239 /* Return 1 if OP is a general operand that is not an immediate
1240 operand of mode MODE. */
1243 nonimmediate_operand (rtx op, machine_mode mode)
1245 return (general_operand (op, mode) && ! CONSTANT_P (op));
1248 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1251 nonmemory_operand (rtx op, machine_mode mode)
1253 if (CONSTANT_P (op))
1254 return immediate_operand (op, mode);
1255 return register_operand (op, mode);
1258 /* Return 1 if OP is a valid operand that stands for pushing a
1259 value of mode MODE onto the stack.
1261 The main use of this function is as a predicate in match_operand
1262 expressions in the machine description. */
1265 push_operand (rtx op, machine_mode mode)
1267 if (!MEM_P (op))
1268 return 0;
1270 if (mode != VOIDmode && GET_MODE (op) != mode)
1271 return 0;
1273 poly_int64 rounded_size = GET_MODE_SIZE (mode);
1275 #ifdef PUSH_ROUNDING
1276 rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1277 #endif
1279 op = XEXP (op, 0);
1281 if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1283 if (GET_CODE (op) != STACK_PUSH_CODE)
1284 return 0;
1286 else
1288 poly_int64 offset;
1289 if (GET_CODE (op) != PRE_MODIFY
1290 || GET_CODE (XEXP (op, 1)) != PLUS
1291 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1292 || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1293 || (STACK_GROWS_DOWNWARD
1294 ? maybe_ne (offset, -rounded_size)
1295 : maybe_ne (offset, rounded_size)))
1296 return 0;
1299 return XEXP (op, 0) == stack_pointer_rtx;
1302 /* Return 1 if OP is a valid operand that stands for popping a
1303 value of mode MODE off the stack.
1305 The main use of this function is as a predicate in match_operand
1306 expressions in the machine description. */
1309 pop_operand (rtx op, machine_mode mode)
1311 if (!MEM_P (op))
1312 return 0;
1314 if (mode != VOIDmode && GET_MODE (op) != mode)
1315 return 0;
1317 op = XEXP (op, 0);
1319 if (GET_CODE (op) != STACK_POP_CODE)
1320 return 0;
1322 return XEXP (op, 0) == stack_pointer_rtx;
1325 /* Return 1 if ADDR is a valid memory address
1326 for mode MODE in address space AS. */
1329 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1330 rtx addr, addr_space_t as)
1332 #ifdef GO_IF_LEGITIMATE_ADDRESS
1333 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1334 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1335 return 0;
1337 win:
1338 return 1;
1339 #else
1340 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1341 #endif
1344 /* Return 1 if OP is a valid memory reference with mode MODE,
1345 including a valid address.
1347 The main use of this function is as a predicate in match_operand
1348 expressions in the machine description. */
1351 memory_operand (rtx op, machine_mode mode)
1353 rtx inner;
1355 if (! reload_completed)
1356 /* Note that no SUBREG is a memory operand before end of reload pass,
1357 because (SUBREG (MEM...)) forces reloading into a register. */
1358 return MEM_P (op) && general_operand (op, mode);
1360 if (mode != VOIDmode && GET_MODE (op) != mode)
1361 return 0;
1363 inner = op;
1364 if (GET_CODE (inner) == SUBREG)
1365 inner = SUBREG_REG (inner);
1367 return (MEM_P (inner) && general_operand (op, mode));
1370 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1371 that is, a memory reference whose address is a general_operand. */
1374 indirect_operand (rtx op, machine_mode mode)
1376 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1377 if (! reload_completed
1378 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1380 if (mode != VOIDmode && GET_MODE (op) != mode)
1381 return 0;
1383 /* The only way that we can have a general_operand as the resulting
1384 address is if OFFSET is zero and the address already is an operand
1385 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1386 operand. */
1387 poly_int64 offset;
1388 rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1389 return (known_eq (offset + SUBREG_BYTE (op), 0)
1390 && general_operand (addr, Pmode));
1393 return (MEM_P (op)
1394 && memory_operand (op, mode)
1395 && general_operand (XEXP (op, 0), Pmode));
1398 /* Return 1 if this is an ordered comparison operator (not including
1399 ORDERED and UNORDERED). */
1402 ordered_comparison_operator (rtx op, machine_mode mode)
1404 if (mode != VOIDmode && GET_MODE (op) != mode)
1405 return false;
1406 switch (GET_CODE (op))
1408 case EQ:
1409 case NE:
1410 case LT:
1411 case LTU:
1412 case LE:
1413 case LEU:
1414 case GT:
1415 case GTU:
1416 case GE:
1417 case GEU:
1418 return true;
1419 default:
1420 return false;
1424 /* Return 1 if this is a comparison operator. This allows the use of
1425 MATCH_OPERATOR to recognize all the branch insns. */
1428 comparison_operator (rtx op, machine_mode mode)
1430 return ((mode == VOIDmode || GET_MODE (op) == mode)
1431 && COMPARISON_P (op));
1434 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1437 extract_asm_operands (rtx body)
1439 rtx tmp;
1440 switch (GET_CODE (body))
1442 case ASM_OPERANDS:
1443 return body;
1445 case SET:
1446 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1447 tmp = SET_SRC (body);
1448 if (GET_CODE (tmp) == ASM_OPERANDS)
1449 return tmp;
1450 break;
1452 case PARALLEL:
1453 tmp = XVECEXP (body, 0, 0);
1454 if (GET_CODE (tmp) == ASM_OPERANDS)
1455 return tmp;
1456 if (GET_CODE (tmp) == SET)
1458 tmp = SET_SRC (tmp);
1459 if (GET_CODE (tmp) == ASM_OPERANDS)
1460 return tmp;
1462 break;
1464 default:
1465 break;
1467 return NULL;
1470 /* If BODY is an insn body that uses ASM_OPERANDS,
1471 return the number of operands (both input and output) in the insn.
1472 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1473 return 0.
1474 Otherwise return -1. */
1477 asm_noperands (const_rtx body)
1479 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1480 int i, n_sets = 0;
1482 if (asm_op == NULL)
1484 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1485 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1487 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1488 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1489 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1490 return -1;
1491 return 0;
1493 return -1;
1496 if (GET_CODE (body) == SET)
1497 n_sets = 1;
1498 else if (GET_CODE (body) == PARALLEL)
1500 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1502 /* Multiple output operands, or 1 output plus some clobbers:
1503 body is
1504 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1505 /* Count backwards through CLOBBERs to determine number of SETs. */
1506 for (i = XVECLEN (body, 0); i > 0; i--)
1508 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1509 break;
1510 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1511 return -1;
1514 /* N_SETS is now number of output operands. */
1515 n_sets = i;
1517 /* Verify that all the SETs we have
1518 came from a single original asm_operands insn
1519 (so that invalid combinations are blocked). */
1520 for (i = 0; i < n_sets; i++)
1522 rtx elt = XVECEXP (body, 0, i);
1523 if (GET_CODE (elt) != SET)
1524 return -1;
1525 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1526 return -1;
1527 /* If these ASM_OPERANDS rtx's came from different original insns
1528 then they aren't allowed together. */
1529 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1530 != ASM_OPERANDS_INPUT_VEC (asm_op))
1531 return -1;
1534 else
1536 /* 0 outputs, but some clobbers:
1537 body is [(asm_operands ...) (clobber (reg ...))...]. */
1538 /* Make sure all the other parallel things really are clobbers. */
1539 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1540 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1541 return -1;
1545 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1546 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1549 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1550 copy its operands (both input and output) into the vector OPERANDS,
1551 the locations of the operands within the insn into the vector OPERAND_LOCS,
1552 and the constraints for the operands into CONSTRAINTS.
1553 Write the modes of the operands into MODES.
1554 Write the location info into LOC.
1555 Return the assembler-template.
1556 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1557 return the basic assembly string.
1559 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1560 we don't store that info. */
1562 const char *
1563 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1564 const char **constraints, machine_mode *modes,
1565 location_t *loc)
1567 int nbase = 0, n, i;
1568 rtx asmop;
1570 switch (GET_CODE (body))
1572 case ASM_OPERANDS:
1573 /* Zero output asm: BODY is (asm_operands ...). */
1574 asmop = body;
1575 break;
1577 case SET:
1578 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1579 asmop = SET_SRC (body);
1581 /* The output is in the SET.
1582 Its constraint is in the ASM_OPERANDS itself. */
1583 if (operands)
1584 operands[0] = SET_DEST (body);
1585 if (operand_locs)
1586 operand_locs[0] = &SET_DEST (body);
1587 if (constraints)
1588 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1589 if (modes)
1590 modes[0] = GET_MODE (SET_DEST (body));
1591 nbase = 1;
1592 break;
1594 case PARALLEL:
1596 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1598 asmop = XVECEXP (body, 0, 0);
1599 if (GET_CODE (asmop) == SET)
1601 asmop = SET_SRC (asmop);
1603 /* At least one output, plus some CLOBBERs. The outputs are in
1604 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1605 for (i = 0; i < nparallel; i++)
1607 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1608 break; /* Past last SET */
1609 gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
1610 if (operands)
1611 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1612 if (operand_locs)
1613 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1614 if (constraints)
1615 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1616 if (modes)
1617 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1619 nbase = i;
1621 else if (GET_CODE (asmop) == ASM_INPUT)
1623 if (loc)
1624 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
1625 return XSTR (asmop, 0);
1627 break;
1630 default:
1631 gcc_unreachable ();
1634 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1635 for (i = 0; i < n; i++)
1637 if (operand_locs)
1638 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1639 if (operands)
1640 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1641 if (constraints)
1642 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1643 if (modes)
1644 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1646 nbase += n;
1648 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1649 for (i = 0; i < n; i++)
1651 if (operand_locs)
1652 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1653 if (operands)
1654 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1655 if (constraints)
1656 constraints[nbase + i] = "";
1657 if (modes)
1658 modes[nbase + i] = Pmode;
1661 if (loc)
1662 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1664 return ASM_OPERANDS_TEMPLATE (asmop);
1667 /* Parse inline assembly string STRING and determine which operands are
1668 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1669 to true if operand I is referenced.
1671 This is intended to distinguish barrier-like asms such as:
1673 asm ("" : "=m" (...));
1675 from real references such as:
1677 asm ("sw\t$0, %0" : "=m" (...)); */
1679 void
1680 get_referenced_operands (const char *string, bool *used,
1681 unsigned int noperands)
1683 memset (used, 0, sizeof (bool) * noperands);
1684 const char *p = string;
1685 while (*p)
1686 switch (*p)
1688 case '%':
1689 p += 1;
1690 /* A letter followed by a digit indicates an operand number. */
1691 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1692 p += 1;
1693 if (ISDIGIT (*p))
1695 char *endptr;
1696 unsigned long opnum = strtoul (p, &endptr, 10);
1697 if (endptr != p && opnum < noperands)
1698 used[opnum] = true;
1699 p = endptr;
1701 else
1702 p += 1;
1703 break;
1705 default:
1706 p++;
1707 break;
1711 /* Check if an asm_operand matches its constraints.
1712 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1715 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1717 int result = 0;
1718 bool incdec_ok = false;
1720 /* Use constrain_operands after reload. */
1721 gcc_assert (!reload_completed);
1723 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1724 many alternatives as required to match the other operands. */
1725 if (*constraint == '\0')
1726 result = 1;
1728 while (*constraint)
1730 enum constraint_num cn;
1731 char c = *constraint;
1732 int len;
1733 switch (c)
1735 case ',':
1736 constraint++;
1737 continue;
1739 case '0': case '1': case '2': case '3': case '4':
1740 case '5': case '6': case '7': case '8': case '9':
1741 /* If caller provided constraints pointer, look up
1742 the matching constraint. Otherwise, our caller should have
1743 given us the proper matching constraint, but we can't
1744 actually fail the check if they didn't. Indicate that
1745 results are inconclusive. */
1746 if (constraints)
1748 char *end;
1749 unsigned long match;
1751 match = strtoul (constraint, &end, 10);
1752 if (!result)
1753 result = asm_operand_ok (op, constraints[match], NULL);
1754 constraint = (const char *) end;
1756 else
1759 constraint++;
1760 while (ISDIGIT (*constraint));
1761 if (! result)
1762 result = -1;
1764 continue;
1766 /* The rest of the compiler assumes that reloading the address
1767 of a MEM into a register will make it fit an 'o' constraint.
1768 That is, if it sees a MEM operand for an 'o' constraint,
1769 it assumes that (mem (base-reg)) will fit.
1771 That assumption fails on targets that don't have offsettable
1772 addresses at all. We therefore need to treat 'o' asm
1773 constraints as a special case and only accept operands that
1774 are already offsettable, thus proving that at least one
1775 offsettable address exists. */
1776 case 'o': /* offsettable */
1777 if (offsettable_nonstrict_memref_p (op))
1778 result = 1;
1779 break;
1781 case 'g':
1782 if (general_operand (op, VOIDmode))
1783 result = 1;
1784 break;
1786 case '<':
1787 case '>':
1788 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1789 to exist, excepting those that expand_call created. Further,
1790 on some machines which do not have generalized auto inc/dec,
1791 an inc/dec is not a memory_operand.
1793 Match any memory and hope things are resolved after reload. */
1794 incdec_ok = true;
1795 /* FALLTHRU */
1796 default:
1797 cn = lookup_constraint (constraint);
1798 switch (get_constraint_type (cn))
1800 case CT_REGISTER:
1801 if (!result
1802 && reg_class_for_constraint (cn) != NO_REGS
1803 && GET_MODE (op) != BLKmode
1804 && register_operand (op, VOIDmode))
1805 result = 1;
1806 break;
1808 case CT_CONST_INT:
1809 if (!result
1810 && CONST_INT_P (op)
1811 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1812 result = 1;
1813 break;
1815 case CT_MEMORY:
1816 case CT_SPECIAL_MEMORY:
1817 /* Every memory operand can be reloaded to fit. */
1818 result = result || memory_operand (op, VOIDmode);
1819 break;
1821 case CT_ADDRESS:
1822 /* Every address operand can be reloaded to fit. */
1823 result = result || address_operand (op, VOIDmode);
1824 break;
1826 case CT_FIXED_FORM:
1827 result = result || constraint_satisfied_p (op, cn);
1828 break;
1830 break;
1832 len = CONSTRAINT_LEN (c, constraint);
1834 constraint++;
1835 while (--len && *constraint && *constraint != ',');
1836 if (len)
1837 return 0;
1840 /* For operands without < or > constraints reject side-effects. */
1841 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
1842 switch (GET_CODE (XEXP (op, 0)))
1844 case PRE_INC:
1845 case POST_INC:
1846 case PRE_DEC:
1847 case POST_DEC:
1848 case PRE_MODIFY:
1849 case POST_MODIFY:
1850 return 0;
1851 default:
1852 break;
1855 return result;
1858 /* Given an rtx *P, if it is a sum containing an integer constant term,
1859 return the location (type rtx *) of the pointer to that constant term.
1860 Otherwise, return a null pointer. */
1862 rtx *
1863 find_constant_term_loc (rtx *p)
1865 rtx *tem;
1866 enum rtx_code code = GET_CODE (*p);
1868 /* If *P IS such a constant term, P is its location. */
1870 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1871 || code == CONST)
1872 return p;
1874 /* Otherwise, if not a sum, it has no constant term. */
1876 if (GET_CODE (*p) != PLUS)
1877 return 0;
1879 /* If one of the summands is constant, return its location. */
1881 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1882 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1883 return p;
1885 /* Otherwise, check each summand for containing a constant term. */
1887 if (XEXP (*p, 0) != 0)
1889 tem = find_constant_term_loc (&XEXP (*p, 0));
1890 if (tem != 0)
1891 return tem;
1894 if (XEXP (*p, 1) != 0)
1896 tem = find_constant_term_loc (&XEXP (*p, 1));
1897 if (tem != 0)
1898 return tem;
1901 return 0;
1904 /* Return 1 if OP is a memory reference
1905 whose address contains no side effects
1906 and remains valid after the addition
1907 of a positive integer less than the
1908 size of the object being referenced.
1910 We assume that the original address is valid and do not check it.
1912 This uses strict_memory_address_p as a subroutine, so
1913 don't use it before reload. */
1916 offsettable_memref_p (rtx op)
1918 return ((MEM_P (op))
1919 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1920 MEM_ADDR_SPACE (op)));
1923 /* Similar, but don't require a strictly valid mem ref:
1924 consider pseudo-regs valid as index or base regs. */
1927 offsettable_nonstrict_memref_p (rtx op)
1929 return ((MEM_P (op))
1930 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1931 MEM_ADDR_SPACE (op)));
1934 /* Return 1 if Y is a memory address which contains no side effects
1935 and would remain valid for address space AS after the addition of
1936 a positive integer less than the size of that mode.
1938 We assume that the original address is valid and do not check it.
1939 We do check that it is valid for narrower modes.
1941 If STRICTP is nonzero, we require a strictly valid address,
1942 for the sake of use in reload.c. */
1945 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1946 addr_space_t as)
1948 enum rtx_code ycode = GET_CODE (y);
1949 rtx z;
1950 rtx y1 = y;
1951 rtx *y2;
1952 int (*addressp) (machine_mode, rtx, addr_space_t) =
1953 (strictp ? strict_memory_address_addr_space_p
1954 : memory_address_addr_space_p);
1955 poly_int64 mode_sz = GET_MODE_SIZE (mode);
1957 if (CONSTANT_ADDRESS_P (y))
1958 return 1;
1960 /* Adjusting an offsettable address involves changing to a narrower mode.
1961 Make sure that's OK. */
1963 if (mode_dependent_address_p (y, as))
1964 return 0;
1966 machine_mode address_mode = GET_MODE (y);
1967 if (address_mode == VOIDmode)
1968 address_mode = targetm.addr_space.address_mode (as);
1969 #ifdef POINTERS_EXTEND_UNSIGNED
1970 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1971 #endif
1973 /* ??? How much offset does an offsettable BLKmode reference need?
1974 Clearly that depends on the situation in which it's being used.
1975 However, the current situation in which we test 0xffffffff is
1976 less than ideal. Caveat user. */
1977 if (known_eq (mode_sz, 0))
1978 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1980 /* If the expression contains a constant term,
1981 see if it remains valid when max possible offset is added. */
1983 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1985 int good;
1987 y1 = *y2;
1988 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
1989 /* Use QImode because an odd displacement may be automatically invalid
1990 for any wider mode. But it should be valid for a single byte. */
1991 good = (*addressp) (QImode, y, as);
1993 /* In any case, restore old contents of memory. */
1994 *y2 = y1;
1995 return good;
1998 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1999 return 0;
2001 /* The offset added here is chosen as the maximum offset that
2002 any instruction could need to add when operating on something
2003 of the specified mode. We assume that if Y and Y+c are
2004 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2005 go inside a LO_SUM here, so we do so as well. */
2006 if (GET_CODE (y) == LO_SUM
2007 && mode != BLKmode
2008 && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2009 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2010 plus_constant (address_mode, XEXP (y, 1),
2011 mode_sz - 1));
2012 #ifdef POINTERS_EXTEND_UNSIGNED
2013 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2014 else if (POINTERS_EXTEND_UNSIGNED > 0
2015 && GET_CODE (y) == ZERO_EXTEND
2016 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2017 z = gen_rtx_ZERO_EXTEND (address_mode,
2018 plus_constant (pointer_mode, XEXP (y, 0),
2019 mode_sz - 1));
2020 #endif
2021 else
2022 z = plus_constant (address_mode, y, mode_sz - 1);
2024 /* Use QImode because an odd displacement may be automatically invalid
2025 for any wider mode. But it should be valid for a single byte. */
2026 return (*addressp) (QImode, z, as);
2029 /* Return 1 if ADDR is an address-expression whose effect depends
2030 on the mode of the memory reference it is used in.
2032 ADDRSPACE is the address space associated with the address.
2034 Autoincrement addressing is a typical example of mode-dependence
2035 because the amount of the increment depends on the mode. */
2037 bool
2038 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2040 /* Auto-increment addressing with anything other than post_modify
2041 or pre_modify always introduces a mode dependency. Catch such
2042 cases now instead of deferring to the target. */
2043 if (GET_CODE (addr) == PRE_INC
2044 || GET_CODE (addr) == POST_INC
2045 || GET_CODE (addr) == PRE_DEC
2046 || GET_CODE (addr) == POST_DEC)
2047 return true;
2049 return targetm.mode_dependent_address_p (addr, addrspace);
2052 /* Return true if boolean attribute ATTR is supported. */
2054 static bool
2055 have_bool_attr (bool_attr attr)
2057 switch (attr)
2059 case BA_ENABLED:
2060 return HAVE_ATTR_enabled;
2061 case BA_PREFERRED_FOR_SIZE:
2062 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2063 case BA_PREFERRED_FOR_SPEED:
2064 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2066 gcc_unreachable ();
2069 /* Return the value of ATTR for instruction INSN. */
2071 static bool
2072 get_bool_attr (rtx_insn *insn, bool_attr attr)
2074 switch (attr)
2076 case BA_ENABLED:
2077 return get_attr_enabled (insn);
2078 case BA_PREFERRED_FOR_SIZE:
2079 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2080 case BA_PREFERRED_FOR_SPEED:
2081 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2083 gcc_unreachable ();
2086 /* Like get_bool_attr_mask, but don't use the cache. */
2088 static alternative_mask
2089 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2091 /* Temporarily install enough information for get_attr_<foo> to assume
2092 that the insn operands are already cached. As above, the attribute
2093 mustn't depend on the values of operands, so we don't provide their
2094 real values here. */
2095 rtx_insn *old_insn = recog_data.insn;
2096 int old_alternative = which_alternative;
2098 recog_data.insn = insn;
2099 alternative_mask mask = ALL_ALTERNATIVES;
2100 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2101 for (int i = 0; i < n_alternatives; i++)
2103 which_alternative = i;
2104 if (!get_bool_attr (insn, attr))
2105 mask &= ~ALTERNATIVE_BIT (i);
2108 recog_data.insn = old_insn;
2109 which_alternative = old_alternative;
2110 return mask;
2113 /* Return the mask of operand alternatives that are allowed for INSN
2114 by boolean attribute ATTR. This mask depends only on INSN and on
2115 the current target; it does not depend on things like the values of
2116 operands. */
2118 static alternative_mask
2119 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2121 /* Quick exit for asms and for targets that don't use these attributes. */
2122 int code = INSN_CODE (insn);
2123 if (code < 0 || !have_bool_attr (attr))
2124 return ALL_ALTERNATIVES;
2126 /* Calling get_attr_<foo> can be expensive, so cache the mask
2127 for speed. */
2128 if (!this_target_recog->x_bool_attr_masks[code][attr])
2129 this_target_recog->x_bool_attr_masks[code][attr]
2130 = get_bool_attr_mask_uncached (insn, attr);
2131 return this_target_recog->x_bool_attr_masks[code][attr];
2134 /* Return the set of alternatives of INSN that are allowed by the current
2135 target. */
2137 alternative_mask
2138 get_enabled_alternatives (rtx_insn *insn)
2140 return get_bool_attr_mask (insn, BA_ENABLED);
2143 /* Return the set of alternatives of INSN that are allowed by the current
2144 target and are preferred for the current size/speed optimization
2145 choice. */
2147 alternative_mask
2148 get_preferred_alternatives (rtx_insn *insn)
2150 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2151 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2152 else
2153 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2156 /* Return the set of alternatives of INSN that are allowed by the current
2157 target and are preferred for the size/speed optimization choice
2158 associated with BB. Passing a separate BB is useful if INSN has not
2159 been emitted yet or if we are considering moving it to a different
2160 block. */
2162 alternative_mask
2163 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2165 if (optimize_bb_for_speed_p (bb))
2166 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2167 else
2168 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2171 /* Assert that the cached boolean attributes for INSN are still accurate.
2172 The backend is required to define these attributes in a way that only
2173 depends on the current target (rather than operands, compiler phase,
2174 etc.). */
2176 bool
2177 check_bool_attrs (rtx_insn *insn)
2179 int code = INSN_CODE (insn);
2180 if (code >= 0)
2181 for (int i = 0; i <= BA_LAST; ++i)
2183 enum bool_attr attr = (enum bool_attr) i;
2184 if (this_target_recog->x_bool_attr_masks[code][attr])
2185 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2186 == get_bool_attr_mask_uncached (insn, attr));
2188 return true;
2191 /* Like extract_insn, but save insn extracted and don't extract again, when
2192 called again for the same insn expecting that recog_data still contain the
2193 valid information. This is used primary by gen_attr infrastructure that
2194 often does extract insn again and again. */
2195 void
2196 extract_insn_cached (rtx_insn *insn)
2198 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2199 return;
2200 extract_insn (insn);
2201 recog_data.insn = insn;
2204 /* Do uncached extract_insn, constrain_operands and complain about failures.
2205 This should be used when extracting a pre-existing constrained instruction
2206 if the caller wants to know which alternative was chosen. */
2207 void
2208 extract_constrain_insn (rtx_insn *insn)
2210 extract_insn (insn);
2211 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2212 fatal_insn_not_found (insn);
2215 /* Do cached extract_insn, constrain_operands and complain about failures.
2216 Used by insn_attrtab. */
2217 void
2218 extract_constrain_insn_cached (rtx_insn *insn)
2220 extract_insn_cached (insn);
2221 if (which_alternative == -1
2222 && !constrain_operands (reload_completed,
2223 get_enabled_alternatives (insn)))
2224 fatal_insn_not_found (insn);
2227 /* Do cached constrain_operands on INSN and complain about failures. */
2229 constrain_operands_cached (rtx_insn *insn, int strict)
2231 if (which_alternative == -1)
2232 return constrain_operands (strict, get_enabled_alternatives (insn));
2233 else
2234 return 1;
2237 /* Analyze INSN and fill in recog_data. */
2239 void
2240 extract_insn (rtx_insn *insn)
2242 int i;
2243 int icode;
2244 int noperands;
2245 rtx body = PATTERN (insn);
2247 recog_data.n_operands = 0;
2248 recog_data.n_alternatives = 0;
2249 recog_data.n_dups = 0;
2250 recog_data.is_asm = false;
2252 switch (GET_CODE (body))
2254 case USE:
2255 case CLOBBER:
2256 case ASM_INPUT:
2257 case ADDR_VEC:
2258 case ADDR_DIFF_VEC:
2259 case VAR_LOCATION:
2260 case DEBUG_MARKER:
2261 return;
2263 case SET:
2264 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2265 goto asm_insn;
2266 else
2267 goto normal_insn;
2268 case PARALLEL:
2269 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2270 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2271 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2272 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2273 goto asm_insn;
2274 else
2275 goto normal_insn;
2276 case ASM_OPERANDS:
2277 asm_insn:
2278 recog_data.n_operands = noperands = asm_noperands (body);
2279 if (noperands >= 0)
2281 /* This insn is an `asm' with operands. */
2283 /* expand_asm_operands makes sure there aren't too many operands. */
2284 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2286 /* Now get the operand values and constraints out of the insn. */
2287 decode_asm_operands (body, recog_data.operand,
2288 recog_data.operand_loc,
2289 recog_data.constraints,
2290 recog_data.operand_mode, NULL);
2291 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2292 if (noperands > 0)
2294 const char *p = recog_data.constraints[0];
2295 recog_data.n_alternatives = 1;
2296 while (*p)
2297 recog_data.n_alternatives += (*p++ == ',');
2299 recog_data.is_asm = true;
2300 break;
2302 fatal_insn_not_found (insn);
2304 default:
2305 normal_insn:
2306 /* Ordinary insn: recognize it, get the operands via insn_extract
2307 and get the constraints. */
2309 icode = recog_memoized (insn);
2310 if (icode < 0)
2311 fatal_insn_not_found (insn);
2313 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2314 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2315 recog_data.n_dups = insn_data[icode].n_dups;
2317 insn_extract (insn);
2319 for (i = 0; i < noperands; i++)
2321 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2322 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2323 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2324 /* VOIDmode match_operands gets mode from their real operand. */
2325 if (recog_data.operand_mode[i] == VOIDmode)
2326 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2329 for (i = 0; i < noperands; i++)
2330 recog_data.operand_type[i]
2331 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2332 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2333 : OP_IN);
2335 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2337 recog_data.insn = NULL;
2338 which_alternative = -1;
2341 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2342 operands, N_ALTERNATIVES alternatives and constraint strings
2343 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2344 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2345 if the insn is an asm statement and preprocessing should take the
2346 asm operands into account, e.g. to determine whether they could be
2347 addresses in constraints that require addresses; it should then
2348 point to an array of pointers to each operand. */
2350 void
2351 preprocess_constraints (int n_operands, int n_alternatives,
2352 const char **constraints,
2353 operand_alternative *op_alt_base,
2354 rtx **oploc)
2356 for (int i = 0; i < n_operands; i++)
2358 int j;
2359 struct operand_alternative *op_alt;
2360 const char *p = constraints[i];
2362 op_alt = op_alt_base;
2364 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2366 op_alt[i].cl = NO_REGS;
2367 op_alt[i].constraint = p;
2368 op_alt[i].matches = -1;
2369 op_alt[i].matched = -1;
2371 if (*p == '\0' || *p == ',')
2373 op_alt[i].anything_ok = 1;
2374 continue;
2377 for (;;)
2379 char c = *p;
2380 if (c == '#')
2382 c = *++p;
2383 while (c != ',' && c != '\0');
2384 if (c == ',' || c == '\0')
2386 p++;
2387 break;
2390 switch (c)
2392 case '?':
2393 op_alt[i].reject += 6;
2394 break;
2395 case '!':
2396 op_alt[i].reject += 600;
2397 break;
2398 case '&':
2399 op_alt[i].earlyclobber = 1;
2400 break;
2402 case '0': case '1': case '2': case '3': case '4':
2403 case '5': case '6': case '7': case '8': case '9':
2405 char *end;
2406 op_alt[i].matches = strtoul (p, &end, 10);
2407 op_alt[op_alt[i].matches].matched = i;
2408 p = end;
2410 continue;
2412 case 'X':
2413 op_alt[i].anything_ok = 1;
2414 break;
2416 case 'g':
2417 op_alt[i].cl =
2418 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2419 break;
2421 default:
2422 enum constraint_num cn = lookup_constraint (p);
2423 enum reg_class cl;
2424 switch (get_constraint_type (cn))
2426 case CT_REGISTER:
2427 cl = reg_class_for_constraint (cn);
2428 if (cl != NO_REGS)
2429 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2430 break;
2432 case CT_CONST_INT:
2433 break;
2435 case CT_MEMORY:
2436 case CT_SPECIAL_MEMORY:
2437 op_alt[i].memory_ok = 1;
2438 break;
2440 case CT_ADDRESS:
2441 if (oploc && !address_operand (*oploc[i], VOIDmode))
2442 break;
2444 op_alt[i].is_address = 1;
2445 op_alt[i].cl
2446 = (reg_class_subunion
2447 [(int) op_alt[i].cl]
2448 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2449 ADDRESS, SCRATCH)]);
2450 break;
2452 case CT_FIXED_FORM:
2453 break;
2455 break;
2457 p += CONSTRAINT_LEN (c, p);
2463 /* Return an array of operand_alternative instructions for
2464 instruction ICODE. */
2466 const operand_alternative *
2467 preprocess_insn_constraints (unsigned int icode)
2469 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2470 if (this_target_recog->x_op_alt[icode])
2471 return this_target_recog->x_op_alt[icode];
2473 int n_operands = insn_data[icode].n_operands;
2474 if (n_operands == 0)
2475 return 0;
2476 /* Always provide at least one alternative so that which_op_alt ()
2477 works correctly. If the instruction has 0 alternatives (i.e. all
2478 constraint strings are empty) then each operand in this alternative
2479 will have anything_ok set. */
2480 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2481 int n_entries = n_operands * n_alternatives;
2483 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2484 const char **constraints = XALLOCAVEC (const char *, n_operands);
2486 for (int i = 0; i < n_operands; ++i)
2487 constraints[i] = insn_data[icode].operand[i].constraint;
2488 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2489 NULL);
2491 this_target_recog->x_op_alt[icode] = op_alt;
2492 return op_alt;
2495 /* After calling extract_insn, you can use this function to extract some
2496 information from the constraint strings into a more usable form.
2497 The collected data is stored in recog_op_alt. */
2499 void
2500 preprocess_constraints (rtx_insn *insn)
2502 int icode = INSN_CODE (insn);
2503 if (icode >= 0)
2504 recog_op_alt = preprocess_insn_constraints (icode);
2505 else
2507 int n_operands = recog_data.n_operands;
2508 int n_alternatives = recog_data.n_alternatives;
2509 int n_entries = n_operands * n_alternatives;
2510 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2511 preprocess_constraints (n_operands, n_alternatives,
2512 recog_data.constraints, asm_op_alt,
2513 NULL);
2514 recog_op_alt = asm_op_alt;
2518 /* Check the operands of an insn against the insn's operand constraints
2519 and return 1 if they match any of the alternatives in ALTERNATIVES.
2521 The information about the insn's operands, constraints, operand modes
2522 etc. is obtained from the global variables set up by extract_insn.
2524 WHICH_ALTERNATIVE is set to a number which indicates which
2525 alternative of constraints was matched: 0 for the first alternative,
2526 1 for the next, etc.
2528 In addition, when two operands are required to match
2529 and it happens that the output operand is (reg) while the
2530 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2531 make the output operand look like the input.
2532 This is because the output operand is the one the template will print.
2534 This is used in final, just before printing the assembler code and by
2535 the routines that determine an insn's attribute.
2537 If STRICT is a positive nonzero value, it means that we have been
2538 called after reload has been completed. In that case, we must
2539 do all checks strictly. If it is zero, it means that we have been called
2540 before reload has completed. In that case, we first try to see if we can
2541 find an alternative that matches strictly. If not, we try again, this
2542 time assuming that reload will fix up the insn. This provides a "best
2543 guess" for the alternative and is used to compute attributes of insns prior
2544 to reload. A negative value of STRICT is used for this internal call. */
2546 struct funny_match
2548 int this_op, other;
2552 constrain_operands (int strict, alternative_mask alternatives)
2554 const char *constraints[MAX_RECOG_OPERANDS];
2555 int matching_operands[MAX_RECOG_OPERANDS];
2556 int earlyclobber[MAX_RECOG_OPERANDS];
2557 int c;
2559 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2560 int funny_match_index;
2562 which_alternative = 0;
2563 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2564 return 1;
2566 for (c = 0; c < recog_data.n_operands; c++)
2568 constraints[c] = recog_data.constraints[c];
2569 matching_operands[c] = -1;
2574 int seen_earlyclobber_at = -1;
2575 int opno;
2576 int lose = 0;
2577 funny_match_index = 0;
2579 if (!TEST_BIT (alternatives, which_alternative))
2581 int i;
2583 for (i = 0; i < recog_data.n_operands; i++)
2584 constraints[i] = skip_alternative (constraints[i]);
2586 which_alternative++;
2587 continue;
2590 for (opno = 0; opno < recog_data.n_operands; opno++)
2592 rtx op = recog_data.operand[opno];
2593 machine_mode mode = GET_MODE (op);
2594 const char *p = constraints[opno];
2595 int offset = 0;
2596 int win = 0;
2597 int val;
2598 int len;
2600 earlyclobber[opno] = 0;
2602 /* A unary operator may be accepted by the predicate, but it
2603 is irrelevant for matching constraints. */
2604 if (UNARY_P (op))
2605 op = XEXP (op, 0);
2607 if (GET_CODE (op) == SUBREG)
2609 if (REG_P (SUBREG_REG (op))
2610 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2611 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2612 GET_MODE (SUBREG_REG (op)),
2613 SUBREG_BYTE (op),
2614 GET_MODE (op));
2615 op = SUBREG_REG (op);
2618 /* An empty constraint or empty alternative
2619 allows anything which matched the pattern. */
2620 if (*p == 0 || *p == ',')
2621 win = 1;
2624 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2626 case '\0':
2627 len = 0;
2628 break;
2629 case ',':
2630 c = '\0';
2631 break;
2633 case '#':
2634 /* Ignore rest of this alternative as far as
2635 constraint checking is concerned. */
2637 p++;
2638 while (*p && *p != ',');
2639 len = 0;
2640 break;
2642 case '&':
2643 earlyclobber[opno] = 1;
2644 if (seen_earlyclobber_at < 0)
2645 seen_earlyclobber_at = opno;
2646 break;
2648 case '0': case '1': case '2': case '3': case '4':
2649 case '5': case '6': case '7': case '8': case '9':
2651 /* This operand must be the same as a previous one.
2652 This kind of constraint is used for instructions such
2653 as add when they take only two operands.
2655 Note that the lower-numbered operand is passed first.
2657 If we are not testing strictly, assume that this
2658 constraint will be satisfied. */
2660 char *end;
2661 int match;
2663 match = strtoul (p, &end, 10);
2664 p = end;
2666 if (strict < 0)
2667 val = 1;
2668 else
2670 rtx op1 = recog_data.operand[match];
2671 rtx op2 = recog_data.operand[opno];
2673 /* A unary operator may be accepted by the predicate,
2674 but it is irrelevant for matching constraints. */
2675 if (UNARY_P (op1))
2676 op1 = XEXP (op1, 0);
2677 if (UNARY_P (op2))
2678 op2 = XEXP (op2, 0);
2680 val = operands_match_p (op1, op2);
2683 matching_operands[opno] = match;
2684 matching_operands[match] = opno;
2686 if (val != 0)
2687 win = 1;
2689 /* If output is *x and input is *--x, arrange later
2690 to change the output to *--x as well, since the
2691 output op is the one that will be printed. */
2692 if (val == 2 && strict > 0)
2694 funny_match[funny_match_index].this_op = opno;
2695 funny_match[funny_match_index++].other = match;
2698 len = 0;
2699 break;
2701 case 'p':
2702 /* p is used for address_operands. When we are called by
2703 gen_reload, no one will have checked that the address is
2704 strictly valid, i.e., that all pseudos requiring hard regs
2705 have gotten them. We also want to make sure we have a
2706 valid mode. */
2707 if ((GET_MODE (op) == VOIDmode
2708 || SCALAR_INT_MODE_P (GET_MODE (op)))
2709 && (strict <= 0
2710 || (strict_memory_address_p
2711 (recog_data.operand_mode[opno], op))))
2712 win = 1;
2713 break;
2715 /* No need to check general_operand again;
2716 it was done in insn-recog.c. Well, except that reload
2717 doesn't check the validity of its replacements, but
2718 that should only matter when there's a bug. */
2719 case 'g':
2720 /* Anything goes unless it is a REG and really has a hard reg
2721 but the hard reg is not in the class GENERAL_REGS. */
2722 if (REG_P (op))
2724 if (strict < 0
2725 || GENERAL_REGS == ALL_REGS
2726 || (reload_in_progress
2727 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2728 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2729 win = 1;
2731 else if (strict < 0 || general_operand (op, mode))
2732 win = 1;
2733 break;
2735 default:
2737 enum constraint_num cn = lookup_constraint (p);
2738 enum reg_class cl = reg_class_for_constraint (cn);
2739 if (cl != NO_REGS)
2741 if (strict < 0
2742 || (strict == 0
2743 && REG_P (op)
2744 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2745 || (strict == 0 && GET_CODE (op) == SCRATCH)
2746 || (REG_P (op)
2747 && reg_fits_class_p (op, cl, offset, mode)))
2748 win = 1;
2751 else if (constraint_satisfied_p (op, cn))
2752 win = 1;
2754 else if (insn_extra_memory_constraint (cn)
2755 /* Every memory operand can be reloaded to fit. */
2756 && ((strict < 0 && MEM_P (op))
2757 /* Before reload, accept what reload can turn
2758 into a mem. */
2759 || (strict < 0 && CONSTANT_P (op))
2760 /* Before reload, accept a pseudo,
2761 since LRA can turn it into a mem. */
2762 || (strict < 0 && targetm.lra_p () && REG_P (op)
2763 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2764 /* During reload, accept a pseudo */
2765 || (reload_in_progress && REG_P (op)
2766 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2767 win = 1;
2768 else if (insn_extra_address_constraint (cn)
2769 /* Every address operand can be reloaded to fit. */
2770 && strict < 0)
2771 win = 1;
2772 /* Cater to architectures like IA-64 that define extra memory
2773 constraints without using define_memory_constraint. */
2774 else if (reload_in_progress
2775 && REG_P (op)
2776 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2777 && reg_renumber[REGNO (op)] < 0
2778 && reg_equiv_mem (REGNO (op)) != 0
2779 && constraint_satisfied_p
2780 (reg_equiv_mem (REGNO (op)), cn))
2781 win = 1;
2782 break;
2785 while (p += len, c);
2787 constraints[opno] = p;
2788 /* If this operand did not win somehow,
2789 this alternative loses. */
2790 if (! win)
2791 lose = 1;
2793 /* This alternative won; the operands are ok.
2794 Change whichever operands this alternative says to change. */
2795 if (! lose)
2797 int opno, eopno;
2799 /* See if any earlyclobber operand conflicts with some other
2800 operand. */
2802 if (strict > 0 && seen_earlyclobber_at >= 0)
2803 for (eopno = seen_earlyclobber_at;
2804 eopno < recog_data.n_operands;
2805 eopno++)
2806 /* Ignore earlyclobber operands now in memory,
2807 because we would often report failure when we have
2808 two memory operands, one of which was formerly a REG. */
2809 if (earlyclobber[eopno]
2810 && REG_P (recog_data.operand[eopno]))
2811 for (opno = 0; opno < recog_data.n_operands; opno++)
2812 if ((MEM_P (recog_data.operand[opno])
2813 || recog_data.operand_type[opno] != OP_OUT)
2814 && opno != eopno
2815 /* Ignore things like match_operator operands. */
2816 && *recog_data.constraints[opno] != 0
2817 && ! (matching_operands[opno] == eopno
2818 && operands_match_p (recog_data.operand[opno],
2819 recog_data.operand[eopno]))
2820 && ! safe_from_earlyclobber (recog_data.operand[opno],
2821 recog_data.operand[eopno]))
2822 lose = 1;
2824 if (! lose)
2826 while (--funny_match_index >= 0)
2828 recog_data.operand[funny_match[funny_match_index].other]
2829 = recog_data.operand[funny_match[funny_match_index].this_op];
2832 /* For operands without < or > constraints reject side-effects. */
2833 if (AUTO_INC_DEC && recog_data.is_asm)
2835 for (opno = 0; opno < recog_data.n_operands; opno++)
2836 if (MEM_P (recog_data.operand[opno]))
2837 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2839 case PRE_INC:
2840 case POST_INC:
2841 case PRE_DEC:
2842 case POST_DEC:
2843 case PRE_MODIFY:
2844 case POST_MODIFY:
2845 if (strchr (recog_data.constraints[opno], '<') == NULL
2846 && strchr (recog_data.constraints[opno], '>')
2847 == NULL)
2848 return 0;
2849 break;
2850 default:
2851 break;
2855 return 1;
2859 which_alternative++;
2861 while (which_alternative < recog_data.n_alternatives);
2863 which_alternative = -1;
2864 /* If we are about to reject this, but we are not to test strictly,
2865 try a very loose test. Only return failure if it fails also. */
2866 if (strict == 0)
2867 return constrain_operands (-1, alternatives);
2868 else
2869 return 0;
2872 /* Return true iff OPERAND (assumed to be a REG rtx)
2873 is a hard reg in class CLASS when its regno is offset by OFFSET
2874 and changed to mode MODE.
2875 If REG occupies multiple hard regs, all of them must be in CLASS. */
2877 bool
2878 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2879 machine_mode mode)
2881 unsigned int regno = REGNO (operand);
2883 if (cl == NO_REGS)
2884 return false;
2886 /* Regno must not be a pseudo register. Offset may be negative. */
2887 return (HARD_REGISTER_NUM_P (regno)
2888 && HARD_REGISTER_NUM_P (regno + offset)
2889 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2890 regno + offset));
2893 /* Split single instruction. Helper function for split_all_insns and
2894 split_all_insns_noflow. Return last insn in the sequence if successful,
2895 or NULL if unsuccessful. */
2897 static rtx_insn *
2898 split_insn (rtx_insn *insn)
2900 /* Split insns here to get max fine-grain parallelism. */
2901 rtx_insn *first = PREV_INSN (insn);
2902 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2903 rtx insn_set, last_set, note;
2905 if (last == insn)
2906 return NULL;
2908 /* If the original instruction was a single set that was known to be
2909 equivalent to a constant, see if we can say the same about the last
2910 instruction in the split sequence. The two instructions must set
2911 the same destination. */
2912 insn_set = single_set (insn);
2913 if (insn_set)
2915 last_set = single_set (last);
2916 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2918 note = find_reg_equal_equiv_note (insn);
2919 if (note && CONSTANT_P (XEXP (note, 0)))
2920 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2921 else if (CONSTANT_P (SET_SRC (insn_set)))
2922 set_unique_reg_note (last, REG_EQUAL,
2923 copy_rtx (SET_SRC (insn_set)));
2927 /* try_split returns the NOTE that INSN became. */
2928 SET_INSN_DELETED (insn);
2930 /* ??? Coddle to md files that generate subregs in post-reload
2931 splitters instead of computing the proper hard register. */
2932 if (reload_completed && first != last)
2934 first = NEXT_INSN (first);
2935 for (;;)
2937 if (INSN_P (first))
2938 cleanup_subreg_operands (first);
2939 if (first == last)
2940 break;
2941 first = NEXT_INSN (first);
2945 return last;
2948 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2950 void
2951 split_all_insns (void)
2953 bool changed;
2954 bool need_cfg_cleanup = false;
2955 basic_block bb;
2957 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
2958 bitmap_clear (blocks);
2959 changed = false;
2961 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2963 rtx_insn *insn, *next;
2964 bool finish = false;
2966 rtl_profile_for_bb (bb);
2967 for (insn = BB_HEAD (bb); !finish ; insn = next)
2969 /* Can't use `next_real_insn' because that might go across
2970 CODE_LABELS and short-out basic blocks. */
2971 next = NEXT_INSN (insn);
2972 finish = (insn == BB_END (bb));
2974 /* If INSN has a REG_EH_REGION note and we split INSN, the
2975 resulting split may not have/need REG_EH_REGION notes.
2977 If that happens and INSN was the last reference to the
2978 given EH region, then the EH region will become unreachable.
2979 We cannot leave the unreachable blocks in the CFG as that
2980 will trigger a checking failure.
2982 So track if INSN has a REG_EH_REGION note. If so and we
2983 split INSN, then trigger a CFG cleanup. */
2984 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2985 if (INSN_P (insn))
2987 rtx set = single_set (insn);
2989 /* Don't split no-op move insns. These should silently
2990 disappear later in final. Splitting such insns would
2991 break the code that handles LIBCALL blocks. */
2992 if (set && set_noop_p (set))
2994 /* Nops get in the way while scheduling, so delete them
2995 now if register allocation has already been done. It
2996 is too risky to try to do this before register
2997 allocation, and there are unlikely to be very many
2998 nops then anyways. */
2999 if (reload_completed)
3000 delete_insn_and_edges (insn);
3001 if (note)
3002 need_cfg_cleanup = true;
3004 else
3006 if (split_insn (insn))
3008 bitmap_set_bit (blocks, bb->index);
3009 changed = true;
3010 if (note)
3011 need_cfg_cleanup = true;
3018 default_rtl_profile ();
3019 if (changed)
3021 find_many_sub_basic_blocks (blocks);
3023 /* Splitting could drop an REG_EH_REGION if it potentially
3024 trapped in its original form, but does not in its split
3025 form. Consider a FLOAT_TRUNCATE which splits into a memory
3026 store/load pair and -fnon-call-exceptions. */
3027 if (need_cfg_cleanup)
3028 cleanup_cfg (0);
3031 checking_verify_flow_info ();
3034 /* Same as split_all_insns, but do not expect CFG to be available.
3035 Used by machine dependent reorg passes. */
3037 unsigned int
3038 split_all_insns_noflow (void)
3040 rtx_insn *next, *insn;
3042 for (insn = get_insns (); insn; insn = next)
3044 next = NEXT_INSN (insn);
3045 if (INSN_P (insn))
3047 /* Don't split no-op move insns. These should silently
3048 disappear later in final. Splitting such insns would
3049 break the code that handles LIBCALL blocks. */
3050 rtx set = single_set (insn);
3051 if (set && set_noop_p (set))
3053 /* Nops get in the way while scheduling, so delete them
3054 now if register allocation has already been done. It
3055 is too risky to try to do this before register
3056 allocation, and there are unlikely to be very many
3057 nops then anyways.
3059 ??? Should we use delete_insn when the CFG isn't valid? */
3060 if (reload_completed)
3061 delete_insn_and_edges (insn);
3063 else
3064 split_insn (insn);
3067 return 0;
3070 struct peep2_insn_data
3072 rtx_insn *insn;
3073 regset live_before;
3076 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3077 static int peep2_current;
3079 static bool peep2_do_rebuild_jump_labels;
3080 static bool peep2_do_cleanup_cfg;
3082 /* The number of instructions available to match a peep2. */
3083 int peep2_current_count;
3085 /* A marker indicating the last insn of the block. The live_before regset
3086 for this element is correct, indicating DF_LIVE_OUT for the block. */
3087 #define PEEP2_EOB invalid_insn_rtx
3089 /* Wrap N to fit into the peep2_insn_data buffer. */
3091 static int
3092 peep2_buf_position (int n)
3094 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3095 n -= MAX_INSNS_PER_PEEP2 + 1;
3096 return n;
3099 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3100 does not exist. Used by the recognizer to find the next insn to match
3101 in a multi-insn pattern. */
3103 rtx_insn *
3104 peep2_next_insn (int n)
3106 gcc_assert (n <= peep2_current_count);
3108 n = peep2_buf_position (peep2_current + n);
3110 return peep2_insn_data[n].insn;
3113 /* Return true if REGNO is dead before the Nth non-note insn
3114 after `current'. */
3117 peep2_regno_dead_p (int ofs, int regno)
3119 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3121 ofs = peep2_buf_position (peep2_current + ofs);
3123 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3125 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3128 /* Similarly for a REG. */
3131 peep2_reg_dead_p (int ofs, rtx reg)
3133 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3135 ofs = peep2_buf_position (peep2_current + ofs);
3137 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3139 unsigned int end_regno = END_REGNO (reg);
3140 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3141 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3142 return 0;
3143 return 1;
3146 /* Regno offset to be used in the register search. */
3147 static int search_ofs;
3149 /* Try to find a hard register of mode MODE, matching the register class in
3150 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3151 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3152 in which case the only condition is that the register must be available
3153 before CURRENT_INSN.
3154 Registers that already have bits set in REG_SET will not be considered.
3156 If an appropriate register is available, it will be returned and the
3157 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3158 returned. */
3161 peep2_find_free_register (int from, int to, const char *class_str,
3162 machine_mode mode, HARD_REG_SET *reg_set)
3164 enum reg_class cl;
3165 HARD_REG_SET live;
3166 df_ref def;
3167 int i;
3169 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3170 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3172 from = peep2_buf_position (peep2_current + from);
3173 to = peep2_buf_position (peep2_current + to);
3175 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3176 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3178 while (from != to)
3180 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3182 /* Don't use registers set or clobbered by the insn. */
3183 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3184 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3186 from = peep2_buf_position (from + 1);
3189 cl = reg_class_for_constraint (lookup_constraint (class_str));
3191 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3193 int raw_regno, regno, success, j;
3195 /* Distribute the free registers as much as possible. */
3196 raw_regno = search_ofs + i;
3197 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3198 raw_regno -= FIRST_PSEUDO_REGISTER;
3199 #ifdef REG_ALLOC_ORDER
3200 regno = reg_alloc_order[raw_regno];
3201 #else
3202 regno = raw_regno;
3203 #endif
3205 /* Can it support the mode we need? */
3206 if (!targetm.hard_regno_mode_ok (regno, mode))
3207 continue;
3209 success = 1;
3210 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3212 /* Don't allocate fixed registers. */
3213 if (fixed_regs[regno + j])
3215 success = 0;
3216 break;
3218 /* Don't allocate global registers. */
3219 if (global_regs[regno + j])
3221 success = 0;
3222 break;
3224 /* Make sure the register is of the right class. */
3225 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3227 success = 0;
3228 break;
3230 /* And that we don't create an extra save/restore. */
3231 if (! crtl->abi->clobbers_full_reg_p (regno + j)
3232 && ! df_regs_ever_live_p (regno + j))
3234 success = 0;
3235 break;
3238 if (! targetm.hard_regno_scratch_ok (regno + j))
3240 success = 0;
3241 break;
3244 /* And we don't clobber traceback for noreturn functions. */
3245 if ((regno + j == FRAME_POINTER_REGNUM
3246 || regno + j == HARD_FRAME_POINTER_REGNUM)
3247 && (! reload_completed || frame_pointer_needed))
3249 success = 0;
3250 break;
3253 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3254 || TEST_HARD_REG_BIT (live, regno + j))
3256 success = 0;
3257 break;
3261 if (success)
3263 add_to_hard_reg_set (reg_set, mode, regno);
3265 /* Start the next search with the next register. */
3266 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3267 raw_regno = 0;
3268 search_ofs = raw_regno;
3270 return gen_rtx_REG (mode, regno);
3274 search_ofs = 0;
3275 return NULL_RTX;
3278 /* Forget all currently tracked instructions, only remember current
3279 LIVE regset. */
3281 static void
3282 peep2_reinit_state (regset live)
3284 int i;
3286 /* Indicate that all slots except the last holds invalid data. */
3287 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3288 peep2_insn_data[i].insn = NULL;
3289 peep2_current_count = 0;
3291 /* Indicate that the last slot contains live_after data. */
3292 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3293 peep2_current = MAX_INSNS_PER_PEEP2;
3295 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3298 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3299 starting at INSN. Perform the replacement, removing the old insns and
3300 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3301 if the replacement is rejected. */
3303 static rtx_insn *
3304 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3306 int i;
3307 rtx_insn *last, *before_try, *x;
3308 rtx eh_note, as_note;
3309 rtx_insn *old_insn;
3310 rtx_insn *new_insn;
3311 bool was_call = false;
3313 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3314 match more than one insn, or to be split into more than one insn. */
3315 old_insn = peep2_insn_data[peep2_current].insn;
3316 if (RTX_FRAME_RELATED_P (old_insn))
3318 bool any_note = false;
3319 rtx note;
3321 if (match_len != 0)
3322 return NULL;
3324 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3325 may be in the stream for the purpose of register allocation. */
3326 if (active_insn_p (attempt))
3327 new_insn = attempt;
3328 else
3329 new_insn = next_active_insn (attempt);
3330 if (next_active_insn (new_insn))
3331 return NULL;
3333 /* We have a 1-1 replacement. Copy over any frame-related info. */
3334 RTX_FRAME_RELATED_P (new_insn) = 1;
3336 /* Allow the backend to fill in a note during the split. */
3337 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3338 switch (REG_NOTE_KIND (note))
3340 case REG_FRAME_RELATED_EXPR:
3341 case REG_CFA_DEF_CFA:
3342 case REG_CFA_ADJUST_CFA:
3343 case REG_CFA_OFFSET:
3344 case REG_CFA_REGISTER:
3345 case REG_CFA_EXPRESSION:
3346 case REG_CFA_RESTORE:
3347 case REG_CFA_SET_VDRAP:
3348 any_note = true;
3349 break;
3350 default:
3351 break;
3354 /* If the backend didn't supply a note, copy one over. */
3355 if (!any_note)
3356 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3357 switch (REG_NOTE_KIND (note))
3359 case REG_FRAME_RELATED_EXPR:
3360 case REG_CFA_DEF_CFA:
3361 case REG_CFA_ADJUST_CFA:
3362 case REG_CFA_OFFSET:
3363 case REG_CFA_REGISTER:
3364 case REG_CFA_EXPRESSION:
3365 case REG_CFA_RESTORE:
3366 case REG_CFA_SET_VDRAP:
3367 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3368 any_note = true;
3369 break;
3370 default:
3371 break;
3374 /* If there still isn't a note, make sure the unwind info sees the
3375 same expression as before the split. */
3376 if (!any_note)
3378 rtx old_set, new_set;
3380 /* The old insn had better have been simple, or annotated. */
3381 old_set = single_set (old_insn);
3382 gcc_assert (old_set != NULL);
3384 new_set = single_set (new_insn);
3385 if (!new_set || !rtx_equal_p (new_set, old_set))
3386 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3389 /* Copy prologue/epilogue status. This is required in order to keep
3390 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3391 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3394 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3395 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3396 cfg-related call notes. */
3397 for (i = 0; i <= match_len; ++i)
3399 int j;
3400 rtx note;
3402 j = peep2_buf_position (peep2_current + i);
3403 old_insn = peep2_insn_data[j].insn;
3404 if (!CALL_P (old_insn))
3405 continue;
3406 was_call = true;
3408 new_insn = attempt;
3409 while (new_insn != NULL_RTX)
3411 if (CALL_P (new_insn))
3412 break;
3413 new_insn = NEXT_INSN (new_insn);
3416 gcc_assert (new_insn != NULL_RTX);
3418 CALL_INSN_FUNCTION_USAGE (new_insn)
3419 = CALL_INSN_FUNCTION_USAGE (old_insn);
3420 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3422 for (note = REG_NOTES (old_insn);
3423 note;
3424 note = XEXP (note, 1))
3425 switch (REG_NOTE_KIND (note))
3427 case REG_NORETURN:
3428 case REG_SETJMP:
3429 case REG_TM:
3430 case REG_CALL_NOCF_CHECK:
3431 add_reg_note (new_insn, REG_NOTE_KIND (note),
3432 XEXP (note, 0));
3433 break;
3434 default:
3435 /* Discard all other reg notes. */
3436 break;
3439 /* Croak if there is another call in the sequence. */
3440 while (++i <= match_len)
3442 j = peep2_buf_position (peep2_current + i);
3443 old_insn = peep2_insn_data[j].insn;
3444 gcc_assert (!CALL_P (old_insn));
3446 break;
3449 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3450 move those notes over to the new sequence. */
3451 as_note = NULL;
3452 for (i = match_len; i >= 0; --i)
3454 int j = peep2_buf_position (peep2_current + i);
3455 old_insn = peep2_insn_data[j].insn;
3457 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3458 if (as_note)
3459 break;
3462 i = peep2_buf_position (peep2_current + match_len);
3463 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3465 /* Replace the old sequence with the new. */
3466 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3467 last = emit_insn_after_setloc (attempt,
3468 peep2_insn_data[i].insn,
3469 INSN_LOCATION (peepinsn));
3470 if (JUMP_P (peepinsn) && JUMP_P (last))
3471 CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3472 before_try = PREV_INSN (insn);
3473 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3475 /* Re-insert the EH_REGION notes. */
3476 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3478 edge eh_edge;
3479 edge_iterator ei;
3481 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3482 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3483 break;
3485 if (eh_note)
3486 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3488 if (eh_edge)
3489 for (x = last; x != before_try; x = PREV_INSN (x))
3490 if (x != BB_END (bb)
3491 && (can_throw_internal (x)
3492 || can_nonlocal_goto (x)))
3494 edge nfte, nehe;
3495 int flags;
3497 nfte = split_block (bb, x);
3498 flags = (eh_edge->flags
3499 & (EDGE_EH | EDGE_ABNORMAL));
3500 if (CALL_P (x))
3501 flags |= EDGE_ABNORMAL_CALL;
3502 nehe = make_edge (nfte->src, eh_edge->dest,
3503 flags);
3505 nehe->probability = eh_edge->probability;
3506 nfte->probability = nehe->probability.invert ();
3508 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3509 bb = nfte->src;
3510 eh_edge = nehe;
3513 /* Converting possibly trapping insn to non-trapping is
3514 possible. Zap dummy outgoing edges. */
3515 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3518 /* Re-insert the ARGS_SIZE notes. */
3519 if (as_note)
3520 fixup_args_size_notes (before_try, last, get_args_size (as_note));
3522 /* If we generated a jump instruction, it won't have
3523 JUMP_LABEL set. Recompute after we're done. */
3524 for (x = last; x != before_try; x = PREV_INSN (x))
3525 if (JUMP_P (x))
3527 peep2_do_rebuild_jump_labels = true;
3528 break;
3531 return last;
3534 /* After performing a replacement in basic block BB, fix up the life
3535 information in our buffer. LAST is the last of the insns that we
3536 emitted as a replacement. PREV is the insn before the start of
3537 the replacement. MATCH_LEN is the number of instructions that were
3538 matched, and which now need to be replaced in the buffer. */
3540 static void
3541 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3542 rtx_insn *prev)
3544 int i = peep2_buf_position (peep2_current + match_len + 1);
3545 rtx_insn *x;
3546 regset_head live;
3548 INIT_REG_SET (&live);
3549 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3551 gcc_assert (peep2_current_count >= match_len + 1);
3552 peep2_current_count -= match_len + 1;
3554 x = last;
3557 if (INSN_P (x))
3559 df_insn_rescan (x);
3560 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3562 peep2_current_count++;
3563 if (--i < 0)
3564 i = MAX_INSNS_PER_PEEP2;
3565 peep2_insn_data[i].insn = x;
3566 df_simulate_one_insn_backwards (bb, x, &live);
3567 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3570 x = PREV_INSN (x);
3572 while (x != prev);
3573 CLEAR_REG_SET (&live);
3575 peep2_current = i;
3578 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3579 Return true if we added it, false otherwise. The caller will try to match
3580 peepholes against the buffer if we return false; otherwise it will try to
3581 add more instructions to the buffer. */
3583 static bool
3584 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
3586 int pos;
3588 /* Once we have filled the maximum number of insns the buffer can hold,
3589 allow the caller to match the insns against peepholes. We wait until
3590 the buffer is full in case the target has similar peepholes of different
3591 length; we always want to match the longest if possible. */
3592 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3593 return false;
3595 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3596 any other pattern, lest it change the semantics of the frame info. */
3597 if (RTX_FRAME_RELATED_P (insn))
3599 /* Let the buffer drain first. */
3600 if (peep2_current_count > 0)
3601 return false;
3602 /* Now the insn will be the only thing in the buffer. */
3605 pos = peep2_buf_position (peep2_current + peep2_current_count);
3606 peep2_insn_data[pos].insn = insn;
3607 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3608 peep2_current_count++;
3610 df_simulate_one_insn_forwards (bb, insn, live);
3611 return true;
3614 /* Perform the peephole2 optimization pass. */
3616 static void
3617 peephole2_optimize (void)
3619 rtx_insn *insn;
3620 bitmap live;
3621 int i;
3622 basic_block bb;
3624 peep2_do_cleanup_cfg = false;
3625 peep2_do_rebuild_jump_labels = false;
3627 df_set_flags (DF_LR_RUN_DCE);
3628 df_note_add_problem ();
3629 df_analyze ();
3631 /* Initialize the regsets we're going to use. */
3632 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3633 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3634 search_ofs = 0;
3635 live = BITMAP_ALLOC (&reg_obstack);
3637 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3639 bool past_end = false;
3640 int pos;
3642 rtl_profile_for_bb (bb);
3644 /* Start up propagation. */
3645 bitmap_copy (live, DF_LR_IN (bb));
3646 df_simulate_initialize_forwards (bb, live);
3647 peep2_reinit_state (live);
3649 insn = BB_HEAD (bb);
3650 for (;;)
3652 rtx_insn *attempt, *head;
3653 int match_len;
3655 if (!past_end && !NONDEBUG_INSN_P (insn))
3657 next_insn:
3658 insn = NEXT_INSN (insn);
3659 if (insn == NEXT_INSN (BB_END (bb)))
3660 past_end = true;
3661 continue;
3663 if (!past_end && peep2_fill_buffer (bb, insn, live))
3664 goto next_insn;
3666 /* If we did not fill an empty buffer, it signals the end of the
3667 block. */
3668 if (peep2_current_count == 0)
3669 break;
3671 /* The buffer filled to the current maximum, so try to match. */
3673 pos = peep2_buf_position (peep2_current + peep2_current_count);
3674 peep2_insn_data[pos].insn = PEEP2_EOB;
3675 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3677 /* Match the peephole. */
3678 head = peep2_insn_data[peep2_current].insn;
3679 attempt = peephole2_insns (PATTERN (head), head, &match_len);
3680 if (attempt != NULL)
3682 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3683 if (last)
3685 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3686 continue;
3690 /* No match: advance the buffer by one insn. */
3691 peep2_current = peep2_buf_position (peep2_current + 1);
3692 peep2_current_count--;
3696 default_rtl_profile ();
3697 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3698 BITMAP_FREE (peep2_insn_data[i].live_before);
3699 BITMAP_FREE (live);
3700 if (peep2_do_rebuild_jump_labels)
3701 rebuild_jump_labels (get_insns ());
3702 if (peep2_do_cleanup_cfg)
3703 cleanup_cfg (CLEANUP_CFG_CHANGED);
3706 /* Common predicates for use with define_bypass. */
3708 /* Helper function for store_data_bypass_p, handle just a single SET
3709 IN_SET. */
3711 static bool
3712 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
3714 if (!MEM_P (SET_DEST (in_set)))
3715 return false;
3717 rtx out_set = single_set (out_insn);
3718 if (out_set)
3719 return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
3721 rtx out_pat = PATTERN (out_insn);
3722 if (GET_CODE (out_pat) != PARALLEL)
3723 return false;
3725 for (int i = 0; i < XVECLEN (out_pat, 0); i++)
3727 rtx out_exp = XVECEXP (out_pat, 0, i);
3729 if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
3730 continue;
3732 gcc_assert (GET_CODE (out_exp) == SET);
3734 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3735 return false;
3738 return true;
3741 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3742 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3743 must be either a single_set or a PARALLEL with SETs inside. */
3746 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3748 rtx in_set = single_set (in_insn);
3749 if (in_set)
3750 return store_data_bypass_p_1 (out_insn, in_set);
3752 rtx in_pat = PATTERN (in_insn);
3753 if (GET_CODE (in_pat) != PARALLEL)
3754 return false;
3756 for (int i = 0; i < XVECLEN (in_pat, 0); i++)
3758 rtx in_exp = XVECEXP (in_pat, 0, i);
3760 if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
3761 continue;
3763 gcc_assert (GET_CODE (in_exp) == SET);
3765 if (!store_data_bypass_p_1 (out_insn, in_exp))
3766 return false;
3769 return true;
3772 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3773 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3774 or multiple set; IN_INSN should be single_set for truth, but for convenience
3775 of insn categorization may be any JUMP or CALL insn. */
3778 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3780 rtx out_set, in_set;
3782 in_set = single_set (in_insn);
3783 if (! in_set)
3785 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3786 return false;
3789 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3790 return false;
3791 in_set = SET_SRC (in_set);
3793 out_set = single_set (out_insn);
3794 if (out_set)
3796 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3797 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3798 return false;
3800 else
3802 rtx out_pat;
3803 int i;
3805 out_pat = PATTERN (out_insn);
3806 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3808 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3810 rtx exp = XVECEXP (out_pat, 0, i);
3812 if (GET_CODE (exp) == CLOBBER)
3813 continue;
3815 gcc_assert (GET_CODE (exp) == SET);
3817 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3818 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3819 return false;
3823 return true;
3826 static unsigned int
3827 rest_of_handle_peephole2 (void)
3829 if (HAVE_peephole2)
3830 peephole2_optimize ();
3832 return 0;
3835 namespace {
3837 const pass_data pass_data_peephole2 =
3839 RTL_PASS, /* type */
3840 "peephole2", /* name */
3841 OPTGROUP_NONE, /* optinfo_flags */
3842 TV_PEEPHOLE2, /* tv_id */
3843 0, /* properties_required */
3844 0, /* properties_provided */
3845 0, /* properties_destroyed */
3846 0, /* todo_flags_start */
3847 TODO_df_finish, /* todo_flags_finish */
3850 class pass_peephole2 : public rtl_opt_pass
3852 public:
3853 pass_peephole2 (gcc::context *ctxt)
3854 : rtl_opt_pass (pass_data_peephole2, ctxt)
3857 /* opt_pass methods: */
3858 /* The epiphany backend creates a second instance of this pass, so we need
3859 a clone method. */
3860 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3861 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3862 virtual unsigned int execute (function *)
3864 return rest_of_handle_peephole2 ();
3867 }; // class pass_peephole2
3869 } // anon namespace
3871 rtl_opt_pass *
3872 make_pass_peephole2 (gcc::context *ctxt)
3874 return new pass_peephole2 (ctxt);
3877 namespace {
3879 const pass_data pass_data_split_all_insns =
3881 RTL_PASS, /* type */
3882 "split1", /* name */
3883 OPTGROUP_NONE, /* optinfo_flags */
3884 TV_NONE, /* tv_id */
3885 0, /* properties_required */
3886 PROP_rtl_split_insns, /* properties_provided */
3887 0, /* properties_destroyed */
3888 0, /* todo_flags_start */
3889 0, /* todo_flags_finish */
3892 class pass_split_all_insns : public rtl_opt_pass
3894 public:
3895 pass_split_all_insns (gcc::context *ctxt)
3896 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3899 /* opt_pass methods: */
3900 /* The epiphany backend creates a second instance of this pass, so
3901 we need a clone method. */
3902 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3903 virtual unsigned int execute (function *)
3905 split_all_insns ();
3906 return 0;
3909 }; // class pass_split_all_insns
3911 } // anon namespace
3913 rtl_opt_pass *
3914 make_pass_split_all_insns (gcc::context *ctxt)
3916 return new pass_split_all_insns (ctxt);
3919 namespace {
3921 const pass_data pass_data_split_after_reload =
3923 RTL_PASS, /* type */
3924 "split2", /* name */
3925 OPTGROUP_NONE, /* optinfo_flags */
3926 TV_NONE, /* tv_id */
3927 0, /* properties_required */
3928 0, /* properties_provided */
3929 0, /* properties_destroyed */
3930 0, /* todo_flags_start */
3931 0, /* todo_flags_finish */
3934 class pass_split_after_reload : public rtl_opt_pass
3936 public:
3937 pass_split_after_reload (gcc::context *ctxt)
3938 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3941 /* opt_pass methods: */
3942 virtual bool gate (function *)
3944 /* If optimizing, then go ahead and split insns now. */
3945 if (optimize > 0)
3946 return true;
3948 #ifdef STACK_REGS
3949 return true;
3950 #else
3951 return false;
3952 #endif
3955 virtual unsigned int execute (function *)
3957 split_all_insns ();
3958 return 0;
3961 }; // class pass_split_after_reload
3963 } // anon namespace
3965 rtl_opt_pass *
3966 make_pass_split_after_reload (gcc::context *ctxt)
3968 return new pass_split_after_reload (ctxt);
3971 namespace {
3973 const pass_data pass_data_split_before_regstack =
3975 RTL_PASS, /* type */
3976 "split3", /* name */
3977 OPTGROUP_NONE, /* optinfo_flags */
3978 TV_NONE, /* tv_id */
3979 0, /* properties_required */
3980 0, /* properties_provided */
3981 0, /* properties_destroyed */
3982 0, /* todo_flags_start */
3983 0, /* todo_flags_finish */
3986 class pass_split_before_regstack : public rtl_opt_pass
3988 public:
3989 pass_split_before_regstack (gcc::context *ctxt)
3990 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
3993 /* opt_pass methods: */
3994 virtual bool gate (function *);
3995 virtual unsigned int execute (function *)
3997 split_all_insns ();
3998 return 0;
4001 }; // class pass_split_before_regstack
4003 bool
4004 pass_split_before_regstack::gate (function *)
4006 #if HAVE_ATTR_length && defined (STACK_REGS)
4007 /* If flow2 creates new instructions which need splitting
4008 and scheduling after reload is not done, they might not be
4009 split until final which doesn't allow splitting
4010 if HAVE_ATTR_length. */
4011 # ifdef INSN_SCHEDULING
4012 return (optimize && !flag_schedule_insns_after_reload);
4013 # else
4014 return (optimize);
4015 # endif
4016 #else
4017 return 0;
4018 #endif
4021 } // anon namespace
4023 rtl_opt_pass *
4024 make_pass_split_before_regstack (gcc::context *ctxt)
4026 return new pass_split_before_regstack (ctxt);
4029 static unsigned int
4030 rest_of_handle_split_before_sched2 (void)
4032 #ifdef INSN_SCHEDULING
4033 split_all_insns ();
4034 #endif
4035 return 0;
4038 namespace {
4040 const pass_data pass_data_split_before_sched2 =
4042 RTL_PASS, /* type */
4043 "split4", /* name */
4044 OPTGROUP_NONE, /* optinfo_flags */
4045 TV_NONE, /* tv_id */
4046 0, /* properties_required */
4047 0, /* properties_provided */
4048 0, /* properties_destroyed */
4049 0, /* todo_flags_start */
4050 0, /* todo_flags_finish */
4053 class pass_split_before_sched2 : public rtl_opt_pass
4055 public:
4056 pass_split_before_sched2 (gcc::context *ctxt)
4057 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4060 /* opt_pass methods: */
4061 virtual bool gate (function *)
4063 #ifdef INSN_SCHEDULING
4064 return optimize > 0 && flag_schedule_insns_after_reload;
4065 #else
4066 return false;
4067 #endif
4070 virtual unsigned int execute (function *)
4072 return rest_of_handle_split_before_sched2 ();
4075 }; // class pass_split_before_sched2
4077 } // anon namespace
4079 rtl_opt_pass *
4080 make_pass_split_before_sched2 (gcc::context *ctxt)
4082 return new pass_split_before_sched2 (ctxt);
4085 namespace {
4087 const pass_data pass_data_split_for_shorten_branches =
4089 RTL_PASS, /* type */
4090 "split5", /* name */
4091 OPTGROUP_NONE, /* optinfo_flags */
4092 TV_NONE, /* tv_id */
4093 0, /* properties_required */
4094 0, /* properties_provided */
4095 0, /* properties_destroyed */
4096 0, /* todo_flags_start */
4097 0, /* todo_flags_finish */
4100 class pass_split_for_shorten_branches : public rtl_opt_pass
4102 public:
4103 pass_split_for_shorten_branches (gcc::context *ctxt)
4104 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4107 /* opt_pass methods: */
4108 virtual bool gate (function *)
4110 /* The placement of the splitting that we do for shorten_branches
4111 depends on whether regstack is used by the target or not. */
4112 #if HAVE_ATTR_length && !defined (STACK_REGS)
4113 return true;
4114 #else
4115 return false;
4116 #endif
4119 virtual unsigned int execute (function *)
4121 return split_all_insns_noflow ();
4124 }; // class pass_split_for_shorten_branches
4126 } // anon namespace
4128 rtl_opt_pass *
4129 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4131 return new pass_split_for_shorten_branches (ctxt);
4134 /* (Re)initialize the target information after a change in target. */
4136 void
4137 recog_init ()
4139 /* The information is zero-initialized, so we don't need to do anything
4140 first time round. */
4141 if (!this_target_recog->x_initialized)
4143 this_target_recog->x_initialized = true;
4144 return;
4146 memset (this_target_recog->x_bool_attr_masks, 0,
4147 sizeof (this_target_recog->x_bool_attr_masks));
4148 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4149 if (this_target_recog->x_op_alt[i])
4151 free (this_target_recog->x_op_alt[i]);
4152 this_target_recog->x_op_alt[i] = 0;