xfail gnat.dg/trampoline3.adb scan-assembler-not check on hppa*-*-*
[official-gcc.git] / gcc / recog.cc
bloba6799e3f5e662be60b1002b9a86ff0a6231ae6b8
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
53 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54 static void validate_replace_src_1 (rtx *, void *);
55 static rtx_insn *split_insn (rtx_insn *);
57 struct target_recog default_target_recog;
58 #if SWITCHABLE_TARGET
59 struct target_recog *this_target_recog = &default_target_recog;
60 #endif
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.cc and expmed.cc (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.cc and final.cc and reload.cc.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data_d recog_data;
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative *recog_op_alt;
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES];
83 /* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
86 int which_alternative;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.cc.
90 Controls the significance of (SUBREG (MEM)). */
92 int reload_completed;
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
101 void
102 init_recog_no_volatile (void)
104 volatile_ok = 0;
107 void
108 init_recog (void)
110 volatile_ok = 1;
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
116 static bool
117 asm_labels_ok (rtx body)
119 rtx asmop;
120 int i;
122 asmop = extract_asm_operands (body);
123 if (asmop == NULL_RTX)
124 return true;
126 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128 return false;
130 return true;
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
136 bool
137 check_asm_operands (rtx x)
139 int noperands;
140 rtx *operands;
141 const char **constraints;
142 int i;
144 if (!asm_labels_ok (x))
145 return false;
147 /* Post-reload, be more strict with things. */
148 if (reload_completed)
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn *insn = make_insn_raw (x);
152 extract_insn (insn);
153 constrain_operands (1, get_enabled_alternatives (insn));
154 return which_alternative >= 0;
157 noperands = asm_noperands (x);
158 if (noperands < 0)
159 return false;
160 if (noperands == 0)
161 return true;
163 operands = XALLOCAVEC (rtx, noperands);
164 constraints = XALLOCAVEC (const char *, noperands);
166 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
168 for (i = 0; i < noperands; i++)
170 const char *c = constraints[i];
171 if (c[0] == '%')
172 c++;
173 if (! asm_operand_ok (operands[i], c, constraints))
174 return false;
177 return true;
180 /* Static data for the next two routines. */
182 struct change_t
184 rtx object;
185 int old_code;
186 int old_len;
187 bool unshare;
188 rtx *loc;
189 rtx old;
192 static change_t *changes;
193 static int changes_allocated;
195 static int num_changes = 0;
196 static int temporarily_undone_changes = 0;
198 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
199 at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
200 will also be changed to NEW_LEN, which is no greater than the current
201 XVECLEN. If OBJECT is zero, no validation is done, the change is
202 simply made.
204 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
205 will be called with the address and mode as parameters. If OBJECT is
206 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
207 the change in place.
209 IN_GROUP is nonzero if this is part of a group of changes that must be
210 performed as a group. In that case, the changes will be stored. The
211 function `apply_change_group' will validate and apply the changes.
213 If IN_GROUP is zero, this is a single change. Try to recognize the insn
214 or validate the memory reference with the change applied. If the result
215 is not valid for the machine, suppress the change and return false.
216 Otherwise, perform the change and return true. */
218 static bool
219 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group,
220 bool unshare, int new_len = -1)
222 gcc_assert (temporarily_undone_changes == 0);
223 rtx old = *loc;
225 /* Single-element parallels aren't valid and won't match anything.
226 Replace them with the single element. */
227 if (new_len == 1 && GET_CODE (new_rtx) == PARALLEL)
229 new_rtx = XVECEXP (new_rtx, 0, 0);
230 new_len = -1;
233 if ((old == new_rtx || rtx_equal_p (old, new_rtx))
234 && (new_len < 0 || XVECLEN (new_rtx, 0) == new_len))
235 return true;
237 gcc_assert ((in_group != 0 || num_changes == 0)
238 && (new_len < 0 || new_rtx == *loc));
240 *loc = new_rtx;
242 /* Save the information describing this change. */
243 if (num_changes >= changes_allocated)
245 if (changes_allocated == 0)
246 /* This value allows for repeated substitutions inside complex
247 indexed addresses, or changes in up to 5 insns. */
248 changes_allocated = MAX_RECOG_OPERANDS * 5;
249 else
250 changes_allocated *= 2;
252 changes = XRESIZEVEC (change_t, changes, changes_allocated);
255 changes[num_changes].object = object;
256 changes[num_changes].loc = loc;
257 changes[num_changes].old = old;
258 changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0) : -1);
259 changes[num_changes].unshare = unshare;
261 if (new_len >= 0)
262 XVECLEN (new_rtx, 0) = new_len;
264 if (object && !MEM_P (object))
266 /* Set INSN_CODE to force rerecognition of insn. Save old code in
267 case invalid. */
268 changes[num_changes].old_code = INSN_CODE (object);
269 INSN_CODE (object) = -1;
272 num_changes++;
274 /* If we are making a group of changes, return 1. Otherwise, validate the
275 change group we made. */
277 if (in_group)
278 return true;
279 else
280 return apply_change_group ();
283 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
284 UNSHARE to false. */
286 bool
287 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
289 return validate_change_1 (object, loc, new_rtx, in_group, false);
292 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
293 UNSHARE to true. */
295 bool
296 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
298 return validate_change_1 (object, loc, new_rtx, in_group, true);
301 /* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
302 value are as for validate_change_1. */
304 bool
305 validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group)
307 return validate_change_1 (object, loc, *loc, in_group, false, new_len);
310 /* Keep X canonicalized if some changes have made it non-canonical; only
311 modifies the operands of X, not (for example) its code. Simplifications
312 are not the job of this routine.
314 Return true if anything was changed. */
315 bool
316 canonicalize_change_group (rtx_insn *insn, rtx x)
318 if (COMMUTATIVE_P (x)
319 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
321 /* Oops, the caller has made X no longer canonical.
322 Let's redo the changes in the correct order. */
323 rtx tem = XEXP (x, 0);
324 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
325 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
326 return true;
328 else
329 return false;
332 /* Check if REG_INC argument in *data overlaps a stored REG. */
334 static void
335 check_invalid_inc_dec (rtx reg, const_rtx, void *data)
337 rtx *pinc = (rtx *) data;
338 if (*pinc == NULL_RTX || MEM_P (reg))
339 return;
340 if (reg_overlap_mentioned_p (reg, *pinc))
341 *pinc = NULL_RTX;
344 /* This subroutine of apply_change_group verifies whether the changes to INSN
345 were valid; i.e. whether INSN can still be recognized.
347 If IN_GROUP is true clobbers which have to be added in order to
348 match the instructions will be added to the current change group.
349 Otherwise the changes will take effect immediately. */
351 bool
352 insn_invalid_p (rtx_insn *insn, bool in_group)
354 rtx pat = PATTERN (insn);
355 int num_clobbers = 0;
356 /* If we are before reload and the pattern is a SET, see if we can add
357 clobbers. */
358 int icode = recog (pat, insn,
359 (GET_CODE (pat) == SET
360 && ! reload_completed
361 && ! reload_in_progress)
362 ? &num_clobbers : 0);
363 bool is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
366 /* If this is an asm and the operand aren't legal, then fail. Likewise if
367 this is not an asm and the insn wasn't recognized. */
368 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
369 || (!is_asm && icode < 0))
370 return true;
372 /* If we have to add CLOBBERs, fail if we have to add ones that reference
373 hard registers since our callers can't know if they are live or not.
374 Otherwise, add them. */
375 if (num_clobbers > 0)
377 rtx newpat;
379 if (added_clobbers_hard_reg_p (icode))
380 return true;
382 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
383 XVECEXP (newpat, 0, 0) = pat;
384 add_clobbers (newpat, icode);
385 if (in_group)
386 validate_change (insn, &PATTERN (insn), newpat, 1);
387 else
388 PATTERN (insn) = pat = newpat;
391 /* After reload, verify that all constraints are satisfied. */
392 if (reload_completed)
394 extract_insn (insn);
396 if (! constrain_operands (1, get_preferred_alternatives (insn)))
397 return true;
400 /* Punt if REG_INC argument overlaps some stored REG. */
401 for (rtx link = FIND_REG_INC_NOTE (insn, NULL_RTX);
402 link; link = XEXP (link, 1))
403 if (REG_NOTE_KIND (link) == REG_INC)
405 rtx reg = XEXP (link, 0);
406 note_stores (insn, check_invalid_inc_dec, &reg);
407 if (reg == NULL_RTX)
408 return true;
411 INSN_CODE (insn) = icode;
412 return false;
415 /* Return number of changes made and not validated yet. */
417 num_changes_pending (void)
419 return num_changes;
422 /* Tentatively apply the changes numbered NUM and up.
423 Return true if all changes are valid, false otherwise. */
425 bool
426 verify_changes (int num)
428 int i;
429 rtx last_validated = NULL_RTX;
431 /* The changes have been applied and all INSN_CODEs have been reset to force
432 rerecognition.
434 The changes are valid if we aren't given an object, or if we are
435 given a MEM and it still is a valid address, or if this is in insn
436 and it is recognized. In the latter case, if reload has completed,
437 we also require that the operands meet the constraints for
438 the insn. */
440 for (i = num; i < num_changes; i++)
442 rtx object = changes[i].object;
444 /* If there is no object to test or if it is the same as the one we
445 already tested, ignore it. */
446 if (object == 0 || object == last_validated)
447 continue;
449 if (MEM_P (object))
451 if (! memory_address_addr_space_p (GET_MODE (object),
452 XEXP (object, 0),
453 MEM_ADDR_SPACE (object)))
454 break;
456 else if (/* changes[i].old might be zero, e.g. when putting a
457 REG_FRAME_RELATED_EXPR into a previously empty list. */
458 changes[i].old
459 && REG_P (changes[i].old)
460 && asm_noperands (PATTERN (object)) > 0
461 && register_asm_p (changes[i].old))
463 /* Don't allow changes of hard register operands to inline
464 assemblies if they have been defined as register asm ("x"). */
465 break;
467 else if (DEBUG_INSN_P (object))
468 continue;
469 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
471 rtx pat = PATTERN (object);
473 /* Perhaps we couldn't recognize the insn because there were
474 extra CLOBBERs at the end. If so, try to re-recognize
475 without the last CLOBBER (later iterations will cause each of
476 them to be eliminated, in turn). But don't do this if we
477 have an ASM_OPERAND. */
478 if (GET_CODE (pat) == PARALLEL
479 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
480 && asm_noperands (PATTERN (object)) < 0)
482 rtx newpat;
484 if (XVECLEN (pat, 0) == 2)
485 newpat = XVECEXP (pat, 0, 0);
486 else
488 int j;
490 newpat
491 = gen_rtx_PARALLEL (VOIDmode,
492 rtvec_alloc (XVECLEN (pat, 0) - 1));
493 for (j = 0; j < XVECLEN (newpat, 0); j++)
494 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
497 /* Add a new change to this group to replace the pattern
498 with this new pattern. Then consider this change
499 as having succeeded. The change we added will
500 cause the entire call to fail if things remain invalid.
502 Note that this can lose if a later change than the one
503 we are processing specified &XVECEXP (PATTERN (object), 0, X)
504 but this shouldn't occur. */
506 validate_change (object, &PATTERN (object), newpat, 1);
507 continue;
509 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
510 || GET_CODE (pat) == VAR_LOCATION)
511 /* If this insn is a CLOBBER or USE, it is always valid, but is
512 never recognized. */
513 continue;
514 else
515 break;
517 last_validated = object;
520 return (i == num_changes);
523 /* A group of changes has previously been issued with validate_change
524 and verified with verify_changes. Call df_insn_rescan for each of
525 the insn changed and clear num_changes. */
527 void
528 confirm_change_group (void)
530 int i;
531 rtx last_object = NULL;
533 gcc_assert (temporarily_undone_changes == 0);
534 for (i = 0; i < num_changes; i++)
536 rtx object = changes[i].object;
538 if (changes[i].unshare)
539 *changes[i].loc = copy_rtx (*changes[i].loc);
541 /* Avoid unnecessary rescanning when multiple changes to same instruction
542 are made. */
543 if (object)
545 if (object != last_object && last_object && INSN_P (last_object))
546 df_insn_rescan (as_a <rtx_insn *> (last_object));
547 last_object = object;
551 if (last_object && INSN_P (last_object))
552 df_insn_rescan (as_a <rtx_insn *> (last_object));
553 num_changes = 0;
556 /* Apply a group of changes previously issued with `validate_change'.
557 If all changes are valid, call confirm_change_group and return true,
558 otherwise, call cancel_changes and return false. */
560 bool
561 apply_change_group (void)
563 if (verify_changes (0))
565 confirm_change_group ();
566 return true;
568 else
570 cancel_changes (0);
571 return false;
576 /* Return the number of changes so far in the current group. */
579 num_validated_changes (void)
581 return num_changes;
584 /* Retract the changes numbered NUM and up. */
586 void
587 cancel_changes (int num)
589 gcc_assert (temporarily_undone_changes == 0);
590 int i;
592 /* Back out all the changes. Do this in the opposite order in which
593 they were made. */
594 for (i = num_changes - 1; i >= num; i--)
596 if (changes[i].old_len >= 0)
597 XVECLEN (*changes[i].loc, 0) = changes[i].old_len;
598 else
599 *changes[i].loc = changes[i].old;
600 if (changes[i].object && !MEM_P (changes[i].object))
601 INSN_CODE (changes[i].object) = changes[i].old_code;
603 num_changes = num;
606 /* Swap the status of change NUM from being applied to not being applied,
607 or vice versa. */
609 static void
610 swap_change (int num)
612 if (changes[num].old_len >= 0)
613 std::swap (XVECLEN (*changes[num].loc, 0), changes[num].old_len);
614 else
615 std::swap (*changes[num].loc, changes[num].old);
616 if (changes[num].object && !MEM_P (changes[num].object))
617 std::swap (INSN_CODE (changes[num].object), changes[num].old_code);
620 /* Temporarily undo all the changes numbered NUM and up, with a view
621 to reapplying them later. The next call to the changes machinery
622 must be:
624 redo_changes (NUM)
626 otherwise things will end up in an invalid state. */
628 void
629 temporarily_undo_changes (int num)
631 gcc_assert (temporarily_undone_changes == 0 && num <= num_changes);
632 for (int i = num_changes - 1; i >= num; i--)
633 swap_change (i);
634 temporarily_undone_changes = num_changes - num;
637 /* Redo the changes that were temporarily undone by:
639 temporarily_undo_changes (NUM). */
641 void
642 redo_changes (int num)
644 gcc_assert (temporarily_undone_changes == num_changes - num);
645 for (int i = num; i < num_changes; ++i)
646 swap_change (i);
647 temporarily_undone_changes = 0;
650 /* Reduce conditional compilation elsewhere. */
651 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
652 rtx. */
654 static void
655 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
656 machine_mode op0_mode)
658 rtx x = *loc;
659 enum rtx_code code = GET_CODE (x);
660 rtx new_rtx = NULL_RTX;
661 scalar_int_mode is_mode;
663 if (SWAPPABLE_OPERANDS_P (x)
664 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
666 validate_unshare_change (object, loc,
667 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
668 : swap_condition (code),
669 GET_MODE (x), XEXP (x, 1),
670 XEXP (x, 0)), 1);
671 x = *loc;
672 code = GET_CODE (x);
675 /* Canonicalize arithmetics with all constant operands. */
676 switch (GET_RTX_CLASS (code))
678 case RTX_UNARY:
679 if (CONSTANT_P (XEXP (x, 0)))
680 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
681 op0_mode);
682 break;
683 case RTX_COMM_ARITH:
684 case RTX_BIN_ARITH:
685 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
686 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
687 XEXP (x, 1));
688 break;
689 case RTX_COMPARE:
690 case RTX_COMM_COMPARE:
691 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
692 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
693 XEXP (x, 0), XEXP (x, 1));
694 break;
695 default:
696 break;
698 if (new_rtx)
700 validate_change (object, loc, new_rtx, 1);
701 return;
704 switch (code)
706 case PLUS:
707 /* If we have a PLUS whose second operand is now a CONST_INT, use
708 simplify_gen_binary to try to simplify it.
709 ??? We may want later to remove this, once simplification is
710 separated from this function. */
711 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
712 validate_change (object, loc,
713 simplify_gen_binary
714 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
715 break;
716 case MINUS:
717 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
718 validate_change (object, loc,
719 simplify_gen_binary
720 (PLUS, GET_MODE (x), XEXP (x, 0),
721 simplify_gen_unary (NEG,
722 GET_MODE (x), XEXP (x, 1),
723 GET_MODE (x))), 1);
724 break;
725 case ZERO_EXTEND:
726 case SIGN_EXTEND:
727 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
729 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
730 op0_mode);
731 /* If any of the above failed, substitute in something that
732 we know won't be recognized. */
733 if (!new_rtx)
734 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
735 validate_change (object, loc, new_rtx, 1);
737 break;
738 case SUBREG:
739 /* All subregs possible to simplify should be simplified. */
740 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
741 SUBREG_BYTE (x));
743 /* Subregs of VOIDmode operands are incorrect. */
744 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
745 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
746 if (new_rtx)
747 validate_change (object, loc, new_rtx, 1);
748 break;
749 case ZERO_EXTRACT:
750 case SIGN_EXTRACT:
751 /* If we are replacing a register with memory, try to change the memory
752 to be the mode required for memory in extract operations (this isn't
753 likely to be an insertion operation; if it was, nothing bad will
754 happen, we might just fail in some cases). */
756 if (MEM_P (XEXP (x, 0))
757 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
758 && CONST_INT_P (XEXP (x, 1))
759 && CONST_INT_P (XEXP (x, 2))
760 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
761 MEM_ADDR_SPACE (XEXP (x, 0)))
762 && !MEM_VOLATILE_P (XEXP (x, 0)))
764 int pos = INTVAL (XEXP (x, 2));
765 machine_mode new_mode = is_mode;
766 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
767 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
768 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
769 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
770 scalar_int_mode wanted_mode = (new_mode == VOIDmode
771 ? word_mode
772 : as_a <scalar_int_mode> (new_mode));
774 /* If we have a narrower mode, we can do something. */
775 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
777 int offset = pos / BITS_PER_UNIT;
778 rtx newmem;
780 /* If the bytes and bits are counted differently, we
781 must adjust the offset. */
782 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
783 offset =
784 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
785 offset);
787 gcc_assert (GET_MODE_PRECISION (wanted_mode)
788 == GET_MODE_BITSIZE (wanted_mode));
789 pos %= GET_MODE_BITSIZE (wanted_mode);
791 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
793 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
794 validate_change (object, &XEXP (x, 0), newmem, 1);
798 break;
800 default:
801 break;
805 /* Replace every occurrence of FROM in X with TO. Mark each change with
806 validate_change passing OBJECT. */
808 static void
809 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
810 bool simplify)
812 int i, j;
813 const char *fmt;
814 rtx x = *loc;
815 enum rtx_code code;
816 machine_mode op0_mode = VOIDmode;
817 int prev_changes = num_changes;
819 if (!x)
820 return;
822 code = GET_CODE (x);
823 fmt = GET_RTX_FORMAT (code);
824 if (fmt[0] == 'e')
825 op0_mode = GET_MODE (XEXP (x, 0));
827 /* X matches FROM if it is the same rtx or they are both referring to the
828 same register in the same mode. Avoid calling rtx_equal_p unless the
829 operands look similar. */
831 if (x == from
832 || (REG_P (x) && REG_P (from)
833 && GET_MODE (x) == GET_MODE (from)
834 && REGNO (x) == REGNO (from))
835 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
836 && rtx_equal_p (x, from)))
838 validate_unshare_change (object, loc, to, 1);
839 return;
842 /* Call ourself recursively to perform the replacements.
843 We must not replace inside already replaced expression, otherwise we
844 get infinite recursion for replacements like (reg X)->(subreg (reg X))
845 so we must special case shared ASM_OPERANDS. */
847 if (GET_CODE (x) == PARALLEL)
849 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
851 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
852 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
854 /* Verify that operands are really shared. */
855 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
856 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
857 (x, 0, j))));
858 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
859 from, to, object, simplify);
861 else
862 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
863 simplify);
866 else
867 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
869 if (fmt[i] == 'e')
870 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
871 else if (fmt[i] == 'E')
872 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
873 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
874 simplify);
877 /* If we didn't substitute, there is nothing more to do. */
878 if (num_changes == prev_changes)
879 return;
881 /* ??? The regmove is no more, so is this aberration still necessary? */
882 /* Allow substituted expression to have different mode. This is used by
883 regmove to change mode of pseudo register. */
884 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
885 op0_mode = GET_MODE (XEXP (x, 0));
887 /* Do changes needed to keep rtx consistent. Don't do any other
888 simplifications, as it is not our job. */
889 if (simplify)
890 simplify_while_replacing (loc, to, object, op0_mode);
893 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
894 with TO. After all changes have been made, validate by seeing
895 if INSN is still valid. */
897 bool
898 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
900 validate_replace_rtx_1 (loc, from, to, insn, true);
901 return apply_change_group ();
904 /* Try replacing every occurrence of FROM in INSN with TO. After all
905 changes have been made, validate by seeing if INSN is still valid. */
907 bool
908 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
910 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
911 return apply_change_group ();
914 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
915 is a part of INSN. After all changes have been made, validate by seeing if
916 INSN is still valid.
917 validate_replace_rtx (from, to, insn) is equivalent to
918 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
920 bool
921 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
923 validate_replace_rtx_1 (where, from, to, insn, true);
924 return apply_change_group ();
927 /* Same as above, but do not simplify rtx afterwards. */
928 bool
929 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
930 rtx_insn *insn)
932 validate_replace_rtx_1 (where, from, to, insn, false);
933 return apply_change_group ();
937 /* Try replacing every occurrence of FROM in INSN with TO. This also
938 will replace in REG_EQUAL and REG_EQUIV notes. */
940 void
941 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
943 rtx note;
944 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
945 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
946 if (REG_NOTE_KIND (note) == REG_EQUAL
947 || REG_NOTE_KIND (note) == REG_EQUIV)
948 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
951 /* Function called by note_uses to replace used subexpressions. */
952 struct validate_replace_src_data
954 rtx from; /* Old RTX */
955 rtx to; /* New RTX */
956 rtx_insn *insn; /* Insn in which substitution is occurring. */
959 static void
960 validate_replace_src_1 (rtx *x, void *data)
962 struct validate_replace_src_data *d
963 = (struct validate_replace_src_data *) data;
965 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
968 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
969 SET_DESTs. */
971 void
972 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
974 struct validate_replace_src_data d;
976 d.from = from;
977 d.to = to;
978 d.insn = insn;
979 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
982 /* Try simplify INSN.
983 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
984 pattern and return true if something was simplified. */
986 bool
987 validate_simplify_insn (rtx_insn *insn)
989 int i;
990 rtx pat = NULL;
991 rtx newpat = NULL;
993 pat = PATTERN (insn);
995 if (GET_CODE (pat) == SET)
997 newpat = simplify_rtx (SET_SRC (pat));
998 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
999 validate_change (insn, &SET_SRC (pat), newpat, 1);
1000 newpat = simplify_rtx (SET_DEST (pat));
1001 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
1002 validate_change (insn, &SET_DEST (pat), newpat, 1);
1004 else if (GET_CODE (pat) == PARALLEL)
1005 for (i = 0; i < XVECLEN (pat, 0); i++)
1007 rtx s = XVECEXP (pat, 0, i);
1009 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
1011 newpat = simplify_rtx (SET_SRC (s));
1012 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
1013 validate_change (insn, &SET_SRC (s), newpat, 1);
1014 newpat = simplify_rtx (SET_DEST (s));
1015 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
1016 validate_change (insn, &SET_DEST (s), newpat, 1);
1019 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
1022 /* Try to process the address of memory expression MEM. Return true on
1023 success; leave the caller to clean up on failure. */
1025 bool
1026 insn_propagation::apply_to_mem_1 (rtx mem)
1028 auto old_num_changes = num_validated_changes ();
1029 mem_depth += 1;
1030 bool res = apply_to_rvalue_1 (&XEXP (mem, 0));
1031 mem_depth -= 1;
1032 if (!res)
1033 return false;
1035 if (old_num_changes != num_validated_changes ()
1036 && should_check_mems
1037 && !check_mem (old_num_changes, mem))
1038 return false;
1040 return true;
1043 /* Try to process the rvalue expression at *LOC. Return true on success;
1044 leave the caller to clean up on failure. */
1046 bool
1047 insn_propagation::apply_to_rvalue_1 (rtx *loc)
1049 rtx x = *loc;
1050 enum rtx_code code = GET_CODE (x);
1051 machine_mode mode = GET_MODE (x);
1053 auto old_num_changes = num_validated_changes ();
1054 if (from && GET_CODE (x) == GET_CODE (from) && rtx_equal_p (x, from))
1056 /* Don't replace register asms in asm statements; we mustn't
1057 change the user's register allocation. */
1058 if (REG_P (x)
1059 && HARD_REGISTER_P (x)
1060 && register_asm_p (x)
1061 && asm_noperands (PATTERN (insn)) > 0)
1062 return false;
1064 if (should_unshare)
1065 validate_unshare_change (insn, loc, to, 1);
1066 else
1067 validate_change (insn, loc, to, 1);
1068 if (mem_depth && !REG_P (to) && !CONSTANT_P (to))
1070 /* We're substituting into an address, but TO will have the
1071 form expected outside an address. Canonicalize it if
1072 necessary. */
1073 insn_propagation subprop (insn);
1074 subprop.mem_depth += 1;
1075 if (!subprop.apply_to_rvalue (loc))
1076 gcc_unreachable ();
1077 if (should_unshare
1078 && num_validated_changes () != old_num_changes + 1)
1080 /* TO is owned by someone else, so create a copy and
1081 return TO to its original form. */
1082 rtx to = copy_rtx (*loc);
1083 cancel_changes (old_num_changes);
1084 validate_change (insn, loc, to, 1);
1087 num_replacements += 1;
1088 should_unshare = true;
1089 result_flags |= UNSIMPLIFIED;
1090 return true;
1093 /* Recursively apply the substitution and see if we can simplify
1094 the result. This specifically shouldn't use simplify_gen_* for
1095 speculative simplifications, since we want to avoid generating new
1096 expressions where possible. */
1097 auto old_result_flags = result_flags;
1098 rtx newx = NULL_RTX;
1099 bool recurse_p = false;
1100 switch (GET_RTX_CLASS (code))
1102 case RTX_UNARY:
1104 machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1105 if (!apply_to_rvalue_1 (&XEXP (x, 0)))
1106 return false;
1107 if (from && old_num_changes == num_validated_changes ())
1108 return true;
1110 newx = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
1111 break;
1114 case RTX_BIN_ARITH:
1115 case RTX_COMM_ARITH:
1117 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1118 || !apply_to_rvalue_1 (&XEXP (x, 1)))
1119 return false;
1120 if (from && old_num_changes == num_validated_changes ())
1121 return true;
1123 if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1124 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
1125 newx = simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
1126 else
1127 newx = simplify_binary_operation (code, mode,
1128 XEXP (x, 0), XEXP (x, 1));
1129 break;
1132 case RTX_COMPARE:
1133 case RTX_COMM_COMPARE:
1135 machine_mode op_mode = (GET_MODE (XEXP (x, 0)) != VOIDmode
1136 ? GET_MODE (XEXP (x, 0))
1137 : GET_MODE (XEXP (x, 1)));
1138 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1139 || !apply_to_rvalue_1 (&XEXP (x, 1)))
1140 return false;
1141 if (from && old_num_changes == num_validated_changes ())
1142 return true;
1144 newx = simplify_relational_operation (code, mode, op_mode,
1145 XEXP (x, 0), XEXP (x, 1));
1146 break;
1149 case RTX_TERNARY:
1150 case RTX_BITFIELD_OPS:
1152 machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1153 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1154 || !apply_to_rvalue_1 (&XEXP (x, 1))
1155 || !apply_to_rvalue_1 (&XEXP (x, 2)))
1156 return false;
1157 if (from && old_num_changes == num_validated_changes ())
1158 return true;
1160 newx = simplify_ternary_operation (code, mode, op0_mode,
1161 XEXP (x, 0), XEXP (x, 1),
1162 XEXP (x, 2));
1163 break;
1166 case RTX_EXTRA:
1167 if (code == SUBREG)
1169 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
1170 if (!apply_to_rvalue_1 (&SUBREG_REG (x)))
1171 return false;
1172 if (from && old_num_changes == num_validated_changes ())
1173 return true;
1175 rtx inner = SUBREG_REG (x);
1176 newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x));
1177 /* Reject the same cases that simplify_gen_subreg would. */
1178 if (!newx
1179 && (GET_CODE (inner) == SUBREG
1180 || GET_CODE (inner) == CONCAT
1181 || GET_MODE (inner) == VOIDmode
1182 || !validate_subreg (mode, inner_mode,
1183 inner, SUBREG_BYTE (x))))
1185 failure_reason = "would create an invalid subreg";
1186 return false;
1188 break;
1190 else
1191 recurse_p = true;
1192 break;
1194 case RTX_OBJ:
1195 if (code == LO_SUM)
1197 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1198 || !apply_to_rvalue_1 (&XEXP (x, 1)))
1199 return false;
1200 if (from && old_num_changes == num_validated_changes ())
1201 return true;
1203 /* (lo_sum (high x) y) -> y where x and y have the same base. */
1204 rtx op0 = XEXP (x, 0);
1205 rtx op1 = XEXP (x, 1);
1206 if (GET_CODE (op0) == HIGH)
1208 rtx base0, base1, offset0, offset1;
1209 split_const (XEXP (op0, 0), &base0, &offset0);
1210 split_const (op1, &base1, &offset1);
1211 if (rtx_equal_p (base0, base1))
1212 newx = op1;
1215 else if (code == REG)
1217 if (from && REG_P (from) && reg_overlap_mentioned_p (x, from))
1219 failure_reason = "inexact register overlap";
1220 return false;
1223 else if (code == MEM)
1224 return apply_to_mem_1 (x);
1225 else
1226 recurse_p = true;
1227 break;
1229 case RTX_CONST_OBJ:
1230 break;
1232 case RTX_AUTOINC:
1233 if (from && reg_overlap_mentioned_p (XEXP (x, 0), from))
1235 failure_reason = "is subject to autoinc";
1236 return false;
1238 recurse_p = true;
1239 break;
1241 case RTX_MATCH:
1242 case RTX_INSN:
1243 gcc_unreachable ();
1246 if (recurse_p)
1248 const char *fmt = GET_RTX_FORMAT (code);
1249 for (int i = 0; fmt[i]; i++)
1250 switch (fmt[i])
1252 case 'E':
1253 for (int j = 0; j < XVECLEN (x, i); j++)
1254 if (!apply_to_rvalue_1 (&XVECEXP (x, i, j)))
1255 return false;
1256 break;
1258 case 'e':
1259 if (XEXP (x, i) && !apply_to_rvalue_1 (&XEXP (x, i)))
1260 return false;
1261 break;
1264 else if (newx && !rtx_equal_p (x, newx))
1266 /* All substitutions made by OLD_NUM_CHANGES onwards have been
1267 simplified. */
1268 result_flags = ((result_flags & ~UNSIMPLIFIED)
1269 | (old_result_flags & UNSIMPLIFIED));
1271 if (should_note_simplifications)
1272 note_simplification (old_num_changes, old_result_flags, x, newx);
1274 /* There's no longer any point unsharing the substitutions made
1275 for subexpressions, since we'll just copy this one instead. */
1276 bool unshare = false;
1277 for (int i = old_num_changes; i < num_changes; ++i)
1279 unshare |= changes[i].unshare;
1280 changes[i].unshare = false;
1282 if (unshare)
1283 validate_unshare_change (insn, loc, newx, 1);
1284 else
1285 validate_change (insn, loc, newx, 1);
1288 return true;
1291 /* Try to process the lvalue expression at *LOC. Return true on success;
1292 leave the caller to clean up on failure. */
1294 bool
1295 insn_propagation::apply_to_lvalue_1 (rtx dest)
1297 rtx old_dest = dest;
1298 while (GET_CODE (dest) == SUBREG
1299 || GET_CODE (dest) == ZERO_EXTRACT
1300 || GET_CODE (dest) == STRICT_LOW_PART)
1302 if (GET_CODE (dest) == ZERO_EXTRACT
1303 && (!apply_to_rvalue_1 (&XEXP (dest, 1))
1304 || !apply_to_rvalue_1 (&XEXP (dest, 2))))
1305 return false;
1306 dest = XEXP (dest, 0);
1309 if (MEM_P (dest))
1310 return apply_to_mem_1 (dest);
1312 /* Check whether the substitution is safe in the presence of this lvalue. */
1313 if (!from
1314 || dest == old_dest
1315 || !REG_P (dest)
1316 || !reg_overlap_mentioned_p (dest, from))
1317 return true;
1319 if (SUBREG_P (old_dest)
1320 && SUBREG_REG (old_dest) == dest
1321 && !read_modify_subreg_p (old_dest))
1322 return true;
1324 failure_reason = "is part of a read-write destination";
1325 return false;
1328 /* Try to process the instruction pattern at *LOC. Return true on success;
1329 leave the caller to clean up on failure. */
1331 bool
1332 insn_propagation::apply_to_pattern_1 (rtx *loc)
1334 rtx body = *loc;
1335 switch (GET_CODE (body))
1337 case COND_EXEC:
1338 return (apply_to_rvalue_1 (&COND_EXEC_TEST (body))
1339 && apply_to_pattern_1 (&COND_EXEC_CODE (body)));
1341 case PARALLEL:
1342 for (int i = 0; i < XVECLEN (body, 0); ++i)
1344 rtx *subloc = &XVECEXP (body, 0, i);
1345 if (GET_CODE (*subloc) == SET)
1347 if (!apply_to_lvalue_1 (SET_DEST (*subloc)))
1348 return false;
1349 /* ASM_OPERANDS are shared between SETs in the same PARALLEL.
1350 Only process them on the first iteration. */
1351 if ((i == 0 || GET_CODE (SET_SRC (*subloc)) != ASM_OPERANDS)
1352 && !apply_to_rvalue_1 (&SET_SRC (*subloc)))
1353 return false;
1355 else
1357 if (!apply_to_pattern_1 (subloc))
1358 return false;
1361 return true;
1363 case ASM_OPERANDS:
1364 for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body); i < len; ++i)
1365 if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body, i)))
1366 return false;
1367 return true;
1369 case CLOBBER:
1370 return apply_to_lvalue_1 (XEXP (body, 0));
1372 case SET:
1373 return (apply_to_lvalue_1 (SET_DEST (body))
1374 && apply_to_rvalue_1 (&SET_SRC (body)));
1376 default:
1377 /* All the other possibilities never store and can use a normal
1378 rtx walk. This includes:
1380 - USE
1381 - TRAP_IF
1382 - PREFETCH
1383 - UNSPEC
1384 - UNSPEC_VOLATILE. */
1385 return apply_to_rvalue_1 (loc);
1389 /* Apply this insn_propagation object's simplification or substitution
1390 to the instruction pattern at LOC. */
1392 bool
1393 insn_propagation::apply_to_pattern (rtx *loc)
1395 unsigned int num_changes = num_validated_changes ();
1396 bool res = apply_to_pattern_1 (loc);
1397 if (!res)
1398 cancel_changes (num_changes);
1399 return res;
1402 /* Apply this insn_propagation object's simplification or substitution
1403 to the rvalue expression at LOC. */
1405 bool
1406 insn_propagation::apply_to_rvalue (rtx *loc)
1408 unsigned int num_changes = num_validated_changes ();
1409 bool res = apply_to_rvalue_1 (loc);
1410 if (!res)
1411 cancel_changes (num_changes);
1412 return res;
1415 /* Check whether INSN matches a specific alternative of an .md pattern. */
1417 bool
1418 valid_insn_p (rtx_insn *insn)
1420 recog_memoized (insn);
1421 if (INSN_CODE (insn) < 0)
1422 return false;
1423 extract_insn (insn);
1424 /* We don't know whether the insn will be in code that is optimized
1425 for size or speed, so consider all enabled alternatives. */
1426 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1427 return false;
1428 return true;
1431 /* Return true if OP is a valid general operand for machine mode MODE.
1432 This is either a register reference, a memory reference,
1433 or a constant. In the case of a memory reference, the address
1434 is checked for general validity for the target machine.
1436 Register and memory references must have mode MODE in order to be valid,
1437 but some constants have no machine mode and are valid for any mode.
1439 If MODE is VOIDmode, OP is checked for validity for whatever mode
1440 it has.
1442 The main use of this function is as a predicate in match_operand
1443 expressions in the machine description. */
1445 bool
1446 general_operand (rtx op, machine_mode mode)
1448 enum rtx_code code = GET_CODE (op);
1450 if (mode == VOIDmode)
1451 mode = GET_MODE (op);
1453 /* Don't accept CONST_INT or anything similar
1454 if the caller wants something floating. */
1455 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1456 && GET_MODE_CLASS (mode) != MODE_INT
1457 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1458 return false;
1460 if (CONST_INT_P (op)
1461 && mode != VOIDmode
1462 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1463 return false;
1465 if (CONSTANT_P (op))
1466 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1467 || mode == VOIDmode)
1468 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1469 && targetm.legitimate_constant_p (mode == VOIDmode
1470 ? GET_MODE (op)
1471 : mode, op));
1473 /* Except for certain constants with VOIDmode, already checked for,
1474 OP's mode must match MODE if MODE specifies a mode. */
1476 if (GET_MODE (op) != mode)
1477 return false;
1479 if (code == SUBREG)
1481 rtx sub = SUBREG_REG (op);
1483 #ifdef INSN_SCHEDULING
1484 /* On machines that have insn scheduling, we want all memory
1485 reference to be explicit, so outlaw paradoxical SUBREGs.
1486 However, we must allow them after reload so that they can
1487 get cleaned up by cleanup_subreg_operands. */
1488 if (!reload_completed && MEM_P (sub)
1489 && paradoxical_subreg_p (op))
1490 return false;
1491 #endif
1492 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1493 may result in incorrect reference. We should simplify all valid
1494 subregs of MEM anyway. But allow this after reload because we
1495 might be called from cleanup_subreg_operands.
1497 ??? This is a kludge. */
1498 if (!reload_completed
1499 && maybe_ne (SUBREG_BYTE (op), 0)
1500 && MEM_P (sub))
1501 return false;
1503 if (REG_P (sub)
1504 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1505 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1506 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1507 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1508 /* LRA can generate some invalid SUBREGS just for matched
1509 operand reload presentation. LRA needs to treat them as
1510 valid. */
1511 && ! LRA_SUBREG_P (op))
1512 return false;
1514 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1515 create such rtl, and we must reject it. */
1516 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1517 /* LRA can use subreg to store a floating point value in an
1518 integer mode. Although the floating point and the
1519 integer modes need the same number of hard registers, the
1520 size of floating point mode can be less than the integer
1521 mode. */
1522 && ! lra_in_progress
1523 && paradoxical_subreg_p (op))
1524 return false;
1526 op = sub;
1527 code = GET_CODE (op);
1530 if (code == REG)
1531 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1532 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1534 if (code == MEM)
1536 rtx y = XEXP (op, 0);
1538 if (! volatile_ok && MEM_VOLATILE_P (op))
1539 return false;
1541 /* Use the mem's mode, since it will be reloaded thus. LRA can
1542 generate move insn with invalid addresses which is made valid
1543 and efficiently calculated by LRA through further numerous
1544 transformations. */
1545 if (lra_in_progress
1546 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1547 return true;
1550 return false;
1553 /* Return true if OP is a valid memory address for a memory reference
1554 of mode MODE.
1556 The main use of this function is as a predicate in match_operand
1557 expressions in the machine description. */
1559 bool
1560 address_operand (rtx op, machine_mode mode)
1562 /* Wrong mode for an address expr. */
1563 if (GET_MODE (op) != VOIDmode
1564 && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1565 return false;
1567 return memory_address_p (mode, op);
1570 /* Return true if OP is a register reference of mode MODE.
1571 If MODE is VOIDmode, accept a register in any mode.
1573 The main use of this function is as a predicate in match_operand
1574 expressions in the machine description. */
1576 bool
1577 register_operand (rtx op, machine_mode mode)
1579 if (GET_CODE (op) == SUBREG)
1581 rtx sub = SUBREG_REG (op);
1583 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1584 because it is guaranteed to be reloaded into one.
1585 Just make sure the MEM is valid in itself.
1586 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1587 but currently it does result from (SUBREG (REG)...) where the
1588 reg went on the stack.) */
1589 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1590 return false;
1592 else if (!REG_P (op))
1593 return false;
1594 return general_operand (op, mode);
1597 /* Return true for a register in Pmode; ignore the tested mode. */
1599 bool
1600 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1602 return register_operand (op, Pmode);
1605 /* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1606 or a hard register. */
1608 bool
1609 scratch_operand (rtx op, machine_mode mode)
1611 if (GET_MODE (op) != mode && mode != VOIDmode)
1612 return false;
1614 return (GET_CODE (op) == SCRATCH
1615 || (REG_P (op)
1616 && (lra_in_progress
1617 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1618 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1621 /* Return true if OP is a valid immediate operand for mode MODE.
1623 The main use of this function is as a predicate in match_operand
1624 expressions in the machine description. */
1626 bool
1627 immediate_operand (rtx op, machine_mode mode)
1629 /* Don't accept CONST_INT or anything similar
1630 if the caller wants something floating. */
1631 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1632 && GET_MODE_CLASS (mode) != MODE_INT
1633 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1634 return false;
1636 if (CONST_INT_P (op)
1637 && mode != VOIDmode
1638 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1639 return false;
1641 return (CONSTANT_P (op)
1642 && (GET_MODE (op) == mode || mode == VOIDmode
1643 || GET_MODE (op) == VOIDmode)
1644 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1645 && targetm.legitimate_constant_p (mode == VOIDmode
1646 ? GET_MODE (op)
1647 : mode, op));
1650 /* Return true if OP is an operand that is a CONST_INT of mode MODE. */
1652 bool
1653 const_int_operand (rtx op, machine_mode mode)
1655 if (!CONST_INT_P (op))
1656 return false;
1658 if (mode != VOIDmode
1659 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1660 return false;
1662 return true;
1665 #if TARGET_SUPPORTS_WIDE_INT
1666 /* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1667 of mode MODE. */
1668 bool
1669 const_scalar_int_operand (rtx op, machine_mode mode)
1671 if (!CONST_SCALAR_INT_P (op))
1672 return false;
1674 if (CONST_INT_P (op))
1675 return const_int_operand (op, mode);
1677 if (mode != VOIDmode)
1679 scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1680 int prec = GET_MODE_PRECISION (int_mode);
1681 int bitsize = GET_MODE_BITSIZE (int_mode);
1683 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1684 return false;
1686 if (prec == bitsize)
1687 return true;
1688 else
1690 /* Multiword partial int. */
1691 HOST_WIDE_INT x
1692 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1693 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1696 return true;
1699 /* Return true if OP is an operand that is a constant integer or constant
1700 floating-point number of MODE. */
1702 bool
1703 const_double_operand (rtx op, machine_mode mode)
1705 return (GET_CODE (op) == CONST_DOUBLE)
1706 && (GET_MODE (op) == mode || mode == VOIDmode);
1708 #else
1709 /* Return true if OP is an operand that is a constant integer or constant
1710 floating-point number of MODE. */
1712 bool
1713 const_double_operand (rtx op, machine_mode mode)
1715 /* Don't accept CONST_INT or anything similar
1716 if the caller wants something floating. */
1717 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1718 && GET_MODE_CLASS (mode) != MODE_INT
1719 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1720 return false;
1722 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1723 && (mode == VOIDmode || GET_MODE (op) == mode
1724 || GET_MODE (op) == VOIDmode));
1726 #endif
1727 /* Return true if OP is a general operand that is not an immediate
1728 operand of mode MODE. */
1730 bool
1731 nonimmediate_operand (rtx op, machine_mode mode)
1733 return (general_operand (op, mode) && ! CONSTANT_P (op));
1736 /* Return true if OP is a register reference or
1737 immediate value of mode MODE. */
1739 bool
1740 nonmemory_operand (rtx op, machine_mode mode)
1742 if (CONSTANT_P (op))
1743 return immediate_operand (op, mode);
1744 return register_operand (op, mode);
1747 /* Return true if OP is a valid operand that stands for pushing a
1748 value of mode MODE onto the stack.
1750 The main use of this function is as a predicate in match_operand
1751 expressions in the machine description. */
1753 bool
1754 push_operand (rtx op, machine_mode mode)
1756 if (!MEM_P (op))
1757 return false;
1759 if (mode != VOIDmode && GET_MODE (op) != mode)
1760 return false;
1762 poly_int64 rounded_size = GET_MODE_SIZE (mode);
1764 #ifdef PUSH_ROUNDING
1765 rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1766 #endif
1768 op = XEXP (op, 0);
1770 if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1772 if (GET_CODE (op) != STACK_PUSH_CODE)
1773 return false;
1775 else
1777 poly_int64 offset;
1778 if (GET_CODE (op) != PRE_MODIFY
1779 || GET_CODE (XEXP (op, 1)) != PLUS
1780 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1781 || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1782 || (STACK_GROWS_DOWNWARD
1783 ? maybe_ne (offset, -rounded_size)
1784 : maybe_ne (offset, rounded_size)))
1785 return false;
1788 return XEXP (op, 0) == stack_pointer_rtx;
1791 /* Return true if OP is a valid operand that stands for popping a
1792 value of mode MODE off the stack.
1794 The main use of this function is as a predicate in match_operand
1795 expressions in the machine description. */
1797 bool
1798 pop_operand (rtx op, machine_mode mode)
1800 if (!MEM_P (op))
1801 return false;
1803 if (mode != VOIDmode && GET_MODE (op) != mode)
1804 return false;
1806 op = XEXP (op, 0);
1808 if (GET_CODE (op) != STACK_POP_CODE)
1809 return false;
1811 return XEXP (op, 0) == stack_pointer_rtx;
1814 /* Return true if ADDR is a valid memory address
1815 for mode MODE in address space AS. */
1817 bool
1818 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED, rtx addr,
1819 addr_space_t as, code_helper ch ATTRIBUTE_UNUSED)
1821 #ifdef GO_IF_LEGITIMATE_ADDRESS
1822 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1823 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1824 return false;
1826 win:
1827 return true;
1828 #else
1829 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as, ch);
1830 #endif
1833 /* Return true if OP is a valid memory reference with mode MODE,
1834 including a valid address.
1836 The main use of this function is as a predicate in match_operand
1837 expressions in the machine description. */
1839 bool
1840 memory_operand (rtx op, machine_mode mode)
1842 rtx inner;
1844 if (! reload_completed)
1845 /* Note that no SUBREG is a memory operand before end of reload pass,
1846 because (SUBREG (MEM...)) forces reloading into a register. */
1847 return MEM_P (op) && general_operand (op, mode);
1849 if (mode != VOIDmode && GET_MODE (op) != mode)
1850 return false;
1852 inner = op;
1853 if (GET_CODE (inner) == SUBREG)
1854 inner = SUBREG_REG (inner);
1856 return (MEM_P (inner) && general_operand (op, mode));
1859 /* Return true if OP is a valid indirect memory reference with mode MODE;
1860 that is, a memory reference whose address is a general_operand. */
1862 bool
1863 indirect_operand (rtx op, machine_mode mode)
1865 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1866 if (! reload_completed
1867 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1869 if (mode != VOIDmode && GET_MODE (op) != mode)
1870 return false;
1872 /* The only way that we can have a general_operand as the resulting
1873 address is if OFFSET is zero and the address already is an operand
1874 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1875 operand. */
1876 poly_int64 offset;
1877 rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1878 return (known_eq (offset + SUBREG_BYTE (op), 0)
1879 && general_operand (addr, Pmode));
1882 return (MEM_P (op)
1883 && memory_operand (op, mode)
1884 && general_operand (XEXP (op, 0), Pmode));
1887 /* Return true if this is an ordered comparison operator (not including
1888 ORDERED and UNORDERED). */
1890 bool
1891 ordered_comparison_operator (rtx op, machine_mode mode)
1893 if (mode != VOIDmode && GET_MODE (op) != mode)
1894 return false;
1895 switch (GET_CODE (op))
1897 case EQ:
1898 case NE:
1899 case LT:
1900 case LTU:
1901 case LE:
1902 case LEU:
1903 case GT:
1904 case GTU:
1905 case GE:
1906 case GEU:
1907 return true;
1908 default:
1909 return false;
1913 /* Return true if this is a comparison operator. This allows the use of
1914 MATCH_OPERATOR to recognize all the branch insns. */
1916 bool
1917 comparison_operator (rtx op, machine_mode mode)
1919 return ((mode == VOIDmode || GET_MODE (op) == mode)
1920 && COMPARISON_P (op));
1923 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1926 extract_asm_operands (rtx body)
1928 rtx tmp;
1929 switch (GET_CODE (body))
1931 case ASM_OPERANDS:
1932 return body;
1934 case SET:
1935 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1936 tmp = SET_SRC (body);
1937 if (GET_CODE (tmp) == ASM_OPERANDS)
1938 return tmp;
1939 break;
1941 case PARALLEL:
1942 tmp = XVECEXP (body, 0, 0);
1943 if (GET_CODE (tmp) == ASM_OPERANDS)
1944 return tmp;
1945 if (GET_CODE (tmp) == SET)
1947 tmp = SET_SRC (tmp);
1948 if (GET_CODE (tmp) == ASM_OPERANDS)
1949 return tmp;
1951 break;
1953 default:
1954 break;
1956 return NULL;
1959 /* If BODY is an insn body that uses ASM_OPERANDS,
1960 return the number of operands (both input and output) in the insn.
1961 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1962 return 0.
1963 Otherwise return -1. */
1966 asm_noperands (const_rtx body)
1968 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1969 int i, n_sets = 0;
1971 if (asm_op == NULL)
1973 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1974 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1976 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1977 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1978 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1979 return -1;
1980 return 0;
1982 return -1;
1985 if (GET_CODE (body) == SET)
1986 n_sets = 1;
1987 else if (GET_CODE (body) == PARALLEL)
1989 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1991 /* Multiple output operands, or 1 output plus some clobbers:
1992 body is
1993 [(set OUTPUT (asm_operands ...))...
1994 (use (reg ...))...
1995 (clobber (reg ...))...]. */
1996 /* Count backwards through USEs and CLOBBERs to determine
1997 number of SETs. */
1998 for (i = XVECLEN (body, 0); i > 0; i--)
2000 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
2001 break;
2002 if (GET_CODE (XVECEXP (body, 0, i - 1)) != USE
2003 && GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
2004 return -1;
2007 /* N_SETS is now number of output operands. */
2008 n_sets = i;
2010 /* Verify that all the SETs we have
2011 came from a single original asm_operands insn
2012 (so that invalid combinations are blocked). */
2013 for (i = 0; i < n_sets; i++)
2015 rtx elt = XVECEXP (body, 0, i);
2016 if (GET_CODE (elt) != SET)
2017 return -1;
2018 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
2019 return -1;
2020 /* If these ASM_OPERANDS rtx's came from different original insns
2021 then they aren't allowed together. */
2022 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
2023 != ASM_OPERANDS_INPUT_VEC (asm_op))
2024 return -1;
2027 else
2029 /* 0 outputs, but some clobbers:
2030 body is [(asm_operands ...)
2031 (use (reg ...))...
2032 (clobber (reg ...))...]. */
2033 /* Make sure all the other parallel things really are clobbers. */
2034 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
2035 if (GET_CODE (XVECEXP (body, 0, i)) != USE
2036 && GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2037 return -1;
2041 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
2042 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
2045 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
2046 copy its operands (both input and output) into the vector OPERANDS,
2047 the locations of the operands within the insn into the vector OPERAND_LOCS,
2048 and the constraints for the operands into CONSTRAINTS.
2049 Write the modes of the operands into MODES.
2050 Write the location info into LOC.
2051 Return the assembler-template.
2052 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2053 return the basic assembly string.
2055 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2056 we don't store that info. */
2058 const char *
2059 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
2060 const char **constraints, machine_mode *modes,
2061 location_t *loc)
2063 int nbase = 0, n, i;
2064 rtx asmop;
2066 switch (GET_CODE (body))
2068 case ASM_OPERANDS:
2069 /* Zero output asm: BODY is (asm_operands ...). */
2070 asmop = body;
2071 break;
2073 case SET:
2074 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2075 asmop = SET_SRC (body);
2077 /* The output is in the SET.
2078 Its constraint is in the ASM_OPERANDS itself. */
2079 if (operands)
2080 operands[0] = SET_DEST (body);
2081 if (operand_locs)
2082 operand_locs[0] = &SET_DEST (body);
2083 if (constraints)
2084 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
2085 if (modes)
2086 modes[0] = GET_MODE (SET_DEST (body));
2087 nbase = 1;
2088 break;
2090 case PARALLEL:
2092 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
2094 asmop = XVECEXP (body, 0, 0);
2095 if (GET_CODE (asmop) == SET)
2097 asmop = SET_SRC (asmop);
2099 /* At least one output, plus some CLOBBERs. The outputs are in
2100 the SETs. Their constraints are in the ASM_OPERANDS itself. */
2101 for (i = 0; i < nparallel; i++)
2103 if (GET_CODE (XVECEXP (body, 0, i)) == USE
2104 || GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
2105 break; /* Past last SET */
2106 gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
2107 if (operands)
2108 operands[i] = SET_DEST (XVECEXP (body, 0, i));
2109 if (operand_locs)
2110 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
2111 if (constraints)
2112 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
2113 if (modes)
2114 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
2116 nbase = i;
2118 else if (GET_CODE (asmop) == ASM_INPUT)
2120 if (loc)
2121 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
2122 return XSTR (asmop, 0);
2124 break;
2127 default:
2128 gcc_unreachable ();
2131 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
2132 for (i = 0; i < n; i++)
2134 if (operand_locs)
2135 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
2136 if (operands)
2137 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
2138 if (constraints)
2139 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
2140 if (modes)
2141 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
2143 nbase += n;
2145 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
2146 for (i = 0; i < n; i++)
2148 if (operand_locs)
2149 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
2150 if (operands)
2151 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
2152 if (constraints)
2153 constraints[nbase + i] = "";
2154 if (modes)
2155 modes[nbase + i] = Pmode;
2158 if (loc)
2159 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
2161 return ASM_OPERANDS_TEMPLATE (asmop);
2164 /* Parse inline assembly string STRING and determine which operands are
2165 referenced by % markers. For the first NOPERANDS operands, set USED[I]
2166 to true if operand I is referenced.
2168 This is intended to distinguish barrier-like asms such as:
2170 asm ("" : "=m" (...));
2172 from real references such as:
2174 asm ("sw\t$0, %0" : "=m" (...)); */
2176 void
2177 get_referenced_operands (const char *string, bool *used,
2178 unsigned int noperands)
2180 memset (used, 0, sizeof (bool) * noperands);
2181 const char *p = string;
2182 while (*p)
2183 switch (*p)
2185 case '%':
2186 p += 1;
2187 /* A letter followed by a digit indicates an operand number. */
2188 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
2189 p += 1;
2190 if (ISDIGIT (*p))
2192 char *endptr;
2193 unsigned long opnum = strtoul (p, &endptr, 10);
2194 if (endptr != p && opnum < noperands)
2195 used[opnum] = true;
2196 p = endptr;
2198 else
2199 p += 1;
2200 break;
2202 default:
2203 p++;
2204 break;
2208 /* Check if an asm_operand matches its constraints.
2209 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2212 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
2214 int result = 0;
2215 bool incdec_ok = false;
2217 /* Use constrain_operands after reload. */
2218 gcc_assert (!reload_completed);
2220 /* Empty constraint string is the same as "X,...,X", i.e. X for as
2221 many alternatives as required to match the other operands. */
2222 if (*constraint == '\0')
2223 result = 1;
2225 while (*constraint)
2227 enum constraint_num cn;
2228 char c = *constraint;
2229 int len;
2230 switch (c)
2232 case ',':
2233 constraint++;
2234 continue;
2236 case '0': case '1': case '2': case '3': case '4':
2237 case '5': case '6': case '7': case '8': case '9':
2238 /* If caller provided constraints pointer, look up
2239 the matching constraint. Otherwise, our caller should have
2240 given us the proper matching constraint, but we can't
2241 actually fail the check if they didn't. Indicate that
2242 results are inconclusive. */
2243 if (constraints)
2245 char *end;
2246 unsigned long match;
2248 match = strtoul (constraint, &end, 10);
2249 if (!result)
2250 result = asm_operand_ok (op, constraints[match], NULL);
2251 constraint = (const char *) end;
2253 else
2256 constraint++;
2257 while (ISDIGIT (*constraint));
2258 if (! result)
2259 result = -1;
2261 continue;
2263 /* The rest of the compiler assumes that reloading the address
2264 of a MEM into a register will make it fit an 'o' constraint.
2265 That is, if it sees a MEM operand for an 'o' constraint,
2266 it assumes that (mem (base-reg)) will fit.
2268 That assumption fails on targets that don't have offsettable
2269 addresses at all. We therefore need to treat 'o' asm
2270 constraints as a special case and only accept operands that
2271 are already offsettable, thus proving that at least one
2272 offsettable address exists. */
2273 case 'o': /* offsettable */
2274 if (offsettable_nonstrict_memref_p (op))
2275 result = 1;
2276 break;
2278 case 'g':
2279 if (general_operand (op, VOIDmode))
2280 result = 1;
2281 break;
2283 case '<':
2284 case '>':
2285 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2286 to exist, excepting those that expand_call created. Further,
2287 on some machines which do not have generalized auto inc/dec,
2288 an inc/dec is not a memory_operand.
2290 Match any memory and hope things are resolved after reload. */
2291 incdec_ok = true;
2292 /* FALLTHRU */
2293 default:
2294 cn = lookup_constraint (constraint);
2295 rtx mem = NULL;
2296 switch (get_constraint_type (cn))
2298 case CT_REGISTER:
2299 if (!result
2300 && reg_class_for_constraint (cn) != NO_REGS
2301 && GET_MODE (op) != BLKmode
2302 && register_operand (op, VOIDmode))
2303 result = 1;
2304 break;
2306 case CT_CONST_INT:
2307 if (!result
2308 && CONST_INT_P (op)
2309 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2310 result = 1;
2311 break;
2313 case CT_MEMORY:
2314 case CT_RELAXED_MEMORY:
2315 mem = op;
2316 /* Fall through. */
2317 case CT_SPECIAL_MEMORY:
2318 /* Every memory operand can be reloaded to fit. */
2319 if (!mem)
2320 mem = extract_mem_from_operand (op);
2321 result = result || memory_operand (mem, VOIDmode);
2322 break;
2324 case CT_ADDRESS:
2325 /* Every address operand can be reloaded to fit. */
2326 result = result || address_operand (op, VOIDmode);
2327 break;
2329 case CT_FIXED_FORM:
2330 result = result || constraint_satisfied_p (op, cn);
2331 break;
2333 break;
2335 len = CONSTRAINT_LEN (c, constraint);
2337 constraint++;
2338 while (--len && *constraint && *constraint != ',');
2339 if (len)
2340 return 0;
2343 /* For operands without < or > constraints reject side-effects. */
2344 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
2345 switch (GET_CODE (XEXP (op, 0)))
2347 case PRE_INC:
2348 case POST_INC:
2349 case PRE_DEC:
2350 case POST_DEC:
2351 case PRE_MODIFY:
2352 case POST_MODIFY:
2353 return 0;
2354 default:
2355 break;
2358 return result;
2361 /* Given an rtx *P, if it is a sum containing an integer constant term,
2362 return the location (type rtx *) of the pointer to that constant term.
2363 Otherwise, return a null pointer. */
2365 rtx *
2366 find_constant_term_loc (rtx *p)
2368 rtx *tem;
2369 enum rtx_code code = GET_CODE (*p);
2371 /* If *P IS such a constant term, P is its location. */
2373 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2374 || code == CONST)
2375 return p;
2377 /* Otherwise, if not a sum, it has no constant term. */
2379 if (GET_CODE (*p) != PLUS)
2380 return 0;
2382 /* If one of the summands is constant, return its location. */
2384 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2385 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2386 return p;
2388 /* Otherwise, check each summand for containing a constant term. */
2390 if (XEXP (*p, 0) != 0)
2392 tem = find_constant_term_loc (&XEXP (*p, 0));
2393 if (tem != 0)
2394 return tem;
2397 if (XEXP (*p, 1) != 0)
2399 tem = find_constant_term_loc (&XEXP (*p, 1));
2400 if (tem != 0)
2401 return tem;
2404 return 0;
2407 /* Return true if OP is a memory reference whose address contains
2408 no side effects and remains valid after the addition of a positive
2409 integer less than the size of the object being referenced.
2411 We assume that the original address is valid and do not check it.
2413 This uses strict_memory_address_p as a subroutine, so
2414 don't use it before reload. */
2416 bool
2417 offsettable_memref_p (rtx op)
2419 return ((MEM_P (op))
2420 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2421 MEM_ADDR_SPACE (op)));
2424 /* Similar, but don't require a strictly valid mem ref:
2425 consider pseudo-regs valid as index or base regs. */
2427 bool
2428 offsettable_nonstrict_memref_p (rtx op)
2430 return ((MEM_P (op))
2431 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2432 MEM_ADDR_SPACE (op)));
2435 /* Return true if Y is a memory address which contains no side effects
2436 and would remain valid for address space AS after the addition of
2437 a positive integer less than the size of that mode.
2439 We assume that the original address is valid and do not check it.
2440 We do check that it is valid for narrower modes.
2442 If STRICTP is nonzero, we require a strictly valid address,
2443 for the sake of use in reload.cc. */
2445 bool
2446 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
2447 addr_space_t as)
2449 enum rtx_code ycode = GET_CODE (y);
2450 rtx z;
2451 rtx y1 = y;
2452 rtx *y2;
2453 bool (*addressp) (machine_mode, rtx, addr_space_t, code_helper) =
2454 (strictp ? strict_memory_address_addr_space_p
2455 : memory_address_addr_space_p);
2456 poly_int64 mode_sz = GET_MODE_SIZE (mode);
2458 if (CONSTANT_ADDRESS_P (y))
2459 return true;
2461 /* Adjusting an offsettable address involves changing to a narrower mode.
2462 Make sure that's OK. */
2464 if (mode_dependent_address_p (y, as))
2465 return false;
2467 machine_mode address_mode = GET_MODE (y);
2468 if (address_mode == VOIDmode)
2469 address_mode = targetm.addr_space.address_mode (as);
2470 #ifdef POINTERS_EXTEND_UNSIGNED
2471 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2472 #endif
2474 /* ??? How much offset does an offsettable BLKmode reference need?
2475 Clearly that depends on the situation in which it's being used.
2476 However, the current situation in which we test 0xffffffff is
2477 less than ideal. Caveat user. */
2478 if (known_eq (mode_sz, 0))
2479 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2481 /* If the expression contains a constant term,
2482 see if it remains valid when max possible offset is added. */
2484 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2486 bool good;
2488 y1 = *y2;
2489 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2490 /* Use QImode because an odd displacement may be automatically invalid
2491 for any wider mode. But it should be valid for a single byte. */
2492 good = (*addressp) (QImode, y, as, ERROR_MARK);
2494 /* In any case, restore old contents of memory. */
2495 *y2 = y1;
2496 return good;
2499 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2500 return false;
2502 /* The offset added here is chosen as the maximum offset that
2503 any instruction could need to add when operating on something
2504 of the specified mode. We assume that if Y and Y+c are
2505 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2506 go inside a LO_SUM here, so we do so as well. */
2507 if (GET_CODE (y) == LO_SUM
2508 && mode != BLKmode
2509 && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2510 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2511 plus_constant (address_mode, XEXP (y, 1),
2512 mode_sz - 1));
2513 #ifdef POINTERS_EXTEND_UNSIGNED
2514 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2515 else if (POINTERS_EXTEND_UNSIGNED > 0
2516 && GET_CODE (y) == ZERO_EXTEND
2517 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2518 z = gen_rtx_ZERO_EXTEND (address_mode,
2519 plus_constant (pointer_mode, XEXP (y, 0),
2520 mode_sz - 1));
2521 #endif
2522 else
2523 z = plus_constant (address_mode, y, mode_sz - 1);
2525 /* Use QImode because an odd displacement may be automatically invalid
2526 for any wider mode. But it should be valid for a single byte. */
2527 return (*addressp) (QImode, z, as, ERROR_MARK);
2530 /* Return true if ADDR is an address-expression whose effect depends
2531 on the mode of the memory reference it is used in.
2533 ADDRSPACE is the address space associated with the address.
2535 Autoincrement addressing is a typical example of mode-dependence
2536 because the amount of the increment depends on the mode. */
2538 bool
2539 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2541 /* Auto-increment addressing with anything other than post_modify
2542 or pre_modify always introduces a mode dependency. Catch such
2543 cases now instead of deferring to the target. */
2544 if (GET_CODE (addr) == PRE_INC
2545 || GET_CODE (addr) == POST_INC
2546 || GET_CODE (addr) == PRE_DEC
2547 || GET_CODE (addr) == POST_DEC)
2548 return true;
2550 return targetm.mode_dependent_address_p (addr, addrspace);
2553 /* Return true if boolean attribute ATTR is supported. */
2555 static bool
2556 have_bool_attr (bool_attr attr)
2558 switch (attr)
2560 case BA_ENABLED:
2561 return HAVE_ATTR_enabled;
2562 case BA_PREFERRED_FOR_SIZE:
2563 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2564 case BA_PREFERRED_FOR_SPEED:
2565 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2567 gcc_unreachable ();
2570 /* Return the value of ATTR for instruction INSN. */
2572 static bool
2573 get_bool_attr (rtx_insn *insn, bool_attr attr)
2575 switch (attr)
2577 case BA_ENABLED:
2578 return get_attr_enabled (insn);
2579 case BA_PREFERRED_FOR_SIZE:
2580 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2581 case BA_PREFERRED_FOR_SPEED:
2582 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2584 gcc_unreachable ();
2587 /* Like get_bool_attr_mask, but don't use the cache. */
2589 static alternative_mask
2590 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2592 /* Temporarily install enough information for get_attr_<foo> to assume
2593 that the insn operands are already cached. As above, the attribute
2594 mustn't depend on the values of operands, so we don't provide their
2595 real values here. */
2596 rtx_insn *old_insn = recog_data.insn;
2597 int old_alternative = which_alternative;
2599 recog_data.insn = insn;
2600 alternative_mask mask = ALL_ALTERNATIVES;
2601 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2602 for (int i = 0; i < n_alternatives; i++)
2604 which_alternative = i;
2605 if (!get_bool_attr (insn, attr))
2606 mask &= ~ALTERNATIVE_BIT (i);
2609 recog_data.insn = old_insn;
2610 which_alternative = old_alternative;
2611 return mask;
2614 /* Return the mask of operand alternatives that are allowed for INSN
2615 by boolean attribute ATTR. This mask depends only on INSN and on
2616 the current target; it does not depend on things like the values of
2617 operands. */
2619 static alternative_mask
2620 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2622 /* Quick exit for asms and for targets that don't use these attributes. */
2623 int code = INSN_CODE (insn);
2624 if (code < 0 || !have_bool_attr (attr))
2625 return ALL_ALTERNATIVES;
2627 /* Calling get_attr_<foo> can be expensive, so cache the mask
2628 for speed. */
2629 if (!this_target_recog->x_bool_attr_masks[code][attr])
2630 this_target_recog->x_bool_attr_masks[code][attr]
2631 = get_bool_attr_mask_uncached (insn, attr);
2632 return this_target_recog->x_bool_attr_masks[code][attr];
2635 /* Return the set of alternatives of INSN that are allowed by the current
2636 target. */
2638 alternative_mask
2639 get_enabled_alternatives (rtx_insn *insn)
2641 return get_bool_attr_mask (insn, BA_ENABLED);
2644 /* Return the set of alternatives of INSN that are allowed by the current
2645 target and are preferred for the current size/speed optimization
2646 choice. */
2648 alternative_mask
2649 get_preferred_alternatives (rtx_insn *insn)
2651 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2652 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2653 else
2654 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2657 /* Return the set of alternatives of INSN that are allowed by the current
2658 target and are preferred for the size/speed optimization choice
2659 associated with BB. Passing a separate BB is useful if INSN has not
2660 been emitted yet or if we are considering moving it to a different
2661 block. */
2663 alternative_mask
2664 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2666 if (optimize_bb_for_speed_p (bb))
2667 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2668 else
2669 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2672 /* Assert that the cached boolean attributes for INSN are still accurate.
2673 The backend is required to define these attributes in a way that only
2674 depends on the current target (rather than operands, compiler phase,
2675 etc.). */
2677 bool
2678 check_bool_attrs (rtx_insn *insn)
2680 int code = INSN_CODE (insn);
2681 if (code >= 0)
2682 for (int i = 0; i <= BA_LAST; ++i)
2684 enum bool_attr attr = (enum bool_attr) i;
2685 if (this_target_recog->x_bool_attr_masks[code][attr])
2686 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2687 == get_bool_attr_mask_uncached (insn, attr));
2689 return true;
2692 /* Like extract_insn, but save insn extracted and don't extract again, when
2693 called again for the same insn expecting that recog_data still contain the
2694 valid information. This is used primary by gen_attr infrastructure that
2695 often does extract insn again and again. */
2696 void
2697 extract_insn_cached (rtx_insn *insn)
2699 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2700 return;
2701 extract_insn (insn);
2702 recog_data.insn = insn;
2705 /* Do uncached extract_insn, constrain_operands and complain about failures.
2706 This should be used when extracting a pre-existing constrained instruction
2707 if the caller wants to know which alternative was chosen. */
2708 void
2709 extract_constrain_insn (rtx_insn *insn)
2711 extract_insn (insn);
2712 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2713 fatal_insn_not_found (insn);
2716 /* Do cached extract_insn, constrain_operands and complain about failures.
2717 Used by insn_attrtab. */
2718 void
2719 extract_constrain_insn_cached (rtx_insn *insn)
2721 extract_insn_cached (insn);
2722 if (which_alternative == -1
2723 && !constrain_operands (reload_completed,
2724 get_enabled_alternatives (insn)))
2725 fatal_insn_not_found (insn);
2728 /* Do cached constrain_operands on INSN and complain about failures. */
2729 bool
2730 constrain_operands_cached (rtx_insn *insn, int strict)
2732 if (which_alternative == -1)
2733 return constrain_operands (strict, get_enabled_alternatives (insn));
2734 else
2735 return true;
2738 /* Analyze INSN and fill in recog_data. */
2740 void
2741 extract_insn (rtx_insn *insn)
2743 int i;
2744 int icode;
2745 int noperands;
2746 rtx body = PATTERN (insn);
2748 recog_data.n_operands = 0;
2749 recog_data.n_alternatives = 0;
2750 recog_data.n_dups = 0;
2751 recog_data.is_asm = false;
2753 switch (GET_CODE (body))
2755 case USE:
2756 case CLOBBER:
2757 case ASM_INPUT:
2758 case ADDR_VEC:
2759 case ADDR_DIFF_VEC:
2760 case VAR_LOCATION:
2761 case DEBUG_MARKER:
2762 return;
2764 case SET:
2765 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2766 goto asm_insn;
2767 else
2768 goto normal_insn;
2769 case PARALLEL:
2770 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2771 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2772 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2773 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2774 goto asm_insn;
2775 else
2776 goto normal_insn;
2777 case ASM_OPERANDS:
2778 asm_insn:
2779 recog_data.n_operands = noperands = asm_noperands (body);
2780 if (noperands >= 0)
2782 /* This insn is an `asm' with operands. */
2784 /* expand_asm_operands makes sure there aren't too many operands. */
2785 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2787 /* Now get the operand values and constraints out of the insn. */
2788 decode_asm_operands (body, recog_data.operand,
2789 recog_data.operand_loc,
2790 recog_data.constraints,
2791 recog_data.operand_mode, NULL);
2792 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2793 if (noperands > 0)
2795 const char *p = recog_data.constraints[0];
2796 recog_data.n_alternatives = 1;
2797 while (*p)
2798 recog_data.n_alternatives += (*p++ == ',');
2800 recog_data.is_asm = true;
2801 break;
2803 fatal_insn_not_found (insn);
2805 default:
2806 normal_insn:
2807 /* Ordinary insn: recognize it, get the operands via insn_extract
2808 and get the constraints. */
2810 icode = recog_memoized (insn);
2811 if (icode < 0)
2812 fatal_insn_not_found (insn);
2814 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2815 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2816 recog_data.n_dups = insn_data[icode].n_dups;
2818 insn_extract (insn);
2820 for (i = 0; i < noperands; i++)
2822 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2823 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2824 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2825 /* VOIDmode match_operands gets mode from their real operand. */
2826 if (recog_data.operand_mode[i] == VOIDmode)
2827 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2830 for (i = 0; i < noperands; i++)
2831 recog_data.operand_type[i]
2832 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2833 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2834 : OP_IN);
2836 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2838 recog_data.insn = NULL;
2839 which_alternative = -1;
2842 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2843 operands, N_ALTERNATIVES alternatives and constraint strings
2844 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2845 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2846 if the insn is an asm statement and preprocessing should take the
2847 asm operands into account, e.g. to determine whether they could be
2848 addresses in constraints that require addresses; it should then
2849 point to an array of pointers to each operand. */
2851 void
2852 preprocess_constraints (int n_operands, int n_alternatives,
2853 const char **constraints,
2854 operand_alternative *op_alt_base,
2855 rtx **oploc)
2857 for (int i = 0; i < n_operands; i++)
2859 int j;
2860 struct operand_alternative *op_alt;
2861 const char *p = constraints[i];
2863 op_alt = op_alt_base;
2865 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2867 op_alt[i].cl = NO_REGS;
2868 op_alt[i].register_filters = 0;
2869 op_alt[i].constraint = p;
2870 op_alt[i].matches = -1;
2871 op_alt[i].matched = -1;
2873 if (*p == '\0' || *p == ',')
2875 op_alt[i].anything_ok = 1;
2876 continue;
2879 for (;;)
2881 char c = *p;
2882 if (c == '#')
2884 c = *++p;
2885 while (c != ',' && c != '\0');
2886 if (c == ',' || c == '\0')
2888 p++;
2889 break;
2892 switch (c)
2894 case '?':
2895 op_alt[i].reject += 6;
2896 break;
2897 case '!':
2898 op_alt[i].reject += 600;
2899 break;
2900 case '&':
2901 op_alt[i].earlyclobber = 1;
2902 break;
2904 case '0': case '1': case '2': case '3': case '4':
2905 case '5': case '6': case '7': case '8': case '9':
2907 char *end;
2908 op_alt[i].matches = strtoul (p, &end, 10);
2909 op_alt[op_alt[i].matches].matched = i;
2910 p = end;
2912 continue;
2914 case 'X':
2915 op_alt[i].anything_ok = 1;
2916 break;
2918 case 'g':
2919 op_alt[i].cl =
2920 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2921 break;
2923 default:
2924 enum constraint_num cn = lookup_constraint (p);
2925 enum reg_class cl;
2926 switch (get_constraint_type (cn))
2928 case CT_REGISTER:
2929 cl = reg_class_for_constraint (cn);
2930 if (cl != NO_REGS)
2932 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2933 auto filter_id = get_register_filter_id (cn);
2934 if (filter_id >= 0)
2935 op_alt[i].register_filters |= 1U << filter_id;
2937 break;
2939 case CT_CONST_INT:
2940 break;
2942 case CT_MEMORY:
2943 case CT_SPECIAL_MEMORY:
2944 case CT_RELAXED_MEMORY:
2945 op_alt[i].memory_ok = 1;
2946 break;
2948 case CT_ADDRESS:
2949 if (oploc && !address_operand (*oploc[i], VOIDmode))
2950 break;
2952 op_alt[i].is_address = 1;
2953 op_alt[i].cl
2954 = (reg_class_subunion
2955 [(int) op_alt[i].cl]
2956 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2957 ADDRESS, SCRATCH)]);
2958 break;
2960 case CT_FIXED_FORM:
2961 break;
2963 break;
2965 p += CONSTRAINT_LEN (c, p);
2971 /* Return an array of operand_alternative instructions for
2972 instruction ICODE. */
2974 const operand_alternative *
2975 preprocess_insn_constraints (unsigned int icode)
2977 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2978 if (this_target_recog->x_op_alt[icode])
2979 return this_target_recog->x_op_alt[icode];
2981 int n_operands = insn_data[icode].n_operands;
2982 if (n_operands == 0)
2983 return 0;
2984 /* Always provide at least one alternative so that which_op_alt ()
2985 works correctly. If the instruction has 0 alternatives (i.e. all
2986 constraint strings are empty) then each operand in this alternative
2987 will have anything_ok set. */
2988 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2989 int n_entries = n_operands * n_alternatives;
2991 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2992 const char **constraints = XALLOCAVEC (const char *, n_operands);
2994 for (int i = 0; i < n_operands; ++i)
2995 constraints[i] = insn_data[icode].operand[i].constraint;
2996 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2997 NULL);
2999 this_target_recog->x_op_alt[icode] = op_alt;
3000 return op_alt;
3003 /* After calling extract_insn, you can use this function to extract some
3004 information from the constraint strings into a more usable form.
3005 The collected data is stored in recog_op_alt. */
3007 void
3008 preprocess_constraints (rtx_insn *insn)
3010 int icode = INSN_CODE (insn);
3011 if (icode >= 0)
3012 recog_op_alt = preprocess_insn_constraints (icode);
3013 else
3015 int n_operands = recog_data.n_operands;
3016 int n_alternatives = recog_data.n_alternatives;
3017 int n_entries = n_operands * n_alternatives;
3018 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
3019 preprocess_constraints (n_operands, n_alternatives,
3020 recog_data.constraints, asm_op_alt,
3021 NULL);
3022 recog_op_alt = asm_op_alt;
3026 /* Check the operands of an insn against the insn's operand constraints
3027 and return 1 if they match any of the alternatives in ALTERNATIVES.
3029 The information about the insn's operands, constraints, operand modes
3030 etc. is obtained from the global variables set up by extract_insn.
3032 WHICH_ALTERNATIVE is set to a number which indicates which
3033 alternative of constraints was matched: 0 for the first alternative,
3034 1 for the next, etc.
3036 In addition, when two operands are required to match
3037 and it happens that the output operand is (reg) while the
3038 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
3039 make the output operand look like the input.
3040 This is because the output operand is the one the template will print.
3042 This is used in final, just before printing the assembler code and by
3043 the routines that determine an insn's attribute.
3045 If STRICT is a positive nonzero value, it means that we have been
3046 called after reload has been completed. In that case, we must
3047 do all checks strictly. If it is zero, it means that we have been called
3048 before reload has completed. In that case, we first try to see if we can
3049 find an alternative that matches strictly. If not, we try again, this
3050 time assuming that reload will fix up the insn. This provides a "best
3051 guess" for the alternative and is used to compute attributes of insns prior
3052 to reload. A negative value of STRICT is used for this internal call. */
3054 struct funny_match
3056 int this_op, other;
3059 bool
3060 constrain_operands (int strict, alternative_mask alternatives)
3062 const char *constraints[MAX_RECOG_OPERANDS];
3063 int matching_operands[MAX_RECOG_OPERANDS];
3064 int earlyclobber[MAX_RECOG_OPERANDS];
3065 int c;
3067 struct funny_match funny_match[MAX_RECOG_OPERANDS];
3068 int funny_match_index;
3070 which_alternative = 0;
3071 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
3072 return true;
3074 for (c = 0; c < recog_data.n_operands; c++)
3075 constraints[c] = recog_data.constraints[c];
3079 int seen_earlyclobber_at = -1;
3080 int opno;
3081 bool lose = false;
3082 funny_match_index = 0;
3084 if (!TEST_BIT (alternatives, which_alternative))
3086 int i;
3088 for (i = 0; i < recog_data.n_operands; i++)
3089 constraints[i] = skip_alternative (constraints[i]);
3091 which_alternative++;
3092 continue;
3095 for (opno = 0; opno < recog_data.n_operands; opno++)
3096 matching_operands[opno] = -1;
3098 for (opno = 0; opno < recog_data.n_operands; opno++)
3100 rtx op = recog_data.operand[opno];
3101 machine_mode mode = GET_MODE (op);
3102 const char *p = constraints[opno];
3103 int offset = 0;
3104 bool win = false;
3105 int val;
3106 int len;
3108 earlyclobber[opno] = 0;
3110 if (GET_CODE (op) == SUBREG)
3112 if (REG_P (SUBREG_REG (op))
3113 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
3114 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
3115 GET_MODE (SUBREG_REG (op)),
3116 SUBREG_BYTE (op),
3117 GET_MODE (op));
3118 op = SUBREG_REG (op);
3121 /* An empty constraint or empty alternative
3122 allows anything which matched the pattern. */
3123 if (*p == 0 || *p == ',')
3124 win = true;
3127 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
3129 case '\0':
3130 len = 0;
3131 break;
3132 case ',':
3133 c = '\0';
3134 break;
3136 case '#':
3137 /* Ignore rest of this alternative as far as
3138 constraint checking is concerned. */
3140 p++;
3141 while (*p && *p != ',');
3142 len = 0;
3143 break;
3145 case '&':
3146 earlyclobber[opno] = 1;
3147 if (seen_earlyclobber_at < 0)
3148 seen_earlyclobber_at = opno;
3149 break;
3151 case '0': case '1': case '2': case '3': case '4':
3152 case '5': case '6': case '7': case '8': case '9':
3154 /* This operand must be the same as a previous one.
3155 This kind of constraint is used for instructions such
3156 as add when they take only two operands.
3158 Note that the lower-numbered operand is passed first.
3160 If we are not testing strictly, assume that this
3161 constraint will be satisfied. */
3163 char *end;
3164 int match;
3166 match = strtoul (p, &end, 10);
3167 p = end;
3169 if (strict < 0)
3170 val = 1;
3171 else
3173 rtx op1 = recog_data.operand[match];
3174 rtx op2 = recog_data.operand[opno];
3175 val = operands_match_p (op1, op2);
3178 matching_operands[opno] = match;
3179 matching_operands[match] = opno;
3181 if (val != 0)
3182 win = true;
3184 /* If output is *x and input is *--x, arrange later
3185 to change the output to *--x as well, since the
3186 output op is the one that will be printed. */
3187 if (val == 2 && strict > 0)
3189 funny_match[funny_match_index].this_op = opno;
3190 funny_match[funny_match_index++].other = match;
3193 len = 0;
3194 break;
3196 case 'p':
3197 /* p is used for address_operands. When we are called by
3198 gen_reload, no one will have checked that the address is
3199 strictly valid, i.e., that all pseudos requiring hard regs
3200 have gotten them. We also want to make sure we have a
3201 valid mode. */
3203 auto mem_mode = (recog_data.is_asm
3204 ? VOIDmode
3205 : recog_data.operand_mode[opno]);
3206 if ((GET_MODE (op) == VOIDmode
3207 || SCALAR_INT_MODE_P (GET_MODE (op)))
3208 && (strict <= 0
3209 || strict_memory_address_p (mem_mode, op)))
3210 win = true;
3211 break;
3214 /* No need to check general_operand again;
3215 it was done in insn-recog.cc. Well, except that reload
3216 doesn't check the validity of its replacements, but
3217 that should only matter when there's a bug. */
3218 case 'g':
3219 /* Anything goes unless it is a REG and really has a hard reg
3220 but the hard reg is not in the class GENERAL_REGS. */
3221 if (REG_P (op))
3223 if (strict < 0
3224 || GENERAL_REGS == ALL_REGS
3225 || (reload_in_progress
3226 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3227 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
3228 win = true;
3230 else if (strict < 0 || general_operand (op, mode))
3231 win = true;
3232 break;
3234 default:
3236 enum constraint_num cn = lookup_constraint (p);
3237 enum reg_class cl = reg_class_for_constraint (cn);
3238 if (cl != NO_REGS)
3240 auto *filter = get_register_filter (cn);
3241 if (strict < 0
3242 || (strict == 0
3243 && REG_P (op)
3244 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3245 || (strict == 0 && GET_CODE (op) == SCRATCH)
3246 || (REG_P (op)
3247 && reg_fits_class_p (op, cl, offset, mode)
3248 && (!filter
3249 || TEST_HARD_REG_BIT (*filter,
3250 REGNO (op) + offset))))
3251 win = true;
3254 else if (constraint_satisfied_p (op, cn))
3255 win = true;
3257 else if ((insn_extra_memory_constraint (cn)
3258 || insn_extra_relaxed_memory_constraint (cn))
3259 /* Every memory operand can be reloaded to fit. */
3260 && ((strict < 0 && MEM_P (op))
3261 /* Before reload, accept what reload can turn
3262 into a mem. */
3263 || (strict < 0 && CONSTANT_P (op))
3264 /* Before reload, accept a pseudo or hard register,
3265 since LRA can turn it into a mem. */
3266 || (strict < 0 && targetm.lra_p () && REG_P (op))
3267 /* During reload, accept a pseudo */
3268 || (reload_in_progress && REG_P (op)
3269 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
3270 win = true;
3271 else if (insn_extra_address_constraint (cn)
3272 /* Every address operand can be reloaded to fit. */
3273 && strict < 0)
3274 win = true;
3275 /* Cater to architectures like IA-64 that define extra memory
3276 constraints without using define_memory_constraint. */
3277 else if (reload_in_progress
3278 && REG_P (op)
3279 && REGNO (op) >= FIRST_PSEUDO_REGISTER
3280 && reg_renumber[REGNO (op)] < 0
3281 && reg_equiv_mem (REGNO (op)) != 0
3282 && constraint_satisfied_p
3283 (reg_equiv_mem (REGNO (op)), cn))
3284 win = true;
3285 break;
3288 while (p += len, c);
3290 constraints[opno] = p;
3291 /* If this operand did not win somehow,
3292 this alternative loses. */
3293 if (! win)
3294 lose = true;
3296 /* This alternative won; the operands are ok.
3297 Change whichever operands this alternative says to change. */
3298 if (! lose)
3300 int opno, eopno;
3302 /* See if any earlyclobber operand conflicts with some other
3303 operand. */
3305 if (strict > 0 && seen_earlyclobber_at >= 0)
3306 for (eopno = seen_earlyclobber_at;
3307 eopno < recog_data.n_operands;
3308 eopno++)
3309 /* Ignore earlyclobber operands now in memory,
3310 because we would often report failure when we have
3311 two memory operands, one of which was formerly a REG. */
3312 if (earlyclobber[eopno]
3313 && REG_P (recog_data.operand[eopno]))
3314 for (opno = 0; opno < recog_data.n_operands; opno++)
3315 if ((MEM_P (recog_data.operand[opno])
3316 || recog_data.operand_type[opno] != OP_OUT)
3317 && opno != eopno
3318 /* Ignore things like match_operator operands. */
3319 && *recog_data.constraints[opno] != 0
3320 && ! (matching_operands[opno] == eopno
3321 && operands_match_p (recog_data.operand[opno],
3322 recog_data.operand[eopno]))
3323 && ! safe_from_earlyclobber (recog_data.operand[opno],
3324 recog_data.operand[eopno]))
3325 lose = true;
3327 if (! lose)
3329 while (--funny_match_index >= 0)
3331 recog_data.operand[funny_match[funny_match_index].other]
3332 = recog_data.operand[funny_match[funny_match_index].this_op];
3335 /* For operands without < or > constraints reject side-effects. */
3336 if (AUTO_INC_DEC && recog_data.is_asm)
3338 for (opno = 0; opno < recog_data.n_operands; opno++)
3339 if (MEM_P (recog_data.operand[opno]))
3340 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
3342 case PRE_INC:
3343 case POST_INC:
3344 case PRE_DEC:
3345 case POST_DEC:
3346 case PRE_MODIFY:
3347 case POST_MODIFY:
3348 if (strchr (recog_data.constraints[opno], '<') == NULL
3349 && strchr (recog_data.constraints[opno], '>')
3350 == NULL)
3351 return false;
3352 break;
3353 default:
3354 break;
3358 return true;
3362 which_alternative++;
3364 while (which_alternative < recog_data.n_alternatives);
3366 which_alternative = -1;
3367 /* If we are about to reject this, but we are not to test strictly,
3368 try a very loose test. Only return failure if it fails also. */
3369 if (strict == 0)
3370 return constrain_operands (-1, alternatives);
3371 else
3372 return false;
3375 /* Return true iff OPERAND (assumed to be a REG rtx)
3376 is a hard reg in class CLASS when its regno is offset by OFFSET
3377 and changed to mode MODE.
3378 If REG occupies multiple hard regs, all of them must be in CLASS. */
3380 bool
3381 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
3382 machine_mode mode)
3384 unsigned int regno = REGNO (operand);
3386 if (cl == NO_REGS)
3387 return false;
3389 /* Regno must not be a pseudo register. Offset may be negative. */
3390 return (HARD_REGISTER_NUM_P (regno)
3391 && HARD_REGISTER_NUM_P (regno + offset)
3392 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
3393 regno + offset));
3396 /* Split single instruction. Helper function for split_all_insns and
3397 split_all_insns_noflow. Return last insn in the sequence if successful,
3398 or NULL if unsuccessful. */
3400 static rtx_insn *
3401 split_insn (rtx_insn *insn)
3403 /* Split insns here to get max fine-grain parallelism. */
3404 rtx_insn *first = PREV_INSN (insn);
3405 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
3406 rtx insn_set, last_set, note;
3408 if (last == insn)
3409 return NULL;
3411 /* If the original instruction was a single set that was known to be
3412 equivalent to a constant, see if we can say the same about the last
3413 instruction in the split sequence. The two instructions must set
3414 the same destination. */
3415 insn_set = single_set (insn);
3416 if (insn_set)
3418 last_set = single_set (last);
3419 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
3421 note = find_reg_equal_equiv_note (insn);
3422 if (note && CONSTANT_P (XEXP (note, 0)))
3423 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
3424 else if (CONSTANT_P (SET_SRC (insn_set)))
3425 set_unique_reg_note (last, REG_EQUAL,
3426 copy_rtx (SET_SRC (insn_set)));
3430 /* try_split returns the NOTE that INSN became. */
3431 SET_INSN_DELETED (insn);
3433 /* ??? Coddle to md files that generate subregs in post-reload
3434 splitters instead of computing the proper hard register. */
3435 if (reload_completed && first != last)
3437 first = NEXT_INSN (first);
3438 for (;;)
3440 if (INSN_P (first))
3441 cleanup_subreg_operands (first);
3442 if (first == last)
3443 break;
3444 first = NEXT_INSN (first);
3448 return last;
3451 /* Split all insns in the function. If UPD_LIFE, update life info after. */
3453 void
3454 split_all_insns (void)
3456 bool changed;
3457 bool need_cfg_cleanup = false;
3458 basic_block bb;
3460 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
3461 bitmap_clear (blocks);
3462 changed = false;
3464 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3466 rtx_insn *insn, *next;
3467 bool finish = false;
3469 rtl_profile_for_bb (bb);
3470 for (insn = BB_HEAD (bb); !finish ; insn = next)
3472 /* Can't use `next_real_insn' because that might go across
3473 CODE_LABELS and short-out basic blocks. */
3474 next = NEXT_INSN (insn);
3475 finish = (insn == BB_END (bb));
3477 /* If INSN has a REG_EH_REGION note and we split INSN, the
3478 resulting split may not have/need REG_EH_REGION notes.
3480 If that happens and INSN was the last reference to the
3481 given EH region, then the EH region will become unreachable.
3482 We cannot leave the unreachable blocks in the CFG as that
3483 will trigger a checking failure.
3485 So track if INSN has a REG_EH_REGION note. If so and we
3486 split INSN, then trigger a CFG cleanup. */
3487 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3488 if (INSN_P (insn))
3490 rtx set = single_set (insn);
3492 /* Don't split no-op move insns. These should silently
3493 disappear later in final. Splitting such insns would
3494 break the code that handles LIBCALL blocks. */
3495 if (set && set_noop_p (set))
3497 /* Nops get in the way while scheduling, so delete them
3498 now if register allocation has already been done. It
3499 is too risky to try to do this before register
3500 allocation, and there are unlikely to be very many
3501 nops then anyways. */
3502 if (reload_completed)
3503 delete_insn_and_edges (insn);
3504 if (note)
3505 need_cfg_cleanup = true;
3507 else
3509 if (split_insn (insn))
3511 bitmap_set_bit (blocks, bb->index);
3512 changed = true;
3513 if (note)
3514 need_cfg_cleanup = true;
3521 default_rtl_profile ();
3522 if (changed)
3524 find_many_sub_basic_blocks (blocks);
3526 /* Splitting could drop an REG_EH_REGION if it potentially
3527 trapped in its original form, but does not in its split
3528 form. Consider a FLOAT_TRUNCATE which splits into a memory
3529 store/load pair and -fnon-call-exceptions. */
3530 if (need_cfg_cleanup)
3531 cleanup_cfg (0);
3534 checking_verify_flow_info ();
3537 /* Same as split_all_insns, but do not expect CFG to be available.
3538 Used by machine dependent reorg passes. */
3540 void
3541 split_all_insns_noflow (void)
3543 rtx_insn *next, *insn;
3545 for (insn = get_insns (); insn; insn = next)
3547 next = NEXT_INSN (insn);
3548 if (INSN_P (insn))
3550 /* Don't split no-op move insns. These should silently
3551 disappear later in final. Splitting such insns would
3552 break the code that handles LIBCALL blocks. */
3553 rtx set = single_set (insn);
3554 if (set && set_noop_p (set))
3556 /* Nops get in the way while scheduling, so delete them
3557 now if register allocation has already been done. It
3558 is too risky to try to do this before register
3559 allocation, and there are unlikely to be very many
3560 nops then anyways.
3562 ??? Should we use delete_insn when the CFG isn't valid? */
3563 if (reload_completed)
3564 delete_insn_and_edges (insn);
3566 else
3567 split_insn (insn);
3572 struct peep2_insn_data
3574 rtx_insn *insn;
3575 regset live_before;
3578 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3579 static int peep2_current;
3581 static bool peep2_do_rebuild_jump_labels;
3582 static bool peep2_do_cleanup_cfg;
3584 /* The number of instructions available to match a peep2. */
3585 int peep2_current_count;
3587 /* A marker indicating the last insn of the block. The live_before regset
3588 for this element is correct, indicating DF_LIVE_OUT for the block. */
3589 #define PEEP2_EOB invalid_insn_rtx
3591 /* Wrap N to fit into the peep2_insn_data buffer. */
3593 static int
3594 peep2_buf_position (int n)
3596 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3597 n -= MAX_INSNS_PER_PEEP2 + 1;
3598 return n;
3601 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3602 does not exist. Used by the recognizer to find the next insn to match
3603 in a multi-insn pattern. */
3605 rtx_insn *
3606 peep2_next_insn (int n)
3608 gcc_assert (n <= peep2_current_count);
3610 n = peep2_buf_position (peep2_current + n);
3612 return peep2_insn_data[n].insn;
3615 /* Return true if REGNO is dead before the Nth non-note insn
3616 after `current'. */
3618 bool
3619 peep2_regno_dead_p (int ofs, int regno)
3621 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3623 ofs = peep2_buf_position (peep2_current + ofs);
3625 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3627 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3630 /* Similarly for a REG. */
3632 bool
3633 peep2_reg_dead_p (int ofs, rtx reg)
3635 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3637 ofs = peep2_buf_position (peep2_current + ofs);
3639 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3641 unsigned int end_regno = END_REGNO (reg);
3642 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3643 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3644 return false;
3645 return true;
3648 /* Regno offset to be used in the register search. */
3649 static int search_ofs;
3651 /* Try to find a hard register of mode MODE, matching the register class in
3652 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3653 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3654 in which case the only condition is that the register must be available
3655 before CURRENT_INSN.
3656 Registers that already have bits set in REG_SET will not be considered.
3658 If an appropriate register is available, it will be returned and the
3659 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3660 returned. */
3663 peep2_find_free_register (int from, int to, const char *class_str,
3664 machine_mode mode, HARD_REG_SET *reg_set)
3666 enum reg_class cl;
3667 HARD_REG_SET live;
3668 df_ref def;
3669 int i;
3671 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3672 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3674 from = peep2_buf_position (peep2_current + from);
3675 to = peep2_buf_position (peep2_current + to);
3677 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3678 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3680 while (from != to)
3682 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3684 /* Don't use registers set or clobbered by the insn. */
3685 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3686 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3688 from = peep2_buf_position (from + 1);
3691 cl = reg_class_for_constraint (lookup_constraint (class_str));
3693 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3695 int raw_regno, regno, j;
3696 bool success;
3698 /* Distribute the free registers as much as possible. */
3699 raw_regno = search_ofs + i;
3700 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3701 raw_regno -= FIRST_PSEUDO_REGISTER;
3702 #ifdef REG_ALLOC_ORDER
3703 regno = reg_alloc_order[raw_regno];
3704 #else
3705 regno = raw_regno;
3706 #endif
3708 /* Can it support the mode we need? */
3709 if (!targetm.hard_regno_mode_ok (regno, mode))
3710 continue;
3712 success = true;
3713 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3715 /* Don't allocate fixed registers. */
3716 if (fixed_regs[regno + j])
3718 success = false;
3719 break;
3721 /* Don't allocate global registers. */
3722 if (global_regs[regno + j])
3724 success = false;
3725 break;
3727 /* Make sure the register is of the right class. */
3728 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3730 success = false;
3731 break;
3733 /* And that we don't create an extra save/restore. */
3734 if (! crtl->abi->clobbers_full_reg_p (regno + j)
3735 && ! df_regs_ever_live_p (regno + j))
3737 success = false;
3738 break;
3741 if (! targetm.hard_regno_scratch_ok (regno + j))
3743 success = false;
3744 break;
3747 /* And we don't clobber traceback for noreturn functions. */
3748 if ((regno + j == FRAME_POINTER_REGNUM
3749 || regno + j == HARD_FRAME_POINTER_REGNUM)
3750 && (! reload_completed || frame_pointer_needed))
3752 success = false;
3753 break;
3756 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3757 || TEST_HARD_REG_BIT (live, regno + j))
3759 success = false;
3760 break;
3764 if (success)
3766 add_to_hard_reg_set (reg_set, mode, regno);
3768 /* Start the next search with the next register. */
3769 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3770 raw_regno = 0;
3771 search_ofs = raw_regno;
3773 return gen_rtx_REG (mode, regno);
3777 search_ofs = 0;
3778 return NULL_RTX;
3781 /* Forget all currently tracked instructions, only remember current
3782 LIVE regset. */
3784 static void
3785 peep2_reinit_state (regset live)
3787 int i;
3789 /* Indicate that all slots except the last holds invalid data. */
3790 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3791 peep2_insn_data[i].insn = NULL;
3792 peep2_current_count = 0;
3794 /* Indicate that the last slot contains live_after data. */
3795 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3796 peep2_current = MAX_INSNS_PER_PEEP2;
3798 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3801 /* Copies frame related info of an insn (OLD_INSN) to the single
3802 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3804 void
3805 copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3807 bool any_note = false;
3808 rtx note;
3810 if (!RTX_FRAME_RELATED_P (old_insn))
3811 return;
3813 RTX_FRAME_RELATED_P (new_insn) = 1;
3815 /* Allow the backend to fill in a note during the split. */
3816 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3817 switch (REG_NOTE_KIND (note))
3819 case REG_FRAME_RELATED_EXPR:
3820 case REG_CFA_DEF_CFA:
3821 case REG_CFA_ADJUST_CFA:
3822 case REG_CFA_OFFSET:
3823 case REG_CFA_REGISTER:
3824 case REG_CFA_EXPRESSION:
3825 case REG_CFA_RESTORE:
3826 case REG_CFA_SET_VDRAP:
3827 any_note = true;
3828 break;
3829 default:
3830 break;
3833 /* If the backend didn't supply a note, copy one over. */
3834 if (!any_note)
3835 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3836 switch (REG_NOTE_KIND (note))
3838 case REG_FRAME_RELATED_EXPR:
3839 case REG_CFA_DEF_CFA:
3840 case REG_CFA_ADJUST_CFA:
3841 case REG_CFA_OFFSET:
3842 case REG_CFA_REGISTER:
3843 case REG_CFA_EXPRESSION:
3844 case REG_CFA_RESTORE:
3845 case REG_CFA_SET_VDRAP:
3846 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3847 any_note = true;
3848 break;
3849 default:
3850 break;
3853 /* If there still isn't a note, make sure the unwind info sees the
3854 same expression as before the split. */
3855 if (!any_note)
3857 rtx old_set, new_set;
3859 /* The old insn had better have been simple, or annotated. */
3860 old_set = single_set (old_insn);
3861 gcc_assert (old_set != NULL);
3863 new_set = single_set (new_insn);
3864 if (!new_set || !rtx_equal_p (new_set, old_set))
3865 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3868 /* Copy prologue/epilogue status. This is required in order to keep
3869 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3870 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3873 /* While scanning basic block BB, we found a match of length MATCH_LEN + 1,
3874 starting at INSN. Perform the replacement, removing the old insns and
3875 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3876 if the replacement is rejected. */
3878 static rtx_insn *
3879 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3881 int i;
3882 rtx_insn *last, *before_try, *x;
3883 rtx eh_note, as_note;
3884 rtx_insn *old_insn;
3885 rtx_insn *new_insn;
3886 bool was_call = false;
3888 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3889 match more than one insn, or to be split into more than one insn. */
3890 old_insn = peep2_insn_data[peep2_current].insn;
3891 if (RTX_FRAME_RELATED_P (old_insn))
3893 if (match_len != 0)
3894 return NULL;
3896 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3897 may be in the stream for the purpose of register allocation. */
3898 if (active_insn_p (attempt))
3899 new_insn = attempt;
3900 else
3901 new_insn = next_active_insn (attempt);
3902 if (next_active_insn (new_insn))
3903 return NULL;
3905 /* We have a 1-1 replacement. Copy over any frame-related info. */
3906 copy_frame_info_to_split_insn (old_insn, new_insn);
3909 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3910 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3911 cfg-related call notes. */
3912 for (i = 0; i <= match_len; ++i)
3914 int j;
3915 rtx note;
3917 j = peep2_buf_position (peep2_current + i);
3918 old_insn = peep2_insn_data[j].insn;
3919 if (!CALL_P (old_insn))
3920 continue;
3921 was_call = true;
3923 new_insn = attempt;
3924 while (new_insn != NULL_RTX)
3926 if (CALL_P (new_insn))
3927 break;
3928 new_insn = NEXT_INSN (new_insn);
3931 gcc_assert (new_insn != NULL_RTX);
3933 CALL_INSN_FUNCTION_USAGE (new_insn)
3934 = CALL_INSN_FUNCTION_USAGE (old_insn);
3935 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3937 for (note = REG_NOTES (old_insn);
3938 note;
3939 note = XEXP (note, 1))
3940 switch (REG_NOTE_KIND (note))
3942 case REG_NORETURN:
3943 case REG_SETJMP:
3944 case REG_TM:
3945 case REG_CALL_NOCF_CHECK:
3946 add_reg_note (new_insn, REG_NOTE_KIND (note),
3947 XEXP (note, 0));
3948 break;
3949 default:
3950 /* Discard all other reg notes. */
3951 break;
3954 /* Croak if there is another call in the sequence. */
3955 while (++i <= match_len)
3957 j = peep2_buf_position (peep2_current + i);
3958 old_insn = peep2_insn_data[j].insn;
3959 gcc_assert (!CALL_P (old_insn));
3961 break;
3964 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3965 move those notes over to the new sequence. */
3966 as_note = NULL;
3967 for (i = match_len; i >= 0; --i)
3969 int j = peep2_buf_position (peep2_current + i);
3970 old_insn = peep2_insn_data[j].insn;
3972 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3973 if (as_note)
3974 break;
3977 i = peep2_buf_position (peep2_current + match_len);
3978 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3980 /* Replace the old sequence with the new. */
3981 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3982 last = emit_insn_after_setloc (attempt,
3983 peep2_insn_data[i].insn,
3984 INSN_LOCATION (peepinsn));
3985 if (JUMP_P (peepinsn) && JUMP_P (last))
3986 CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3987 before_try = PREV_INSN (insn);
3988 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3990 /* Re-insert the EH_REGION notes. */
3991 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3993 edge eh_edge;
3994 edge_iterator ei;
3996 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3997 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3998 break;
4000 if (eh_note)
4001 copy_reg_eh_region_note_backward (eh_note, last, before_try);
4003 if (eh_edge)
4004 for (x = last; x != before_try; x = PREV_INSN (x))
4005 if (x != BB_END (bb)
4006 && (can_throw_internal (x)
4007 || can_nonlocal_goto (x)))
4009 edge nfte, nehe;
4010 int flags;
4012 nfte = split_block (bb, x);
4013 flags = (eh_edge->flags
4014 & (EDGE_EH | EDGE_ABNORMAL));
4015 if (CALL_P (x))
4016 flags |= EDGE_ABNORMAL_CALL;
4017 nehe = make_edge (nfte->src, eh_edge->dest,
4018 flags);
4020 nehe->probability = eh_edge->probability;
4021 nfte->probability = nehe->probability.invert ();
4023 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
4024 bb = nfte->src;
4025 eh_edge = nehe;
4028 /* Converting possibly trapping insn to non-trapping is
4029 possible. Zap dummy outgoing edges. */
4030 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
4033 /* Re-insert the ARGS_SIZE notes. */
4034 if (as_note)
4035 fixup_args_size_notes (before_try, last, get_args_size (as_note));
4037 /* Scan the new insns for embedded side effects and add appropriate
4038 REG_INC notes. */
4039 if (AUTO_INC_DEC)
4040 for (x = last; x != before_try; x = PREV_INSN (x))
4041 if (NONDEBUG_INSN_P (x))
4042 add_auto_inc_notes (x, PATTERN (x));
4044 /* If we generated a jump instruction, it won't have
4045 JUMP_LABEL set. Recompute after we're done. */
4046 for (x = last; x != before_try; x = PREV_INSN (x))
4047 if (JUMP_P (x))
4049 peep2_do_rebuild_jump_labels = true;
4050 break;
4053 return last;
4056 /* After performing a replacement in basic block BB, fix up the life
4057 information in our buffer. LAST is the last of the insns that we
4058 emitted as a replacement. PREV is the insn before the start of
4059 the replacement. MATCH_LEN + 1 is the number of instructions that were
4060 matched, and which now need to be replaced in the buffer. */
4062 static void
4063 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
4064 rtx_insn *prev)
4066 int i = peep2_buf_position (peep2_current + match_len + 1);
4067 rtx_insn *x;
4068 regset_head live;
4070 INIT_REG_SET (&live);
4071 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
4073 gcc_assert (peep2_current_count >= match_len + 1);
4074 peep2_current_count -= match_len + 1;
4076 x = last;
4079 if (INSN_P (x))
4081 df_insn_rescan (x);
4082 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
4084 peep2_current_count++;
4085 if (--i < 0)
4086 i = MAX_INSNS_PER_PEEP2;
4087 peep2_insn_data[i].insn = x;
4088 df_simulate_one_insn_backwards (bb, x, &live);
4089 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
4092 x = PREV_INSN (x);
4094 while (x != prev);
4095 CLEAR_REG_SET (&live);
4097 peep2_current = i;
4100 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4101 Return true if we added it, false otherwise. The caller will try to match
4102 peepholes against the buffer if we return false; otherwise it will try to
4103 add more instructions to the buffer. */
4105 static bool
4106 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
4108 int pos;
4110 /* Once we have filled the maximum number of insns the buffer can hold,
4111 allow the caller to match the insns against peepholes. We wait until
4112 the buffer is full in case the target has similar peepholes of different
4113 length; we always want to match the longest if possible. */
4114 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
4115 return false;
4117 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4118 any other pattern, lest it change the semantics of the frame info. */
4119 if (RTX_FRAME_RELATED_P (insn))
4121 /* Let the buffer drain first. */
4122 if (peep2_current_count > 0)
4123 return false;
4124 /* Now the insn will be the only thing in the buffer. */
4127 pos = peep2_buf_position (peep2_current + peep2_current_count);
4128 peep2_insn_data[pos].insn = insn;
4129 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4130 peep2_current_count++;
4132 df_simulate_one_insn_forwards (bb, insn, live);
4133 return true;
4136 /* Perform the peephole2 optimization pass. */
4138 static void
4139 peephole2_optimize (void)
4141 rtx_insn *insn;
4142 bitmap live;
4143 int i;
4144 basic_block bb;
4146 peep2_do_cleanup_cfg = false;
4147 peep2_do_rebuild_jump_labels = false;
4149 df_set_flags (DF_LR_RUN_DCE);
4150 df_note_add_problem ();
4151 df_analyze ();
4153 /* Initialize the regsets we're going to use. */
4154 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4155 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
4156 search_ofs = 0;
4157 live = BITMAP_ALLOC (&reg_obstack);
4159 FOR_EACH_BB_REVERSE_FN (bb, cfun)
4161 bool past_end = false;
4162 int pos;
4164 rtl_profile_for_bb (bb);
4166 /* Start up propagation. */
4167 bitmap_copy (live, DF_LR_IN (bb));
4168 df_simulate_initialize_forwards (bb, live);
4169 peep2_reinit_state (live);
4171 insn = BB_HEAD (bb);
4172 for (;;)
4174 rtx_insn *attempt, *head;
4175 int match_len;
4177 if (!past_end && !NONDEBUG_INSN_P (insn))
4179 next_insn:
4180 insn = NEXT_INSN (insn);
4181 if (insn == NEXT_INSN (BB_END (bb)))
4182 past_end = true;
4183 continue;
4185 if (!past_end && peep2_fill_buffer (bb, insn, live))
4186 goto next_insn;
4188 /* If we did not fill an empty buffer, it signals the end of the
4189 block. */
4190 if (peep2_current_count == 0)
4191 break;
4193 /* The buffer filled to the current maximum, so try to match. */
4195 pos = peep2_buf_position (peep2_current + peep2_current_count);
4196 peep2_insn_data[pos].insn = PEEP2_EOB;
4197 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4199 /* Match the peephole. */
4200 head = peep2_insn_data[peep2_current].insn;
4201 attempt = peephole2_insns (PATTERN (head), head, &match_len);
4202 if (attempt != NULL)
4204 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
4205 if (last)
4207 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
4208 continue;
4212 /* No match: advance the buffer by one insn. */
4213 peep2_current = peep2_buf_position (peep2_current + 1);
4214 peep2_current_count--;
4218 default_rtl_profile ();
4219 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4220 BITMAP_FREE (peep2_insn_data[i].live_before);
4221 BITMAP_FREE (live);
4222 if (peep2_do_rebuild_jump_labels)
4223 rebuild_jump_labels (get_insns ());
4224 if (peep2_do_cleanup_cfg)
4225 cleanup_cfg (CLEANUP_CFG_CHANGED);
4228 /* Common predicates for use with define_bypass. */
4230 /* Helper function for store_data_bypass_p, handle just a single SET
4231 IN_SET. */
4233 static bool
4234 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
4236 if (!MEM_P (SET_DEST (in_set)))
4237 return false;
4239 rtx out_set = single_set (out_insn);
4240 if (out_set)
4241 return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
4243 rtx out_pat = PATTERN (out_insn);
4244 if (GET_CODE (out_pat) != PARALLEL)
4245 return false;
4247 for (int i = 0; i < XVECLEN (out_pat, 0); i++)
4249 rtx out_exp = XVECEXP (out_pat, 0, i);
4251 if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
4252 continue;
4254 gcc_assert (GET_CODE (out_exp) == SET);
4256 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
4257 return false;
4260 return true;
4263 /* True if the dependency between OUT_INSN and IN_INSN is on the store
4264 data not the address operand(s) of the store. IN_INSN and OUT_INSN
4265 must be either a single_set or a PARALLEL with SETs inside. */
4267 bool
4268 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4270 rtx in_set = single_set (in_insn);
4271 if (in_set)
4272 return store_data_bypass_p_1 (out_insn, in_set);
4274 rtx in_pat = PATTERN (in_insn);
4275 if (GET_CODE (in_pat) != PARALLEL)
4276 return false;
4278 for (int i = 0; i < XVECLEN (in_pat, 0); i++)
4280 rtx in_exp = XVECEXP (in_pat, 0, i);
4282 if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
4283 continue;
4285 gcc_assert (GET_CODE (in_exp) == SET);
4287 if (!store_data_bypass_p_1 (out_insn, in_exp))
4288 return false;
4291 return true;
4294 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4295 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4296 or multiple set; IN_INSN should be single_set for truth, but for convenience
4297 of insn categorization may be any JUMP or CALL insn. */
4299 bool
4300 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4302 rtx out_set, in_set;
4304 in_set = single_set (in_insn);
4305 if (! in_set)
4307 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
4308 return false;
4311 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
4312 return false;
4313 in_set = SET_SRC (in_set);
4315 out_set = single_set (out_insn);
4316 if (out_set)
4318 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4319 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4320 return false;
4322 else
4324 rtx out_pat;
4325 int i;
4327 out_pat = PATTERN (out_insn);
4328 gcc_assert (GET_CODE (out_pat) == PARALLEL);
4330 for (i = 0; i < XVECLEN (out_pat, 0); i++)
4332 rtx exp = XVECEXP (out_pat, 0, i);
4334 if (GET_CODE (exp) == CLOBBER)
4335 continue;
4337 gcc_assert (GET_CODE (exp) == SET);
4339 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4340 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4341 return false;
4345 return true;
4348 static unsigned int
4349 rest_of_handle_peephole2 (void)
4351 if (HAVE_peephole2)
4352 peephole2_optimize ();
4354 return 0;
4357 namespace {
4359 const pass_data pass_data_peephole2 =
4361 RTL_PASS, /* type */
4362 "peephole2", /* name */
4363 OPTGROUP_NONE, /* optinfo_flags */
4364 TV_PEEPHOLE2, /* tv_id */
4365 0, /* properties_required */
4366 0, /* properties_provided */
4367 0, /* properties_destroyed */
4368 0, /* todo_flags_start */
4369 TODO_df_finish, /* todo_flags_finish */
4372 class pass_peephole2 : public rtl_opt_pass
4374 public:
4375 pass_peephole2 (gcc::context *ctxt)
4376 : rtl_opt_pass (pass_data_peephole2, ctxt)
4379 /* opt_pass methods: */
4380 /* The epiphany backend creates a second instance of this pass, so we need
4381 a clone method. */
4382 opt_pass * clone () final override { return new pass_peephole2 (m_ctxt); }
4383 bool gate (function *) final override
4385 return (optimize > 0 && flag_peephole2);
4387 unsigned int execute (function *) final override
4389 return rest_of_handle_peephole2 ();
4392 }; // class pass_peephole2
4394 } // anon namespace
4396 rtl_opt_pass *
4397 make_pass_peephole2 (gcc::context *ctxt)
4399 return new pass_peephole2 (ctxt);
4402 namespace {
4404 const pass_data pass_data_split_all_insns =
4406 RTL_PASS, /* type */
4407 "split1", /* name */
4408 OPTGROUP_NONE, /* optinfo_flags */
4409 TV_NONE, /* tv_id */
4410 0, /* properties_required */
4411 PROP_rtl_split_insns, /* properties_provided */
4412 0, /* properties_destroyed */
4413 0, /* todo_flags_start */
4414 0, /* todo_flags_finish */
4417 class pass_split_all_insns : public rtl_opt_pass
4419 public:
4420 pass_split_all_insns (gcc::context *ctxt)
4421 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4424 /* opt_pass methods: */
4425 /* The epiphany backend creates a second instance of this pass, so
4426 we need a clone method. */
4427 opt_pass * clone () final override
4429 return new pass_split_all_insns (m_ctxt);
4431 unsigned int execute (function *) final override
4433 split_all_insns ();
4434 return 0;
4437 }; // class pass_split_all_insns
4439 } // anon namespace
4441 rtl_opt_pass *
4442 make_pass_split_all_insns (gcc::context *ctxt)
4444 return new pass_split_all_insns (ctxt);
4447 namespace {
4449 const pass_data pass_data_split_after_reload =
4451 RTL_PASS, /* type */
4452 "split2", /* name */
4453 OPTGROUP_NONE, /* optinfo_flags */
4454 TV_NONE, /* tv_id */
4455 0, /* properties_required */
4456 0, /* properties_provided */
4457 0, /* properties_destroyed */
4458 0, /* todo_flags_start */
4459 0, /* todo_flags_finish */
4462 class pass_split_after_reload : public rtl_opt_pass
4464 public:
4465 pass_split_after_reload (gcc::context *ctxt)
4466 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4469 /* opt_pass methods: */
4470 bool gate (function *) final override
4472 /* If optimizing, then go ahead and split insns now. */
4473 return optimize > 0;
4476 unsigned int execute (function *) final override
4478 split_all_insns ();
4479 return 0;
4482 }; // class pass_split_after_reload
4484 } // anon namespace
4486 rtl_opt_pass *
4487 make_pass_split_after_reload (gcc::context *ctxt)
4489 return new pass_split_after_reload (ctxt);
4492 static bool
4493 enable_split_before_sched2 (void)
4495 #ifdef INSN_SCHEDULING
4496 return optimize > 0 && flag_schedule_insns_after_reload;
4497 #else
4498 return false;
4499 #endif
4502 namespace {
4504 const pass_data pass_data_split_before_sched2 =
4506 RTL_PASS, /* type */
4507 "split3", /* name */
4508 OPTGROUP_NONE, /* optinfo_flags */
4509 TV_NONE, /* tv_id */
4510 0, /* properties_required */
4511 0, /* properties_provided */
4512 0, /* properties_destroyed */
4513 0, /* todo_flags_start */
4514 0, /* todo_flags_finish */
4517 class pass_split_before_sched2 : public rtl_opt_pass
4519 public:
4520 pass_split_before_sched2 (gcc::context *ctxt)
4521 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4524 /* opt_pass methods: */
4525 bool gate (function *) final override
4527 return enable_split_before_sched2 ();
4530 unsigned int execute (function *) final override
4532 split_all_insns ();
4533 return 0;
4536 }; // class pass_split_before_sched2
4538 } // anon namespace
4540 rtl_opt_pass *
4541 make_pass_split_before_sched2 (gcc::context *ctxt)
4543 return new pass_split_before_sched2 (ctxt);
4546 namespace {
4548 const pass_data pass_data_split_before_regstack =
4550 RTL_PASS, /* type */
4551 "split4", /* name */
4552 OPTGROUP_NONE, /* optinfo_flags */
4553 TV_NONE, /* tv_id */
4554 0, /* properties_required */
4555 0, /* properties_provided */
4556 0, /* properties_destroyed */
4557 0, /* todo_flags_start */
4558 0, /* todo_flags_finish */
4561 class pass_split_before_regstack : public rtl_opt_pass
4563 public:
4564 pass_split_before_regstack (gcc::context *ctxt)
4565 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4568 /* opt_pass methods: */
4569 bool gate (function *) final override;
4570 unsigned int execute (function *) final override
4572 split_all_insns ();
4573 return 0;
4576 }; // class pass_split_before_regstack
4578 bool
4579 pass_split_before_regstack::gate (function *)
4581 #if HAVE_ATTR_length && defined (STACK_REGS)
4582 /* If flow2 creates new instructions which need splitting
4583 and scheduling after reload is not done, they might not be
4584 split until final which doesn't allow splitting
4585 if HAVE_ATTR_length. Selective scheduling can result in
4586 further instructions that need splitting. */
4587 #ifdef INSN_SCHEDULING
4588 return !enable_split_before_sched2 () || flag_selective_scheduling2;
4589 #else
4590 return !enable_split_before_sched2 ();
4591 #endif
4592 #else
4593 return false;
4594 #endif
4597 } // anon namespace
4599 rtl_opt_pass *
4600 make_pass_split_before_regstack (gcc::context *ctxt)
4602 return new pass_split_before_regstack (ctxt);
4605 namespace {
4607 const pass_data pass_data_split_for_shorten_branches =
4609 RTL_PASS, /* type */
4610 "split5", /* name */
4611 OPTGROUP_NONE, /* optinfo_flags */
4612 TV_NONE, /* tv_id */
4613 0, /* properties_required */
4614 0, /* properties_provided */
4615 0, /* properties_destroyed */
4616 0, /* todo_flags_start */
4617 0, /* todo_flags_finish */
4620 class pass_split_for_shorten_branches : public rtl_opt_pass
4622 public:
4623 pass_split_for_shorten_branches (gcc::context *ctxt)
4624 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4627 /* opt_pass methods: */
4628 bool gate (function *) final override
4630 /* The placement of the splitting that we do for shorten_branches
4631 depends on whether regstack is used by the target or not. */
4632 #if HAVE_ATTR_length && !defined (STACK_REGS)
4633 return true;
4634 #else
4635 return false;
4636 #endif
4639 unsigned int execute (function *) final override
4641 split_all_insns_noflow ();
4642 return 0;
4645 }; // class pass_split_for_shorten_branches
4647 } // anon namespace
4649 rtl_opt_pass *
4650 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4652 return new pass_split_for_shorten_branches (ctxt);
4655 /* (Re)initialize the target information after a change in target. */
4657 void
4658 recog_init ()
4660 /* The information is zero-initialized, so we don't need to do anything
4661 first time round. */
4662 if (!this_target_recog->x_initialized)
4664 this_target_recog->x_initialized = true;
4665 return;
4667 memset (this_target_recog->x_bool_attr_masks, 0,
4668 sizeof (this_target_recog->x_bool_attr_masks));
4669 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4670 if (this_target_recog->x_op_alt[i])
4672 free (this_target_recog->x_op_alt[i]);
4673 this_target_recog->x_op_alt[i] = 0;