Update baseline symbols for hppa-linux.
[official-gcc.git] / gcc / recog.cc
blob92f151248a64f1bd581cbf18225e99b3eaeaaf6b
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "cfghooks.h"
29 #include "df.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "insn-attr.h"
37 #include "addresses.h"
38 #include "cfgrtl.h"
39 #include "cfgbuild.h"
40 #include "cfgcleanup.h"
41 #include "reload.h"
42 #include "tree-pass.h"
43 #include "function-abi.h"
45 #ifndef STACK_POP_CODE
46 #if STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
53 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx_insn *, bool);
54 static void validate_replace_src_1 (rtx *, void *);
55 static rtx_insn *split_insn (rtx_insn *);
57 struct target_recog default_target_recog;
58 #if SWITCHABLE_TARGET
59 struct target_recog *this_target_recog = &default_target_recog;
60 #endif
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.cc and expmed.cc (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in reginfo.cc and final.cc and reload.cc.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data_d recog_data;
74 /* Contains a vector of operand_alternative structures, such that
75 operand OP of alternative A is at index A * n_operands + OP.
76 Set up by preprocess_constraints. */
77 const operand_alternative *recog_op_alt;
79 /* Used to provide recog_op_alt for asms. */
80 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
81 * MAX_RECOG_ALTERNATIVES];
83 /* On return from `constrain_operands', indicate which alternative
84 was satisfied. */
86 int which_alternative;
88 /* Nonzero after end of reload pass.
89 Set to 1 or 0 by toplev.cc.
90 Controls the significance of (SUBREG (MEM)). */
92 int reload_completed;
94 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
95 int epilogue_completed;
97 /* Initialize data used by the function `recog'.
98 This must be called once in the compilation of a function
99 before any insn recognition may be done in the function. */
101 void
102 init_recog_no_volatile (void)
104 volatile_ok = 0;
107 void
108 init_recog (void)
110 volatile_ok = 1;
114 /* Return true if labels in asm operands BODY are LABEL_REFs. */
116 static bool
117 asm_labels_ok (rtx body)
119 rtx asmop;
120 int i;
122 asmop = extract_asm_operands (body);
123 if (asmop == NULL_RTX)
124 return true;
126 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
127 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
128 return false;
130 return true;
133 /* Check that X is an insn-body for an `asm' with operands
134 and that the operands mentioned in it are legitimate. */
136 bool
137 check_asm_operands (rtx x)
139 int noperands;
140 rtx *operands;
141 const char **constraints;
142 int i;
144 if (!asm_labels_ok (x))
145 return false;
147 /* Post-reload, be more strict with things. */
148 if (reload_completed)
150 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
151 rtx_insn *insn = make_insn_raw (x);
152 extract_insn (insn);
153 constrain_operands (1, get_enabled_alternatives (insn));
154 return which_alternative >= 0;
157 noperands = asm_noperands (x);
158 if (noperands < 0)
159 return false;
160 if (noperands == 0)
161 return true;
163 operands = XALLOCAVEC (rtx, noperands);
164 constraints = XALLOCAVEC (const char *, noperands);
166 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
168 for (i = 0; i < noperands; i++)
170 const char *c = constraints[i];
171 if (c[0] == '%')
172 c++;
173 if (! asm_operand_ok (operands[i], c, constraints))
174 return false;
177 return true;
180 /* Static data for the next two routines. */
182 struct change_t
184 rtx object;
185 int old_code;
186 int old_len;
187 bool unshare;
188 rtx *loc;
189 rtx old;
192 static change_t *changes;
193 static int changes_allocated;
195 static int num_changes = 0;
196 static int temporarily_undone_changes = 0;
198 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
199 at which NEW_RTX will be placed. If NEW_LEN is >= 0, XVECLEN (NEW_RTX, 0)
200 will also be changed to NEW_LEN, which is no greater than the current
201 XVECLEN. If OBJECT is zero, no validation is done, the change is
202 simply made.
204 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
205 will be called with the address and mode as parameters. If OBJECT is
206 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
207 the change in place.
209 IN_GROUP is nonzero if this is part of a group of changes that must be
210 performed as a group. In that case, the changes will be stored. The
211 function `apply_change_group' will validate and apply the changes.
213 If IN_GROUP is zero, this is a single change. Try to recognize the insn
214 or validate the memory reference with the change applied. If the result
215 is not valid for the machine, suppress the change and return false.
216 Otherwise, perform the change and return true. */
218 static bool
219 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group,
220 bool unshare, int new_len = -1)
222 gcc_assert (temporarily_undone_changes == 0);
223 rtx old = *loc;
225 /* Single-element parallels aren't valid and won't match anything.
226 Replace them with the single element. */
227 if (new_len == 1 && GET_CODE (new_rtx) == PARALLEL)
229 new_rtx = XVECEXP (new_rtx, 0, 0);
230 new_len = -1;
233 if ((old == new_rtx || rtx_equal_p (old, new_rtx))
234 && (new_len < 0 || XVECLEN (new_rtx, 0) == new_len))
235 return true;
237 gcc_assert ((in_group != 0 || num_changes == 0)
238 && (new_len < 0 || new_rtx == *loc));
240 *loc = new_rtx;
242 /* Save the information describing this change. */
243 if (num_changes >= changes_allocated)
245 if (changes_allocated == 0)
246 /* This value allows for repeated substitutions inside complex
247 indexed addresses, or changes in up to 5 insns. */
248 changes_allocated = MAX_RECOG_OPERANDS * 5;
249 else
250 changes_allocated *= 2;
252 changes = XRESIZEVEC (change_t, changes, changes_allocated);
255 changes[num_changes].object = object;
256 changes[num_changes].loc = loc;
257 changes[num_changes].old = old;
258 changes[num_changes].old_len = (new_len >= 0 ? XVECLEN (new_rtx, 0) : -1);
259 changes[num_changes].unshare = unshare;
261 if (new_len >= 0)
262 XVECLEN (new_rtx, 0) = new_len;
264 if (object && !MEM_P (object))
266 /* Set INSN_CODE to force rerecognition of insn. Save old code in
267 case invalid. */
268 changes[num_changes].old_code = INSN_CODE (object);
269 INSN_CODE (object) = -1;
272 num_changes++;
274 /* If we are making a group of changes, return 1. Otherwise, validate the
275 change group we made. */
277 if (in_group)
278 return true;
279 else
280 return apply_change_group ();
283 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
284 UNSHARE to false. */
286 bool
287 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
289 return validate_change_1 (object, loc, new_rtx, in_group, false);
292 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
293 UNSHARE to true. */
295 bool
296 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
298 return validate_change_1 (object, loc, new_rtx, in_group, true);
301 /* Change XVECLEN (*LOC, 0) to NEW_LEN. OBJECT, IN_GROUP and the return
302 value are as for validate_change_1. */
304 bool
305 validate_change_xveclen (rtx object, rtx *loc, int new_len, bool in_group)
307 return validate_change_1 (object, loc, *loc, in_group, false, new_len);
310 /* Keep X canonicalized if some changes have made it non-canonical; only
311 modifies the operands of X, not (for example) its code. Simplifications
312 are not the job of this routine.
314 Return true if anything was changed. */
315 bool
316 canonicalize_change_group (rtx_insn *insn, rtx x)
318 if (COMMUTATIVE_P (x)
319 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
321 /* Oops, the caller has made X no longer canonical.
322 Let's redo the changes in the correct order. */
323 rtx tem = XEXP (x, 0);
324 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
325 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
326 return true;
328 else
329 return false;
332 /* Check if REG_INC argument in *data overlaps a stored REG. */
334 static void
335 check_invalid_inc_dec (rtx reg, const_rtx, void *data)
337 rtx *pinc = (rtx *) data;
338 if (*pinc == NULL_RTX || MEM_P (reg))
339 return;
340 if (reg_overlap_mentioned_p (reg, *pinc))
341 *pinc = NULL_RTX;
344 /* This subroutine of apply_change_group verifies whether the changes to INSN
345 were valid; i.e. whether INSN can still be recognized.
347 If IN_GROUP is true clobbers which have to be added in order to
348 match the instructions will be added to the current change group.
349 Otherwise the changes will take effect immediately. */
351 bool
352 insn_invalid_p (rtx_insn *insn, bool in_group)
354 rtx pat = PATTERN (insn);
355 int num_clobbers = 0;
356 /* If we are before reload and the pattern is a SET, see if we can add
357 clobbers. */
358 int icode = recog (pat, insn,
359 (GET_CODE (pat) == SET
360 && ! reload_completed
361 && ! reload_in_progress)
362 ? &num_clobbers : 0);
363 bool is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
366 /* If this is an asm and the operand aren't legal, then fail. Likewise if
367 this is not an asm and the insn wasn't recognized. */
368 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
369 || (!is_asm && icode < 0))
370 return true;
372 /* If we have to add CLOBBERs, fail if we have to add ones that reference
373 hard registers since our callers can't know if they are live or not.
374 Otherwise, add them. */
375 if (num_clobbers > 0)
377 rtx newpat;
379 if (added_clobbers_hard_reg_p (icode))
380 return true;
382 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
383 XVECEXP (newpat, 0, 0) = pat;
384 add_clobbers (newpat, icode);
385 if (in_group)
386 validate_change (insn, &PATTERN (insn), newpat, 1);
387 else
388 PATTERN (insn) = pat = newpat;
391 /* After reload, verify that all constraints are satisfied. */
392 if (reload_completed)
394 extract_insn (insn);
396 if (! constrain_operands (1, get_preferred_alternatives (insn)))
397 return true;
400 /* Punt if REG_INC argument overlaps some stored REG. */
401 for (rtx link = FIND_REG_INC_NOTE (insn, NULL_RTX);
402 link; link = XEXP (link, 1))
403 if (REG_NOTE_KIND (link) == REG_INC)
405 rtx reg = XEXP (link, 0);
406 note_stores (insn, check_invalid_inc_dec, &reg);
407 if (reg == NULL_RTX)
408 return true;
411 INSN_CODE (insn) = icode;
412 return false;
415 /* Return number of changes made and not validated yet. */
417 num_changes_pending (void)
419 return num_changes;
422 /* Tentatively apply the changes numbered NUM and up.
423 Return true if all changes are valid, false otherwise. */
425 bool
426 verify_changes (int num)
428 int i;
429 rtx last_validated = NULL_RTX;
431 /* The changes have been applied and all INSN_CODEs have been reset to force
432 rerecognition.
434 The changes are valid if we aren't given an object, or if we are
435 given a MEM and it still is a valid address, or if this is in insn
436 and it is recognized. In the latter case, if reload has completed,
437 we also require that the operands meet the constraints for
438 the insn. */
440 for (i = num; i < num_changes; i++)
442 rtx object = changes[i].object;
444 /* If there is no object to test or if it is the same as the one we
445 already tested, ignore it. */
446 if (object == 0 || object == last_validated)
447 continue;
449 if (MEM_P (object))
451 if (! memory_address_addr_space_p (GET_MODE (object),
452 XEXP (object, 0),
453 MEM_ADDR_SPACE (object)))
454 break;
456 else if (/* changes[i].old might be zero, e.g. when putting a
457 REG_FRAME_RELATED_EXPR into a previously empty list. */
458 changes[i].old
459 && REG_P (changes[i].old)
460 && asm_noperands (PATTERN (object)) > 0
461 && register_asm_p (changes[i].old))
463 /* Don't allow changes of hard register operands to inline
464 assemblies if they have been defined as register asm ("x"). */
465 break;
467 else if (DEBUG_INSN_P (object))
468 continue;
469 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
471 rtx pat = PATTERN (object);
473 /* Perhaps we couldn't recognize the insn because there were
474 extra CLOBBERs at the end. If so, try to re-recognize
475 without the last CLOBBER (later iterations will cause each of
476 them to be eliminated, in turn). But don't do this if we
477 have an ASM_OPERAND. */
478 if (GET_CODE (pat) == PARALLEL
479 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
480 && asm_noperands (PATTERN (object)) < 0)
482 rtx newpat;
484 if (XVECLEN (pat, 0) == 2)
485 newpat = XVECEXP (pat, 0, 0);
486 else
488 int j;
490 newpat
491 = gen_rtx_PARALLEL (VOIDmode,
492 rtvec_alloc (XVECLEN (pat, 0) - 1));
493 for (j = 0; j < XVECLEN (newpat, 0); j++)
494 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
497 /* Add a new change to this group to replace the pattern
498 with this new pattern. Then consider this change
499 as having succeeded. The change we added will
500 cause the entire call to fail if things remain invalid.
502 Note that this can lose if a later change than the one
503 we are processing specified &XVECEXP (PATTERN (object), 0, X)
504 but this shouldn't occur. */
506 validate_change (object, &PATTERN (object), newpat, 1);
507 continue;
509 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
510 || GET_CODE (pat) == VAR_LOCATION)
511 /* If this insn is a CLOBBER or USE, it is always valid, but is
512 never recognized. */
513 continue;
514 else
515 break;
517 last_validated = object;
520 return (i == num_changes);
523 /* A group of changes has previously been issued with validate_change
524 and verified with verify_changes. Call df_insn_rescan for each of
525 the insn changed and clear num_changes. */
527 void
528 confirm_change_group (void)
530 int i;
531 rtx last_object = NULL;
533 gcc_assert (temporarily_undone_changes == 0);
534 for (i = 0; i < num_changes; i++)
536 rtx object = changes[i].object;
538 if (changes[i].unshare)
539 *changes[i].loc = copy_rtx (*changes[i].loc);
541 /* Avoid unnecessary rescanning when multiple changes to same instruction
542 are made. */
543 if (object)
545 if (object != last_object && last_object && INSN_P (last_object))
546 df_insn_rescan (as_a <rtx_insn *> (last_object));
547 last_object = object;
551 if (last_object && INSN_P (last_object))
552 df_insn_rescan (as_a <rtx_insn *> (last_object));
553 num_changes = 0;
556 /* Apply a group of changes previously issued with `validate_change'.
557 If all changes are valid, call confirm_change_group and return true,
558 otherwise, call cancel_changes and return false. */
560 bool
561 apply_change_group (void)
563 if (verify_changes (0))
565 confirm_change_group ();
566 return true;
568 else
570 cancel_changes (0);
571 return false;
576 /* Return the number of changes so far in the current group. */
579 num_validated_changes (void)
581 return num_changes;
584 /* Retract the changes numbered NUM and up. */
586 void
587 cancel_changes (int num)
589 gcc_assert (temporarily_undone_changes == 0);
590 int i;
592 /* Back out all the changes. Do this in the opposite order in which
593 they were made. */
594 for (i = num_changes - 1; i >= num; i--)
596 if (changes[i].old_len >= 0)
597 XVECLEN (*changes[i].loc, 0) = changes[i].old_len;
598 else
599 *changes[i].loc = changes[i].old;
600 if (changes[i].object && !MEM_P (changes[i].object))
601 INSN_CODE (changes[i].object) = changes[i].old_code;
603 num_changes = num;
606 /* Swap the status of change NUM from being applied to not being applied,
607 or vice versa. */
609 static void
610 swap_change (int num)
612 if (changes[num].old_len >= 0)
613 std::swap (XVECLEN (*changes[num].loc, 0), changes[num].old_len);
614 else
615 std::swap (*changes[num].loc, changes[num].old);
616 if (changes[num].object && !MEM_P (changes[num].object))
617 std::swap (INSN_CODE (changes[num].object), changes[num].old_code);
620 /* Temporarily undo all the changes numbered NUM and up, with a view
621 to reapplying them later. The next call to the changes machinery
622 must be:
624 redo_changes (NUM)
626 otherwise things will end up in an invalid state. */
628 void
629 temporarily_undo_changes (int num)
631 gcc_assert (temporarily_undone_changes == 0 && num <= num_changes);
632 for (int i = num_changes - 1; i >= num; i--)
633 swap_change (i);
634 temporarily_undone_changes = num_changes - num;
637 /* Redo the changes that were temporarily undone by:
639 temporarily_undo_changes (NUM). */
641 void
642 redo_changes (int num)
644 gcc_assert (temporarily_undone_changes == num_changes - num);
645 for (int i = num; i < num_changes; ++i)
646 swap_change (i);
647 temporarily_undone_changes = 0;
650 /* Reduce conditional compilation elsewhere. */
651 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
652 rtx. */
654 static void
655 simplify_while_replacing (rtx *loc, rtx to, rtx_insn *object,
656 machine_mode op0_mode)
658 rtx x = *loc;
659 enum rtx_code code = GET_CODE (x);
660 rtx new_rtx = NULL_RTX;
661 scalar_int_mode is_mode;
663 if (SWAPPABLE_OPERANDS_P (x)
664 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
666 validate_unshare_change (object, loc,
667 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
668 : swap_condition (code),
669 GET_MODE (x), XEXP (x, 1),
670 XEXP (x, 0)), 1);
671 x = *loc;
672 code = GET_CODE (x);
675 /* Canonicalize arithmetics with all constant operands. */
676 switch (GET_RTX_CLASS (code))
678 case RTX_UNARY:
679 if (CONSTANT_P (XEXP (x, 0)))
680 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
681 op0_mode);
682 break;
683 case RTX_COMM_ARITH:
684 case RTX_BIN_ARITH:
685 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
686 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
687 XEXP (x, 1));
688 break;
689 case RTX_COMPARE:
690 case RTX_COMM_COMPARE:
691 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
692 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
693 XEXP (x, 0), XEXP (x, 1));
694 break;
695 default:
696 break;
698 if (new_rtx)
700 validate_change (object, loc, new_rtx, 1);
701 return;
704 switch (code)
706 case PLUS:
707 /* If we have a PLUS whose second operand is now a CONST_INT, use
708 simplify_gen_binary to try to simplify it.
709 ??? We may want later to remove this, once simplification is
710 separated from this function. */
711 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
712 validate_change (object, loc,
713 simplify_gen_binary
714 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
715 break;
716 case MINUS:
717 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
718 validate_change (object, loc,
719 simplify_gen_binary
720 (PLUS, GET_MODE (x), XEXP (x, 0),
721 simplify_gen_unary (NEG,
722 GET_MODE (x), XEXP (x, 1),
723 GET_MODE (x))), 1);
724 break;
725 case ZERO_EXTEND:
726 case SIGN_EXTEND:
727 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
729 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
730 op0_mode);
731 /* If any of the above failed, substitute in something that
732 we know won't be recognized. */
733 if (!new_rtx)
734 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
735 validate_change (object, loc, new_rtx, 1);
737 break;
738 case SUBREG:
739 /* All subregs possible to simplify should be simplified. */
740 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
741 SUBREG_BYTE (x));
743 /* Subregs of VOIDmode operands are incorrect. */
744 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
745 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
746 if (new_rtx)
747 validate_change (object, loc, new_rtx, 1);
748 break;
749 case ZERO_EXTRACT:
750 case SIGN_EXTRACT:
751 /* If we are replacing a register with memory, try to change the memory
752 to be the mode required for memory in extract operations (this isn't
753 likely to be an insertion operation; if it was, nothing bad will
754 happen, we might just fail in some cases). */
756 if (MEM_P (XEXP (x, 0))
757 && is_a <scalar_int_mode> (GET_MODE (XEXP (x, 0)), &is_mode)
758 && CONST_INT_P (XEXP (x, 1))
759 && CONST_INT_P (XEXP (x, 2))
760 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
761 MEM_ADDR_SPACE (XEXP (x, 0)))
762 && !MEM_VOLATILE_P (XEXP (x, 0)))
764 int pos = INTVAL (XEXP (x, 2));
765 machine_mode new_mode = is_mode;
766 if (GET_CODE (x) == ZERO_EXTRACT && targetm.have_extzv ())
767 new_mode = insn_data[targetm.code_for_extzv].operand[1].mode;
768 else if (GET_CODE (x) == SIGN_EXTRACT && targetm.have_extv ())
769 new_mode = insn_data[targetm.code_for_extv].operand[1].mode;
770 scalar_int_mode wanted_mode = (new_mode == VOIDmode
771 ? word_mode
772 : as_a <scalar_int_mode> (new_mode));
774 /* If we have a narrower mode, we can do something. */
775 if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
777 int offset = pos / BITS_PER_UNIT;
778 rtx newmem;
780 /* If the bytes and bits are counted differently, we
781 must adjust the offset. */
782 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
783 offset =
784 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
785 offset);
787 gcc_assert (GET_MODE_PRECISION (wanted_mode)
788 == GET_MODE_BITSIZE (wanted_mode));
789 pos %= GET_MODE_BITSIZE (wanted_mode);
791 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
793 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
794 validate_change (object, &XEXP (x, 0), newmem, 1);
798 break;
800 default:
801 break;
805 /* Replace every occurrence of FROM in X with TO. Mark each change with
806 validate_change passing OBJECT. */
808 static void
809 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx_insn *object,
810 bool simplify)
812 int i, j;
813 const char *fmt;
814 rtx x = *loc;
815 enum rtx_code code;
816 machine_mode op0_mode = VOIDmode;
817 int prev_changes = num_changes;
819 if (!x)
820 return;
822 code = GET_CODE (x);
823 fmt = GET_RTX_FORMAT (code);
824 if (fmt[0] == 'e')
825 op0_mode = GET_MODE (XEXP (x, 0));
827 /* X matches FROM if it is the same rtx or they are both referring to the
828 same register in the same mode. Avoid calling rtx_equal_p unless the
829 operands look similar. */
831 if (x == from
832 || (REG_P (x) && REG_P (from)
833 && GET_MODE (x) == GET_MODE (from)
834 && REGNO (x) == REGNO (from))
835 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
836 && rtx_equal_p (x, from)))
838 validate_unshare_change (object, loc, to, 1);
839 return;
842 /* Call ourself recursively to perform the replacements.
843 We must not replace inside already replaced expression, otherwise we
844 get infinite recursion for replacements like (reg X)->(subreg (reg X))
845 so we must special case shared ASM_OPERANDS. */
847 if (GET_CODE (x) == PARALLEL)
849 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
851 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
852 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
854 /* Verify that operands are really shared. */
855 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
856 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
857 (x, 0, j))));
858 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
859 from, to, object, simplify);
861 else
862 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
863 simplify);
866 else
867 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
869 if (fmt[i] == 'e')
870 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
871 else if (fmt[i] == 'E')
872 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
873 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
874 simplify);
877 /* If we didn't substitute, there is nothing more to do. */
878 if (num_changes == prev_changes)
879 return;
881 /* ??? The regmove is no more, so is this aberration still necessary? */
882 /* Allow substituted expression to have different mode. This is used by
883 regmove to change mode of pseudo register. */
884 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
885 op0_mode = GET_MODE (XEXP (x, 0));
887 /* Do changes needed to keep rtx consistent. Don't do any other
888 simplifications, as it is not our job. */
889 if (simplify)
890 simplify_while_replacing (loc, to, object, op0_mode);
893 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
894 with TO. After all changes have been made, validate by seeing
895 if INSN is still valid. */
897 bool
898 validate_replace_rtx_subexp (rtx from, rtx to, rtx_insn *insn, rtx *loc)
900 validate_replace_rtx_1 (loc, from, to, insn, true);
901 return apply_change_group ();
904 /* Try replacing every occurrence of FROM in INSN with TO. After all
905 changes have been made, validate by seeing if INSN is still valid. */
907 bool
908 validate_replace_rtx (rtx from, rtx to, rtx_insn *insn)
910 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
911 return apply_change_group ();
914 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
915 is a part of INSN. After all changes have been made, validate by seeing if
916 INSN is still valid.
917 validate_replace_rtx (from, to, insn) is equivalent to
918 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
920 bool
921 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx_insn *insn)
923 validate_replace_rtx_1 (where, from, to, insn, true);
924 return apply_change_group ();
927 /* Same as above, but do not simplify rtx afterwards. */
928 bool
929 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
930 rtx_insn *insn)
932 validate_replace_rtx_1 (where, from, to, insn, false);
933 return apply_change_group ();
937 /* Try replacing every occurrence of FROM in INSN with TO. This also
938 will replace in REG_EQUAL and REG_EQUIV notes. */
940 void
941 validate_replace_rtx_group (rtx from, rtx to, rtx_insn *insn)
943 rtx note;
944 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
945 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
946 if (REG_NOTE_KIND (note) == REG_EQUAL
947 || REG_NOTE_KIND (note) == REG_EQUIV)
948 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
951 /* Function called by note_uses to replace used subexpressions. */
952 struct validate_replace_src_data
954 rtx from; /* Old RTX */
955 rtx to; /* New RTX */
956 rtx_insn *insn; /* Insn in which substitution is occurring. */
959 static void
960 validate_replace_src_1 (rtx *x, void *data)
962 struct validate_replace_src_data *d
963 = (struct validate_replace_src_data *) data;
965 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
968 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
969 SET_DESTs. */
971 void
972 validate_replace_src_group (rtx from, rtx to, rtx_insn *insn)
974 struct validate_replace_src_data d;
976 d.from = from;
977 d.to = to;
978 d.insn = insn;
979 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
982 /* Try simplify INSN.
983 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
984 pattern and return true if something was simplified. */
986 bool
987 validate_simplify_insn (rtx_insn *insn)
989 int i;
990 rtx pat = NULL;
991 rtx newpat = NULL;
993 pat = PATTERN (insn);
995 if (GET_CODE (pat) == SET)
997 newpat = simplify_rtx (SET_SRC (pat));
998 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
999 validate_change (insn, &SET_SRC (pat), newpat, 1);
1000 newpat = simplify_rtx (SET_DEST (pat));
1001 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
1002 validate_change (insn, &SET_DEST (pat), newpat, 1);
1004 else if (GET_CODE (pat) == PARALLEL)
1005 for (i = 0; i < XVECLEN (pat, 0); i++)
1007 rtx s = XVECEXP (pat, 0, i);
1009 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
1011 newpat = simplify_rtx (SET_SRC (s));
1012 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
1013 validate_change (insn, &SET_SRC (s), newpat, 1);
1014 newpat = simplify_rtx (SET_DEST (s));
1015 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
1016 validate_change (insn, &SET_DEST (s), newpat, 1);
1019 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
1022 /* Try to process the address of memory expression MEM. Return true on
1023 success; leave the caller to clean up on failure. */
1025 bool
1026 insn_propagation::apply_to_mem_1 (rtx mem)
1028 auto old_num_changes = num_validated_changes ();
1029 mem_depth += 1;
1030 bool res = apply_to_rvalue_1 (&XEXP (mem, 0));
1031 mem_depth -= 1;
1032 if (!res)
1033 return false;
1035 if (old_num_changes != num_validated_changes ()
1036 && should_check_mems
1037 && !check_mem (old_num_changes, mem))
1038 return false;
1040 return true;
1043 /* Try to process the rvalue expression at *LOC. Return true on success;
1044 leave the caller to clean up on failure. */
1046 bool
1047 insn_propagation::apply_to_rvalue_1 (rtx *loc)
1049 rtx x = *loc;
1050 enum rtx_code code = GET_CODE (x);
1051 machine_mode mode = GET_MODE (x);
1053 auto old_num_changes = num_validated_changes ();
1054 if (from && GET_CODE (x) == GET_CODE (from) && rtx_equal_p (x, from))
1056 /* Don't replace register asms in asm statements; we mustn't
1057 change the user's register allocation. */
1058 if (REG_P (x)
1059 && HARD_REGISTER_P (x)
1060 && register_asm_p (x)
1061 && asm_noperands (PATTERN (insn)) > 0)
1062 return false;
1064 if (should_unshare)
1065 validate_unshare_change (insn, loc, to, 1);
1066 else
1067 validate_change (insn, loc, to, 1);
1068 if (mem_depth && !REG_P (to) && !CONSTANT_P (to))
1070 /* We're substituting into an address, but TO will have the
1071 form expected outside an address. Canonicalize it if
1072 necessary. */
1073 insn_propagation subprop (insn);
1074 subprop.mem_depth += 1;
1075 if (!subprop.apply_to_rvalue (loc))
1076 gcc_unreachable ();
1077 if (should_unshare
1078 && num_validated_changes () != old_num_changes + 1)
1080 /* TO is owned by someone else, so create a copy and
1081 return TO to its original form. */
1082 rtx to = copy_rtx (*loc);
1083 cancel_changes (old_num_changes);
1084 validate_change (insn, loc, to, 1);
1087 num_replacements += 1;
1088 should_unshare = true;
1089 result_flags |= UNSIMPLIFIED;
1090 return true;
1093 /* Recursively apply the substitution and see if we can simplify
1094 the result. This specifically shouldn't use simplify_gen_* for
1095 speculative simplifications, since we want to avoid generating new
1096 expressions where possible. */
1097 auto old_result_flags = result_flags;
1098 rtx newx = NULL_RTX;
1099 bool recurse_p = false;
1100 switch (GET_RTX_CLASS (code))
1102 case RTX_UNARY:
1104 machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1105 if (!apply_to_rvalue_1 (&XEXP (x, 0)))
1106 return false;
1107 if (from && old_num_changes == num_validated_changes ())
1108 return true;
1110 newx = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
1111 break;
1114 case RTX_BIN_ARITH:
1115 case RTX_COMM_ARITH:
1117 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1118 || !apply_to_rvalue_1 (&XEXP (x, 1)))
1119 return false;
1120 if (from && old_num_changes == num_validated_changes ())
1121 return true;
1123 if (GET_RTX_CLASS (code) == RTX_COMM_ARITH
1124 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
1125 newx = simplify_gen_binary (code, mode, XEXP (x, 1), XEXP (x, 0));
1126 else
1127 newx = simplify_binary_operation (code, mode,
1128 XEXP (x, 0), XEXP (x, 1));
1129 break;
1132 case RTX_COMPARE:
1133 case RTX_COMM_COMPARE:
1135 machine_mode op_mode = (GET_MODE (XEXP (x, 0)) != VOIDmode
1136 ? GET_MODE (XEXP (x, 0))
1137 : GET_MODE (XEXP (x, 1)));
1138 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1139 || !apply_to_rvalue_1 (&XEXP (x, 1)))
1140 return false;
1141 if (from && old_num_changes == num_validated_changes ())
1142 return true;
1144 newx = simplify_relational_operation (code, mode, op_mode,
1145 XEXP (x, 0), XEXP (x, 1));
1146 break;
1149 case RTX_TERNARY:
1150 case RTX_BITFIELD_OPS:
1152 machine_mode op0_mode = GET_MODE (XEXP (x, 0));
1153 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1154 || !apply_to_rvalue_1 (&XEXP (x, 1))
1155 || !apply_to_rvalue_1 (&XEXP (x, 2)))
1156 return false;
1157 if (from && old_num_changes == num_validated_changes ())
1158 return true;
1160 newx = simplify_ternary_operation (code, mode, op0_mode,
1161 XEXP (x, 0), XEXP (x, 1),
1162 XEXP (x, 2));
1163 break;
1166 case RTX_EXTRA:
1167 if (code == SUBREG)
1169 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
1170 if (!apply_to_rvalue_1 (&SUBREG_REG (x)))
1171 return false;
1172 if (from && old_num_changes == num_validated_changes ())
1173 return true;
1175 rtx inner = SUBREG_REG (x);
1176 newx = simplify_subreg (mode, inner, inner_mode, SUBREG_BYTE (x));
1177 /* Reject the same cases that simplify_gen_subreg would. */
1178 if (!newx
1179 && (GET_CODE (inner) == SUBREG
1180 || GET_CODE (inner) == CONCAT
1181 || GET_MODE (inner) == VOIDmode
1182 || !validate_subreg (mode, inner_mode,
1183 inner, SUBREG_BYTE (x))))
1185 failure_reason = "would create an invalid subreg";
1186 return false;
1188 break;
1190 else
1191 recurse_p = true;
1192 break;
1194 case RTX_OBJ:
1195 if (code == LO_SUM)
1197 if (!apply_to_rvalue_1 (&XEXP (x, 0))
1198 || !apply_to_rvalue_1 (&XEXP (x, 1)))
1199 return false;
1200 if (from && old_num_changes == num_validated_changes ())
1201 return true;
1203 /* (lo_sum (high x) y) -> y where x and y have the same base. */
1204 rtx op0 = XEXP (x, 0);
1205 rtx op1 = XEXP (x, 1);
1206 if (GET_CODE (op0) == HIGH)
1208 rtx base0, base1, offset0, offset1;
1209 split_const (XEXP (op0, 0), &base0, &offset0);
1210 split_const (op1, &base1, &offset1);
1211 if (rtx_equal_p (base0, base1))
1212 newx = op1;
1215 else if (code == REG)
1217 if (from && REG_P (from) && reg_overlap_mentioned_p (x, from))
1219 failure_reason = "inexact register overlap";
1220 return false;
1223 else if (code == MEM)
1224 return apply_to_mem_1 (x);
1225 else
1226 recurse_p = true;
1227 break;
1229 case RTX_CONST_OBJ:
1230 break;
1232 case RTX_AUTOINC:
1233 if (from && reg_overlap_mentioned_p (XEXP (x, 0), from))
1235 failure_reason = "is subject to autoinc";
1236 return false;
1238 recurse_p = true;
1239 break;
1241 case RTX_MATCH:
1242 case RTX_INSN:
1243 gcc_unreachable ();
1246 if (recurse_p)
1248 const char *fmt = GET_RTX_FORMAT (code);
1249 for (int i = 0; fmt[i]; i++)
1250 switch (fmt[i])
1252 case 'E':
1253 for (int j = 0; j < XVECLEN (x, i); j++)
1254 if (!apply_to_rvalue_1 (&XVECEXP (x, i, j)))
1255 return false;
1256 break;
1258 case 'e':
1259 if (XEXP (x, i) && !apply_to_rvalue_1 (&XEXP (x, i)))
1260 return false;
1261 break;
1264 else if (newx && !rtx_equal_p (x, newx))
1266 /* All substitutions made by OLD_NUM_CHANGES onwards have been
1267 simplified. */
1268 result_flags = ((result_flags & ~UNSIMPLIFIED)
1269 | (old_result_flags & UNSIMPLIFIED));
1271 if (should_note_simplifications)
1272 note_simplification (old_num_changes, old_result_flags, x, newx);
1274 /* There's no longer any point unsharing the substitutions made
1275 for subexpressions, since we'll just copy this one instead. */
1276 bool unshare = false;
1277 for (int i = old_num_changes; i < num_changes; ++i)
1279 unshare |= changes[i].unshare;
1280 changes[i].unshare = false;
1282 if (unshare)
1283 validate_unshare_change (insn, loc, newx, 1);
1284 else
1285 validate_change (insn, loc, newx, 1);
1288 return true;
1291 /* Try to process the lvalue expression at *LOC. Return true on success;
1292 leave the caller to clean up on failure. */
1294 bool
1295 insn_propagation::apply_to_lvalue_1 (rtx dest)
1297 rtx old_dest = dest;
1298 while (GET_CODE (dest) == SUBREG
1299 || GET_CODE (dest) == ZERO_EXTRACT
1300 || GET_CODE (dest) == STRICT_LOW_PART)
1302 if (GET_CODE (dest) == ZERO_EXTRACT
1303 && (!apply_to_rvalue_1 (&XEXP (dest, 1))
1304 || !apply_to_rvalue_1 (&XEXP (dest, 2))))
1305 return false;
1306 dest = XEXP (dest, 0);
1309 if (MEM_P (dest))
1310 return apply_to_mem_1 (dest);
1312 /* Check whether the substitution is safe in the presence of this lvalue. */
1313 if (!from
1314 || dest == old_dest
1315 || !REG_P (dest)
1316 || !reg_overlap_mentioned_p (dest, from))
1317 return true;
1319 if (SUBREG_P (old_dest)
1320 && SUBREG_REG (old_dest) == dest
1321 && !read_modify_subreg_p (old_dest))
1322 return true;
1324 failure_reason = "is part of a read-write destination";
1325 return false;
1328 /* Try to process the instruction pattern at *LOC. Return true on success;
1329 leave the caller to clean up on failure. */
1331 bool
1332 insn_propagation::apply_to_pattern_1 (rtx *loc)
1334 rtx body = *loc;
1335 switch (GET_CODE (body))
1337 case COND_EXEC:
1338 return (apply_to_rvalue_1 (&COND_EXEC_TEST (body))
1339 && apply_to_pattern_1 (&COND_EXEC_CODE (body)));
1341 case PARALLEL:
1343 int last = XVECLEN (body, 0) - 1;
1344 for (int i = 0; i < last; ++i)
1345 if (!apply_to_pattern_1 (&XVECEXP (body, 0, i)))
1346 return false;
1347 return apply_to_pattern_1 (&XVECEXP (body, 0, last));
1350 case ASM_OPERANDS:
1351 for (int i = 0, len = ASM_OPERANDS_INPUT_LENGTH (body); i < len; ++i)
1352 if (!apply_to_rvalue_1 (&ASM_OPERANDS_INPUT (body, i)))
1353 return false;
1354 return true;
1356 case CLOBBER:
1357 return apply_to_lvalue_1 (XEXP (body, 0));
1359 case SET:
1360 return (apply_to_lvalue_1 (SET_DEST (body))
1361 && apply_to_rvalue_1 (&SET_SRC (body)));
1363 default:
1364 /* All the other possibilities never store and can use a normal
1365 rtx walk. This includes:
1367 - USE
1368 - TRAP_IF
1369 - PREFETCH
1370 - UNSPEC
1371 - UNSPEC_VOLATILE. */
1372 return apply_to_rvalue_1 (loc);
1376 /* Apply this insn_propagation object's simplification or substitution
1377 to the instruction pattern at LOC. */
1379 bool
1380 insn_propagation::apply_to_pattern (rtx *loc)
1382 unsigned int num_changes = num_validated_changes ();
1383 bool res = apply_to_pattern_1 (loc);
1384 if (!res)
1385 cancel_changes (num_changes);
1386 return res;
1389 /* Apply this insn_propagation object's simplification or substitution
1390 to the rvalue expression at LOC. */
1392 bool
1393 insn_propagation::apply_to_rvalue (rtx *loc)
1395 unsigned int num_changes = num_validated_changes ();
1396 bool res = apply_to_rvalue_1 (loc);
1397 if (!res)
1398 cancel_changes (num_changes);
1399 return res;
1402 /* Check whether INSN matches a specific alternative of an .md pattern. */
1404 bool
1405 valid_insn_p (rtx_insn *insn)
1407 recog_memoized (insn);
1408 if (INSN_CODE (insn) < 0)
1409 return false;
1410 extract_insn (insn);
1411 /* We don't know whether the insn will be in code that is optimized
1412 for size or speed, so consider all enabled alternatives. */
1413 if (!constrain_operands (1, get_enabled_alternatives (insn)))
1414 return false;
1415 return true;
1418 /* Return true if OP is a valid general operand for machine mode MODE.
1419 This is either a register reference, a memory reference,
1420 or a constant. In the case of a memory reference, the address
1421 is checked for general validity for the target machine.
1423 Register and memory references must have mode MODE in order to be valid,
1424 but some constants have no machine mode and are valid for any mode.
1426 If MODE is VOIDmode, OP is checked for validity for whatever mode
1427 it has.
1429 The main use of this function is as a predicate in match_operand
1430 expressions in the machine description. */
1432 bool
1433 general_operand (rtx op, machine_mode mode)
1435 enum rtx_code code = GET_CODE (op);
1437 if (mode == VOIDmode)
1438 mode = GET_MODE (op);
1440 /* Don't accept CONST_INT or anything similar
1441 if the caller wants something floating. */
1442 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1443 && GET_MODE_CLASS (mode) != MODE_INT
1444 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1445 return false;
1447 if (CONST_INT_P (op)
1448 && mode != VOIDmode
1449 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1450 return false;
1452 if (CONSTANT_P (op))
1453 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1454 || mode == VOIDmode)
1455 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1456 && targetm.legitimate_constant_p (mode == VOIDmode
1457 ? GET_MODE (op)
1458 : mode, op));
1460 /* Except for certain constants with VOIDmode, already checked for,
1461 OP's mode must match MODE if MODE specifies a mode. */
1463 if (GET_MODE (op) != mode)
1464 return false;
1466 if (code == SUBREG)
1468 rtx sub = SUBREG_REG (op);
1470 #ifdef INSN_SCHEDULING
1471 /* On machines that have insn scheduling, we want all memory
1472 reference to be explicit, so outlaw paradoxical SUBREGs.
1473 However, we must allow them after reload so that they can
1474 get cleaned up by cleanup_subreg_operands. */
1475 if (!reload_completed && MEM_P (sub)
1476 && paradoxical_subreg_p (op))
1477 return false;
1478 #endif
1479 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1480 may result in incorrect reference. We should simplify all valid
1481 subregs of MEM anyway. But allow this after reload because we
1482 might be called from cleanup_subreg_operands.
1484 ??? This is a kludge. */
1485 if (!reload_completed
1486 && maybe_ne (SUBREG_BYTE (op), 0)
1487 && MEM_P (sub))
1488 return false;
1490 if (REG_P (sub)
1491 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1492 && !REG_CAN_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1493 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1494 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1495 /* LRA can generate some invalid SUBREGS just for matched
1496 operand reload presentation. LRA needs to treat them as
1497 valid. */
1498 && ! LRA_SUBREG_P (op))
1499 return false;
1501 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1502 create such rtl, and we must reject it. */
1503 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1504 /* LRA can use subreg to store a floating point value in an
1505 integer mode. Although the floating point and the
1506 integer modes need the same number of hard registers, the
1507 size of floating point mode can be less than the integer
1508 mode. */
1509 && ! lra_in_progress
1510 && paradoxical_subreg_p (op))
1511 return false;
1513 op = sub;
1514 code = GET_CODE (op);
1517 if (code == REG)
1518 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1519 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1521 if (code == MEM)
1523 rtx y = XEXP (op, 0);
1525 if (! volatile_ok && MEM_VOLATILE_P (op))
1526 return false;
1528 /* Use the mem's mode, since it will be reloaded thus. LRA can
1529 generate move insn with invalid addresses which is made valid
1530 and efficiently calculated by LRA through further numerous
1531 transformations. */
1532 if (lra_in_progress
1533 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1534 return true;
1537 return false;
1540 /* Return true if OP is a valid memory address for a memory reference
1541 of mode MODE.
1543 The main use of this function is as a predicate in match_operand
1544 expressions in the machine description. */
1546 bool
1547 address_operand (rtx op, machine_mode mode)
1549 /* Wrong mode for an address expr. */
1550 if (GET_MODE (op) != VOIDmode
1551 && ! SCALAR_INT_MODE_P (GET_MODE (op)))
1552 return false;
1554 return memory_address_p (mode, op);
1557 /* Return true if OP is a register reference of mode MODE.
1558 If MODE is VOIDmode, accept a register in any mode.
1560 The main use of this function is as a predicate in match_operand
1561 expressions in the machine description. */
1563 bool
1564 register_operand (rtx op, machine_mode mode)
1566 if (GET_CODE (op) == SUBREG)
1568 rtx sub = SUBREG_REG (op);
1570 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1571 because it is guaranteed to be reloaded into one.
1572 Just make sure the MEM is valid in itself.
1573 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1574 but currently it does result from (SUBREG (REG)...) where the
1575 reg went on the stack.) */
1576 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1577 return false;
1579 else if (!REG_P (op))
1580 return false;
1581 return general_operand (op, mode);
1584 /* Return true for a register in Pmode; ignore the tested mode. */
1586 bool
1587 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1589 return register_operand (op, Pmode);
1592 /* Return true if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1593 or a hard register. */
1595 bool
1596 scratch_operand (rtx op, machine_mode mode)
1598 if (GET_MODE (op) != mode && mode != VOIDmode)
1599 return false;
1601 return (GET_CODE (op) == SCRATCH
1602 || (REG_P (op)
1603 && (lra_in_progress
1604 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1605 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1608 /* Return true if OP is a valid immediate operand for mode MODE.
1610 The main use of this function is as a predicate in match_operand
1611 expressions in the machine description. */
1613 bool
1614 immediate_operand (rtx op, machine_mode mode)
1616 /* Don't accept CONST_INT or anything similar
1617 if the caller wants something floating. */
1618 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1619 && GET_MODE_CLASS (mode) != MODE_INT
1620 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1621 return false;
1623 if (CONST_INT_P (op)
1624 && mode != VOIDmode
1625 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1626 return false;
1628 return (CONSTANT_P (op)
1629 && (GET_MODE (op) == mode || mode == VOIDmode
1630 || GET_MODE (op) == VOIDmode)
1631 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1632 && targetm.legitimate_constant_p (mode == VOIDmode
1633 ? GET_MODE (op)
1634 : mode, op));
1637 /* Return true if OP is an operand that is a CONST_INT of mode MODE. */
1639 bool
1640 const_int_operand (rtx op, machine_mode mode)
1642 if (!CONST_INT_P (op))
1643 return false;
1645 if (mode != VOIDmode
1646 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1647 return false;
1649 return true;
1652 #if TARGET_SUPPORTS_WIDE_INT
1653 /* Return true if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1654 of mode MODE. */
1655 bool
1656 const_scalar_int_operand (rtx op, machine_mode mode)
1658 if (!CONST_SCALAR_INT_P (op))
1659 return false;
1661 if (CONST_INT_P (op))
1662 return const_int_operand (op, mode);
1664 if (mode != VOIDmode)
1666 scalar_int_mode int_mode = as_a <scalar_int_mode> (mode);
1667 int prec = GET_MODE_PRECISION (int_mode);
1668 int bitsize = GET_MODE_BITSIZE (int_mode);
1670 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1671 return false;
1673 if (prec == bitsize)
1674 return true;
1675 else
1677 /* Multiword partial int. */
1678 HOST_WIDE_INT x
1679 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1680 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1683 return true;
1686 /* Return true if OP is an operand that is a constant integer or constant
1687 floating-point number of MODE. */
1689 bool
1690 const_double_operand (rtx op, machine_mode mode)
1692 return (GET_CODE (op) == CONST_DOUBLE)
1693 && (GET_MODE (op) == mode || mode == VOIDmode);
1695 #else
1696 /* Return true if OP is an operand that is a constant integer or constant
1697 floating-point number of MODE. */
1699 bool
1700 const_double_operand (rtx op, machine_mode mode)
1702 /* Don't accept CONST_INT or anything similar
1703 if the caller wants something floating. */
1704 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1705 && GET_MODE_CLASS (mode) != MODE_INT
1706 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1707 return false;
1709 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1710 && (mode == VOIDmode || GET_MODE (op) == mode
1711 || GET_MODE (op) == VOIDmode));
1713 #endif
1714 /* Return true if OP is a general operand that is not an immediate
1715 operand of mode MODE. */
1717 bool
1718 nonimmediate_operand (rtx op, machine_mode mode)
1720 return (general_operand (op, mode) && ! CONSTANT_P (op));
1723 /* Return true if OP is a register reference or
1724 immediate value of mode MODE. */
1726 bool
1727 nonmemory_operand (rtx op, machine_mode mode)
1729 if (CONSTANT_P (op))
1730 return immediate_operand (op, mode);
1731 return register_operand (op, mode);
1734 /* Return true if OP is a valid operand that stands for pushing a
1735 value of mode MODE onto the stack.
1737 The main use of this function is as a predicate in match_operand
1738 expressions in the machine description. */
1740 bool
1741 push_operand (rtx op, machine_mode mode)
1743 if (!MEM_P (op))
1744 return false;
1746 if (mode != VOIDmode && GET_MODE (op) != mode)
1747 return false;
1749 poly_int64 rounded_size = GET_MODE_SIZE (mode);
1751 #ifdef PUSH_ROUNDING
1752 rounded_size = PUSH_ROUNDING (MACRO_INT (rounded_size));
1753 #endif
1755 op = XEXP (op, 0);
1757 if (known_eq (rounded_size, GET_MODE_SIZE (mode)))
1759 if (GET_CODE (op) != STACK_PUSH_CODE)
1760 return false;
1762 else
1764 poly_int64 offset;
1765 if (GET_CODE (op) != PRE_MODIFY
1766 || GET_CODE (XEXP (op, 1)) != PLUS
1767 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1768 || !poly_int_rtx_p (XEXP (XEXP (op, 1), 1), &offset)
1769 || (STACK_GROWS_DOWNWARD
1770 ? maybe_ne (offset, -rounded_size)
1771 : maybe_ne (offset, rounded_size)))
1772 return false;
1775 return XEXP (op, 0) == stack_pointer_rtx;
1778 /* Return true if OP is a valid operand that stands for popping a
1779 value of mode MODE off the stack.
1781 The main use of this function is as a predicate in match_operand
1782 expressions in the machine description. */
1784 bool
1785 pop_operand (rtx op, machine_mode mode)
1787 if (!MEM_P (op))
1788 return false;
1790 if (mode != VOIDmode && GET_MODE (op) != mode)
1791 return false;
1793 op = XEXP (op, 0);
1795 if (GET_CODE (op) != STACK_POP_CODE)
1796 return false;
1798 return XEXP (op, 0) == stack_pointer_rtx;
1801 /* Return true if ADDR is a valid memory address
1802 for mode MODE in address space AS. */
1804 bool
1805 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED, rtx addr,
1806 addr_space_t as, code_helper ch ATTRIBUTE_UNUSED)
1808 #ifdef GO_IF_LEGITIMATE_ADDRESS
1809 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1810 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1811 return false;
1813 win:
1814 return true;
1815 #else
1816 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as, ch);
1817 #endif
1820 /* Return true if OP is a valid memory reference with mode MODE,
1821 including a valid address.
1823 The main use of this function is as a predicate in match_operand
1824 expressions in the machine description. */
1826 bool
1827 memory_operand (rtx op, machine_mode mode)
1829 rtx inner;
1831 if (! reload_completed)
1832 /* Note that no SUBREG is a memory operand before end of reload pass,
1833 because (SUBREG (MEM...)) forces reloading into a register. */
1834 return MEM_P (op) && general_operand (op, mode);
1836 if (mode != VOIDmode && GET_MODE (op) != mode)
1837 return false;
1839 inner = op;
1840 if (GET_CODE (inner) == SUBREG)
1841 inner = SUBREG_REG (inner);
1843 return (MEM_P (inner) && general_operand (op, mode));
1846 /* Return true if OP is a valid indirect memory reference with mode MODE;
1847 that is, a memory reference whose address is a general_operand. */
1849 bool
1850 indirect_operand (rtx op, machine_mode mode)
1852 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1853 if (! reload_completed
1854 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1856 if (mode != VOIDmode && GET_MODE (op) != mode)
1857 return false;
1859 /* The only way that we can have a general_operand as the resulting
1860 address is if OFFSET is zero and the address already is an operand
1861 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1862 operand. */
1863 poly_int64 offset;
1864 rtx addr = strip_offset (XEXP (SUBREG_REG (op), 0), &offset);
1865 return (known_eq (offset + SUBREG_BYTE (op), 0)
1866 && general_operand (addr, Pmode));
1869 return (MEM_P (op)
1870 && memory_operand (op, mode)
1871 && general_operand (XEXP (op, 0), Pmode));
1874 /* Return true if this is an ordered comparison operator (not including
1875 ORDERED and UNORDERED). */
1877 bool
1878 ordered_comparison_operator (rtx op, machine_mode mode)
1880 if (mode != VOIDmode && GET_MODE (op) != mode)
1881 return false;
1882 switch (GET_CODE (op))
1884 case EQ:
1885 case NE:
1886 case LT:
1887 case LTU:
1888 case LE:
1889 case LEU:
1890 case GT:
1891 case GTU:
1892 case GE:
1893 case GEU:
1894 return true;
1895 default:
1896 return false;
1900 /* Return true if this is a comparison operator. This allows the use of
1901 MATCH_OPERATOR to recognize all the branch insns. */
1903 bool
1904 comparison_operator (rtx op, machine_mode mode)
1906 return ((mode == VOIDmode || GET_MODE (op) == mode)
1907 && COMPARISON_P (op));
1910 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1913 extract_asm_operands (rtx body)
1915 rtx tmp;
1916 switch (GET_CODE (body))
1918 case ASM_OPERANDS:
1919 return body;
1921 case SET:
1922 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1923 tmp = SET_SRC (body);
1924 if (GET_CODE (tmp) == ASM_OPERANDS)
1925 return tmp;
1926 break;
1928 case PARALLEL:
1929 tmp = XVECEXP (body, 0, 0);
1930 if (GET_CODE (tmp) == ASM_OPERANDS)
1931 return tmp;
1932 if (GET_CODE (tmp) == SET)
1934 tmp = SET_SRC (tmp);
1935 if (GET_CODE (tmp) == ASM_OPERANDS)
1936 return tmp;
1938 break;
1940 default:
1941 break;
1943 return NULL;
1946 /* If BODY is an insn body that uses ASM_OPERANDS,
1947 return the number of operands (both input and output) in the insn.
1948 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
1949 return 0.
1950 Otherwise return -1. */
1953 asm_noperands (const_rtx body)
1955 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1956 int i, n_sets = 0;
1958 if (asm_op == NULL)
1960 if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) >= 2
1961 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
1963 /* body is [(asm_input ...) (clobber (reg ...))...]. */
1964 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1965 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1966 return -1;
1967 return 0;
1969 return -1;
1972 if (GET_CODE (body) == SET)
1973 n_sets = 1;
1974 else if (GET_CODE (body) == PARALLEL)
1976 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1978 /* Multiple output operands, or 1 output plus some clobbers:
1979 body is
1980 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1981 /* Count backwards through CLOBBERs to determine number of SETs. */
1982 for (i = XVECLEN (body, 0); i > 0; i--)
1984 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1985 break;
1986 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1987 return -1;
1990 /* N_SETS is now number of output operands. */
1991 n_sets = i;
1993 /* Verify that all the SETs we have
1994 came from a single original asm_operands insn
1995 (so that invalid combinations are blocked). */
1996 for (i = 0; i < n_sets; i++)
1998 rtx elt = XVECEXP (body, 0, i);
1999 if (GET_CODE (elt) != SET)
2000 return -1;
2001 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
2002 return -1;
2003 /* If these ASM_OPERANDS rtx's came from different original insns
2004 then they aren't allowed together. */
2005 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
2006 != ASM_OPERANDS_INPUT_VEC (asm_op))
2007 return -1;
2010 else
2012 /* 0 outputs, but some clobbers:
2013 body is [(asm_operands ...) (clobber (reg ...))...]. */
2014 /* Make sure all the other parallel things really are clobbers. */
2015 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
2016 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
2017 return -1;
2021 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
2022 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
2025 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
2026 copy its operands (both input and output) into the vector OPERANDS,
2027 the locations of the operands within the insn into the vector OPERAND_LOCS,
2028 and the constraints for the operands into CONSTRAINTS.
2029 Write the modes of the operands into MODES.
2030 Write the location info into LOC.
2031 Return the assembler-template.
2032 If BODY is an insn body that uses ASM_INPUT with CLOBBERS in PARALLEL,
2033 return the basic assembly string.
2035 If LOC, MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
2036 we don't store that info. */
2038 const char *
2039 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
2040 const char **constraints, machine_mode *modes,
2041 location_t *loc)
2043 int nbase = 0, n, i;
2044 rtx asmop;
2046 switch (GET_CODE (body))
2048 case ASM_OPERANDS:
2049 /* Zero output asm: BODY is (asm_operands ...). */
2050 asmop = body;
2051 break;
2053 case SET:
2054 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
2055 asmop = SET_SRC (body);
2057 /* The output is in the SET.
2058 Its constraint is in the ASM_OPERANDS itself. */
2059 if (operands)
2060 operands[0] = SET_DEST (body);
2061 if (operand_locs)
2062 operand_locs[0] = &SET_DEST (body);
2063 if (constraints)
2064 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
2065 if (modes)
2066 modes[0] = GET_MODE (SET_DEST (body));
2067 nbase = 1;
2068 break;
2070 case PARALLEL:
2072 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
2074 asmop = XVECEXP (body, 0, 0);
2075 if (GET_CODE (asmop) == SET)
2077 asmop = SET_SRC (asmop);
2079 /* At least one output, plus some CLOBBERs. The outputs are in
2080 the SETs. Their constraints are in the ASM_OPERANDS itself. */
2081 for (i = 0; i < nparallel; i++)
2083 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
2084 break; /* Past last SET */
2085 gcc_assert (GET_CODE (XVECEXP (body, 0, i)) == SET);
2086 if (operands)
2087 operands[i] = SET_DEST (XVECEXP (body, 0, i));
2088 if (operand_locs)
2089 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
2090 if (constraints)
2091 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
2092 if (modes)
2093 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
2095 nbase = i;
2097 else if (GET_CODE (asmop) == ASM_INPUT)
2099 if (loc)
2100 *loc = ASM_INPUT_SOURCE_LOCATION (asmop);
2101 return XSTR (asmop, 0);
2103 break;
2106 default:
2107 gcc_unreachable ();
2110 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
2111 for (i = 0; i < n; i++)
2113 if (operand_locs)
2114 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
2115 if (operands)
2116 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
2117 if (constraints)
2118 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
2119 if (modes)
2120 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
2122 nbase += n;
2124 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
2125 for (i = 0; i < n; i++)
2127 if (operand_locs)
2128 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
2129 if (operands)
2130 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
2131 if (constraints)
2132 constraints[nbase + i] = "";
2133 if (modes)
2134 modes[nbase + i] = Pmode;
2137 if (loc)
2138 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
2140 return ASM_OPERANDS_TEMPLATE (asmop);
2143 /* Parse inline assembly string STRING and determine which operands are
2144 referenced by % markers. For the first NOPERANDS operands, set USED[I]
2145 to true if operand I is referenced.
2147 This is intended to distinguish barrier-like asms such as:
2149 asm ("" : "=m" (...));
2151 from real references such as:
2153 asm ("sw\t$0, %0" : "=m" (...)); */
2155 void
2156 get_referenced_operands (const char *string, bool *used,
2157 unsigned int noperands)
2159 memset (used, 0, sizeof (bool) * noperands);
2160 const char *p = string;
2161 while (*p)
2162 switch (*p)
2164 case '%':
2165 p += 1;
2166 /* A letter followed by a digit indicates an operand number. */
2167 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
2168 p += 1;
2169 if (ISDIGIT (*p))
2171 char *endptr;
2172 unsigned long opnum = strtoul (p, &endptr, 10);
2173 if (endptr != p && opnum < noperands)
2174 used[opnum] = true;
2175 p = endptr;
2177 else
2178 p += 1;
2179 break;
2181 default:
2182 p++;
2183 break;
2187 /* Check if an asm_operand matches its constraints.
2188 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
2191 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
2193 int result = 0;
2194 bool incdec_ok = false;
2196 /* Use constrain_operands after reload. */
2197 gcc_assert (!reload_completed);
2199 /* Empty constraint string is the same as "X,...,X", i.e. X for as
2200 many alternatives as required to match the other operands. */
2201 if (*constraint == '\0')
2202 result = 1;
2204 while (*constraint)
2206 enum constraint_num cn;
2207 char c = *constraint;
2208 int len;
2209 switch (c)
2211 case ',':
2212 constraint++;
2213 continue;
2215 case '0': case '1': case '2': case '3': case '4':
2216 case '5': case '6': case '7': case '8': case '9':
2217 /* If caller provided constraints pointer, look up
2218 the matching constraint. Otherwise, our caller should have
2219 given us the proper matching constraint, but we can't
2220 actually fail the check if they didn't. Indicate that
2221 results are inconclusive. */
2222 if (constraints)
2224 char *end;
2225 unsigned long match;
2227 match = strtoul (constraint, &end, 10);
2228 if (!result)
2229 result = asm_operand_ok (op, constraints[match], NULL);
2230 constraint = (const char *) end;
2232 else
2235 constraint++;
2236 while (ISDIGIT (*constraint));
2237 if (! result)
2238 result = -1;
2240 continue;
2242 /* The rest of the compiler assumes that reloading the address
2243 of a MEM into a register will make it fit an 'o' constraint.
2244 That is, if it sees a MEM operand for an 'o' constraint,
2245 it assumes that (mem (base-reg)) will fit.
2247 That assumption fails on targets that don't have offsettable
2248 addresses at all. We therefore need to treat 'o' asm
2249 constraints as a special case and only accept operands that
2250 are already offsettable, thus proving that at least one
2251 offsettable address exists. */
2252 case 'o': /* offsettable */
2253 if (offsettable_nonstrict_memref_p (op))
2254 result = 1;
2255 break;
2257 case 'g':
2258 if (general_operand (op, VOIDmode))
2259 result = 1;
2260 break;
2262 case '<':
2263 case '>':
2264 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
2265 to exist, excepting those that expand_call created. Further,
2266 on some machines which do not have generalized auto inc/dec,
2267 an inc/dec is not a memory_operand.
2269 Match any memory and hope things are resolved after reload. */
2270 incdec_ok = true;
2271 /* FALLTHRU */
2272 default:
2273 cn = lookup_constraint (constraint);
2274 rtx mem = NULL;
2275 switch (get_constraint_type (cn))
2277 case CT_REGISTER:
2278 if (!result
2279 && reg_class_for_constraint (cn) != NO_REGS
2280 && GET_MODE (op) != BLKmode
2281 && register_operand (op, VOIDmode))
2282 result = 1;
2283 break;
2285 case CT_CONST_INT:
2286 if (!result
2287 && CONST_INT_P (op)
2288 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
2289 result = 1;
2290 break;
2292 case CT_MEMORY:
2293 case CT_RELAXED_MEMORY:
2294 mem = op;
2295 /* Fall through. */
2296 case CT_SPECIAL_MEMORY:
2297 /* Every memory operand can be reloaded to fit. */
2298 if (!mem)
2299 mem = extract_mem_from_operand (op);
2300 result = result || memory_operand (mem, VOIDmode);
2301 break;
2303 case CT_ADDRESS:
2304 /* Every address operand can be reloaded to fit. */
2305 result = result || address_operand (op, VOIDmode);
2306 break;
2308 case CT_FIXED_FORM:
2309 result = result || constraint_satisfied_p (op, cn);
2310 break;
2312 break;
2314 len = CONSTRAINT_LEN (c, constraint);
2316 constraint++;
2317 while (--len && *constraint && *constraint != ',');
2318 if (len)
2319 return 0;
2322 /* For operands without < or > constraints reject side-effects. */
2323 if (AUTO_INC_DEC && !incdec_ok && result && MEM_P (op))
2324 switch (GET_CODE (XEXP (op, 0)))
2326 case PRE_INC:
2327 case POST_INC:
2328 case PRE_DEC:
2329 case POST_DEC:
2330 case PRE_MODIFY:
2331 case POST_MODIFY:
2332 return 0;
2333 default:
2334 break;
2337 return result;
2340 /* Given an rtx *P, if it is a sum containing an integer constant term,
2341 return the location (type rtx *) of the pointer to that constant term.
2342 Otherwise, return a null pointer. */
2344 rtx *
2345 find_constant_term_loc (rtx *p)
2347 rtx *tem;
2348 enum rtx_code code = GET_CODE (*p);
2350 /* If *P IS such a constant term, P is its location. */
2352 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2353 || code == CONST)
2354 return p;
2356 /* Otherwise, if not a sum, it has no constant term. */
2358 if (GET_CODE (*p) != PLUS)
2359 return 0;
2361 /* If one of the summands is constant, return its location. */
2363 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2364 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2365 return p;
2367 /* Otherwise, check each summand for containing a constant term. */
2369 if (XEXP (*p, 0) != 0)
2371 tem = find_constant_term_loc (&XEXP (*p, 0));
2372 if (tem != 0)
2373 return tem;
2376 if (XEXP (*p, 1) != 0)
2378 tem = find_constant_term_loc (&XEXP (*p, 1));
2379 if (tem != 0)
2380 return tem;
2383 return 0;
2386 /* Return true if OP is a memory reference whose address contains
2387 no side effects and remains valid after the addition of a positive
2388 integer less than the size of the object being referenced.
2390 We assume that the original address is valid and do not check it.
2392 This uses strict_memory_address_p as a subroutine, so
2393 don't use it before reload. */
2395 bool
2396 offsettable_memref_p (rtx op)
2398 return ((MEM_P (op))
2399 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
2400 MEM_ADDR_SPACE (op)));
2403 /* Similar, but don't require a strictly valid mem ref:
2404 consider pseudo-regs valid as index or base regs. */
2406 bool
2407 offsettable_nonstrict_memref_p (rtx op)
2409 return ((MEM_P (op))
2410 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
2411 MEM_ADDR_SPACE (op)));
2414 /* Return true if Y is a memory address which contains no side effects
2415 and would remain valid for address space AS after the addition of
2416 a positive integer less than the size of that mode.
2418 We assume that the original address is valid and do not check it.
2419 We do check that it is valid for narrower modes.
2421 If STRICTP is nonzero, we require a strictly valid address,
2422 for the sake of use in reload.cc. */
2424 bool
2425 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
2426 addr_space_t as)
2428 enum rtx_code ycode = GET_CODE (y);
2429 rtx z;
2430 rtx y1 = y;
2431 rtx *y2;
2432 bool (*addressp) (machine_mode, rtx, addr_space_t, code_helper) =
2433 (strictp ? strict_memory_address_addr_space_p
2434 : memory_address_addr_space_p);
2435 poly_int64 mode_sz = GET_MODE_SIZE (mode);
2437 if (CONSTANT_ADDRESS_P (y))
2438 return true;
2440 /* Adjusting an offsettable address involves changing to a narrower mode.
2441 Make sure that's OK. */
2443 if (mode_dependent_address_p (y, as))
2444 return false;
2446 machine_mode address_mode = GET_MODE (y);
2447 if (address_mode == VOIDmode)
2448 address_mode = targetm.addr_space.address_mode (as);
2449 #ifdef POINTERS_EXTEND_UNSIGNED
2450 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2451 #endif
2453 /* ??? How much offset does an offsettable BLKmode reference need?
2454 Clearly that depends on the situation in which it's being used.
2455 However, the current situation in which we test 0xffffffff is
2456 less than ideal. Caveat user. */
2457 if (known_eq (mode_sz, 0))
2458 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2460 /* If the expression contains a constant term,
2461 see if it remains valid when max possible offset is added. */
2463 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2465 bool good;
2467 y1 = *y2;
2468 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2469 /* Use QImode because an odd displacement may be automatically invalid
2470 for any wider mode. But it should be valid for a single byte. */
2471 good = (*addressp) (QImode, y, as, ERROR_MARK);
2473 /* In any case, restore old contents of memory. */
2474 *y2 = y1;
2475 return good;
2478 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2479 return false;
2481 /* The offset added here is chosen as the maximum offset that
2482 any instruction could need to add when operating on something
2483 of the specified mode. We assume that if Y and Y+c are
2484 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2485 go inside a LO_SUM here, so we do so as well. */
2486 if (GET_CODE (y) == LO_SUM
2487 && mode != BLKmode
2488 && known_le (mode_sz, GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT))
2489 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2490 plus_constant (address_mode, XEXP (y, 1),
2491 mode_sz - 1));
2492 #ifdef POINTERS_EXTEND_UNSIGNED
2493 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2494 else if (POINTERS_EXTEND_UNSIGNED > 0
2495 && GET_CODE (y) == ZERO_EXTEND
2496 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2497 z = gen_rtx_ZERO_EXTEND (address_mode,
2498 plus_constant (pointer_mode, XEXP (y, 0),
2499 mode_sz - 1));
2500 #endif
2501 else
2502 z = plus_constant (address_mode, y, mode_sz - 1);
2504 /* Use QImode because an odd displacement may be automatically invalid
2505 for any wider mode. But it should be valid for a single byte. */
2506 return (*addressp) (QImode, z, as, ERROR_MARK);
2509 /* Return true if ADDR is an address-expression whose effect depends
2510 on the mode of the memory reference it is used in.
2512 ADDRSPACE is the address space associated with the address.
2514 Autoincrement addressing is a typical example of mode-dependence
2515 because the amount of the increment depends on the mode. */
2517 bool
2518 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2520 /* Auto-increment addressing with anything other than post_modify
2521 or pre_modify always introduces a mode dependency. Catch such
2522 cases now instead of deferring to the target. */
2523 if (GET_CODE (addr) == PRE_INC
2524 || GET_CODE (addr) == POST_INC
2525 || GET_CODE (addr) == PRE_DEC
2526 || GET_CODE (addr) == POST_DEC)
2527 return true;
2529 return targetm.mode_dependent_address_p (addr, addrspace);
2532 /* Return true if boolean attribute ATTR is supported. */
2534 static bool
2535 have_bool_attr (bool_attr attr)
2537 switch (attr)
2539 case BA_ENABLED:
2540 return HAVE_ATTR_enabled;
2541 case BA_PREFERRED_FOR_SIZE:
2542 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2543 case BA_PREFERRED_FOR_SPEED:
2544 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2546 gcc_unreachable ();
2549 /* Return the value of ATTR for instruction INSN. */
2551 static bool
2552 get_bool_attr (rtx_insn *insn, bool_attr attr)
2554 switch (attr)
2556 case BA_ENABLED:
2557 return get_attr_enabled (insn);
2558 case BA_PREFERRED_FOR_SIZE:
2559 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2560 case BA_PREFERRED_FOR_SPEED:
2561 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2563 gcc_unreachable ();
2566 /* Like get_bool_attr_mask, but don't use the cache. */
2568 static alternative_mask
2569 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2571 /* Temporarily install enough information for get_attr_<foo> to assume
2572 that the insn operands are already cached. As above, the attribute
2573 mustn't depend on the values of operands, so we don't provide their
2574 real values here. */
2575 rtx_insn *old_insn = recog_data.insn;
2576 int old_alternative = which_alternative;
2578 recog_data.insn = insn;
2579 alternative_mask mask = ALL_ALTERNATIVES;
2580 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2581 for (int i = 0; i < n_alternatives; i++)
2583 which_alternative = i;
2584 if (!get_bool_attr (insn, attr))
2585 mask &= ~ALTERNATIVE_BIT (i);
2588 recog_data.insn = old_insn;
2589 which_alternative = old_alternative;
2590 return mask;
2593 /* Return the mask of operand alternatives that are allowed for INSN
2594 by boolean attribute ATTR. This mask depends only on INSN and on
2595 the current target; it does not depend on things like the values of
2596 operands. */
2598 static alternative_mask
2599 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2601 /* Quick exit for asms and for targets that don't use these attributes. */
2602 int code = INSN_CODE (insn);
2603 if (code < 0 || !have_bool_attr (attr))
2604 return ALL_ALTERNATIVES;
2606 /* Calling get_attr_<foo> can be expensive, so cache the mask
2607 for speed. */
2608 if (!this_target_recog->x_bool_attr_masks[code][attr])
2609 this_target_recog->x_bool_attr_masks[code][attr]
2610 = get_bool_attr_mask_uncached (insn, attr);
2611 return this_target_recog->x_bool_attr_masks[code][attr];
2614 /* Return the set of alternatives of INSN that are allowed by the current
2615 target. */
2617 alternative_mask
2618 get_enabled_alternatives (rtx_insn *insn)
2620 return get_bool_attr_mask (insn, BA_ENABLED);
2623 /* Return the set of alternatives of INSN that are allowed by the current
2624 target and are preferred for the current size/speed optimization
2625 choice. */
2627 alternative_mask
2628 get_preferred_alternatives (rtx_insn *insn)
2630 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2631 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2632 else
2633 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2636 /* Return the set of alternatives of INSN that are allowed by the current
2637 target and are preferred for the size/speed optimization choice
2638 associated with BB. Passing a separate BB is useful if INSN has not
2639 been emitted yet or if we are considering moving it to a different
2640 block. */
2642 alternative_mask
2643 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2645 if (optimize_bb_for_speed_p (bb))
2646 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2647 else
2648 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2651 /* Assert that the cached boolean attributes for INSN are still accurate.
2652 The backend is required to define these attributes in a way that only
2653 depends on the current target (rather than operands, compiler phase,
2654 etc.). */
2656 bool
2657 check_bool_attrs (rtx_insn *insn)
2659 int code = INSN_CODE (insn);
2660 if (code >= 0)
2661 for (int i = 0; i <= BA_LAST; ++i)
2663 enum bool_attr attr = (enum bool_attr) i;
2664 if (this_target_recog->x_bool_attr_masks[code][attr])
2665 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2666 == get_bool_attr_mask_uncached (insn, attr));
2668 return true;
2671 /* Like extract_insn, but save insn extracted and don't extract again, when
2672 called again for the same insn expecting that recog_data still contain the
2673 valid information. This is used primary by gen_attr infrastructure that
2674 often does extract insn again and again. */
2675 void
2676 extract_insn_cached (rtx_insn *insn)
2678 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2679 return;
2680 extract_insn (insn);
2681 recog_data.insn = insn;
2684 /* Do uncached extract_insn, constrain_operands and complain about failures.
2685 This should be used when extracting a pre-existing constrained instruction
2686 if the caller wants to know which alternative was chosen. */
2687 void
2688 extract_constrain_insn (rtx_insn *insn)
2690 extract_insn (insn);
2691 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2692 fatal_insn_not_found (insn);
2695 /* Do cached extract_insn, constrain_operands and complain about failures.
2696 Used by insn_attrtab. */
2697 void
2698 extract_constrain_insn_cached (rtx_insn *insn)
2700 extract_insn_cached (insn);
2701 if (which_alternative == -1
2702 && !constrain_operands (reload_completed,
2703 get_enabled_alternatives (insn)))
2704 fatal_insn_not_found (insn);
2707 /* Do cached constrain_operands on INSN and complain about failures. */
2708 bool
2709 constrain_operands_cached (rtx_insn *insn, int strict)
2711 if (which_alternative == -1)
2712 return constrain_operands (strict, get_enabled_alternatives (insn));
2713 else
2714 return true;
2717 /* Analyze INSN and fill in recog_data. */
2719 void
2720 extract_insn (rtx_insn *insn)
2722 int i;
2723 int icode;
2724 int noperands;
2725 rtx body = PATTERN (insn);
2727 recog_data.n_operands = 0;
2728 recog_data.n_alternatives = 0;
2729 recog_data.n_dups = 0;
2730 recog_data.is_asm = false;
2732 switch (GET_CODE (body))
2734 case USE:
2735 case CLOBBER:
2736 case ASM_INPUT:
2737 case ADDR_VEC:
2738 case ADDR_DIFF_VEC:
2739 case VAR_LOCATION:
2740 case DEBUG_MARKER:
2741 return;
2743 case SET:
2744 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2745 goto asm_insn;
2746 else
2747 goto normal_insn;
2748 case PARALLEL:
2749 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2750 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2751 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS
2752 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_INPUT)
2753 goto asm_insn;
2754 else
2755 goto normal_insn;
2756 case ASM_OPERANDS:
2757 asm_insn:
2758 recog_data.n_operands = noperands = asm_noperands (body);
2759 if (noperands >= 0)
2761 /* This insn is an `asm' with operands. */
2763 /* expand_asm_operands makes sure there aren't too many operands. */
2764 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2766 /* Now get the operand values and constraints out of the insn. */
2767 decode_asm_operands (body, recog_data.operand,
2768 recog_data.operand_loc,
2769 recog_data.constraints,
2770 recog_data.operand_mode, NULL);
2771 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2772 if (noperands > 0)
2774 const char *p = recog_data.constraints[0];
2775 recog_data.n_alternatives = 1;
2776 while (*p)
2777 recog_data.n_alternatives += (*p++ == ',');
2779 recog_data.is_asm = true;
2780 break;
2782 fatal_insn_not_found (insn);
2784 default:
2785 normal_insn:
2786 /* Ordinary insn: recognize it, get the operands via insn_extract
2787 and get the constraints. */
2789 icode = recog_memoized (insn);
2790 if (icode < 0)
2791 fatal_insn_not_found (insn);
2793 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2794 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2795 recog_data.n_dups = insn_data[icode].n_dups;
2797 insn_extract (insn);
2799 for (i = 0; i < noperands; i++)
2801 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2802 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2803 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2804 /* VOIDmode match_operands gets mode from their real operand. */
2805 if (recog_data.operand_mode[i] == VOIDmode)
2806 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2809 for (i = 0; i < noperands; i++)
2810 recog_data.operand_type[i]
2811 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2812 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2813 : OP_IN);
2815 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2817 recog_data.insn = NULL;
2818 which_alternative = -1;
2821 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS
2822 operands, N_ALTERNATIVES alternatives and constraint strings
2823 CONSTRAINTS. OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries
2824 and CONSTRAINTS has N_OPERANDS entries. OPLOC should be passed in
2825 if the insn is an asm statement and preprocessing should take the
2826 asm operands into account, e.g. to determine whether they could be
2827 addresses in constraints that require addresses; it should then
2828 point to an array of pointers to each operand. */
2830 void
2831 preprocess_constraints (int n_operands, int n_alternatives,
2832 const char **constraints,
2833 operand_alternative *op_alt_base,
2834 rtx **oploc)
2836 for (int i = 0; i < n_operands; i++)
2838 int j;
2839 struct operand_alternative *op_alt;
2840 const char *p = constraints[i];
2842 op_alt = op_alt_base;
2844 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2846 op_alt[i].cl = NO_REGS;
2847 op_alt[i].constraint = p;
2848 op_alt[i].matches = -1;
2849 op_alt[i].matched = -1;
2851 if (*p == '\0' || *p == ',')
2853 op_alt[i].anything_ok = 1;
2854 continue;
2857 for (;;)
2859 char c = *p;
2860 if (c == '#')
2862 c = *++p;
2863 while (c != ',' && c != '\0');
2864 if (c == ',' || c == '\0')
2866 p++;
2867 break;
2870 switch (c)
2872 case '?':
2873 op_alt[i].reject += 6;
2874 break;
2875 case '!':
2876 op_alt[i].reject += 600;
2877 break;
2878 case '&':
2879 op_alt[i].earlyclobber = 1;
2880 break;
2882 case '0': case '1': case '2': case '3': case '4':
2883 case '5': case '6': case '7': case '8': case '9':
2885 char *end;
2886 op_alt[i].matches = strtoul (p, &end, 10);
2887 op_alt[op_alt[i].matches].matched = i;
2888 p = end;
2890 continue;
2892 case 'X':
2893 op_alt[i].anything_ok = 1;
2894 break;
2896 case 'g':
2897 op_alt[i].cl =
2898 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2899 break;
2901 default:
2902 enum constraint_num cn = lookup_constraint (p);
2903 enum reg_class cl;
2904 switch (get_constraint_type (cn))
2906 case CT_REGISTER:
2907 cl = reg_class_for_constraint (cn);
2908 if (cl != NO_REGS)
2909 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2910 break;
2912 case CT_CONST_INT:
2913 break;
2915 case CT_MEMORY:
2916 case CT_SPECIAL_MEMORY:
2917 case CT_RELAXED_MEMORY:
2918 op_alt[i].memory_ok = 1;
2919 break;
2921 case CT_ADDRESS:
2922 if (oploc && !address_operand (*oploc[i], VOIDmode))
2923 break;
2925 op_alt[i].is_address = 1;
2926 op_alt[i].cl
2927 = (reg_class_subunion
2928 [(int) op_alt[i].cl]
2929 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2930 ADDRESS, SCRATCH)]);
2931 break;
2933 case CT_FIXED_FORM:
2934 break;
2936 break;
2938 p += CONSTRAINT_LEN (c, p);
2944 /* Return an array of operand_alternative instructions for
2945 instruction ICODE. */
2947 const operand_alternative *
2948 preprocess_insn_constraints (unsigned int icode)
2950 gcc_checking_assert (IN_RANGE (icode, 0, NUM_INSN_CODES - 1));
2951 if (this_target_recog->x_op_alt[icode])
2952 return this_target_recog->x_op_alt[icode];
2954 int n_operands = insn_data[icode].n_operands;
2955 if (n_operands == 0)
2956 return 0;
2957 /* Always provide at least one alternative so that which_op_alt ()
2958 works correctly. If the instruction has 0 alternatives (i.e. all
2959 constraint strings are empty) then each operand in this alternative
2960 will have anything_ok set. */
2961 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2962 int n_entries = n_operands * n_alternatives;
2964 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2965 const char **constraints = XALLOCAVEC (const char *, n_operands);
2967 for (int i = 0; i < n_operands; ++i)
2968 constraints[i] = insn_data[icode].operand[i].constraint;
2969 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt,
2970 NULL);
2972 this_target_recog->x_op_alt[icode] = op_alt;
2973 return op_alt;
2976 /* After calling extract_insn, you can use this function to extract some
2977 information from the constraint strings into a more usable form.
2978 The collected data is stored in recog_op_alt. */
2980 void
2981 preprocess_constraints (rtx_insn *insn)
2983 int icode = INSN_CODE (insn);
2984 if (icode >= 0)
2985 recog_op_alt = preprocess_insn_constraints (icode);
2986 else
2988 int n_operands = recog_data.n_operands;
2989 int n_alternatives = recog_data.n_alternatives;
2990 int n_entries = n_operands * n_alternatives;
2991 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2992 preprocess_constraints (n_operands, n_alternatives,
2993 recog_data.constraints, asm_op_alt,
2994 NULL);
2995 recog_op_alt = asm_op_alt;
2999 /* Check the operands of an insn against the insn's operand constraints
3000 and return 1 if they match any of the alternatives in ALTERNATIVES.
3002 The information about the insn's operands, constraints, operand modes
3003 etc. is obtained from the global variables set up by extract_insn.
3005 WHICH_ALTERNATIVE is set to a number which indicates which
3006 alternative of constraints was matched: 0 for the first alternative,
3007 1 for the next, etc.
3009 In addition, when two operands are required to match
3010 and it happens that the output operand is (reg) while the
3011 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
3012 make the output operand look like the input.
3013 This is because the output operand is the one the template will print.
3015 This is used in final, just before printing the assembler code and by
3016 the routines that determine an insn's attribute.
3018 If STRICT is a positive nonzero value, it means that we have been
3019 called after reload has been completed. In that case, we must
3020 do all checks strictly. If it is zero, it means that we have been called
3021 before reload has completed. In that case, we first try to see if we can
3022 find an alternative that matches strictly. If not, we try again, this
3023 time assuming that reload will fix up the insn. This provides a "best
3024 guess" for the alternative and is used to compute attributes of insns prior
3025 to reload. A negative value of STRICT is used for this internal call. */
3027 struct funny_match
3029 int this_op, other;
3032 bool
3033 constrain_operands (int strict, alternative_mask alternatives)
3035 const char *constraints[MAX_RECOG_OPERANDS];
3036 int matching_operands[MAX_RECOG_OPERANDS];
3037 int earlyclobber[MAX_RECOG_OPERANDS];
3038 int c;
3040 struct funny_match funny_match[MAX_RECOG_OPERANDS];
3041 int funny_match_index;
3043 which_alternative = 0;
3044 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
3045 return true;
3047 for (c = 0; c < recog_data.n_operands; c++)
3048 constraints[c] = recog_data.constraints[c];
3052 int seen_earlyclobber_at = -1;
3053 int opno;
3054 bool lose = false;
3055 funny_match_index = 0;
3057 if (!TEST_BIT (alternatives, which_alternative))
3059 int i;
3061 for (i = 0; i < recog_data.n_operands; i++)
3062 constraints[i] = skip_alternative (constraints[i]);
3064 which_alternative++;
3065 continue;
3068 for (opno = 0; opno < recog_data.n_operands; opno++)
3069 matching_operands[opno] = -1;
3071 for (opno = 0; opno < recog_data.n_operands; opno++)
3073 rtx op = recog_data.operand[opno];
3074 machine_mode mode = GET_MODE (op);
3075 const char *p = constraints[opno];
3076 int offset = 0;
3077 bool win = false;
3078 int val;
3079 int len;
3081 earlyclobber[opno] = 0;
3083 /* A unary operator may be accepted by the predicate, but it
3084 is irrelevant for matching constraints. */
3085 /* For special_memory_operand, there could be a memory operand inside,
3086 and it would cause a mismatch for constraint_satisfied_p. */
3087 if (UNARY_P (op) && op == extract_mem_from_operand (op))
3088 op = XEXP (op, 0);
3090 if (GET_CODE (op) == SUBREG)
3092 if (REG_P (SUBREG_REG (op))
3093 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
3094 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
3095 GET_MODE (SUBREG_REG (op)),
3096 SUBREG_BYTE (op),
3097 GET_MODE (op));
3098 op = SUBREG_REG (op);
3101 /* An empty constraint or empty alternative
3102 allows anything which matched the pattern. */
3103 if (*p == 0 || *p == ',')
3104 win = true;
3107 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
3109 case '\0':
3110 len = 0;
3111 break;
3112 case ',':
3113 c = '\0';
3114 break;
3116 case '#':
3117 /* Ignore rest of this alternative as far as
3118 constraint checking is concerned. */
3120 p++;
3121 while (*p && *p != ',');
3122 len = 0;
3123 break;
3125 case '&':
3126 earlyclobber[opno] = 1;
3127 if (seen_earlyclobber_at < 0)
3128 seen_earlyclobber_at = opno;
3129 break;
3131 case '0': case '1': case '2': case '3': case '4':
3132 case '5': case '6': case '7': case '8': case '9':
3134 /* This operand must be the same as a previous one.
3135 This kind of constraint is used for instructions such
3136 as add when they take only two operands.
3138 Note that the lower-numbered operand is passed first.
3140 If we are not testing strictly, assume that this
3141 constraint will be satisfied. */
3143 char *end;
3144 int match;
3146 match = strtoul (p, &end, 10);
3147 p = end;
3149 if (strict < 0)
3150 val = 1;
3151 else
3153 rtx op1 = recog_data.operand[match];
3154 rtx op2 = recog_data.operand[opno];
3156 /* A unary operator may be accepted by the predicate,
3157 but it is irrelevant for matching constraints. */
3158 if (UNARY_P (op1))
3159 op1 = XEXP (op1, 0);
3160 if (UNARY_P (op2))
3161 op2 = XEXP (op2, 0);
3163 val = operands_match_p (op1, op2);
3166 matching_operands[opno] = match;
3167 matching_operands[match] = opno;
3169 if (val != 0)
3170 win = true;
3172 /* If output is *x and input is *--x, arrange later
3173 to change the output to *--x as well, since the
3174 output op is the one that will be printed. */
3175 if (val == 2 && strict > 0)
3177 funny_match[funny_match_index].this_op = opno;
3178 funny_match[funny_match_index++].other = match;
3181 len = 0;
3182 break;
3184 case 'p':
3185 /* p is used for address_operands. When we are called by
3186 gen_reload, no one will have checked that the address is
3187 strictly valid, i.e., that all pseudos requiring hard regs
3188 have gotten them. We also want to make sure we have a
3189 valid mode. */
3190 if ((GET_MODE (op) == VOIDmode
3191 || SCALAR_INT_MODE_P (GET_MODE (op)))
3192 && (strict <= 0
3193 || (strict_memory_address_p
3194 (recog_data.operand_mode[opno], op))))
3195 win = true;
3196 break;
3198 /* No need to check general_operand again;
3199 it was done in insn-recog.cc. Well, except that reload
3200 doesn't check the validity of its replacements, but
3201 that should only matter when there's a bug. */
3202 case 'g':
3203 /* Anything goes unless it is a REG and really has a hard reg
3204 but the hard reg is not in the class GENERAL_REGS. */
3205 if (REG_P (op))
3207 if (strict < 0
3208 || GENERAL_REGS == ALL_REGS
3209 || (reload_in_progress
3210 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3211 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
3212 win = true;
3214 else if (strict < 0 || general_operand (op, mode))
3215 win = true;
3216 break;
3218 default:
3220 enum constraint_num cn = lookup_constraint (p);
3221 enum reg_class cl = reg_class_for_constraint (cn);
3222 if (cl != NO_REGS)
3224 if (strict < 0
3225 || (strict == 0
3226 && REG_P (op)
3227 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
3228 || (strict == 0 && GET_CODE (op) == SCRATCH)
3229 || (REG_P (op)
3230 && reg_fits_class_p (op, cl, offset, mode)))
3231 win = true;
3234 else if (constraint_satisfied_p (op, cn))
3235 win = true;
3237 else if ((insn_extra_memory_constraint (cn)
3238 || insn_extra_relaxed_memory_constraint (cn))
3239 /* Every memory operand can be reloaded to fit. */
3240 && ((strict < 0 && MEM_P (op))
3241 /* Before reload, accept what reload can turn
3242 into a mem. */
3243 || (strict < 0 && CONSTANT_P (op))
3244 /* Before reload, accept a pseudo or hard register,
3245 since LRA can turn it into a mem. */
3246 || (strict < 0 && targetm.lra_p () && REG_P (op))
3247 /* During reload, accept a pseudo */
3248 || (reload_in_progress && REG_P (op)
3249 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
3250 win = true;
3251 else if (insn_extra_address_constraint (cn)
3252 /* Every address operand can be reloaded to fit. */
3253 && strict < 0)
3254 win = true;
3255 /* Cater to architectures like IA-64 that define extra memory
3256 constraints without using define_memory_constraint. */
3257 else if (reload_in_progress
3258 && REG_P (op)
3259 && REGNO (op) >= FIRST_PSEUDO_REGISTER
3260 && reg_renumber[REGNO (op)] < 0
3261 && reg_equiv_mem (REGNO (op)) != 0
3262 && constraint_satisfied_p
3263 (reg_equiv_mem (REGNO (op)), cn))
3264 win = true;
3265 break;
3268 while (p += len, c);
3270 constraints[opno] = p;
3271 /* If this operand did not win somehow,
3272 this alternative loses. */
3273 if (! win)
3274 lose = true;
3276 /* This alternative won; the operands are ok.
3277 Change whichever operands this alternative says to change. */
3278 if (! lose)
3280 int opno, eopno;
3282 /* See if any earlyclobber operand conflicts with some other
3283 operand. */
3285 if (strict > 0 && seen_earlyclobber_at >= 0)
3286 for (eopno = seen_earlyclobber_at;
3287 eopno < recog_data.n_operands;
3288 eopno++)
3289 /* Ignore earlyclobber operands now in memory,
3290 because we would often report failure when we have
3291 two memory operands, one of which was formerly a REG. */
3292 if (earlyclobber[eopno]
3293 && REG_P (recog_data.operand[eopno]))
3294 for (opno = 0; opno < recog_data.n_operands; opno++)
3295 if ((MEM_P (recog_data.operand[opno])
3296 || recog_data.operand_type[opno] != OP_OUT)
3297 && opno != eopno
3298 /* Ignore things like match_operator operands. */
3299 && *recog_data.constraints[opno] != 0
3300 && ! (matching_operands[opno] == eopno
3301 && operands_match_p (recog_data.operand[opno],
3302 recog_data.operand[eopno]))
3303 && ! safe_from_earlyclobber (recog_data.operand[opno],
3304 recog_data.operand[eopno]))
3305 lose = true;
3307 if (! lose)
3309 while (--funny_match_index >= 0)
3311 recog_data.operand[funny_match[funny_match_index].other]
3312 = recog_data.operand[funny_match[funny_match_index].this_op];
3315 /* For operands without < or > constraints reject side-effects. */
3316 if (AUTO_INC_DEC && recog_data.is_asm)
3318 for (opno = 0; opno < recog_data.n_operands; opno++)
3319 if (MEM_P (recog_data.operand[opno]))
3320 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
3322 case PRE_INC:
3323 case POST_INC:
3324 case PRE_DEC:
3325 case POST_DEC:
3326 case PRE_MODIFY:
3327 case POST_MODIFY:
3328 if (strchr (recog_data.constraints[opno], '<') == NULL
3329 && strchr (recog_data.constraints[opno], '>')
3330 == NULL)
3331 return false;
3332 break;
3333 default:
3334 break;
3338 return true;
3342 which_alternative++;
3344 while (which_alternative < recog_data.n_alternatives);
3346 which_alternative = -1;
3347 /* If we are about to reject this, but we are not to test strictly,
3348 try a very loose test. Only return failure if it fails also. */
3349 if (strict == 0)
3350 return constrain_operands (-1, alternatives);
3351 else
3352 return false;
3355 /* Return true iff OPERAND (assumed to be a REG rtx)
3356 is a hard reg in class CLASS when its regno is offset by OFFSET
3357 and changed to mode MODE.
3358 If REG occupies multiple hard regs, all of them must be in CLASS. */
3360 bool
3361 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
3362 machine_mode mode)
3364 unsigned int regno = REGNO (operand);
3366 if (cl == NO_REGS)
3367 return false;
3369 /* Regno must not be a pseudo register. Offset may be negative. */
3370 return (HARD_REGISTER_NUM_P (regno)
3371 && HARD_REGISTER_NUM_P (regno + offset)
3372 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
3373 regno + offset));
3376 /* Split single instruction. Helper function for split_all_insns and
3377 split_all_insns_noflow. Return last insn in the sequence if successful,
3378 or NULL if unsuccessful. */
3380 static rtx_insn *
3381 split_insn (rtx_insn *insn)
3383 /* Split insns here to get max fine-grain parallelism. */
3384 rtx_insn *first = PREV_INSN (insn);
3385 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
3386 rtx insn_set, last_set, note;
3388 if (last == insn)
3389 return NULL;
3391 /* If the original instruction was a single set that was known to be
3392 equivalent to a constant, see if we can say the same about the last
3393 instruction in the split sequence. The two instructions must set
3394 the same destination. */
3395 insn_set = single_set (insn);
3396 if (insn_set)
3398 last_set = single_set (last);
3399 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
3401 note = find_reg_equal_equiv_note (insn);
3402 if (note && CONSTANT_P (XEXP (note, 0)))
3403 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
3404 else if (CONSTANT_P (SET_SRC (insn_set)))
3405 set_unique_reg_note (last, REG_EQUAL,
3406 copy_rtx (SET_SRC (insn_set)));
3410 /* try_split returns the NOTE that INSN became. */
3411 SET_INSN_DELETED (insn);
3413 /* ??? Coddle to md files that generate subregs in post-reload
3414 splitters instead of computing the proper hard register. */
3415 if (reload_completed && first != last)
3417 first = NEXT_INSN (first);
3418 for (;;)
3420 if (INSN_P (first))
3421 cleanup_subreg_operands (first);
3422 if (first == last)
3423 break;
3424 first = NEXT_INSN (first);
3428 return last;
3431 /* Split all insns in the function. If UPD_LIFE, update life info after. */
3433 void
3434 split_all_insns (void)
3436 bool changed;
3437 bool need_cfg_cleanup = false;
3438 basic_block bb;
3440 auto_sbitmap blocks (last_basic_block_for_fn (cfun));
3441 bitmap_clear (blocks);
3442 changed = false;
3444 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3446 rtx_insn *insn, *next;
3447 bool finish = false;
3449 rtl_profile_for_bb (bb);
3450 for (insn = BB_HEAD (bb); !finish ; insn = next)
3452 /* Can't use `next_real_insn' because that might go across
3453 CODE_LABELS and short-out basic blocks. */
3454 next = NEXT_INSN (insn);
3455 finish = (insn == BB_END (bb));
3457 /* If INSN has a REG_EH_REGION note and we split INSN, the
3458 resulting split may not have/need REG_EH_REGION notes.
3460 If that happens and INSN was the last reference to the
3461 given EH region, then the EH region will become unreachable.
3462 We cannot leave the unreachable blocks in the CFG as that
3463 will trigger a checking failure.
3465 So track if INSN has a REG_EH_REGION note. If so and we
3466 split INSN, then trigger a CFG cleanup. */
3467 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3468 if (INSN_P (insn))
3470 rtx set = single_set (insn);
3472 /* Don't split no-op move insns. These should silently
3473 disappear later in final. Splitting such insns would
3474 break the code that handles LIBCALL blocks. */
3475 if (set && set_noop_p (set))
3477 /* Nops get in the way while scheduling, so delete them
3478 now if register allocation has already been done. It
3479 is too risky to try to do this before register
3480 allocation, and there are unlikely to be very many
3481 nops then anyways. */
3482 if (reload_completed)
3483 delete_insn_and_edges (insn);
3484 if (note)
3485 need_cfg_cleanup = true;
3487 else
3489 if (split_insn (insn))
3491 bitmap_set_bit (blocks, bb->index);
3492 changed = true;
3493 if (note)
3494 need_cfg_cleanup = true;
3501 default_rtl_profile ();
3502 if (changed)
3504 find_many_sub_basic_blocks (blocks);
3506 /* Splitting could drop an REG_EH_REGION if it potentially
3507 trapped in its original form, but does not in its split
3508 form. Consider a FLOAT_TRUNCATE which splits into a memory
3509 store/load pair and -fnon-call-exceptions. */
3510 if (need_cfg_cleanup)
3511 cleanup_cfg (0);
3514 checking_verify_flow_info ();
3517 /* Same as split_all_insns, but do not expect CFG to be available.
3518 Used by machine dependent reorg passes. */
3520 void
3521 split_all_insns_noflow (void)
3523 rtx_insn *next, *insn;
3525 for (insn = get_insns (); insn; insn = next)
3527 next = NEXT_INSN (insn);
3528 if (INSN_P (insn))
3530 /* Don't split no-op move insns. These should silently
3531 disappear later in final. Splitting such insns would
3532 break the code that handles LIBCALL blocks. */
3533 rtx set = single_set (insn);
3534 if (set && set_noop_p (set))
3536 /* Nops get in the way while scheduling, so delete them
3537 now if register allocation has already been done. It
3538 is too risky to try to do this before register
3539 allocation, and there are unlikely to be very many
3540 nops then anyways.
3542 ??? Should we use delete_insn when the CFG isn't valid? */
3543 if (reload_completed)
3544 delete_insn_and_edges (insn);
3546 else
3547 split_insn (insn);
3552 struct peep2_insn_data
3554 rtx_insn *insn;
3555 regset live_before;
3558 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3559 static int peep2_current;
3561 static bool peep2_do_rebuild_jump_labels;
3562 static bool peep2_do_cleanup_cfg;
3564 /* The number of instructions available to match a peep2. */
3565 int peep2_current_count;
3567 /* A marker indicating the last insn of the block. The live_before regset
3568 for this element is correct, indicating DF_LIVE_OUT for the block. */
3569 #define PEEP2_EOB invalid_insn_rtx
3571 /* Wrap N to fit into the peep2_insn_data buffer. */
3573 static int
3574 peep2_buf_position (int n)
3576 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3577 n -= MAX_INSNS_PER_PEEP2 + 1;
3578 return n;
3581 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3582 does not exist. Used by the recognizer to find the next insn to match
3583 in a multi-insn pattern. */
3585 rtx_insn *
3586 peep2_next_insn (int n)
3588 gcc_assert (n <= peep2_current_count);
3590 n = peep2_buf_position (peep2_current + n);
3592 return peep2_insn_data[n].insn;
3595 /* Return true if REGNO is dead before the Nth non-note insn
3596 after `current'. */
3598 bool
3599 peep2_regno_dead_p (int ofs, int regno)
3601 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3603 ofs = peep2_buf_position (peep2_current + ofs);
3605 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3607 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3610 /* Similarly for a REG. */
3612 bool
3613 peep2_reg_dead_p (int ofs, rtx reg)
3615 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3617 ofs = peep2_buf_position (peep2_current + ofs);
3619 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3621 unsigned int end_regno = END_REGNO (reg);
3622 for (unsigned int regno = REGNO (reg); regno < end_regno; ++regno)
3623 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno))
3624 return false;
3625 return true;
3628 /* Regno offset to be used in the register search. */
3629 static int search_ofs;
3631 /* Try to find a hard register of mode MODE, matching the register class in
3632 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3633 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3634 in which case the only condition is that the register must be available
3635 before CURRENT_INSN.
3636 Registers that already have bits set in REG_SET will not be considered.
3638 If an appropriate register is available, it will be returned and the
3639 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3640 returned. */
3643 peep2_find_free_register (int from, int to, const char *class_str,
3644 machine_mode mode, HARD_REG_SET *reg_set)
3646 enum reg_class cl;
3647 HARD_REG_SET live;
3648 df_ref def;
3649 int i;
3651 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3652 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3654 from = peep2_buf_position (peep2_current + from);
3655 to = peep2_buf_position (peep2_current + to);
3657 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3658 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3660 while (from != to)
3662 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3664 /* Don't use registers set or clobbered by the insn. */
3665 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3666 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3668 from = peep2_buf_position (from + 1);
3671 cl = reg_class_for_constraint (lookup_constraint (class_str));
3673 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3675 int raw_regno, regno, j;
3676 bool success;
3678 /* Distribute the free registers as much as possible. */
3679 raw_regno = search_ofs + i;
3680 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3681 raw_regno -= FIRST_PSEUDO_REGISTER;
3682 #ifdef REG_ALLOC_ORDER
3683 regno = reg_alloc_order[raw_regno];
3684 #else
3685 regno = raw_regno;
3686 #endif
3688 /* Can it support the mode we need? */
3689 if (!targetm.hard_regno_mode_ok (regno, mode))
3690 continue;
3692 success = true;
3693 for (j = 0; success && j < hard_regno_nregs (regno, mode); j++)
3695 /* Don't allocate fixed registers. */
3696 if (fixed_regs[regno + j])
3698 success = false;
3699 break;
3701 /* Don't allocate global registers. */
3702 if (global_regs[regno + j])
3704 success = false;
3705 break;
3707 /* Make sure the register is of the right class. */
3708 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3710 success = false;
3711 break;
3713 /* And that we don't create an extra save/restore. */
3714 if (! crtl->abi->clobbers_full_reg_p (regno + j)
3715 && ! df_regs_ever_live_p (regno + j))
3717 success = false;
3718 break;
3721 if (! targetm.hard_regno_scratch_ok (regno + j))
3723 success = false;
3724 break;
3727 /* And we don't clobber traceback for noreturn functions. */
3728 if ((regno + j == FRAME_POINTER_REGNUM
3729 || regno + j == HARD_FRAME_POINTER_REGNUM)
3730 && (! reload_completed || frame_pointer_needed))
3732 success = false;
3733 break;
3736 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3737 || TEST_HARD_REG_BIT (live, regno + j))
3739 success = false;
3740 break;
3744 if (success)
3746 add_to_hard_reg_set (reg_set, mode, regno);
3748 /* Start the next search with the next register. */
3749 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3750 raw_regno = 0;
3751 search_ofs = raw_regno;
3753 return gen_rtx_REG (mode, regno);
3757 search_ofs = 0;
3758 return NULL_RTX;
3761 /* Forget all currently tracked instructions, only remember current
3762 LIVE regset. */
3764 static void
3765 peep2_reinit_state (regset live)
3767 int i;
3769 /* Indicate that all slots except the last holds invalid data. */
3770 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3771 peep2_insn_data[i].insn = NULL;
3772 peep2_current_count = 0;
3774 /* Indicate that the last slot contains live_after data. */
3775 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3776 peep2_current = MAX_INSNS_PER_PEEP2;
3778 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3781 /* Copies frame related info of an insn (OLD_INSN) to the single
3782 insn (NEW_INSN) that was obtained by splitting OLD_INSN. */
3784 void
3785 copy_frame_info_to_split_insn (rtx_insn *old_insn, rtx_insn *new_insn)
3787 bool any_note = false;
3788 rtx note;
3790 if (!RTX_FRAME_RELATED_P (old_insn))
3791 return;
3793 RTX_FRAME_RELATED_P (new_insn) = 1;
3795 /* Allow the backend to fill in a note during the split. */
3796 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3797 switch (REG_NOTE_KIND (note))
3799 case REG_FRAME_RELATED_EXPR:
3800 case REG_CFA_DEF_CFA:
3801 case REG_CFA_ADJUST_CFA:
3802 case REG_CFA_OFFSET:
3803 case REG_CFA_REGISTER:
3804 case REG_CFA_EXPRESSION:
3805 case REG_CFA_RESTORE:
3806 case REG_CFA_SET_VDRAP:
3807 any_note = true;
3808 break;
3809 default:
3810 break;
3813 /* If the backend didn't supply a note, copy one over. */
3814 if (!any_note)
3815 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3816 switch (REG_NOTE_KIND (note))
3818 case REG_FRAME_RELATED_EXPR:
3819 case REG_CFA_DEF_CFA:
3820 case REG_CFA_ADJUST_CFA:
3821 case REG_CFA_OFFSET:
3822 case REG_CFA_REGISTER:
3823 case REG_CFA_EXPRESSION:
3824 case REG_CFA_RESTORE:
3825 case REG_CFA_SET_VDRAP:
3826 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3827 any_note = true;
3828 break;
3829 default:
3830 break;
3833 /* If there still isn't a note, make sure the unwind info sees the
3834 same expression as before the split. */
3835 if (!any_note)
3837 rtx old_set, new_set;
3839 /* The old insn had better have been simple, or annotated. */
3840 old_set = single_set (old_insn);
3841 gcc_assert (old_set != NULL);
3843 new_set = single_set (new_insn);
3844 if (!new_set || !rtx_equal_p (new_set, old_set))
3845 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3848 /* Copy prologue/epilogue status. This is required in order to keep
3849 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3850 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3853 /* While scanning basic block BB, we found a match of length MATCH_LEN + 1,
3854 starting at INSN. Perform the replacement, removing the old insns and
3855 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3856 if the replacement is rejected. */
3858 static rtx_insn *
3859 peep2_attempt (basic_block bb, rtx_insn *insn, int match_len, rtx_insn *attempt)
3861 int i;
3862 rtx_insn *last, *before_try, *x;
3863 rtx eh_note, as_note;
3864 rtx_insn *old_insn;
3865 rtx_insn *new_insn;
3866 bool was_call = false;
3868 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3869 match more than one insn, or to be split into more than one insn. */
3870 old_insn = peep2_insn_data[peep2_current].insn;
3871 if (RTX_FRAME_RELATED_P (old_insn))
3873 if (match_len != 0)
3874 return NULL;
3876 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3877 may be in the stream for the purpose of register allocation. */
3878 if (active_insn_p (attempt))
3879 new_insn = attempt;
3880 else
3881 new_insn = next_active_insn (attempt);
3882 if (next_active_insn (new_insn))
3883 return NULL;
3885 /* We have a 1-1 replacement. Copy over any frame-related info. */
3886 copy_frame_info_to_split_insn (old_insn, new_insn);
3889 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3890 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3891 cfg-related call notes. */
3892 for (i = 0; i <= match_len; ++i)
3894 int j;
3895 rtx note;
3897 j = peep2_buf_position (peep2_current + i);
3898 old_insn = peep2_insn_data[j].insn;
3899 if (!CALL_P (old_insn))
3900 continue;
3901 was_call = true;
3903 new_insn = attempt;
3904 while (new_insn != NULL_RTX)
3906 if (CALL_P (new_insn))
3907 break;
3908 new_insn = NEXT_INSN (new_insn);
3911 gcc_assert (new_insn != NULL_RTX);
3913 CALL_INSN_FUNCTION_USAGE (new_insn)
3914 = CALL_INSN_FUNCTION_USAGE (old_insn);
3915 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3917 for (note = REG_NOTES (old_insn);
3918 note;
3919 note = XEXP (note, 1))
3920 switch (REG_NOTE_KIND (note))
3922 case REG_NORETURN:
3923 case REG_SETJMP:
3924 case REG_TM:
3925 case REG_CALL_NOCF_CHECK:
3926 add_reg_note (new_insn, REG_NOTE_KIND (note),
3927 XEXP (note, 0));
3928 break;
3929 default:
3930 /* Discard all other reg notes. */
3931 break;
3934 /* Croak if there is another call in the sequence. */
3935 while (++i <= match_len)
3937 j = peep2_buf_position (peep2_current + i);
3938 old_insn = peep2_insn_data[j].insn;
3939 gcc_assert (!CALL_P (old_insn));
3941 break;
3944 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3945 move those notes over to the new sequence. */
3946 as_note = NULL;
3947 for (i = match_len; i >= 0; --i)
3949 int j = peep2_buf_position (peep2_current + i);
3950 old_insn = peep2_insn_data[j].insn;
3952 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3953 if (as_note)
3954 break;
3957 i = peep2_buf_position (peep2_current + match_len);
3958 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3960 /* Replace the old sequence with the new. */
3961 rtx_insn *peepinsn = peep2_insn_data[i].insn;
3962 last = emit_insn_after_setloc (attempt,
3963 peep2_insn_data[i].insn,
3964 INSN_LOCATION (peepinsn));
3965 if (JUMP_P (peepinsn) && JUMP_P (last))
3966 CROSSING_JUMP_P (last) = CROSSING_JUMP_P (peepinsn);
3967 before_try = PREV_INSN (insn);
3968 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3970 /* Re-insert the EH_REGION notes. */
3971 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3973 edge eh_edge;
3974 edge_iterator ei;
3976 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3977 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3978 break;
3980 if (eh_note)
3981 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3983 if (eh_edge)
3984 for (x = last; x != before_try; x = PREV_INSN (x))
3985 if (x != BB_END (bb)
3986 && (can_throw_internal (x)
3987 || can_nonlocal_goto (x)))
3989 edge nfte, nehe;
3990 int flags;
3992 nfte = split_block (bb, x);
3993 flags = (eh_edge->flags
3994 & (EDGE_EH | EDGE_ABNORMAL));
3995 if (CALL_P (x))
3996 flags |= EDGE_ABNORMAL_CALL;
3997 nehe = make_edge (nfte->src, eh_edge->dest,
3998 flags);
4000 nehe->probability = eh_edge->probability;
4001 nfte->probability = nehe->probability.invert ();
4003 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
4004 bb = nfte->src;
4005 eh_edge = nehe;
4008 /* Converting possibly trapping insn to non-trapping is
4009 possible. Zap dummy outgoing edges. */
4010 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
4013 /* Re-insert the ARGS_SIZE notes. */
4014 if (as_note)
4015 fixup_args_size_notes (before_try, last, get_args_size (as_note));
4017 /* Scan the new insns for embedded side effects and add appropriate
4018 REG_INC notes. */
4019 if (AUTO_INC_DEC)
4020 for (x = last; x != before_try; x = PREV_INSN (x))
4021 if (NONDEBUG_INSN_P (x))
4022 add_auto_inc_notes (x, PATTERN (x));
4024 /* If we generated a jump instruction, it won't have
4025 JUMP_LABEL set. Recompute after we're done. */
4026 for (x = last; x != before_try; x = PREV_INSN (x))
4027 if (JUMP_P (x))
4029 peep2_do_rebuild_jump_labels = true;
4030 break;
4033 return last;
4036 /* After performing a replacement in basic block BB, fix up the life
4037 information in our buffer. LAST is the last of the insns that we
4038 emitted as a replacement. PREV is the insn before the start of
4039 the replacement. MATCH_LEN + 1 is the number of instructions that were
4040 matched, and which now need to be replaced in the buffer. */
4042 static void
4043 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
4044 rtx_insn *prev)
4046 int i = peep2_buf_position (peep2_current + match_len + 1);
4047 rtx_insn *x;
4048 regset_head live;
4050 INIT_REG_SET (&live);
4051 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
4053 gcc_assert (peep2_current_count >= match_len + 1);
4054 peep2_current_count -= match_len + 1;
4056 x = last;
4059 if (INSN_P (x))
4061 df_insn_rescan (x);
4062 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
4064 peep2_current_count++;
4065 if (--i < 0)
4066 i = MAX_INSNS_PER_PEEP2;
4067 peep2_insn_data[i].insn = x;
4068 df_simulate_one_insn_backwards (bb, x, &live);
4069 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
4072 x = PREV_INSN (x);
4074 while (x != prev);
4075 CLEAR_REG_SET (&live);
4077 peep2_current = i;
4080 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
4081 Return true if we added it, false otherwise. The caller will try to match
4082 peepholes against the buffer if we return false; otherwise it will try to
4083 add more instructions to the buffer. */
4085 static bool
4086 peep2_fill_buffer (basic_block bb, rtx_insn *insn, regset live)
4088 int pos;
4090 /* Once we have filled the maximum number of insns the buffer can hold,
4091 allow the caller to match the insns against peepholes. We wait until
4092 the buffer is full in case the target has similar peepholes of different
4093 length; we always want to match the longest if possible. */
4094 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
4095 return false;
4097 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
4098 any other pattern, lest it change the semantics of the frame info. */
4099 if (RTX_FRAME_RELATED_P (insn))
4101 /* Let the buffer drain first. */
4102 if (peep2_current_count > 0)
4103 return false;
4104 /* Now the insn will be the only thing in the buffer. */
4107 pos = peep2_buf_position (peep2_current + peep2_current_count);
4108 peep2_insn_data[pos].insn = insn;
4109 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4110 peep2_current_count++;
4112 df_simulate_one_insn_forwards (bb, insn, live);
4113 return true;
4116 /* Perform the peephole2 optimization pass. */
4118 static void
4119 peephole2_optimize (void)
4121 rtx_insn *insn;
4122 bitmap live;
4123 int i;
4124 basic_block bb;
4126 peep2_do_cleanup_cfg = false;
4127 peep2_do_rebuild_jump_labels = false;
4129 df_set_flags (DF_LR_RUN_DCE);
4130 df_note_add_problem ();
4131 df_analyze ();
4133 /* Initialize the regsets we're going to use. */
4134 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4135 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
4136 search_ofs = 0;
4137 live = BITMAP_ALLOC (&reg_obstack);
4139 FOR_EACH_BB_REVERSE_FN (bb, cfun)
4141 bool past_end = false;
4142 int pos;
4144 rtl_profile_for_bb (bb);
4146 /* Start up propagation. */
4147 bitmap_copy (live, DF_LR_IN (bb));
4148 df_simulate_initialize_forwards (bb, live);
4149 peep2_reinit_state (live);
4151 insn = BB_HEAD (bb);
4152 for (;;)
4154 rtx_insn *attempt, *head;
4155 int match_len;
4157 if (!past_end && !NONDEBUG_INSN_P (insn))
4159 next_insn:
4160 insn = NEXT_INSN (insn);
4161 if (insn == NEXT_INSN (BB_END (bb)))
4162 past_end = true;
4163 continue;
4165 if (!past_end && peep2_fill_buffer (bb, insn, live))
4166 goto next_insn;
4168 /* If we did not fill an empty buffer, it signals the end of the
4169 block. */
4170 if (peep2_current_count == 0)
4171 break;
4173 /* The buffer filled to the current maximum, so try to match. */
4175 pos = peep2_buf_position (peep2_current + peep2_current_count);
4176 peep2_insn_data[pos].insn = PEEP2_EOB;
4177 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
4179 /* Match the peephole. */
4180 head = peep2_insn_data[peep2_current].insn;
4181 attempt = peephole2_insns (PATTERN (head), head, &match_len);
4182 if (attempt != NULL)
4184 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
4185 if (last)
4187 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
4188 continue;
4192 /* No match: advance the buffer by one insn. */
4193 peep2_current = peep2_buf_position (peep2_current + 1);
4194 peep2_current_count--;
4198 default_rtl_profile ();
4199 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
4200 BITMAP_FREE (peep2_insn_data[i].live_before);
4201 BITMAP_FREE (live);
4202 if (peep2_do_rebuild_jump_labels)
4203 rebuild_jump_labels (get_insns ());
4204 if (peep2_do_cleanup_cfg)
4205 cleanup_cfg (CLEANUP_CFG_CHANGED);
4208 /* Common predicates for use with define_bypass. */
4210 /* Helper function for store_data_bypass_p, handle just a single SET
4211 IN_SET. */
4213 static bool
4214 store_data_bypass_p_1 (rtx_insn *out_insn, rtx in_set)
4216 if (!MEM_P (SET_DEST (in_set)))
4217 return false;
4219 rtx out_set = single_set (out_insn);
4220 if (out_set)
4221 return !reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set));
4223 rtx out_pat = PATTERN (out_insn);
4224 if (GET_CODE (out_pat) != PARALLEL)
4225 return false;
4227 for (int i = 0; i < XVECLEN (out_pat, 0); i++)
4229 rtx out_exp = XVECEXP (out_pat, 0, i);
4231 if (GET_CODE (out_exp) == CLOBBER || GET_CODE (out_exp) == USE)
4232 continue;
4234 gcc_assert (GET_CODE (out_exp) == SET);
4236 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
4237 return false;
4240 return true;
4243 /* True if the dependency between OUT_INSN and IN_INSN is on the store
4244 data not the address operand(s) of the store. IN_INSN and OUT_INSN
4245 must be either a single_set or a PARALLEL with SETs inside. */
4247 bool
4248 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4250 rtx in_set = single_set (in_insn);
4251 if (in_set)
4252 return store_data_bypass_p_1 (out_insn, in_set);
4254 rtx in_pat = PATTERN (in_insn);
4255 if (GET_CODE (in_pat) != PARALLEL)
4256 return false;
4258 for (int i = 0; i < XVECLEN (in_pat, 0); i++)
4260 rtx in_exp = XVECEXP (in_pat, 0, i);
4262 if (GET_CODE (in_exp) == CLOBBER || GET_CODE (in_exp) == USE)
4263 continue;
4265 gcc_assert (GET_CODE (in_exp) == SET);
4267 if (!store_data_bypass_p_1 (out_insn, in_exp))
4268 return false;
4271 return true;
4274 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
4275 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
4276 or multiple set; IN_INSN should be single_set for truth, but for convenience
4277 of insn categorization may be any JUMP or CALL insn. */
4279 bool
4280 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
4282 rtx out_set, in_set;
4284 in_set = single_set (in_insn);
4285 if (! in_set)
4287 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
4288 return false;
4291 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
4292 return false;
4293 in_set = SET_SRC (in_set);
4295 out_set = single_set (out_insn);
4296 if (out_set)
4298 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4299 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4300 return false;
4302 else
4304 rtx out_pat;
4305 int i;
4307 out_pat = PATTERN (out_insn);
4308 gcc_assert (GET_CODE (out_pat) == PARALLEL);
4310 for (i = 0; i < XVECLEN (out_pat, 0); i++)
4312 rtx exp = XVECEXP (out_pat, 0, i);
4314 if (GET_CODE (exp) == CLOBBER)
4315 continue;
4317 gcc_assert (GET_CODE (exp) == SET);
4319 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
4320 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
4321 return false;
4325 return true;
4328 static unsigned int
4329 rest_of_handle_peephole2 (void)
4331 if (HAVE_peephole2)
4332 peephole2_optimize ();
4334 return 0;
4337 namespace {
4339 const pass_data pass_data_peephole2 =
4341 RTL_PASS, /* type */
4342 "peephole2", /* name */
4343 OPTGROUP_NONE, /* optinfo_flags */
4344 TV_PEEPHOLE2, /* tv_id */
4345 0, /* properties_required */
4346 0, /* properties_provided */
4347 0, /* properties_destroyed */
4348 0, /* todo_flags_start */
4349 TODO_df_finish, /* todo_flags_finish */
4352 class pass_peephole2 : public rtl_opt_pass
4354 public:
4355 pass_peephole2 (gcc::context *ctxt)
4356 : rtl_opt_pass (pass_data_peephole2, ctxt)
4359 /* opt_pass methods: */
4360 /* The epiphany backend creates a second instance of this pass, so we need
4361 a clone method. */
4362 opt_pass * clone () final override { return new pass_peephole2 (m_ctxt); }
4363 bool gate (function *) final override
4365 return (optimize > 0 && flag_peephole2);
4367 unsigned int execute (function *) final override
4369 return rest_of_handle_peephole2 ();
4372 }; // class pass_peephole2
4374 } // anon namespace
4376 rtl_opt_pass *
4377 make_pass_peephole2 (gcc::context *ctxt)
4379 return new pass_peephole2 (ctxt);
4382 namespace {
4384 const pass_data pass_data_split_all_insns =
4386 RTL_PASS, /* type */
4387 "split1", /* name */
4388 OPTGROUP_NONE, /* optinfo_flags */
4389 TV_NONE, /* tv_id */
4390 0, /* properties_required */
4391 PROP_rtl_split_insns, /* properties_provided */
4392 0, /* properties_destroyed */
4393 0, /* todo_flags_start */
4394 0, /* todo_flags_finish */
4397 class pass_split_all_insns : public rtl_opt_pass
4399 public:
4400 pass_split_all_insns (gcc::context *ctxt)
4401 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
4404 /* opt_pass methods: */
4405 /* The epiphany backend creates a second instance of this pass, so
4406 we need a clone method. */
4407 opt_pass * clone () final override
4409 return new pass_split_all_insns (m_ctxt);
4411 unsigned int execute (function *) final override
4413 split_all_insns ();
4414 return 0;
4417 }; // class pass_split_all_insns
4419 } // anon namespace
4421 rtl_opt_pass *
4422 make_pass_split_all_insns (gcc::context *ctxt)
4424 return new pass_split_all_insns (ctxt);
4427 namespace {
4429 const pass_data pass_data_split_after_reload =
4431 RTL_PASS, /* type */
4432 "split2", /* name */
4433 OPTGROUP_NONE, /* optinfo_flags */
4434 TV_NONE, /* tv_id */
4435 0, /* properties_required */
4436 0, /* properties_provided */
4437 0, /* properties_destroyed */
4438 0, /* todo_flags_start */
4439 0, /* todo_flags_finish */
4442 class pass_split_after_reload : public rtl_opt_pass
4444 public:
4445 pass_split_after_reload (gcc::context *ctxt)
4446 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
4449 /* opt_pass methods: */
4450 bool gate (function *) final override
4452 /* If optimizing, then go ahead and split insns now. */
4453 return optimize > 0;
4456 unsigned int execute (function *) final override
4458 split_all_insns ();
4459 return 0;
4462 }; // class pass_split_after_reload
4464 } // anon namespace
4466 rtl_opt_pass *
4467 make_pass_split_after_reload (gcc::context *ctxt)
4469 return new pass_split_after_reload (ctxt);
4472 static bool
4473 enable_split_before_sched2 (void)
4475 #ifdef INSN_SCHEDULING
4476 return optimize > 0 && flag_schedule_insns_after_reload;
4477 #else
4478 return false;
4479 #endif
4482 namespace {
4484 const pass_data pass_data_split_before_sched2 =
4486 RTL_PASS, /* type */
4487 "split3", /* name */
4488 OPTGROUP_NONE, /* optinfo_flags */
4489 TV_NONE, /* tv_id */
4490 0, /* properties_required */
4491 0, /* properties_provided */
4492 0, /* properties_destroyed */
4493 0, /* todo_flags_start */
4494 0, /* todo_flags_finish */
4497 class pass_split_before_sched2 : public rtl_opt_pass
4499 public:
4500 pass_split_before_sched2 (gcc::context *ctxt)
4501 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4504 /* opt_pass methods: */
4505 bool gate (function *) final override
4507 return enable_split_before_sched2 ();
4510 unsigned int execute (function *) final override
4512 split_all_insns ();
4513 return 0;
4516 }; // class pass_split_before_sched2
4518 } // anon namespace
4520 rtl_opt_pass *
4521 make_pass_split_before_sched2 (gcc::context *ctxt)
4523 return new pass_split_before_sched2 (ctxt);
4526 namespace {
4528 const pass_data pass_data_split_before_regstack =
4530 RTL_PASS, /* type */
4531 "split4", /* name */
4532 OPTGROUP_NONE, /* optinfo_flags */
4533 TV_NONE, /* tv_id */
4534 0, /* properties_required */
4535 0, /* properties_provided */
4536 0, /* properties_destroyed */
4537 0, /* todo_flags_start */
4538 0, /* todo_flags_finish */
4541 class pass_split_before_regstack : public rtl_opt_pass
4543 public:
4544 pass_split_before_regstack (gcc::context *ctxt)
4545 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4548 /* opt_pass methods: */
4549 bool gate (function *) final override;
4550 unsigned int execute (function *) final override
4552 split_all_insns ();
4553 return 0;
4556 }; // class pass_split_before_regstack
4558 bool
4559 pass_split_before_regstack::gate (function *)
4561 #if HAVE_ATTR_length && defined (STACK_REGS)
4562 /* If flow2 creates new instructions which need splitting
4563 and scheduling after reload is not done, they might not be
4564 split until final which doesn't allow splitting
4565 if HAVE_ATTR_length. Selective scheduling can result in
4566 further instructions that need splitting. */
4567 #ifdef INSN_SCHEDULING
4568 return !enable_split_before_sched2 () || flag_selective_scheduling2;
4569 #else
4570 return !enable_split_before_sched2 ();
4571 #endif
4572 #else
4573 return false;
4574 #endif
4577 } // anon namespace
4579 rtl_opt_pass *
4580 make_pass_split_before_regstack (gcc::context *ctxt)
4582 return new pass_split_before_regstack (ctxt);
4585 namespace {
4587 const pass_data pass_data_split_for_shorten_branches =
4589 RTL_PASS, /* type */
4590 "split5", /* name */
4591 OPTGROUP_NONE, /* optinfo_flags */
4592 TV_NONE, /* tv_id */
4593 0, /* properties_required */
4594 0, /* properties_provided */
4595 0, /* properties_destroyed */
4596 0, /* todo_flags_start */
4597 0, /* todo_flags_finish */
4600 class pass_split_for_shorten_branches : public rtl_opt_pass
4602 public:
4603 pass_split_for_shorten_branches (gcc::context *ctxt)
4604 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4607 /* opt_pass methods: */
4608 bool gate (function *) final override
4610 /* The placement of the splitting that we do for shorten_branches
4611 depends on whether regstack is used by the target or not. */
4612 #if HAVE_ATTR_length && !defined (STACK_REGS)
4613 return true;
4614 #else
4615 return false;
4616 #endif
4619 unsigned int execute (function *) final override
4621 split_all_insns_noflow ();
4622 return 0;
4625 }; // class pass_split_for_shorten_branches
4627 } // anon namespace
4629 rtl_opt_pass *
4630 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4632 return new pass_split_for_shorten_branches (ctxt);
4635 /* (Re)initialize the target information after a change in target. */
4637 void
4638 recog_init ()
4640 /* The information is zero-initialized, so we don't need to do anything
4641 first time round. */
4642 if (!this_target_recog->x_initialized)
4644 this_target_recog->x_initialized = true;
4645 return;
4647 memset (this_target_recog->x_bool_attr_masks, 0,
4648 sizeof (this_target_recog->x_bool_attr_masks));
4649 for (unsigned int i = 0; i < NUM_INSN_CODES; ++i)
4650 if (this_target_recog->x_op_alt[i])
4652 free (this_target_recog->x_op_alt[i]);
4653 this_target_recog->x_op_alt[i] = 0;