/cp
[official-gcc.git] / gcc / recog.c
blobcf7f48d9c43355f5db58ae1ceae20912635b1252
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "rtl-error.h"
36 #include "tm_p.h"
37 #include "insn-config.h"
38 #include "insn-attr.h"
39 #include "hard-reg-set.h"
40 #include "recog.h"
41 #include "regs.h"
42 #include "addresses.h"
43 #include "expr.h"
44 #include "input.h"
45 #include "function.h"
46 #include "flags.h"
47 #include "predict.h"
48 #include "dominance.h"
49 #include "cfg.h"
50 #include "cfgrtl.h"
51 #include "cfgbuild.h"
52 #include "cfgcleanup.h"
53 #include "basic-block.h"
54 #include "reload.h"
55 #include "target.h"
56 #include "tree-pass.h"
57 #include "df.h"
58 #include "insn-codes.h"
60 #ifndef STACK_PUSH_CODE
61 #ifdef STACK_GROWS_DOWNWARD
62 #define STACK_PUSH_CODE PRE_DEC
63 #else
64 #define STACK_PUSH_CODE PRE_INC
65 #endif
66 #endif
68 #ifndef STACK_POP_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_POP_CODE POST_INC
71 #else
72 #define STACK_POP_CODE POST_DEC
73 #endif
74 #endif
76 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
77 static void validate_replace_src_1 (rtx *, void *);
78 static rtx split_insn (rtx_insn *);
80 struct target_recog default_target_recog;
81 #if SWITCHABLE_TARGET
82 struct target_recog *this_target_recog = &default_target_recog;
83 #endif
85 /* Nonzero means allow operands to be volatile.
86 This should be 0 if you are generating rtl, such as if you are calling
87 the functions in optabs.c and expmed.c (most of the time).
88 This should be 1 if all valid insns need to be recognized,
89 such as in reginfo.c and final.c and reload.c.
91 init_recog and init_recog_no_volatile are responsible for setting this. */
93 int volatile_ok;
95 struct recog_data_d recog_data;
97 /* Contains a vector of operand_alternative structures, such that
98 operand OP of alternative A is at index A * n_operands + OP.
99 Set up by preprocess_constraints. */
100 const operand_alternative *recog_op_alt;
102 /* Used to provide recog_op_alt for asms. */
103 static operand_alternative asm_op_alt[MAX_RECOG_OPERANDS
104 * MAX_RECOG_ALTERNATIVES];
106 /* On return from `constrain_operands', indicate which alternative
107 was satisfied. */
109 int which_alternative;
111 /* Nonzero after end of reload pass.
112 Set to 1 or 0 by toplev.c.
113 Controls the significance of (SUBREG (MEM)). */
115 int reload_completed;
117 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
118 int epilogue_completed;
120 /* Initialize data used by the function `recog'.
121 This must be called once in the compilation of a function
122 before any insn recognition may be done in the function. */
124 void
125 init_recog_no_volatile (void)
127 volatile_ok = 0;
130 void
131 init_recog (void)
133 volatile_ok = 1;
137 /* Return true if labels in asm operands BODY are LABEL_REFs. */
139 static bool
140 asm_labels_ok (rtx body)
142 rtx asmop;
143 int i;
145 asmop = extract_asm_operands (body);
146 if (asmop == NULL_RTX)
147 return true;
149 for (i = 0; i < ASM_OPERANDS_LABEL_LENGTH (asmop); i++)
150 if (GET_CODE (ASM_OPERANDS_LABEL (asmop, i)) != LABEL_REF)
151 return false;
153 return true;
156 /* Check that X is an insn-body for an `asm' with operands
157 and that the operands mentioned in it are legitimate. */
160 check_asm_operands (rtx x)
162 int noperands;
163 rtx *operands;
164 const char **constraints;
165 int i;
167 if (!asm_labels_ok (x))
168 return 0;
170 /* Post-reload, be more strict with things. */
171 if (reload_completed)
173 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
174 rtx_insn *insn = make_insn_raw (x);
175 extract_insn (insn);
176 constrain_operands (1, get_enabled_alternatives (insn));
177 return which_alternative >= 0;
180 noperands = asm_noperands (x);
181 if (noperands < 0)
182 return 0;
183 if (noperands == 0)
184 return 1;
186 operands = XALLOCAVEC (rtx, noperands);
187 constraints = XALLOCAVEC (const char *, noperands);
189 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
191 for (i = 0; i < noperands; i++)
193 const char *c = constraints[i];
194 if (c[0] == '%')
195 c++;
196 if (! asm_operand_ok (operands[i], c, constraints))
197 return 0;
200 return 1;
203 /* Static data for the next two routines. */
205 typedef struct change_t
207 rtx object;
208 int old_code;
209 rtx *loc;
210 rtx old;
211 bool unshare;
212 } change_t;
214 static change_t *changes;
215 static int changes_allocated;
217 static int num_changes = 0;
219 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
220 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
221 the change is simply made.
223 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
224 will be called with the address and mode as parameters. If OBJECT is
225 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
226 the change in place.
228 IN_GROUP is nonzero if this is part of a group of changes that must be
229 performed as a group. In that case, the changes will be stored. The
230 function `apply_change_group' will validate and apply the changes.
232 If IN_GROUP is zero, this is a single change. Try to recognize the insn
233 or validate the memory reference with the change applied. If the result
234 is not valid for the machine, suppress the change and return zero.
235 Otherwise, perform the change and return 1. */
237 static bool
238 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
240 rtx old = *loc;
242 if (old == new_rtx || rtx_equal_p (old, new_rtx))
243 return 1;
245 gcc_assert (in_group != 0 || num_changes == 0);
247 *loc = new_rtx;
249 /* Save the information describing this change. */
250 if (num_changes >= changes_allocated)
252 if (changes_allocated == 0)
253 /* This value allows for repeated substitutions inside complex
254 indexed addresses, or changes in up to 5 insns. */
255 changes_allocated = MAX_RECOG_OPERANDS * 5;
256 else
257 changes_allocated *= 2;
259 changes = XRESIZEVEC (change_t, changes, changes_allocated);
262 changes[num_changes].object = object;
263 changes[num_changes].loc = loc;
264 changes[num_changes].old = old;
265 changes[num_changes].unshare = unshare;
267 if (object && !MEM_P (object))
269 /* Set INSN_CODE to force rerecognition of insn. Save old code in
270 case invalid. */
271 changes[num_changes].old_code = INSN_CODE (object);
272 INSN_CODE (object) = -1;
275 num_changes++;
277 /* If we are making a group of changes, return 1. Otherwise, validate the
278 change group we made. */
280 if (in_group)
281 return 1;
282 else
283 return apply_change_group ();
286 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
287 UNSHARE to false. */
289 bool
290 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
292 return validate_change_1 (object, loc, new_rtx, in_group, false);
295 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
296 UNSHARE to true. */
298 bool
299 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
301 return validate_change_1 (object, loc, new_rtx, in_group, true);
305 /* Keep X canonicalized if some changes have made it non-canonical; only
306 modifies the operands of X, not (for example) its code. Simplifications
307 are not the job of this routine.
309 Return true if anything was changed. */
310 bool
311 canonicalize_change_group (rtx insn, rtx x)
313 if (COMMUTATIVE_P (x)
314 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
316 /* Oops, the caller has made X no longer canonical.
317 Let's redo the changes in the correct order. */
318 rtx tem = XEXP (x, 0);
319 validate_unshare_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
320 validate_unshare_change (insn, &XEXP (x, 1), tem, 1);
321 return true;
323 else
324 return false;
328 /* This subroutine of apply_change_group verifies whether the changes to INSN
329 were valid; i.e. whether INSN can still be recognized.
331 If IN_GROUP is true clobbers which have to be added in order to
332 match the instructions will be added to the current change group.
333 Otherwise the changes will take effect immediately. */
336 insn_invalid_p (rtx_insn *insn, bool in_group)
338 rtx pat = PATTERN (insn);
339 int num_clobbers = 0;
340 /* If we are before reload and the pattern is a SET, see if we can add
341 clobbers. */
342 int icode = recog (pat, insn,
343 (GET_CODE (pat) == SET
344 && ! reload_completed
345 && ! reload_in_progress)
346 ? &num_clobbers : 0);
347 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
350 /* If this is an asm and the operand aren't legal, then fail. Likewise if
351 this is not an asm and the insn wasn't recognized. */
352 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
353 || (!is_asm && icode < 0))
354 return 1;
356 /* If we have to add CLOBBERs, fail if we have to add ones that reference
357 hard registers since our callers can't know if they are live or not.
358 Otherwise, add them. */
359 if (num_clobbers > 0)
361 rtx newpat;
363 if (added_clobbers_hard_reg_p (icode))
364 return 1;
366 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
367 XVECEXP (newpat, 0, 0) = pat;
368 add_clobbers (newpat, icode);
369 if (in_group)
370 validate_change (insn, &PATTERN (insn), newpat, 1);
371 else
372 PATTERN (insn) = pat = newpat;
375 /* After reload, verify that all constraints are satisfied. */
376 if (reload_completed)
378 extract_insn (insn);
380 if (! constrain_operands (1, get_preferred_alternatives (insn)))
381 return 1;
384 INSN_CODE (insn) = icode;
385 return 0;
388 /* Return number of changes made and not validated yet. */
390 num_changes_pending (void)
392 return num_changes;
395 /* Tentatively apply the changes numbered NUM and up.
396 Return 1 if all changes are valid, zero otherwise. */
399 verify_changes (int num)
401 int i;
402 rtx last_validated = NULL_RTX;
404 /* The changes have been applied and all INSN_CODEs have been reset to force
405 rerecognition.
407 The changes are valid if we aren't given an object, or if we are
408 given a MEM and it still is a valid address, or if this is in insn
409 and it is recognized. In the latter case, if reload has completed,
410 we also require that the operands meet the constraints for
411 the insn. */
413 for (i = num; i < num_changes; i++)
415 rtx object = changes[i].object;
417 /* If there is no object to test or if it is the same as the one we
418 already tested, ignore it. */
419 if (object == 0 || object == last_validated)
420 continue;
422 if (MEM_P (object))
424 if (! memory_address_addr_space_p (GET_MODE (object),
425 XEXP (object, 0),
426 MEM_ADDR_SPACE (object)))
427 break;
429 else if (/* changes[i].old might be zero, e.g. when putting a
430 REG_FRAME_RELATED_EXPR into a previously empty list. */
431 changes[i].old
432 && REG_P (changes[i].old)
433 && asm_noperands (PATTERN (object)) > 0
434 && REG_EXPR (changes[i].old) != NULL_TREE
435 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
436 && DECL_REGISTER (REG_EXPR (changes[i].old)))
438 /* Don't allow changes of hard register operands to inline
439 assemblies if they have been defined as register asm ("x"). */
440 break;
442 else if (DEBUG_INSN_P (object))
443 continue;
444 else if (insn_invalid_p (as_a <rtx_insn *> (object), true))
446 rtx pat = PATTERN (object);
448 /* Perhaps we couldn't recognize the insn because there were
449 extra CLOBBERs at the end. If so, try to re-recognize
450 without the last CLOBBER (later iterations will cause each of
451 them to be eliminated, in turn). But don't do this if we
452 have an ASM_OPERAND. */
453 if (GET_CODE (pat) == PARALLEL
454 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
455 && asm_noperands (PATTERN (object)) < 0)
457 rtx newpat;
459 if (XVECLEN (pat, 0) == 2)
460 newpat = XVECEXP (pat, 0, 0);
461 else
463 int j;
465 newpat
466 = gen_rtx_PARALLEL (VOIDmode,
467 rtvec_alloc (XVECLEN (pat, 0) - 1));
468 for (j = 0; j < XVECLEN (newpat, 0); j++)
469 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
472 /* Add a new change to this group to replace the pattern
473 with this new pattern. Then consider this change
474 as having succeeded. The change we added will
475 cause the entire call to fail if things remain invalid.
477 Note that this can lose if a later change than the one
478 we are processing specified &XVECEXP (PATTERN (object), 0, X)
479 but this shouldn't occur. */
481 validate_change (object, &PATTERN (object), newpat, 1);
482 continue;
484 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
485 || GET_CODE (pat) == VAR_LOCATION)
486 /* If this insn is a CLOBBER or USE, it is always valid, but is
487 never recognized. */
488 continue;
489 else
490 break;
492 last_validated = object;
495 return (i == num_changes);
498 /* A group of changes has previously been issued with validate_change
499 and verified with verify_changes. Call df_insn_rescan for each of
500 the insn changed and clear num_changes. */
502 void
503 confirm_change_group (void)
505 int i;
506 rtx last_object = NULL;
508 for (i = 0; i < num_changes; i++)
510 rtx object = changes[i].object;
512 if (changes[i].unshare)
513 *changes[i].loc = copy_rtx (*changes[i].loc);
515 /* Avoid unnecessary rescanning when multiple changes to same instruction
516 are made. */
517 if (object)
519 if (object != last_object && last_object && INSN_P (last_object))
520 df_insn_rescan (as_a <rtx_insn *> (last_object));
521 last_object = object;
525 if (last_object && INSN_P (last_object))
526 df_insn_rescan (as_a <rtx_insn *> (last_object));
527 num_changes = 0;
530 /* Apply a group of changes previously issued with `validate_change'.
531 If all changes are valid, call confirm_change_group and return 1,
532 otherwise, call cancel_changes and return 0. */
535 apply_change_group (void)
537 if (verify_changes (0))
539 confirm_change_group ();
540 return 1;
542 else
544 cancel_changes (0);
545 return 0;
550 /* Return the number of changes so far in the current group. */
553 num_validated_changes (void)
555 return num_changes;
558 /* Retract the changes numbered NUM and up. */
560 void
561 cancel_changes (int num)
563 int i;
565 /* Back out all the changes. Do this in the opposite order in which
566 they were made. */
567 for (i = num_changes - 1; i >= num; i--)
569 *changes[i].loc = changes[i].old;
570 if (changes[i].object && !MEM_P (changes[i].object))
571 INSN_CODE (changes[i].object) = changes[i].old_code;
573 num_changes = num;
576 /* Reduce conditional compilation elsewhere. */
577 #ifndef HAVE_extv
578 #define HAVE_extv 0
579 #define CODE_FOR_extv CODE_FOR_nothing
580 #endif
581 #ifndef HAVE_extzv
582 #define HAVE_extzv 0
583 #define CODE_FOR_extzv CODE_FOR_nothing
584 #endif
586 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
587 rtx. */
589 static void
590 simplify_while_replacing (rtx *loc, rtx to, rtx object,
591 machine_mode op0_mode)
593 rtx x = *loc;
594 enum rtx_code code = GET_CODE (x);
595 rtx new_rtx = NULL_RTX;
597 if (SWAPPABLE_OPERANDS_P (x)
598 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
600 validate_unshare_change (object, loc,
601 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
602 : swap_condition (code),
603 GET_MODE (x), XEXP (x, 1),
604 XEXP (x, 0)), 1);
605 x = *loc;
606 code = GET_CODE (x);
609 /* Canonicalize arithmetics with all constant operands. */
610 switch (GET_RTX_CLASS (code))
612 case RTX_UNARY:
613 if (CONSTANT_P (XEXP (x, 0)))
614 new_rtx = simplify_unary_operation (code, GET_MODE (x), XEXP (x, 0),
615 op0_mode);
616 break;
617 case RTX_COMM_ARITH:
618 case RTX_BIN_ARITH:
619 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
620 new_rtx = simplify_binary_operation (code, GET_MODE (x), XEXP (x, 0),
621 XEXP (x, 1));
622 break;
623 case RTX_COMPARE:
624 case RTX_COMM_COMPARE:
625 if (CONSTANT_P (XEXP (x, 0)) && CONSTANT_P (XEXP (x, 1)))
626 new_rtx = simplify_relational_operation (code, GET_MODE (x), op0_mode,
627 XEXP (x, 0), XEXP (x, 1));
628 break;
629 default:
630 break;
632 if (new_rtx)
634 validate_change (object, loc, new_rtx, 1);
635 return;
638 switch (code)
640 case PLUS:
641 /* If we have a PLUS whose second operand is now a CONST_INT, use
642 simplify_gen_binary to try to simplify it.
643 ??? We may want later to remove this, once simplification is
644 separated from this function. */
645 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
646 validate_change (object, loc,
647 simplify_gen_binary
648 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
649 break;
650 case MINUS:
651 if (CONST_SCALAR_INT_P (XEXP (x, 1)))
652 validate_change (object, loc,
653 simplify_gen_binary
654 (PLUS, GET_MODE (x), XEXP (x, 0),
655 simplify_gen_unary (NEG,
656 GET_MODE (x), XEXP (x, 1),
657 GET_MODE (x))), 1);
658 break;
659 case ZERO_EXTEND:
660 case SIGN_EXTEND:
661 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
663 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
664 op0_mode);
665 /* If any of the above failed, substitute in something that
666 we know won't be recognized. */
667 if (!new_rtx)
668 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
669 validate_change (object, loc, new_rtx, 1);
671 break;
672 case SUBREG:
673 /* All subregs possible to simplify should be simplified. */
674 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
675 SUBREG_BYTE (x));
677 /* Subregs of VOIDmode operands are incorrect. */
678 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
679 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
680 if (new_rtx)
681 validate_change (object, loc, new_rtx, 1);
682 break;
683 case ZERO_EXTRACT:
684 case SIGN_EXTRACT:
685 /* If we are replacing a register with memory, try to change the memory
686 to be the mode required for memory in extract operations (this isn't
687 likely to be an insertion operation; if it was, nothing bad will
688 happen, we might just fail in some cases). */
690 if (MEM_P (XEXP (x, 0))
691 && CONST_INT_P (XEXP (x, 1))
692 && CONST_INT_P (XEXP (x, 2))
693 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0),
694 MEM_ADDR_SPACE (XEXP (x, 0)))
695 && !MEM_VOLATILE_P (XEXP (x, 0)))
697 machine_mode wanted_mode = VOIDmode;
698 machine_mode is_mode = GET_MODE (XEXP (x, 0));
699 int pos = INTVAL (XEXP (x, 2));
701 if (GET_CODE (x) == ZERO_EXTRACT && HAVE_extzv)
703 wanted_mode = insn_data[CODE_FOR_extzv].operand[1].mode;
704 if (wanted_mode == VOIDmode)
705 wanted_mode = word_mode;
707 else if (GET_CODE (x) == SIGN_EXTRACT && HAVE_extv)
709 wanted_mode = insn_data[CODE_FOR_extv].operand[1].mode;
710 if (wanted_mode == VOIDmode)
711 wanted_mode = word_mode;
714 /* If we have a narrower mode, we can do something. */
715 if (wanted_mode != VOIDmode
716 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
718 int offset = pos / BITS_PER_UNIT;
719 rtx newmem;
721 /* If the bytes and bits are counted differently, we
722 must adjust the offset. */
723 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
724 offset =
725 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
726 offset);
728 gcc_assert (GET_MODE_PRECISION (wanted_mode)
729 == GET_MODE_BITSIZE (wanted_mode));
730 pos %= GET_MODE_BITSIZE (wanted_mode);
732 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
734 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
735 validate_change (object, &XEXP (x, 0), newmem, 1);
739 break;
741 default:
742 break;
746 /* Replace every occurrence of FROM in X with TO. Mark each change with
747 validate_change passing OBJECT. */
749 static void
750 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
751 bool simplify)
753 int i, j;
754 const char *fmt;
755 rtx x = *loc;
756 enum rtx_code code;
757 machine_mode op0_mode = VOIDmode;
758 int prev_changes = num_changes;
760 if (!x)
761 return;
763 code = GET_CODE (x);
764 fmt = GET_RTX_FORMAT (code);
765 if (fmt[0] == 'e')
766 op0_mode = GET_MODE (XEXP (x, 0));
768 /* X matches FROM if it is the same rtx or they are both referring to the
769 same register in the same mode. Avoid calling rtx_equal_p unless the
770 operands look similar. */
772 if (x == from
773 || (REG_P (x) && REG_P (from)
774 && GET_MODE (x) == GET_MODE (from)
775 && REGNO (x) == REGNO (from))
776 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
777 && rtx_equal_p (x, from)))
779 validate_unshare_change (object, loc, to, 1);
780 return;
783 /* Call ourself recursively to perform the replacements.
784 We must not replace inside already replaced expression, otherwise we
785 get infinite recursion for replacements like (reg X)->(subreg (reg X))
786 so we must special case shared ASM_OPERANDS. */
788 if (GET_CODE (x) == PARALLEL)
790 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
792 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
793 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
795 /* Verify that operands are really shared. */
796 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
797 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
798 (x, 0, j))));
799 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
800 from, to, object, simplify);
802 else
803 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
804 simplify);
807 else
808 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
810 if (fmt[i] == 'e')
811 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
812 else if (fmt[i] == 'E')
813 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
814 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
815 simplify);
818 /* If we didn't substitute, there is nothing more to do. */
819 if (num_changes == prev_changes)
820 return;
822 /* ??? The regmove is no more, so is this aberration still necessary? */
823 /* Allow substituted expression to have different mode. This is used by
824 regmove to change mode of pseudo register. */
825 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
826 op0_mode = GET_MODE (XEXP (x, 0));
828 /* Do changes needed to keep rtx consistent. Don't do any other
829 simplifications, as it is not our job. */
830 if (simplify)
831 simplify_while_replacing (loc, to, object, op0_mode);
834 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
835 with TO. After all changes have been made, validate by seeing
836 if INSN is still valid. */
839 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
841 validate_replace_rtx_1 (loc, from, to, insn, true);
842 return apply_change_group ();
845 /* Try replacing every occurrence of FROM in INSN with TO. After all
846 changes have been made, validate by seeing if INSN is still valid. */
849 validate_replace_rtx (rtx from, rtx to, rtx insn)
851 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
852 return apply_change_group ();
855 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
856 is a part of INSN. After all changes have been made, validate by seeing if
857 INSN is still valid.
858 validate_replace_rtx (from, to, insn) is equivalent to
859 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
862 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
864 validate_replace_rtx_1 (where, from, to, insn, true);
865 return apply_change_group ();
868 /* Same as above, but do not simplify rtx afterwards. */
870 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
871 rtx insn)
873 validate_replace_rtx_1 (where, from, to, insn, false);
874 return apply_change_group ();
878 /* Try replacing every occurrence of FROM in INSN with TO. This also
879 will replace in REG_EQUAL and REG_EQUIV notes. */
881 void
882 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
884 rtx note;
885 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
886 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
887 if (REG_NOTE_KIND (note) == REG_EQUAL
888 || REG_NOTE_KIND (note) == REG_EQUIV)
889 validate_replace_rtx_1 (&XEXP (note, 0), from, to, insn, true);
892 /* Function called by note_uses to replace used subexpressions. */
893 struct validate_replace_src_data
895 rtx from; /* Old RTX */
896 rtx to; /* New RTX */
897 rtx insn; /* Insn in which substitution is occurring. */
900 static void
901 validate_replace_src_1 (rtx *x, void *data)
903 struct validate_replace_src_data *d
904 = (struct validate_replace_src_data *) data;
906 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
909 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
910 SET_DESTs. */
912 void
913 validate_replace_src_group (rtx from, rtx to, rtx insn)
915 struct validate_replace_src_data d;
917 d.from = from;
918 d.to = to;
919 d.insn = insn;
920 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
923 /* Try simplify INSN.
924 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
925 pattern and return true if something was simplified. */
927 bool
928 validate_simplify_insn (rtx insn)
930 int i;
931 rtx pat = NULL;
932 rtx newpat = NULL;
934 pat = PATTERN (insn);
936 if (GET_CODE (pat) == SET)
938 newpat = simplify_rtx (SET_SRC (pat));
939 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
940 validate_change (insn, &SET_SRC (pat), newpat, 1);
941 newpat = simplify_rtx (SET_DEST (pat));
942 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
943 validate_change (insn, &SET_DEST (pat), newpat, 1);
945 else if (GET_CODE (pat) == PARALLEL)
946 for (i = 0; i < XVECLEN (pat, 0); i++)
948 rtx s = XVECEXP (pat, 0, i);
950 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
952 newpat = simplify_rtx (SET_SRC (s));
953 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
954 validate_change (insn, &SET_SRC (s), newpat, 1);
955 newpat = simplify_rtx (SET_DEST (s));
956 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
957 validate_change (insn, &SET_DEST (s), newpat, 1);
960 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
963 #ifdef HAVE_cc0
964 /* Return 1 if the insn using CC0 set by INSN does not contain
965 any ordered tests applied to the condition codes.
966 EQ and NE tests do not count. */
969 next_insn_tests_no_inequality (rtx insn)
971 rtx next = next_cc0_user (insn);
973 /* If there is no next insn, we have to take the conservative choice. */
974 if (next == 0)
975 return 0;
977 return (INSN_P (next)
978 && ! inequality_comparisons_p (PATTERN (next)));
980 #endif
982 /* Return 1 if OP is a valid general operand for machine mode MODE.
983 This is either a register reference, a memory reference,
984 or a constant. In the case of a memory reference, the address
985 is checked for general validity for the target machine.
987 Register and memory references must have mode MODE in order to be valid,
988 but some constants have no machine mode and are valid for any mode.
990 If MODE is VOIDmode, OP is checked for validity for whatever mode
991 it has.
993 The main use of this function is as a predicate in match_operand
994 expressions in the machine description. */
997 general_operand (rtx op, machine_mode mode)
999 enum rtx_code code = GET_CODE (op);
1001 if (mode == VOIDmode)
1002 mode = GET_MODE (op);
1004 /* Don't accept CONST_INT or anything similar
1005 if the caller wants something floating. */
1006 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1007 && GET_MODE_CLASS (mode) != MODE_INT
1008 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1009 return 0;
1011 if (CONST_INT_P (op)
1012 && mode != VOIDmode
1013 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1014 return 0;
1016 if (CONSTANT_P (op))
1017 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1018 || mode == VOIDmode)
1019 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1020 && targetm.legitimate_constant_p (mode == VOIDmode
1021 ? GET_MODE (op)
1022 : mode, op));
1024 /* Except for certain constants with VOIDmode, already checked for,
1025 OP's mode must match MODE if MODE specifies a mode. */
1027 if (GET_MODE (op) != mode)
1028 return 0;
1030 if (code == SUBREG)
1032 rtx sub = SUBREG_REG (op);
1034 #ifdef INSN_SCHEDULING
1035 /* On machines that have insn scheduling, we want all memory
1036 reference to be explicit, so outlaw paradoxical SUBREGs.
1037 However, we must allow them after reload so that they can
1038 get cleaned up by cleanup_subreg_operands. */
1039 if (!reload_completed && MEM_P (sub)
1040 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
1041 return 0;
1042 #endif
1043 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
1044 may result in incorrect reference. We should simplify all valid
1045 subregs of MEM anyway. But allow this after reload because we
1046 might be called from cleanup_subreg_operands.
1048 ??? This is a kludge. */
1049 if (!reload_completed && SUBREG_BYTE (op) != 0
1050 && MEM_P (sub))
1051 return 0;
1053 #ifdef CANNOT_CHANGE_MODE_CLASS
1054 if (REG_P (sub)
1055 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1056 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1057 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1058 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT
1059 /* LRA can generate some invalid SUBREGS just for matched
1060 operand reload presentation. LRA needs to treat them as
1061 valid. */
1062 && ! LRA_SUBREG_P (op))
1063 return 0;
1064 #endif
1066 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1067 create such rtl, and we must reject it. */
1068 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1069 /* LRA can use subreg to store a floating point value in an
1070 integer mode. Although the floating point and the
1071 integer modes need the same number of hard registers, the
1072 size of floating point mode can be less than the integer
1073 mode. */
1074 && ! lra_in_progress
1075 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1076 return 0;
1078 op = sub;
1079 code = GET_CODE (op);
1082 if (code == REG)
1083 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1084 || in_hard_reg_set_p (operand_reg_set, GET_MODE (op), REGNO (op)));
1086 if (code == MEM)
1088 rtx y = XEXP (op, 0);
1090 if (! volatile_ok && MEM_VOLATILE_P (op))
1091 return 0;
1093 /* Use the mem's mode, since it will be reloaded thus. LRA can
1094 generate move insn with invalid addresses which is made valid
1095 and efficiently calculated by LRA through further numerous
1096 transformations. */
1097 if (lra_in_progress
1098 || memory_address_addr_space_p (GET_MODE (op), y, MEM_ADDR_SPACE (op)))
1099 return 1;
1102 return 0;
1105 /* Return 1 if OP is a valid memory address for a memory reference
1106 of mode MODE.
1108 The main use of this function is as a predicate in match_operand
1109 expressions in the machine description. */
1112 address_operand (rtx op, machine_mode mode)
1114 return memory_address_p (mode, op);
1117 /* Return 1 if OP is a register reference of mode MODE.
1118 If MODE is VOIDmode, accept a register in any mode.
1120 The main use of this function is as a predicate in match_operand
1121 expressions in the machine description. */
1124 register_operand (rtx op, machine_mode mode)
1126 if (GET_CODE (op) == SUBREG)
1128 rtx sub = SUBREG_REG (op);
1130 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1131 because it is guaranteed to be reloaded into one.
1132 Just make sure the MEM is valid in itself.
1133 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1134 but currently it does result from (SUBREG (REG)...) where the
1135 reg went on the stack.) */
1136 if (!REG_P (sub) && (reload_completed || !MEM_P (sub)))
1137 return 0;
1139 else if (!REG_P (op))
1140 return 0;
1141 return general_operand (op, mode);
1144 /* Return 1 for a register in Pmode; ignore the tested mode. */
1147 pmode_register_operand (rtx op, machine_mode mode ATTRIBUTE_UNUSED)
1149 return register_operand (op, Pmode);
1152 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1153 or a hard register. */
1156 scratch_operand (rtx op, machine_mode mode)
1158 if (GET_MODE (op) != mode && mode != VOIDmode)
1159 return 0;
1161 return (GET_CODE (op) == SCRATCH
1162 || (REG_P (op)
1163 && (lra_in_progress
1164 || (REGNO (op) < FIRST_PSEUDO_REGISTER
1165 && REGNO_REG_CLASS (REGNO (op)) != NO_REGS))));
1168 /* Return 1 if OP is a valid immediate operand for mode MODE.
1170 The main use of this function is as a predicate in match_operand
1171 expressions in the machine description. */
1174 immediate_operand (rtx op, machine_mode mode)
1176 /* Don't accept CONST_INT or anything similar
1177 if the caller wants something floating. */
1178 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1179 && GET_MODE_CLASS (mode) != MODE_INT
1180 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1181 return 0;
1183 if (CONST_INT_P (op)
1184 && mode != VOIDmode
1185 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1186 return 0;
1188 return (CONSTANT_P (op)
1189 && (GET_MODE (op) == mode || mode == VOIDmode
1190 || GET_MODE (op) == VOIDmode)
1191 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1192 && targetm.legitimate_constant_p (mode == VOIDmode
1193 ? GET_MODE (op)
1194 : mode, op));
1197 /* Returns 1 if OP is an operand that is a CONST_INT of mode MODE. */
1200 const_int_operand (rtx op, machine_mode mode)
1202 if (!CONST_INT_P (op))
1203 return 0;
1205 if (mode != VOIDmode
1206 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1207 return 0;
1209 return 1;
1212 #if TARGET_SUPPORTS_WIDE_INT
1213 /* Returns 1 if OP is an operand that is a CONST_INT or CONST_WIDE_INT
1214 of mode MODE. */
1216 const_scalar_int_operand (rtx op, machine_mode mode)
1218 if (!CONST_SCALAR_INT_P (op))
1219 return 0;
1221 if (CONST_INT_P (op))
1222 return const_int_operand (op, mode);
1224 if (mode != VOIDmode)
1226 int prec = GET_MODE_PRECISION (mode);
1227 int bitsize = GET_MODE_BITSIZE (mode);
1229 if (CONST_WIDE_INT_NUNITS (op) * HOST_BITS_PER_WIDE_INT > bitsize)
1230 return 0;
1232 if (prec == bitsize)
1233 return 1;
1234 else
1236 /* Multiword partial int. */
1237 HOST_WIDE_INT x
1238 = CONST_WIDE_INT_ELT (op, CONST_WIDE_INT_NUNITS (op) - 1);
1239 return (sext_hwi (x, prec & (HOST_BITS_PER_WIDE_INT - 1)) == x);
1242 return 1;
1245 /* Returns 1 if OP is an operand that is a constant integer or constant
1246 floating-point number of MODE. */
1249 const_double_operand (rtx op, machine_mode mode)
1251 return (GET_CODE (op) == CONST_DOUBLE)
1252 && (GET_MODE (op) == mode || mode == VOIDmode);
1254 #else
1255 /* Returns 1 if OP is an operand that is a constant integer or constant
1256 floating-point number of MODE. */
1259 const_double_operand (rtx op, machine_mode mode)
1261 /* Don't accept CONST_INT or anything similar
1262 if the caller wants something floating. */
1263 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1264 && GET_MODE_CLASS (mode) != MODE_INT
1265 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1266 return 0;
1268 return ((CONST_DOUBLE_P (op) || CONST_INT_P (op))
1269 && (mode == VOIDmode || GET_MODE (op) == mode
1270 || GET_MODE (op) == VOIDmode));
1272 #endif
1273 /* Return 1 if OP is a general operand that is not an immediate
1274 operand of mode MODE. */
1277 nonimmediate_operand (rtx op, machine_mode mode)
1279 return (general_operand (op, mode) && ! CONSTANT_P (op));
1282 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1285 nonmemory_operand (rtx op, machine_mode mode)
1287 if (CONSTANT_P (op))
1288 return immediate_operand (op, mode);
1289 return register_operand (op, mode);
1292 /* Return 1 if OP is a valid operand that stands for pushing a
1293 value of mode MODE onto the stack.
1295 The main use of this function is as a predicate in match_operand
1296 expressions in the machine description. */
1299 push_operand (rtx op, machine_mode mode)
1301 unsigned int rounded_size = GET_MODE_SIZE (mode);
1303 #ifdef PUSH_ROUNDING
1304 rounded_size = PUSH_ROUNDING (rounded_size);
1305 #endif
1307 if (!MEM_P (op))
1308 return 0;
1310 if (mode != VOIDmode && GET_MODE (op) != mode)
1311 return 0;
1313 op = XEXP (op, 0);
1315 if (rounded_size == GET_MODE_SIZE (mode))
1317 if (GET_CODE (op) != STACK_PUSH_CODE)
1318 return 0;
1320 else
1322 if (GET_CODE (op) != PRE_MODIFY
1323 || GET_CODE (XEXP (op, 1)) != PLUS
1324 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1325 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1326 #ifdef STACK_GROWS_DOWNWARD
1327 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1328 #else
1329 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1330 #endif
1332 return 0;
1335 return XEXP (op, 0) == stack_pointer_rtx;
1338 /* Return 1 if OP is a valid operand that stands for popping a
1339 value of mode MODE off the stack.
1341 The main use of this function is as a predicate in match_operand
1342 expressions in the machine description. */
1345 pop_operand (rtx op, machine_mode mode)
1347 if (!MEM_P (op))
1348 return 0;
1350 if (mode != VOIDmode && GET_MODE (op) != mode)
1351 return 0;
1353 op = XEXP (op, 0);
1355 if (GET_CODE (op) != STACK_POP_CODE)
1356 return 0;
1358 return XEXP (op, 0) == stack_pointer_rtx;
1361 /* Return 1 if ADDR is a valid memory address
1362 for mode MODE in address space AS. */
1365 memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
1366 rtx addr, addr_space_t as)
1368 #ifdef GO_IF_LEGITIMATE_ADDRESS
1369 gcc_assert (ADDR_SPACE_GENERIC_P (as));
1370 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1371 return 0;
1373 win:
1374 return 1;
1375 #else
1376 return targetm.addr_space.legitimate_address_p (mode, addr, 0, as);
1377 #endif
1380 /* Return 1 if OP is a valid memory reference with mode MODE,
1381 including a valid address.
1383 The main use of this function is as a predicate in match_operand
1384 expressions in the machine description. */
1387 memory_operand (rtx op, machine_mode mode)
1389 rtx inner;
1391 if (! reload_completed)
1392 /* Note that no SUBREG is a memory operand before end of reload pass,
1393 because (SUBREG (MEM...)) forces reloading into a register. */
1394 return MEM_P (op) && general_operand (op, mode);
1396 if (mode != VOIDmode && GET_MODE (op) != mode)
1397 return 0;
1399 inner = op;
1400 if (GET_CODE (inner) == SUBREG)
1401 inner = SUBREG_REG (inner);
1403 return (MEM_P (inner) && general_operand (op, mode));
1406 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1407 that is, a memory reference whose address is a general_operand. */
1410 indirect_operand (rtx op, machine_mode mode)
1412 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1413 if (! reload_completed
1414 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1416 int offset = SUBREG_BYTE (op);
1417 rtx inner = SUBREG_REG (op);
1419 if (mode != VOIDmode && GET_MODE (op) != mode)
1420 return 0;
1422 /* The only way that we can have a general_operand as the resulting
1423 address is if OFFSET is zero and the address already is an operand
1424 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1425 operand. */
1427 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1428 || (GET_CODE (XEXP (inner, 0)) == PLUS
1429 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1430 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1431 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1434 return (MEM_P (op)
1435 && memory_operand (op, mode)
1436 && general_operand (XEXP (op, 0), Pmode));
1439 /* Return 1 if this is an ordered comparison operator (not including
1440 ORDERED and UNORDERED). */
1443 ordered_comparison_operator (rtx op, machine_mode mode)
1445 if (mode != VOIDmode && GET_MODE (op) != mode)
1446 return false;
1447 switch (GET_CODE (op))
1449 case EQ:
1450 case NE:
1451 case LT:
1452 case LTU:
1453 case LE:
1454 case LEU:
1455 case GT:
1456 case GTU:
1457 case GE:
1458 case GEU:
1459 return true;
1460 default:
1461 return false;
1465 /* Return 1 if this is a comparison operator. This allows the use of
1466 MATCH_OPERATOR to recognize all the branch insns. */
1469 comparison_operator (rtx op, machine_mode mode)
1471 return ((mode == VOIDmode || GET_MODE (op) == mode)
1472 && COMPARISON_P (op));
1475 /* If BODY is an insn body that uses ASM_OPERANDS, return it. */
1478 extract_asm_operands (rtx body)
1480 rtx tmp;
1481 switch (GET_CODE (body))
1483 case ASM_OPERANDS:
1484 return body;
1486 case SET:
1487 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1488 tmp = SET_SRC (body);
1489 if (GET_CODE (tmp) == ASM_OPERANDS)
1490 return tmp;
1491 break;
1493 case PARALLEL:
1494 tmp = XVECEXP (body, 0, 0);
1495 if (GET_CODE (tmp) == ASM_OPERANDS)
1496 return tmp;
1497 if (GET_CODE (tmp) == SET)
1499 tmp = SET_SRC (tmp);
1500 if (GET_CODE (tmp) == ASM_OPERANDS)
1501 return tmp;
1503 break;
1505 default:
1506 break;
1508 return NULL;
1511 /* If BODY is an insn body that uses ASM_OPERANDS,
1512 return the number of operands (both input and output) in the insn.
1513 Otherwise return -1. */
1516 asm_noperands (const_rtx body)
1518 rtx asm_op = extract_asm_operands (CONST_CAST_RTX (body));
1519 int n_sets = 0;
1521 if (asm_op == NULL)
1522 return -1;
1524 if (GET_CODE (body) == SET)
1525 n_sets = 1;
1526 else if (GET_CODE (body) == PARALLEL)
1528 int i;
1529 if (GET_CODE (XVECEXP (body, 0, 0)) == SET)
1531 /* Multiple output operands, or 1 output plus some clobbers:
1532 body is
1533 [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1534 /* Count backwards through CLOBBERs to determine number of SETs. */
1535 for (i = XVECLEN (body, 0); i > 0; i--)
1537 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1538 break;
1539 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1540 return -1;
1543 /* N_SETS is now number of output operands. */
1544 n_sets = i;
1546 /* Verify that all the SETs we have
1547 came from a single original asm_operands insn
1548 (so that invalid combinations are blocked). */
1549 for (i = 0; i < n_sets; i++)
1551 rtx elt = XVECEXP (body, 0, i);
1552 if (GET_CODE (elt) != SET)
1553 return -1;
1554 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1555 return -1;
1556 /* If these ASM_OPERANDS rtx's came from different original insns
1557 then they aren't allowed together. */
1558 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1559 != ASM_OPERANDS_INPUT_VEC (asm_op))
1560 return -1;
1563 else
1565 /* 0 outputs, but some clobbers:
1566 body is [(asm_operands ...) (clobber (reg ...))...]. */
1567 /* Make sure all the other parallel things really are clobbers. */
1568 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1569 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1570 return -1;
1574 return (ASM_OPERANDS_INPUT_LENGTH (asm_op)
1575 + ASM_OPERANDS_LABEL_LENGTH (asm_op) + n_sets);
1578 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1579 copy its operands (both input and output) into the vector OPERANDS,
1580 the locations of the operands within the insn into the vector OPERAND_LOCS,
1581 and the constraints for the operands into CONSTRAINTS.
1582 Write the modes of the operands into MODES.
1583 Return the assembler-template.
1585 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1586 we don't store that info. */
1588 const char *
1589 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1590 const char **constraints, machine_mode *modes,
1591 location_t *loc)
1593 int nbase = 0, n, i;
1594 rtx asmop;
1596 switch (GET_CODE (body))
1598 case ASM_OPERANDS:
1599 /* Zero output asm: BODY is (asm_operands ...). */
1600 asmop = body;
1601 break;
1603 case SET:
1604 /* Single output asm: BODY is (set OUTPUT (asm_operands ...)). */
1605 asmop = SET_SRC (body);
1607 /* The output is in the SET.
1608 Its constraint is in the ASM_OPERANDS itself. */
1609 if (operands)
1610 operands[0] = SET_DEST (body);
1611 if (operand_locs)
1612 operand_locs[0] = &SET_DEST (body);
1613 if (constraints)
1614 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1615 if (modes)
1616 modes[0] = GET_MODE (SET_DEST (body));
1617 nbase = 1;
1618 break;
1620 case PARALLEL:
1622 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1624 asmop = XVECEXP (body, 0, 0);
1625 if (GET_CODE (asmop) == SET)
1627 asmop = SET_SRC (asmop);
1629 /* At least one output, plus some CLOBBERs. The outputs are in
1630 the SETs. Their constraints are in the ASM_OPERANDS itself. */
1631 for (i = 0; i < nparallel; i++)
1633 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1634 break; /* Past last SET */
1635 if (operands)
1636 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1637 if (operand_locs)
1638 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1639 if (constraints)
1640 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1641 if (modes)
1642 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1644 nbase = i;
1646 break;
1649 default:
1650 gcc_unreachable ();
1653 n = ASM_OPERANDS_INPUT_LENGTH (asmop);
1654 for (i = 0; i < n; i++)
1656 if (operand_locs)
1657 operand_locs[nbase + i] = &ASM_OPERANDS_INPUT (asmop, i);
1658 if (operands)
1659 operands[nbase + i] = ASM_OPERANDS_INPUT (asmop, i);
1660 if (constraints)
1661 constraints[nbase + i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1662 if (modes)
1663 modes[nbase + i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1665 nbase += n;
1667 n = ASM_OPERANDS_LABEL_LENGTH (asmop);
1668 for (i = 0; i < n; i++)
1670 if (operand_locs)
1671 operand_locs[nbase + i] = &ASM_OPERANDS_LABEL (asmop, i);
1672 if (operands)
1673 operands[nbase + i] = ASM_OPERANDS_LABEL (asmop, i);
1674 if (constraints)
1675 constraints[nbase + i] = "";
1676 if (modes)
1677 modes[nbase + i] = Pmode;
1680 if (loc)
1681 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1683 return ASM_OPERANDS_TEMPLATE (asmop);
1686 /* Parse inline assembly string STRING and determine which operands are
1687 referenced by % markers. For the first NOPERANDS operands, set USED[I]
1688 to true if operand I is referenced.
1690 This is intended to distinguish barrier-like asms such as:
1692 asm ("" : "=m" (...));
1694 from real references such as:
1696 asm ("sw\t$0, %0" : "=m" (...)); */
1698 void
1699 get_referenced_operands (const char *string, bool *used,
1700 unsigned int noperands)
1702 memset (used, 0, sizeof (bool) * noperands);
1703 const char *p = string;
1704 while (*p)
1705 switch (*p)
1707 case '%':
1708 p += 1;
1709 /* A letter followed by a digit indicates an operand number. */
1710 if (ISALPHA (p[0]) && ISDIGIT (p[1]))
1711 p += 1;
1712 if (ISDIGIT (*p))
1714 char *endptr;
1715 unsigned long opnum = strtoul (p, &endptr, 10);
1716 if (endptr != p && opnum < noperands)
1717 used[opnum] = true;
1718 p = endptr;
1720 else
1721 p += 1;
1722 break;
1724 default:
1725 p++;
1726 break;
1730 /* Check if an asm_operand matches its constraints.
1731 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1734 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1736 int result = 0;
1737 #ifdef AUTO_INC_DEC
1738 bool incdec_ok = false;
1739 #endif
1741 /* Use constrain_operands after reload. */
1742 gcc_assert (!reload_completed);
1744 /* Empty constraint string is the same as "X,...,X", i.e. X for as
1745 many alternatives as required to match the other operands. */
1746 if (*constraint == '\0')
1747 result = 1;
1749 while (*constraint)
1751 enum constraint_num cn;
1752 char c = *constraint;
1753 int len;
1754 switch (c)
1756 case ',':
1757 constraint++;
1758 continue;
1760 case '0': case '1': case '2': case '3': case '4':
1761 case '5': case '6': case '7': case '8': case '9':
1762 /* If caller provided constraints pointer, look up
1763 the matching constraint. Otherwise, our caller should have
1764 given us the proper matching constraint, but we can't
1765 actually fail the check if they didn't. Indicate that
1766 results are inconclusive. */
1767 if (constraints)
1769 char *end;
1770 unsigned long match;
1772 match = strtoul (constraint, &end, 10);
1773 if (!result)
1774 result = asm_operand_ok (op, constraints[match], NULL);
1775 constraint = (const char *) end;
1777 else
1780 constraint++;
1781 while (ISDIGIT (*constraint));
1782 if (! result)
1783 result = -1;
1785 continue;
1787 /* The rest of the compiler assumes that reloading the address
1788 of a MEM into a register will make it fit an 'o' constraint.
1789 That is, if it sees a MEM operand for an 'o' constraint,
1790 it assumes that (mem (base-reg)) will fit.
1792 That assumption fails on targets that don't have offsettable
1793 addresses at all. We therefore need to treat 'o' asm
1794 constraints as a special case and only accept operands that
1795 are already offsettable, thus proving that at least one
1796 offsettable address exists. */
1797 case 'o': /* offsettable */
1798 if (offsettable_nonstrict_memref_p (op))
1799 result = 1;
1800 break;
1802 case 'g':
1803 if (general_operand (op, VOIDmode))
1804 result = 1;
1805 break;
1807 #ifdef AUTO_INC_DEC
1808 case '<':
1809 case '>':
1810 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed
1811 to exist, excepting those that expand_call created. Further,
1812 on some machines which do not have generalized auto inc/dec,
1813 an inc/dec is not a memory_operand.
1815 Match any memory and hope things are resolved after reload. */
1816 incdec_ok = true;
1817 #endif
1818 default:
1819 cn = lookup_constraint (constraint);
1820 switch (get_constraint_type (cn))
1822 case CT_REGISTER:
1823 if (!result
1824 && reg_class_for_constraint (cn) != NO_REGS
1825 && GET_MODE (op) != BLKmode
1826 && register_operand (op, VOIDmode))
1827 result = 1;
1828 break;
1830 case CT_CONST_INT:
1831 if (!result
1832 && CONST_INT_P (op)
1833 && insn_const_int_ok_for_constraint (INTVAL (op), cn))
1834 result = 1;
1835 break;
1837 case CT_MEMORY:
1838 /* Every memory operand can be reloaded to fit. */
1839 result = result || memory_operand (op, VOIDmode);
1840 break;
1842 case CT_ADDRESS:
1843 /* Every address operand can be reloaded to fit. */
1844 result = result || address_operand (op, VOIDmode);
1845 break;
1847 case CT_FIXED_FORM:
1848 result = result || constraint_satisfied_p (op, cn);
1849 break;
1851 break;
1853 len = CONSTRAINT_LEN (c, constraint);
1855 constraint++;
1856 while (--len && *constraint);
1857 if (len)
1858 return 0;
1861 #ifdef AUTO_INC_DEC
1862 /* For operands without < or > constraints reject side-effects. */
1863 if (!incdec_ok && result && MEM_P (op))
1864 switch (GET_CODE (XEXP (op, 0)))
1866 case PRE_INC:
1867 case POST_INC:
1868 case PRE_DEC:
1869 case POST_DEC:
1870 case PRE_MODIFY:
1871 case POST_MODIFY:
1872 return 0;
1873 default:
1874 break;
1876 #endif
1878 return result;
1881 /* Given an rtx *P, if it is a sum containing an integer constant term,
1882 return the location (type rtx *) of the pointer to that constant term.
1883 Otherwise, return a null pointer. */
1885 rtx *
1886 find_constant_term_loc (rtx *p)
1888 rtx *tem;
1889 enum rtx_code code = GET_CODE (*p);
1891 /* If *P IS such a constant term, P is its location. */
1893 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1894 || code == CONST)
1895 return p;
1897 /* Otherwise, if not a sum, it has no constant term. */
1899 if (GET_CODE (*p) != PLUS)
1900 return 0;
1902 /* If one of the summands is constant, return its location. */
1904 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1905 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1906 return p;
1908 /* Otherwise, check each summand for containing a constant term. */
1910 if (XEXP (*p, 0) != 0)
1912 tem = find_constant_term_loc (&XEXP (*p, 0));
1913 if (tem != 0)
1914 return tem;
1917 if (XEXP (*p, 1) != 0)
1919 tem = find_constant_term_loc (&XEXP (*p, 1));
1920 if (tem != 0)
1921 return tem;
1924 return 0;
1927 /* Return 1 if OP is a memory reference
1928 whose address contains no side effects
1929 and remains valid after the addition
1930 of a positive integer less than the
1931 size of the object being referenced.
1933 We assume that the original address is valid and do not check it.
1935 This uses strict_memory_address_p as a subroutine, so
1936 don't use it before reload. */
1939 offsettable_memref_p (rtx op)
1941 return ((MEM_P (op))
1942 && offsettable_address_addr_space_p (1, GET_MODE (op), XEXP (op, 0),
1943 MEM_ADDR_SPACE (op)));
1946 /* Similar, but don't require a strictly valid mem ref:
1947 consider pseudo-regs valid as index or base regs. */
1950 offsettable_nonstrict_memref_p (rtx op)
1952 return ((MEM_P (op))
1953 && offsettable_address_addr_space_p (0, GET_MODE (op), XEXP (op, 0),
1954 MEM_ADDR_SPACE (op)));
1957 /* Return 1 if Y is a memory address which contains no side effects
1958 and would remain valid for address space AS after the addition of
1959 a positive integer less than the size of that mode.
1961 We assume that the original address is valid and do not check it.
1962 We do check that it is valid for narrower modes.
1964 If STRICTP is nonzero, we require a strictly valid address,
1965 for the sake of use in reload.c. */
1968 offsettable_address_addr_space_p (int strictp, machine_mode mode, rtx y,
1969 addr_space_t as)
1971 enum rtx_code ycode = GET_CODE (y);
1972 rtx z;
1973 rtx y1 = y;
1974 rtx *y2;
1975 int (*addressp) (machine_mode, rtx, addr_space_t) =
1976 (strictp ? strict_memory_address_addr_space_p
1977 : memory_address_addr_space_p);
1978 unsigned int mode_sz = GET_MODE_SIZE (mode);
1980 if (CONSTANT_ADDRESS_P (y))
1981 return 1;
1983 /* Adjusting an offsettable address involves changing to a narrower mode.
1984 Make sure that's OK. */
1986 if (mode_dependent_address_p (y, as))
1987 return 0;
1989 machine_mode address_mode = GET_MODE (y);
1990 if (address_mode == VOIDmode)
1991 address_mode = targetm.addr_space.address_mode (as);
1992 #ifdef POINTERS_EXTEND_UNSIGNED
1993 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
1994 #endif
1996 /* ??? How much offset does an offsettable BLKmode reference need?
1997 Clearly that depends on the situation in which it's being used.
1998 However, the current situation in which we test 0xffffffff is
1999 less than ideal. Caveat user. */
2000 if (mode_sz == 0)
2001 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2003 /* If the expression contains a constant term,
2004 see if it remains valid when max possible offset is added. */
2006 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2008 int good;
2010 y1 = *y2;
2011 *y2 = plus_constant (address_mode, *y2, mode_sz - 1);
2012 /* Use QImode because an odd displacement may be automatically invalid
2013 for any wider mode. But it should be valid for a single byte. */
2014 good = (*addressp) (QImode, y, as);
2016 /* In any case, restore old contents of memory. */
2017 *y2 = y1;
2018 return good;
2021 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
2022 return 0;
2024 /* The offset added here is chosen as the maximum offset that
2025 any instruction could need to add when operating on something
2026 of the specified mode. We assume that if Y and Y+c are
2027 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2028 go inside a LO_SUM here, so we do so as well. */
2029 if (GET_CODE (y) == LO_SUM
2030 && mode != BLKmode
2031 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2032 z = gen_rtx_LO_SUM (address_mode, XEXP (y, 0),
2033 plus_constant (address_mode, XEXP (y, 1),
2034 mode_sz - 1));
2035 #ifdef POINTERS_EXTEND_UNSIGNED
2036 /* Likewise for a ZERO_EXTEND from pointer_mode. */
2037 else if (POINTERS_EXTEND_UNSIGNED > 0
2038 && GET_CODE (y) == ZERO_EXTEND
2039 && GET_MODE (XEXP (y, 0)) == pointer_mode)
2040 z = gen_rtx_ZERO_EXTEND (address_mode,
2041 plus_constant (pointer_mode, XEXP (y, 0),
2042 mode_sz - 1));
2043 #endif
2044 else
2045 z = plus_constant (address_mode, y, mode_sz - 1);
2047 /* Use QImode because an odd displacement may be automatically invalid
2048 for any wider mode. But it should be valid for a single byte. */
2049 return (*addressp) (QImode, z, as);
2052 /* Return 1 if ADDR is an address-expression whose effect depends
2053 on the mode of the memory reference it is used in.
2055 ADDRSPACE is the address space associated with the address.
2057 Autoincrement addressing is a typical example of mode-dependence
2058 because the amount of the increment depends on the mode. */
2060 bool
2061 mode_dependent_address_p (rtx addr, addr_space_t addrspace)
2063 /* Auto-increment addressing with anything other than post_modify
2064 or pre_modify always introduces a mode dependency. Catch such
2065 cases now instead of deferring to the target. */
2066 if (GET_CODE (addr) == PRE_INC
2067 || GET_CODE (addr) == POST_INC
2068 || GET_CODE (addr) == PRE_DEC
2069 || GET_CODE (addr) == POST_DEC)
2070 return true;
2072 return targetm.mode_dependent_address_p (addr, addrspace);
2075 /* Return true if boolean attribute ATTR is supported. */
2077 static bool
2078 have_bool_attr (bool_attr attr)
2080 switch (attr)
2082 case BA_ENABLED:
2083 return HAVE_ATTR_enabled;
2084 case BA_PREFERRED_FOR_SIZE:
2085 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_size;
2086 case BA_PREFERRED_FOR_SPEED:
2087 return HAVE_ATTR_enabled || HAVE_ATTR_preferred_for_speed;
2089 gcc_unreachable ();
2092 /* Return the value of ATTR for instruction INSN. */
2094 static bool
2095 get_bool_attr (rtx_insn *insn, bool_attr attr)
2097 switch (attr)
2099 case BA_ENABLED:
2100 return get_attr_enabled (insn);
2101 case BA_PREFERRED_FOR_SIZE:
2102 return get_attr_enabled (insn) && get_attr_preferred_for_size (insn);
2103 case BA_PREFERRED_FOR_SPEED:
2104 return get_attr_enabled (insn) && get_attr_preferred_for_speed (insn);
2106 gcc_unreachable ();
2109 /* Like get_bool_attr_mask, but don't use the cache. */
2111 static alternative_mask
2112 get_bool_attr_mask_uncached (rtx_insn *insn, bool_attr attr)
2114 /* Temporarily install enough information for get_attr_<foo> to assume
2115 that the insn operands are already cached. As above, the attribute
2116 mustn't depend on the values of operands, so we don't provide their
2117 real values here. */
2118 rtx old_insn = recog_data.insn;
2119 int old_alternative = which_alternative;
2121 recog_data.insn = insn;
2122 alternative_mask mask = ALL_ALTERNATIVES;
2123 int n_alternatives = insn_data[INSN_CODE (insn)].n_alternatives;
2124 for (int i = 0; i < n_alternatives; i++)
2126 which_alternative = i;
2127 if (!get_bool_attr (insn, attr))
2128 mask &= ~ALTERNATIVE_BIT (i);
2131 recog_data.insn = old_insn;
2132 which_alternative = old_alternative;
2133 return mask;
2136 /* Return the mask of operand alternatives that are allowed for INSN
2137 by boolean attribute ATTR. This mask depends only on INSN and on
2138 the current target; it does not depend on things like the values of
2139 operands. */
2141 static alternative_mask
2142 get_bool_attr_mask (rtx_insn *insn, bool_attr attr)
2144 /* Quick exit for asms and for targets that don't use these attributes. */
2145 int code = INSN_CODE (insn);
2146 if (code < 0 || !have_bool_attr (attr))
2147 return ALL_ALTERNATIVES;
2149 /* Calling get_attr_<foo> can be expensive, so cache the mask
2150 for speed. */
2151 if (!this_target_recog->x_bool_attr_masks[code][attr])
2152 this_target_recog->x_bool_attr_masks[code][attr]
2153 = get_bool_attr_mask_uncached (insn, attr);
2154 return this_target_recog->x_bool_attr_masks[code][attr];
2157 /* Return the set of alternatives of INSN that are allowed by the current
2158 target. */
2160 alternative_mask
2161 get_enabled_alternatives (rtx_insn *insn)
2163 return get_bool_attr_mask (insn, BA_ENABLED);
2166 /* Return the set of alternatives of INSN that are allowed by the current
2167 target and are preferred for the current size/speed optimization
2168 choice. */
2170 alternative_mask
2171 get_preferred_alternatives (rtx_insn *insn)
2173 if (optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn)))
2174 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2175 else
2176 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2179 /* Return the set of alternatives of INSN that are allowed by the current
2180 target and are preferred for the size/speed optimization choice
2181 associated with BB. Passing a separate BB is useful if INSN has not
2182 been emitted yet or if we are considering moving it to a different
2183 block. */
2185 alternative_mask
2186 get_preferred_alternatives (rtx_insn *insn, basic_block bb)
2188 if (optimize_bb_for_speed_p (bb))
2189 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SPEED);
2190 else
2191 return get_bool_attr_mask (insn, BA_PREFERRED_FOR_SIZE);
2194 /* Assert that the cached boolean attributes for INSN are still accurate.
2195 The backend is required to define these attributes in a way that only
2196 depends on the current target (rather than operands, compiler phase,
2197 etc.). */
2199 bool
2200 check_bool_attrs (rtx_insn *insn)
2202 int code = INSN_CODE (insn);
2203 if (code >= 0)
2204 for (int i = 0; i <= BA_LAST; ++i)
2206 enum bool_attr attr = (enum bool_attr) i;
2207 if (this_target_recog->x_bool_attr_masks[code][attr])
2208 gcc_assert (this_target_recog->x_bool_attr_masks[code][attr]
2209 == get_bool_attr_mask_uncached (insn, attr));
2211 return true;
2214 /* Like extract_insn, but save insn extracted and don't extract again, when
2215 called again for the same insn expecting that recog_data still contain the
2216 valid information. This is used primary by gen_attr infrastructure that
2217 often does extract insn again and again. */
2218 void
2219 extract_insn_cached (rtx_insn *insn)
2221 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2222 return;
2223 extract_insn (insn);
2224 recog_data.insn = insn;
2227 /* Do uncached extract_insn, constrain_operands and complain about failures.
2228 This should be used when extracting a pre-existing constrained instruction
2229 if the caller wants to know which alternative was chosen. */
2230 void
2231 extract_constrain_insn (rtx_insn *insn)
2233 extract_insn (insn);
2234 if (!constrain_operands (reload_completed, get_enabled_alternatives (insn)))
2235 fatal_insn_not_found (insn);
2238 /* Do cached extract_insn, constrain_operands and complain about failures.
2239 Used by insn_attrtab. */
2240 void
2241 extract_constrain_insn_cached (rtx_insn *insn)
2243 extract_insn_cached (insn);
2244 if (which_alternative == -1
2245 && !constrain_operands (reload_completed,
2246 get_enabled_alternatives (insn)))
2247 fatal_insn_not_found (insn);
2250 /* Do cached constrain_operands on INSN and complain about failures. */
2252 constrain_operands_cached (rtx_insn *insn, int strict)
2254 if (which_alternative == -1)
2255 return constrain_operands (strict, get_enabled_alternatives (insn));
2256 else
2257 return 1;
2260 /* Analyze INSN and fill in recog_data. */
2262 void
2263 extract_insn (rtx_insn *insn)
2265 int i;
2266 int icode;
2267 int noperands;
2268 rtx body = PATTERN (insn);
2270 recog_data.n_operands = 0;
2271 recog_data.n_alternatives = 0;
2272 recog_data.n_dups = 0;
2273 recog_data.is_asm = false;
2275 switch (GET_CODE (body))
2277 case USE:
2278 case CLOBBER:
2279 case ASM_INPUT:
2280 case ADDR_VEC:
2281 case ADDR_DIFF_VEC:
2282 case VAR_LOCATION:
2283 return;
2285 case SET:
2286 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2287 goto asm_insn;
2288 else
2289 goto normal_insn;
2290 case PARALLEL:
2291 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2292 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2293 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2294 goto asm_insn;
2295 else
2296 goto normal_insn;
2297 case ASM_OPERANDS:
2298 asm_insn:
2299 recog_data.n_operands = noperands = asm_noperands (body);
2300 if (noperands >= 0)
2302 /* This insn is an `asm' with operands. */
2304 /* expand_asm_operands makes sure there aren't too many operands. */
2305 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2307 /* Now get the operand values and constraints out of the insn. */
2308 decode_asm_operands (body, recog_data.operand,
2309 recog_data.operand_loc,
2310 recog_data.constraints,
2311 recog_data.operand_mode, NULL);
2312 memset (recog_data.is_operator, 0, sizeof recog_data.is_operator);
2313 if (noperands > 0)
2315 const char *p = recog_data.constraints[0];
2316 recog_data.n_alternatives = 1;
2317 while (*p)
2318 recog_data.n_alternatives += (*p++ == ',');
2320 recog_data.is_asm = true;
2321 break;
2323 fatal_insn_not_found (insn);
2325 default:
2326 normal_insn:
2327 /* Ordinary insn: recognize it, get the operands via insn_extract
2328 and get the constraints. */
2330 icode = recog_memoized (insn);
2331 if (icode < 0)
2332 fatal_insn_not_found (insn);
2334 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2335 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2336 recog_data.n_dups = insn_data[icode].n_dups;
2338 insn_extract (insn);
2340 for (i = 0; i < noperands; i++)
2342 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2343 recog_data.is_operator[i] = insn_data[icode].operand[i].is_operator;
2344 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2345 /* VOIDmode match_operands gets mode from their real operand. */
2346 if (recog_data.operand_mode[i] == VOIDmode)
2347 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2350 for (i = 0; i < noperands; i++)
2351 recog_data.operand_type[i]
2352 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2353 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2354 : OP_IN);
2356 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2358 recog_data.insn = NULL;
2359 which_alternative = -1;
2362 /* Fill in OP_ALT_BASE for an instruction that has N_OPERANDS operands,
2363 N_ALTERNATIVES alternatives and constraint strings CONSTRAINTS.
2364 OP_ALT_BASE has N_ALTERNATIVES * N_OPERANDS entries and CONSTRAINTS
2365 has N_OPERANDS entries. */
2367 void
2368 preprocess_constraints (int n_operands, int n_alternatives,
2369 const char **constraints,
2370 operand_alternative *op_alt_base)
2372 for (int i = 0; i < n_operands; i++)
2374 int j;
2375 struct operand_alternative *op_alt;
2376 const char *p = constraints[i];
2378 op_alt = op_alt_base;
2380 for (j = 0; j < n_alternatives; j++, op_alt += n_operands)
2382 op_alt[i].cl = NO_REGS;
2383 op_alt[i].constraint = p;
2384 op_alt[i].matches = -1;
2385 op_alt[i].matched = -1;
2387 if (*p == '\0' || *p == ',')
2389 op_alt[i].anything_ok = 1;
2390 continue;
2393 for (;;)
2395 char c = *p;
2396 if (c == '#')
2398 c = *++p;
2399 while (c != ',' && c != '\0');
2400 if (c == ',' || c == '\0')
2402 p++;
2403 break;
2406 switch (c)
2408 case '?':
2409 op_alt[i].reject += 6;
2410 break;
2411 case '!':
2412 op_alt[i].reject += 600;
2413 break;
2414 case '&':
2415 op_alt[i].earlyclobber = 1;
2416 break;
2418 case '0': case '1': case '2': case '3': case '4':
2419 case '5': case '6': case '7': case '8': case '9':
2421 char *end;
2422 op_alt[i].matches = strtoul (p, &end, 10);
2423 op_alt[op_alt[i].matches].matched = i;
2424 p = end;
2426 continue;
2428 case 'X':
2429 op_alt[i].anything_ok = 1;
2430 break;
2432 case 'g':
2433 op_alt[i].cl =
2434 reg_class_subunion[(int) op_alt[i].cl][(int) GENERAL_REGS];
2435 break;
2437 default:
2438 enum constraint_num cn = lookup_constraint (p);
2439 enum reg_class cl;
2440 switch (get_constraint_type (cn))
2442 case CT_REGISTER:
2443 cl = reg_class_for_constraint (cn);
2444 if (cl != NO_REGS)
2445 op_alt[i].cl = reg_class_subunion[op_alt[i].cl][cl];
2446 break;
2448 case CT_CONST_INT:
2449 break;
2451 case CT_MEMORY:
2452 op_alt[i].memory_ok = 1;
2453 break;
2455 case CT_ADDRESS:
2456 op_alt[i].is_address = 1;
2457 op_alt[i].cl
2458 = (reg_class_subunion
2459 [(int) op_alt[i].cl]
2460 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
2461 ADDRESS, SCRATCH)]);
2462 break;
2464 case CT_FIXED_FORM:
2465 break;
2467 break;
2469 p += CONSTRAINT_LEN (c, p);
2475 /* Return an array of operand_alternative instructions for
2476 instruction ICODE. */
2478 const operand_alternative *
2479 preprocess_insn_constraints (int icode)
2481 gcc_checking_assert (IN_RANGE (icode, 0, LAST_INSN_CODE));
2482 if (this_target_recog->x_op_alt[icode])
2483 return this_target_recog->x_op_alt[icode];
2485 int n_operands = insn_data[icode].n_operands;
2486 if (n_operands == 0)
2487 return 0;
2488 /* Always provide at least one alternative so that which_op_alt ()
2489 works correctly. If the instruction has 0 alternatives (i.e. all
2490 constraint strings are empty) then each operand in this alternative
2491 will have anything_ok set. */
2492 int n_alternatives = MAX (insn_data[icode].n_alternatives, 1);
2493 int n_entries = n_operands * n_alternatives;
2495 operand_alternative *op_alt = XCNEWVEC (operand_alternative, n_entries);
2496 const char **constraints = XALLOCAVEC (const char *, n_operands);
2498 for (int i = 0; i < n_operands; ++i)
2499 constraints[i] = insn_data[icode].operand[i].constraint;
2500 preprocess_constraints (n_operands, n_alternatives, constraints, op_alt);
2502 this_target_recog->x_op_alt[icode] = op_alt;
2503 return op_alt;
2506 /* After calling extract_insn, you can use this function to extract some
2507 information from the constraint strings into a more usable form.
2508 The collected data is stored in recog_op_alt. */
2510 void
2511 preprocess_constraints (rtx insn)
2513 int icode = INSN_CODE (insn);
2514 if (icode >= 0)
2515 recog_op_alt = preprocess_insn_constraints (icode);
2516 else
2518 int n_operands = recog_data.n_operands;
2519 int n_alternatives = recog_data.n_alternatives;
2520 int n_entries = n_operands * n_alternatives;
2521 memset (asm_op_alt, 0, n_entries * sizeof (operand_alternative));
2522 preprocess_constraints (n_operands, n_alternatives,
2523 recog_data.constraints, asm_op_alt);
2524 recog_op_alt = asm_op_alt;
2528 /* Check the operands of an insn against the insn's operand constraints
2529 and return 1 if they match any of the alternatives in ALTERNATIVES.
2531 The information about the insn's operands, constraints, operand modes
2532 etc. is obtained from the global variables set up by extract_insn.
2534 WHICH_ALTERNATIVE is set to a number which indicates which
2535 alternative of constraints was matched: 0 for the first alternative,
2536 1 for the next, etc.
2538 In addition, when two operands are required to match
2539 and it happens that the output operand is (reg) while the
2540 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2541 make the output operand look like the input.
2542 This is because the output operand is the one the template will print.
2544 This is used in final, just before printing the assembler code and by
2545 the routines that determine an insn's attribute.
2547 If STRICT is a positive nonzero value, it means that we have been
2548 called after reload has been completed. In that case, we must
2549 do all checks strictly. If it is zero, it means that we have been called
2550 before reload has completed. In that case, we first try to see if we can
2551 find an alternative that matches strictly. If not, we try again, this
2552 time assuming that reload will fix up the insn. This provides a "best
2553 guess" for the alternative and is used to compute attributes of insns prior
2554 to reload. A negative value of STRICT is used for this internal call. */
2556 struct funny_match
2558 int this_op, other;
2562 constrain_operands (int strict, alternative_mask alternatives)
2564 const char *constraints[MAX_RECOG_OPERANDS];
2565 int matching_operands[MAX_RECOG_OPERANDS];
2566 int earlyclobber[MAX_RECOG_OPERANDS];
2567 int c;
2569 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2570 int funny_match_index;
2572 which_alternative = 0;
2573 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2574 return 1;
2576 for (c = 0; c < recog_data.n_operands; c++)
2578 constraints[c] = recog_data.constraints[c];
2579 matching_operands[c] = -1;
2584 int seen_earlyclobber_at = -1;
2585 int opno;
2586 int lose = 0;
2587 funny_match_index = 0;
2589 if (!TEST_BIT (alternatives, which_alternative))
2591 int i;
2593 for (i = 0; i < recog_data.n_operands; i++)
2594 constraints[i] = skip_alternative (constraints[i]);
2596 which_alternative++;
2597 continue;
2600 for (opno = 0; opno < recog_data.n_operands; opno++)
2602 rtx op = recog_data.operand[opno];
2603 machine_mode mode = GET_MODE (op);
2604 const char *p = constraints[opno];
2605 int offset = 0;
2606 int win = 0;
2607 int val;
2608 int len;
2610 earlyclobber[opno] = 0;
2612 /* A unary operator may be accepted by the predicate, but it
2613 is irrelevant for matching constraints. */
2614 if (UNARY_P (op))
2615 op = XEXP (op, 0);
2617 if (GET_CODE (op) == SUBREG)
2619 if (REG_P (SUBREG_REG (op))
2620 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2621 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2622 GET_MODE (SUBREG_REG (op)),
2623 SUBREG_BYTE (op),
2624 GET_MODE (op));
2625 op = SUBREG_REG (op);
2628 /* An empty constraint or empty alternative
2629 allows anything which matched the pattern. */
2630 if (*p == 0 || *p == ',')
2631 win = 1;
2634 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2636 case '\0':
2637 len = 0;
2638 break;
2639 case ',':
2640 c = '\0';
2641 break;
2643 case '#':
2644 /* Ignore rest of this alternative as far as
2645 constraint checking is concerned. */
2647 p++;
2648 while (*p && *p != ',');
2649 len = 0;
2650 break;
2652 case '&':
2653 earlyclobber[opno] = 1;
2654 if (seen_earlyclobber_at < 0)
2655 seen_earlyclobber_at = opno;
2656 break;
2658 case '0': case '1': case '2': case '3': case '4':
2659 case '5': case '6': case '7': case '8': case '9':
2661 /* This operand must be the same as a previous one.
2662 This kind of constraint is used for instructions such
2663 as add when they take only two operands.
2665 Note that the lower-numbered operand is passed first.
2667 If we are not testing strictly, assume that this
2668 constraint will be satisfied. */
2670 char *end;
2671 int match;
2673 match = strtoul (p, &end, 10);
2674 p = end;
2676 if (strict < 0)
2677 val = 1;
2678 else
2680 rtx op1 = recog_data.operand[match];
2681 rtx op2 = recog_data.operand[opno];
2683 /* A unary operator may be accepted by the predicate,
2684 but it is irrelevant for matching constraints. */
2685 if (UNARY_P (op1))
2686 op1 = XEXP (op1, 0);
2687 if (UNARY_P (op2))
2688 op2 = XEXP (op2, 0);
2690 val = operands_match_p (op1, op2);
2693 matching_operands[opno] = match;
2694 matching_operands[match] = opno;
2696 if (val != 0)
2697 win = 1;
2699 /* If output is *x and input is *--x, arrange later
2700 to change the output to *--x as well, since the
2701 output op is the one that will be printed. */
2702 if (val == 2 && strict > 0)
2704 funny_match[funny_match_index].this_op = opno;
2705 funny_match[funny_match_index++].other = match;
2708 len = 0;
2709 break;
2711 case 'p':
2712 /* p is used for address_operands. When we are called by
2713 gen_reload, no one will have checked that the address is
2714 strictly valid, i.e., that all pseudos requiring hard regs
2715 have gotten them. */
2716 if (strict <= 0
2717 || (strict_memory_address_p (recog_data.operand_mode[opno],
2718 op)))
2719 win = 1;
2720 break;
2722 /* No need to check general_operand again;
2723 it was done in insn-recog.c. Well, except that reload
2724 doesn't check the validity of its replacements, but
2725 that should only matter when there's a bug. */
2726 case 'g':
2727 /* Anything goes unless it is a REG and really has a hard reg
2728 but the hard reg is not in the class GENERAL_REGS. */
2729 if (REG_P (op))
2731 if (strict < 0
2732 || GENERAL_REGS == ALL_REGS
2733 || (reload_in_progress
2734 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2735 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2736 win = 1;
2738 else if (strict < 0 || general_operand (op, mode))
2739 win = 1;
2740 break;
2742 default:
2744 enum constraint_num cn = lookup_constraint (p);
2745 enum reg_class cl = reg_class_for_constraint (cn);
2746 if (cl != NO_REGS)
2748 if (strict < 0
2749 || (strict == 0
2750 && REG_P (op)
2751 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2752 || (strict == 0 && GET_CODE (op) == SCRATCH)
2753 || (REG_P (op)
2754 && reg_fits_class_p (op, cl, offset, mode)))
2755 win = 1;
2758 else if (constraint_satisfied_p (op, cn))
2759 win = 1;
2761 else if (insn_extra_memory_constraint (cn)
2762 /* Every memory operand can be reloaded to fit. */
2763 && ((strict < 0 && MEM_P (op))
2764 /* Before reload, accept what reload can turn
2765 into mem. */
2766 || (strict < 0 && CONSTANT_P (op))
2767 /* During reload, accept a pseudo */
2768 || (reload_in_progress && REG_P (op)
2769 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2770 win = 1;
2771 else if (insn_extra_address_constraint (cn)
2772 /* Every address operand can be reloaded to fit. */
2773 && strict < 0)
2774 win = 1;
2775 /* Cater to architectures like IA-64 that define extra memory
2776 constraints without using define_memory_constraint. */
2777 else if (reload_in_progress
2778 && REG_P (op)
2779 && REGNO (op) >= FIRST_PSEUDO_REGISTER
2780 && reg_renumber[REGNO (op)] < 0
2781 && reg_equiv_mem (REGNO (op)) != 0
2782 && constraint_satisfied_p
2783 (reg_equiv_mem (REGNO (op)), cn))
2784 win = 1;
2785 break;
2788 while (p += len, c);
2790 constraints[opno] = p;
2791 /* If this operand did not win somehow,
2792 this alternative loses. */
2793 if (! win)
2794 lose = 1;
2796 /* This alternative won; the operands are ok.
2797 Change whichever operands this alternative says to change. */
2798 if (! lose)
2800 int opno, eopno;
2802 /* See if any earlyclobber operand conflicts with some other
2803 operand. */
2805 if (strict > 0 && seen_earlyclobber_at >= 0)
2806 for (eopno = seen_earlyclobber_at;
2807 eopno < recog_data.n_operands;
2808 eopno++)
2809 /* Ignore earlyclobber operands now in memory,
2810 because we would often report failure when we have
2811 two memory operands, one of which was formerly a REG. */
2812 if (earlyclobber[eopno]
2813 && REG_P (recog_data.operand[eopno]))
2814 for (opno = 0; opno < recog_data.n_operands; opno++)
2815 if ((MEM_P (recog_data.operand[opno])
2816 || recog_data.operand_type[opno] != OP_OUT)
2817 && opno != eopno
2818 /* Ignore things like match_operator operands. */
2819 && *recog_data.constraints[opno] != 0
2820 && ! (matching_operands[opno] == eopno
2821 && operands_match_p (recog_data.operand[opno],
2822 recog_data.operand[eopno]))
2823 && ! safe_from_earlyclobber (recog_data.operand[opno],
2824 recog_data.operand[eopno]))
2825 lose = 1;
2827 if (! lose)
2829 while (--funny_match_index >= 0)
2831 recog_data.operand[funny_match[funny_match_index].other]
2832 = recog_data.operand[funny_match[funny_match_index].this_op];
2835 #ifdef AUTO_INC_DEC
2836 /* For operands without < or > constraints reject side-effects. */
2837 if (recog_data.is_asm)
2839 for (opno = 0; opno < recog_data.n_operands; opno++)
2840 if (MEM_P (recog_data.operand[opno]))
2841 switch (GET_CODE (XEXP (recog_data.operand[opno], 0)))
2843 case PRE_INC:
2844 case POST_INC:
2845 case PRE_DEC:
2846 case POST_DEC:
2847 case PRE_MODIFY:
2848 case POST_MODIFY:
2849 if (strchr (recog_data.constraints[opno], '<') == NULL
2850 && strchr (recog_data.constraints[opno], '>')
2851 == NULL)
2852 return 0;
2853 break;
2854 default:
2855 break;
2858 #endif
2859 return 1;
2863 which_alternative++;
2865 while (which_alternative < recog_data.n_alternatives);
2867 which_alternative = -1;
2868 /* If we are about to reject this, but we are not to test strictly,
2869 try a very loose test. Only return failure if it fails also. */
2870 if (strict == 0)
2871 return constrain_operands (-1, alternatives);
2872 else
2873 return 0;
2876 /* Return true iff OPERAND (assumed to be a REG rtx)
2877 is a hard reg in class CLASS when its regno is offset by OFFSET
2878 and changed to mode MODE.
2879 If REG occupies multiple hard regs, all of them must be in CLASS. */
2881 bool
2882 reg_fits_class_p (const_rtx operand, reg_class_t cl, int offset,
2883 machine_mode mode)
2885 unsigned int regno = REGNO (operand);
2887 if (cl == NO_REGS)
2888 return false;
2890 /* Regno must not be a pseudo register. Offset may be negative. */
2891 return (HARD_REGISTER_NUM_P (regno)
2892 && HARD_REGISTER_NUM_P (regno + offset)
2893 && in_hard_reg_set_p (reg_class_contents[(int) cl], mode,
2894 regno + offset));
2897 /* Split single instruction. Helper function for split_all_insns and
2898 split_all_insns_noflow. Return last insn in the sequence if successful,
2899 or NULL if unsuccessful. */
2901 static rtx
2902 split_insn (rtx_insn *insn)
2904 /* Split insns here to get max fine-grain parallelism. */
2905 rtx_insn *first = PREV_INSN (insn);
2906 rtx_insn *last = try_split (PATTERN (insn), insn, 1);
2907 rtx insn_set, last_set, note;
2909 if (last == insn)
2910 return NULL_RTX;
2912 /* If the original instruction was a single set that was known to be
2913 equivalent to a constant, see if we can say the same about the last
2914 instruction in the split sequence. The two instructions must set
2915 the same destination. */
2916 insn_set = single_set (insn);
2917 if (insn_set)
2919 last_set = single_set (last);
2920 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2922 note = find_reg_equal_equiv_note (insn);
2923 if (note && CONSTANT_P (XEXP (note, 0)))
2924 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2925 else if (CONSTANT_P (SET_SRC (insn_set)))
2926 set_unique_reg_note (last, REG_EQUAL,
2927 copy_rtx (SET_SRC (insn_set)));
2931 /* try_split returns the NOTE that INSN became. */
2932 SET_INSN_DELETED (insn);
2934 /* ??? Coddle to md files that generate subregs in post-reload
2935 splitters instead of computing the proper hard register. */
2936 if (reload_completed && first != last)
2938 first = NEXT_INSN (first);
2939 for (;;)
2941 if (INSN_P (first))
2942 cleanup_subreg_operands (first);
2943 if (first == last)
2944 break;
2945 first = NEXT_INSN (first);
2949 return last;
2952 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2954 void
2955 split_all_insns (void)
2957 sbitmap blocks;
2958 bool changed;
2959 basic_block bb;
2961 blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
2962 bitmap_clear (blocks);
2963 changed = false;
2965 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2967 rtx_insn *insn, *next;
2968 bool finish = false;
2970 rtl_profile_for_bb (bb);
2971 for (insn = BB_HEAD (bb); !finish ; insn = next)
2973 /* Can't use `next_real_insn' because that might go across
2974 CODE_LABELS and short-out basic blocks. */
2975 next = NEXT_INSN (insn);
2976 finish = (insn == BB_END (bb));
2977 if (INSN_P (insn))
2979 rtx set = single_set (insn);
2981 /* Don't split no-op move insns. These should silently
2982 disappear later in final. Splitting such insns would
2983 break the code that handles LIBCALL blocks. */
2984 if (set && set_noop_p (set))
2986 /* Nops get in the way while scheduling, so delete them
2987 now if register allocation has already been done. It
2988 is too risky to try to do this before register
2989 allocation, and there are unlikely to be very many
2990 nops then anyways. */
2991 if (reload_completed)
2992 delete_insn_and_edges (insn);
2994 else
2996 if (split_insn (insn))
2998 bitmap_set_bit (blocks, bb->index);
2999 changed = true;
3006 default_rtl_profile ();
3007 if (changed)
3008 find_many_sub_basic_blocks (blocks);
3010 #ifdef ENABLE_CHECKING
3011 verify_flow_info ();
3012 #endif
3014 sbitmap_free (blocks);
3017 /* Same as split_all_insns, but do not expect CFG to be available.
3018 Used by machine dependent reorg passes. */
3020 unsigned int
3021 split_all_insns_noflow (void)
3023 rtx_insn *next, *insn;
3025 for (insn = get_insns (); insn; insn = next)
3027 next = NEXT_INSN (insn);
3028 if (INSN_P (insn))
3030 /* Don't split no-op move insns. These should silently
3031 disappear later in final. Splitting such insns would
3032 break the code that handles LIBCALL blocks. */
3033 rtx set = single_set (insn);
3034 if (set && set_noop_p (set))
3036 /* Nops get in the way while scheduling, so delete them
3037 now if register allocation has already been done. It
3038 is too risky to try to do this before register
3039 allocation, and there are unlikely to be very many
3040 nops then anyways.
3042 ??? Should we use delete_insn when the CFG isn't valid? */
3043 if (reload_completed)
3044 delete_insn_and_edges (insn);
3046 else
3047 split_insn (insn);
3050 return 0;
3053 #ifdef HAVE_peephole2
3054 struct peep2_insn_data
3056 rtx insn;
3057 regset live_before;
3060 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
3061 static int peep2_current;
3063 static bool peep2_do_rebuild_jump_labels;
3064 static bool peep2_do_cleanup_cfg;
3066 /* The number of instructions available to match a peep2. */
3067 int peep2_current_count;
3069 /* A non-insn marker indicating the last insn of the block.
3070 The live_before regset for this element is correct, indicating
3071 DF_LIVE_OUT for the block. */
3072 #define PEEP2_EOB pc_rtx
3074 /* Wrap N to fit into the peep2_insn_data buffer. */
3076 static int
3077 peep2_buf_position (int n)
3079 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3080 n -= MAX_INSNS_PER_PEEP2 + 1;
3081 return n;
3084 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
3085 does not exist. Used by the recognizer to find the next insn to match
3086 in a multi-insn pattern. */
3089 peep2_next_insn (int n)
3091 gcc_assert (n <= peep2_current_count);
3093 n = peep2_buf_position (peep2_current + n);
3095 return peep2_insn_data[n].insn;
3098 /* Return true if REGNO is dead before the Nth non-note insn
3099 after `current'. */
3102 peep2_regno_dead_p (int ofs, int regno)
3104 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3106 ofs = peep2_buf_position (peep2_current + ofs);
3108 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3110 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3113 /* Similarly for a REG. */
3116 peep2_reg_dead_p (int ofs, rtx reg)
3118 int regno, n;
3120 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
3122 ofs = peep2_buf_position (peep2_current + ofs);
3124 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
3126 regno = REGNO (reg);
3127 n = hard_regno_nregs[regno][GET_MODE (reg)];
3128 while (--n >= 0)
3129 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3130 return 0;
3131 return 1;
3134 /* Regno offset to be used in the register search. */
3135 static int search_ofs;
3137 /* Try to find a hard register of mode MODE, matching the register class in
3138 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3139 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3140 in which case the only condition is that the register must be available
3141 before CURRENT_INSN.
3142 Registers that already have bits set in REG_SET will not be considered.
3144 If an appropriate register is available, it will be returned and the
3145 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3146 returned. */
3149 peep2_find_free_register (int from, int to, const char *class_str,
3150 machine_mode mode, HARD_REG_SET *reg_set)
3152 enum reg_class cl;
3153 HARD_REG_SET live;
3154 df_ref def;
3155 int i;
3157 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
3158 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
3160 from = peep2_buf_position (peep2_current + from);
3161 to = peep2_buf_position (peep2_current + to);
3163 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3164 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3166 while (from != to)
3168 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3170 /* Don't use registers set or clobbered by the insn. */
3171 FOR_EACH_INSN_DEF (def, peep2_insn_data[from].insn)
3172 SET_HARD_REG_BIT (live, DF_REF_REGNO (def));
3174 from = peep2_buf_position (from + 1);
3177 cl = reg_class_for_constraint (lookup_constraint (class_str));
3179 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3181 int raw_regno, regno, success, j;
3183 /* Distribute the free registers as much as possible. */
3184 raw_regno = search_ofs + i;
3185 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3186 raw_regno -= FIRST_PSEUDO_REGISTER;
3187 #ifdef REG_ALLOC_ORDER
3188 regno = reg_alloc_order[raw_regno];
3189 #else
3190 regno = raw_regno;
3191 #endif
3193 /* Can it support the mode we need? */
3194 if (! HARD_REGNO_MODE_OK (regno, mode))
3195 continue;
3197 success = 1;
3198 for (j = 0; success && j < hard_regno_nregs[regno][mode]; j++)
3200 /* Don't allocate fixed registers. */
3201 if (fixed_regs[regno + j])
3203 success = 0;
3204 break;
3206 /* Don't allocate global registers. */
3207 if (global_regs[regno + j])
3209 success = 0;
3210 break;
3212 /* Make sure the register is of the right class. */
3213 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno + j))
3215 success = 0;
3216 break;
3218 /* And that we don't create an extra save/restore. */
3219 if (! call_used_regs[regno + j] && ! df_regs_ever_live_p (regno + j))
3221 success = 0;
3222 break;
3225 if (! targetm.hard_regno_scratch_ok (regno + j))
3227 success = 0;
3228 break;
3231 /* And we don't clobber traceback for noreturn functions. */
3232 if ((regno + j == FRAME_POINTER_REGNUM
3233 || regno + j == HARD_FRAME_POINTER_REGNUM)
3234 && (! reload_completed || frame_pointer_needed))
3236 success = 0;
3237 break;
3240 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3241 || TEST_HARD_REG_BIT (live, regno + j))
3243 success = 0;
3244 break;
3248 if (success)
3250 add_to_hard_reg_set (reg_set, mode, regno);
3252 /* Start the next search with the next register. */
3253 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3254 raw_regno = 0;
3255 search_ofs = raw_regno;
3257 return gen_rtx_REG (mode, regno);
3261 search_ofs = 0;
3262 return NULL_RTX;
3265 /* Forget all currently tracked instructions, only remember current
3266 LIVE regset. */
3268 static void
3269 peep2_reinit_state (regset live)
3271 int i;
3273 /* Indicate that all slots except the last holds invalid data. */
3274 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3275 peep2_insn_data[i].insn = NULL_RTX;
3276 peep2_current_count = 0;
3278 /* Indicate that the last slot contains live_after data. */
3279 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3280 peep2_current = MAX_INSNS_PER_PEEP2;
3282 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3285 /* While scanning basic block BB, we found a match of length MATCH_LEN,
3286 starting at INSN. Perform the replacement, removing the old insns and
3287 replacing them with ATTEMPT. Returns the last insn emitted, or NULL
3288 if the replacement is rejected. */
3290 static rtx_insn *
3291 peep2_attempt (basic_block bb, rtx uncast_insn, int match_len, rtx_insn *attempt)
3293 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn);
3294 int i;
3295 rtx_insn *last, *before_try, *x;
3296 rtx eh_note, as_note;
3297 rtx_insn *old_insn;
3298 rtx_insn *new_insn;
3299 bool was_call = false;
3301 /* If we are splitting an RTX_FRAME_RELATED_P insn, do not allow it to
3302 match more than one insn, or to be split into more than one insn. */
3303 old_insn = as_a <rtx_insn *> (peep2_insn_data[peep2_current].insn);
3304 if (RTX_FRAME_RELATED_P (old_insn))
3306 bool any_note = false;
3307 rtx note;
3309 if (match_len != 0)
3310 return NULL;
3312 /* Look for one "active" insn. I.e. ignore any "clobber" insns that
3313 may be in the stream for the purpose of register allocation. */
3314 if (active_insn_p (attempt))
3315 new_insn = attempt;
3316 else
3317 new_insn = next_active_insn (attempt);
3318 if (next_active_insn (new_insn))
3319 return NULL;
3321 /* We have a 1-1 replacement. Copy over any frame-related info. */
3322 RTX_FRAME_RELATED_P (new_insn) = 1;
3324 /* Allow the backend to fill in a note during the split. */
3325 for (note = REG_NOTES (new_insn); note ; note = XEXP (note, 1))
3326 switch (REG_NOTE_KIND (note))
3328 case REG_FRAME_RELATED_EXPR:
3329 case REG_CFA_DEF_CFA:
3330 case REG_CFA_ADJUST_CFA:
3331 case REG_CFA_OFFSET:
3332 case REG_CFA_REGISTER:
3333 case REG_CFA_EXPRESSION:
3334 case REG_CFA_RESTORE:
3335 case REG_CFA_SET_VDRAP:
3336 any_note = true;
3337 break;
3338 default:
3339 break;
3342 /* If the backend didn't supply a note, copy one over. */
3343 if (!any_note)
3344 for (note = REG_NOTES (old_insn); note ; note = XEXP (note, 1))
3345 switch (REG_NOTE_KIND (note))
3347 case REG_FRAME_RELATED_EXPR:
3348 case REG_CFA_DEF_CFA:
3349 case REG_CFA_ADJUST_CFA:
3350 case REG_CFA_OFFSET:
3351 case REG_CFA_REGISTER:
3352 case REG_CFA_EXPRESSION:
3353 case REG_CFA_RESTORE:
3354 case REG_CFA_SET_VDRAP:
3355 add_reg_note (new_insn, REG_NOTE_KIND (note), XEXP (note, 0));
3356 any_note = true;
3357 break;
3358 default:
3359 break;
3362 /* If there still isn't a note, make sure the unwind info sees the
3363 same expression as before the split. */
3364 if (!any_note)
3366 rtx old_set, new_set;
3368 /* The old insn had better have been simple, or annotated. */
3369 old_set = single_set (old_insn);
3370 gcc_assert (old_set != NULL);
3372 new_set = single_set (new_insn);
3373 if (!new_set || !rtx_equal_p (new_set, old_set))
3374 add_reg_note (new_insn, REG_FRAME_RELATED_EXPR, old_set);
3377 /* Copy prologue/epilogue status. This is required in order to keep
3378 proper placement of EPILOGUE_BEG and the DW_CFA_remember_state. */
3379 maybe_copy_prologue_epilogue_insn (old_insn, new_insn);
3382 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3383 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3384 cfg-related call notes. */
3385 for (i = 0; i <= match_len; ++i)
3387 int j;
3388 rtx note;
3390 j = peep2_buf_position (peep2_current + i);
3391 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3392 if (!CALL_P (old_insn))
3393 continue;
3394 was_call = true;
3396 new_insn = attempt;
3397 while (new_insn != NULL_RTX)
3399 if (CALL_P (new_insn))
3400 break;
3401 new_insn = NEXT_INSN (new_insn);
3404 gcc_assert (new_insn != NULL_RTX);
3406 CALL_INSN_FUNCTION_USAGE (new_insn)
3407 = CALL_INSN_FUNCTION_USAGE (old_insn);
3408 SIBLING_CALL_P (new_insn) = SIBLING_CALL_P (old_insn);
3410 for (note = REG_NOTES (old_insn);
3411 note;
3412 note = XEXP (note, 1))
3413 switch (REG_NOTE_KIND (note))
3415 case REG_NORETURN:
3416 case REG_SETJMP:
3417 case REG_TM:
3418 add_reg_note (new_insn, REG_NOTE_KIND (note),
3419 XEXP (note, 0));
3420 break;
3421 default:
3422 /* Discard all other reg notes. */
3423 break;
3426 /* Croak if there is another call in the sequence. */
3427 while (++i <= match_len)
3429 j = peep2_buf_position (peep2_current + i);
3430 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3431 gcc_assert (!CALL_P (old_insn));
3433 break;
3436 /* If we matched any instruction that had a REG_ARGS_SIZE, then
3437 move those notes over to the new sequence. */
3438 as_note = NULL;
3439 for (i = match_len; i >= 0; --i)
3441 int j = peep2_buf_position (peep2_current + i);
3442 old_insn = as_a <rtx_insn *> (peep2_insn_data[j].insn);
3444 as_note = find_reg_note (old_insn, REG_ARGS_SIZE, NULL);
3445 if (as_note)
3446 break;
3449 i = peep2_buf_position (peep2_current + match_len);
3450 eh_note = find_reg_note (peep2_insn_data[i].insn, REG_EH_REGION, NULL_RTX);
3452 /* Replace the old sequence with the new. */
3453 rtx_insn *peepinsn = as_a <rtx_insn *> (peep2_insn_data[i].insn);
3454 last = emit_insn_after_setloc (attempt,
3455 peep2_insn_data[i].insn,
3456 INSN_LOCATION (peepinsn));
3457 before_try = PREV_INSN (insn);
3458 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3460 /* Re-insert the EH_REGION notes. */
3461 if (eh_note || (was_call && nonlocal_goto_handler_labels))
3463 edge eh_edge;
3464 edge_iterator ei;
3466 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3467 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3468 break;
3470 if (eh_note)
3471 copy_reg_eh_region_note_backward (eh_note, last, before_try);
3473 if (eh_edge)
3474 for (x = last; x != before_try; x = PREV_INSN (x))
3475 if (x != BB_END (bb)
3476 && (can_throw_internal (x)
3477 || can_nonlocal_goto (x)))
3479 edge nfte, nehe;
3480 int flags;
3482 nfte = split_block (bb, x);
3483 flags = (eh_edge->flags
3484 & (EDGE_EH | EDGE_ABNORMAL));
3485 if (CALL_P (x))
3486 flags |= EDGE_ABNORMAL_CALL;
3487 nehe = make_edge (nfte->src, eh_edge->dest,
3488 flags);
3490 nehe->probability = eh_edge->probability;
3491 nfte->probability
3492 = REG_BR_PROB_BASE - nehe->probability;
3494 peep2_do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3495 bb = nfte->src;
3496 eh_edge = nehe;
3499 /* Converting possibly trapping insn to non-trapping is
3500 possible. Zap dummy outgoing edges. */
3501 peep2_do_cleanup_cfg |= purge_dead_edges (bb);
3504 /* Re-insert the ARGS_SIZE notes. */
3505 if (as_note)
3506 fixup_args_size_notes (before_try, last, INTVAL (XEXP (as_note, 0)));
3508 /* If we generated a jump instruction, it won't have
3509 JUMP_LABEL set. Recompute after we're done. */
3510 for (x = last; x != before_try; x = PREV_INSN (x))
3511 if (JUMP_P (x))
3513 peep2_do_rebuild_jump_labels = true;
3514 break;
3517 return last;
3520 /* After performing a replacement in basic block BB, fix up the life
3521 information in our buffer. LAST is the last of the insns that we
3522 emitted as a replacement. PREV is the insn before the start of
3523 the replacement. MATCH_LEN is the number of instructions that were
3524 matched, and which now need to be replaced in the buffer. */
3526 static void
3527 peep2_update_life (basic_block bb, int match_len, rtx_insn *last,
3528 rtx_insn *prev)
3530 int i = peep2_buf_position (peep2_current + match_len + 1);
3531 rtx_insn *x;
3532 regset_head live;
3534 INIT_REG_SET (&live);
3535 COPY_REG_SET (&live, peep2_insn_data[i].live_before);
3537 gcc_assert (peep2_current_count >= match_len + 1);
3538 peep2_current_count -= match_len + 1;
3540 x = last;
3543 if (INSN_P (x))
3545 df_insn_rescan (x);
3546 if (peep2_current_count < MAX_INSNS_PER_PEEP2)
3548 peep2_current_count++;
3549 if (--i < 0)
3550 i = MAX_INSNS_PER_PEEP2;
3551 peep2_insn_data[i].insn = x;
3552 df_simulate_one_insn_backwards (bb, x, &live);
3553 COPY_REG_SET (peep2_insn_data[i].live_before, &live);
3556 x = PREV_INSN (x);
3558 while (x != prev);
3559 CLEAR_REG_SET (&live);
3561 peep2_current = i;
3564 /* Add INSN, which is in BB, at the end of the peep2 insn buffer if possible.
3565 Return true if we added it, false otherwise. The caller will try to match
3566 peepholes against the buffer if we return false; otherwise it will try to
3567 add more instructions to the buffer. */
3569 static bool
3570 peep2_fill_buffer (basic_block bb, rtx insn, regset live)
3572 int pos;
3574 /* Once we have filled the maximum number of insns the buffer can hold,
3575 allow the caller to match the insns against peepholes. We wait until
3576 the buffer is full in case the target has similar peepholes of different
3577 length; we always want to match the longest if possible. */
3578 if (peep2_current_count == MAX_INSNS_PER_PEEP2)
3579 return false;
3581 /* If an insn has RTX_FRAME_RELATED_P set, do not allow it to be matched with
3582 any other pattern, lest it change the semantics of the frame info. */
3583 if (RTX_FRAME_RELATED_P (insn))
3585 /* Let the buffer drain first. */
3586 if (peep2_current_count > 0)
3587 return false;
3588 /* Now the insn will be the only thing in the buffer. */
3591 pos = peep2_buf_position (peep2_current + peep2_current_count);
3592 peep2_insn_data[pos].insn = insn;
3593 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3594 peep2_current_count++;
3596 df_simulate_one_insn_forwards (bb, as_a <rtx_insn *> (insn), live);
3597 return true;
3600 /* Perform the peephole2 optimization pass. */
3602 static void
3603 peephole2_optimize (void)
3605 rtx_insn *insn;
3606 bitmap live;
3607 int i;
3608 basic_block bb;
3610 peep2_do_cleanup_cfg = false;
3611 peep2_do_rebuild_jump_labels = false;
3613 df_set_flags (DF_LR_RUN_DCE);
3614 df_note_add_problem ();
3615 df_analyze ();
3617 /* Initialize the regsets we're going to use. */
3618 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3619 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3620 search_ofs = 0;
3621 live = BITMAP_ALLOC (&reg_obstack);
3623 FOR_EACH_BB_REVERSE_FN (bb, cfun)
3625 bool past_end = false;
3626 int pos;
3628 rtl_profile_for_bb (bb);
3630 /* Start up propagation. */
3631 bitmap_copy (live, DF_LR_IN (bb));
3632 df_simulate_initialize_forwards (bb, live);
3633 peep2_reinit_state (live);
3635 insn = BB_HEAD (bb);
3636 for (;;)
3638 rtx_insn *attempt;
3639 rtx head;
3640 int match_len;
3642 if (!past_end && !NONDEBUG_INSN_P (insn))
3644 next_insn:
3645 insn = NEXT_INSN (insn);
3646 if (insn == NEXT_INSN (BB_END (bb)))
3647 past_end = true;
3648 continue;
3650 if (!past_end && peep2_fill_buffer (bb, insn, live))
3651 goto next_insn;
3653 /* If we did not fill an empty buffer, it signals the end of the
3654 block. */
3655 if (peep2_current_count == 0)
3656 break;
3658 /* The buffer filled to the current maximum, so try to match. */
3660 pos = peep2_buf_position (peep2_current + peep2_current_count);
3661 peep2_insn_data[pos].insn = PEEP2_EOB;
3662 COPY_REG_SET (peep2_insn_data[pos].live_before, live);
3664 /* Match the peephole. */
3665 head = peep2_insn_data[peep2_current].insn;
3666 attempt = safe_as_a <rtx_insn *> (
3667 peephole2_insns (PATTERN (head), head, &match_len));
3668 if (attempt != NULL)
3670 rtx_insn *last = peep2_attempt (bb, head, match_len, attempt);
3671 if (last)
3673 peep2_update_life (bb, match_len, last, PREV_INSN (attempt));
3674 continue;
3678 /* No match: advance the buffer by one insn. */
3679 peep2_current = peep2_buf_position (peep2_current + 1);
3680 peep2_current_count--;
3684 default_rtl_profile ();
3685 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3686 BITMAP_FREE (peep2_insn_data[i].live_before);
3687 BITMAP_FREE (live);
3688 if (peep2_do_rebuild_jump_labels)
3689 rebuild_jump_labels (get_insns ());
3690 if (peep2_do_cleanup_cfg)
3691 cleanup_cfg (CLEANUP_CFG_CHANGED);
3693 #endif /* HAVE_peephole2 */
3695 /* Common predicates for use with define_bypass. */
3697 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3698 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3699 must be either a single_set or a PARALLEL with SETs inside. */
3702 store_data_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3704 rtx out_set, in_set;
3705 rtx out_pat, in_pat;
3706 rtx out_exp, in_exp;
3707 int i, j;
3709 in_set = single_set (in_insn);
3710 if (in_set)
3712 if (!MEM_P (SET_DEST (in_set)))
3713 return false;
3715 out_set = single_set (out_insn);
3716 if (out_set)
3718 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3719 return false;
3721 else
3723 out_pat = PATTERN (out_insn);
3725 if (GET_CODE (out_pat) != PARALLEL)
3726 return false;
3728 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3730 out_exp = XVECEXP (out_pat, 0, i);
3732 if (GET_CODE (out_exp) == CLOBBER)
3733 continue;
3735 gcc_assert (GET_CODE (out_exp) == SET);
3737 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3738 return false;
3742 else
3744 in_pat = PATTERN (in_insn);
3745 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3747 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3749 in_exp = XVECEXP (in_pat, 0, i);
3751 if (GET_CODE (in_exp) == CLOBBER)
3752 continue;
3754 gcc_assert (GET_CODE (in_exp) == SET);
3756 if (!MEM_P (SET_DEST (in_exp)))
3757 return false;
3759 out_set = single_set (out_insn);
3760 if (out_set)
3762 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3763 return false;
3765 else
3767 out_pat = PATTERN (out_insn);
3768 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3770 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3772 out_exp = XVECEXP (out_pat, 0, j);
3774 if (GET_CODE (out_exp) == CLOBBER)
3775 continue;
3777 gcc_assert (GET_CODE (out_exp) == SET);
3779 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3780 return false;
3786 return true;
3789 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3790 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3791 or multiple set; IN_INSN should be single_set for truth, but for convenience
3792 of insn categorization may be any JUMP or CALL insn. */
3795 if_test_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
3797 rtx out_set, in_set;
3799 in_set = single_set (in_insn);
3800 if (! in_set)
3802 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3803 return false;
3806 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3807 return false;
3808 in_set = SET_SRC (in_set);
3810 out_set = single_set (out_insn);
3811 if (out_set)
3813 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3814 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3815 return false;
3817 else
3819 rtx out_pat;
3820 int i;
3822 out_pat = PATTERN (out_insn);
3823 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3825 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3827 rtx exp = XVECEXP (out_pat, 0, i);
3829 if (GET_CODE (exp) == CLOBBER)
3830 continue;
3832 gcc_assert (GET_CODE (exp) == SET);
3834 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3835 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3836 return false;
3840 return true;
3843 static unsigned int
3844 rest_of_handle_peephole2 (void)
3846 #ifdef HAVE_peephole2
3847 peephole2_optimize ();
3848 #endif
3849 return 0;
3852 namespace {
3854 const pass_data pass_data_peephole2 =
3856 RTL_PASS, /* type */
3857 "peephole2", /* name */
3858 OPTGROUP_NONE, /* optinfo_flags */
3859 TV_PEEPHOLE2, /* tv_id */
3860 0, /* properties_required */
3861 0, /* properties_provided */
3862 0, /* properties_destroyed */
3863 0, /* todo_flags_start */
3864 TODO_df_finish, /* todo_flags_finish */
3867 class pass_peephole2 : public rtl_opt_pass
3869 public:
3870 pass_peephole2 (gcc::context *ctxt)
3871 : rtl_opt_pass (pass_data_peephole2, ctxt)
3874 /* opt_pass methods: */
3875 /* The epiphany backend creates a second instance of this pass, so we need
3876 a clone method. */
3877 opt_pass * clone () { return new pass_peephole2 (m_ctxt); }
3878 virtual bool gate (function *) { return (optimize > 0 && flag_peephole2); }
3879 virtual unsigned int execute (function *)
3881 return rest_of_handle_peephole2 ();
3884 }; // class pass_peephole2
3886 } // anon namespace
3888 rtl_opt_pass *
3889 make_pass_peephole2 (gcc::context *ctxt)
3891 return new pass_peephole2 (ctxt);
3894 namespace {
3896 const pass_data pass_data_split_all_insns =
3898 RTL_PASS, /* type */
3899 "split1", /* name */
3900 OPTGROUP_NONE, /* optinfo_flags */
3901 TV_NONE, /* tv_id */
3902 0, /* properties_required */
3903 0, /* properties_provided */
3904 0, /* properties_destroyed */
3905 0, /* todo_flags_start */
3906 0, /* todo_flags_finish */
3909 class pass_split_all_insns : public rtl_opt_pass
3911 public:
3912 pass_split_all_insns (gcc::context *ctxt)
3913 : rtl_opt_pass (pass_data_split_all_insns, ctxt)
3916 /* opt_pass methods: */
3917 /* The epiphany backend creates a second instance of this pass, so
3918 we need a clone method. */
3919 opt_pass * clone () { return new pass_split_all_insns (m_ctxt); }
3920 virtual unsigned int execute (function *)
3922 split_all_insns ();
3923 return 0;
3926 }; // class pass_split_all_insns
3928 } // anon namespace
3930 rtl_opt_pass *
3931 make_pass_split_all_insns (gcc::context *ctxt)
3933 return new pass_split_all_insns (ctxt);
3936 static unsigned int
3937 rest_of_handle_split_after_reload (void)
3939 /* If optimizing, then go ahead and split insns now. */
3940 #ifndef STACK_REGS
3941 if (optimize > 0)
3942 #endif
3943 split_all_insns ();
3944 return 0;
3947 namespace {
3949 const pass_data pass_data_split_after_reload =
3951 RTL_PASS, /* type */
3952 "split2", /* name */
3953 OPTGROUP_NONE, /* optinfo_flags */
3954 TV_NONE, /* tv_id */
3955 0, /* properties_required */
3956 0, /* properties_provided */
3957 0, /* properties_destroyed */
3958 0, /* todo_flags_start */
3959 0, /* todo_flags_finish */
3962 class pass_split_after_reload : public rtl_opt_pass
3964 public:
3965 pass_split_after_reload (gcc::context *ctxt)
3966 : rtl_opt_pass (pass_data_split_after_reload, ctxt)
3969 /* opt_pass methods: */
3970 virtual unsigned int execute (function *)
3972 return rest_of_handle_split_after_reload ();
3975 }; // class pass_split_after_reload
3977 } // anon namespace
3979 rtl_opt_pass *
3980 make_pass_split_after_reload (gcc::context *ctxt)
3982 return new pass_split_after_reload (ctxt);
3985 namespace {
3987 const pass_data pass_data_split_before_regstack =
3989 RTL_PASS, /* type */
3990 "split3", /* name */
3991 OPTGROUP_NONE, /* optinfo_flags */
3992 TV_NONE, /* tv_id */
3993 0, /* properties_required */
3994 0, /* properties_provided */
3995 0, /* properties_destroyed */
3996 0, /* todo_flags_start */
3997 0, /* todo_flags_finish */
4000 class pass_split_before_regstack : public rtl_opt_pass
4002 public:
4003 pass_split_before_regstack (gcc::context *ctxt)
4004 : rtl_opt_pass (pass_data_split_before_regstack, ctxt)
4007 /* opt_pass methods: */
4008 virtual bool gate (function *);
4009 virtual unsigned int execute (function *)
4011 split_all_insns ();
4012 return 0;
4015 }; // class pass_split_before_regstack
4017 bool
4018 pass_split_before_regstack::gate (function *)
4020 #if HAVE_ATTR_length && defined (STACK_REGS)
4021 /* If flow2 creates new instructions which need splitting
4022 and scheduling after reload is not done, they might not be
4023 split until final which doesn't allow splitting
4024 if HAVE_ATTR_length. */
4025 # ifdef INSN_SCHEDULING
4026 return (optimize && !flag_schedule_insns_after_reload);
4027 # else
4028 return (optimize);
4029 # endif
4030 #else
4031 return 0;
4032 #endif
4035 } // anon namespace
4037 rtl_opt_pass *
4038 make_pass_split_before_regstack (gcc::context *ctxt)
4040 return new pass_split_before_regstack (ctxt);
4043 static unsigned int
4044 rest_of_handle_split_before_sched2 (void)
4046 #ifdef INSN_SCHEDULING
4047 split_all_insns ();
4048 #endif
4049 return 0;
4052 namespace {
4054 const pass_data pass_data_split_before_sched2 =
4056 RTL_PASS, /* type */
4057 "split4", /* name */
4058 OPTGROUP_NONE, /* optinfo_flags */
4059 TV_NONE, /* tv_id */
4060 0, /* properties_required */
4061 0, /* properties_provided */
4062 0, /* properties_destroyed */
4063 0, /* todo_flags_start */
4064 0, /* todo_flags_finish */
4067 class pass_split_before_sched2 : public rtl_opt_pass
4069 public:
4070 pass_split_before_sched2 (gcc::context *ctxt)
4071 : rtl_opt_pass (pass_data_split_before_sched2, ctxt)
4074 /* opt_pass methods: */
4075 virtual bool gate (function *)
4077 #ifdef INSN_SCHEDULING
4078 return optimize > 0 && flag_schedule_insns_after_reload;
4079 #else
4080 return false;
4081 #endif
4084 virtual unsigned int execute (function *)
4086 return rest_of_handle_split_before_sched2 ();
4089 }; // class pass_split_before_sched2
4091 } // anon namespace
4093 rtl_opt_pass *
4094 make_pass_split_before_sched2 (gcc::context *ctxt)
4096 return new pass_split_before_sched2 (ctxt);
4099 namespace {
4101 const pass_data pass_data_split_for_shorten_branches =
4103 RTL_PASS, /* type */
4104 "split5", /* name */
4105 OPTGROUP_NONE, /* optinfo_flags */
4106 TV_NONE, /* tv_id */
4107 0, /* properties_required */
4108 0, /* properties_provided */
4109 0, /* properties_destroyed */
4110 0, /* todo_flags_start */
4111 0, /* todo_flags_finish */
4114 class pass_split_for_shorten_branches : public rtl_opt_pass
4116 public:
4117 pass_split_for_shorten_branches (gcc::context *ctxt)
4118 : rtl_opt_pass (pass_data_split_for_shorten_branches, ctxt)
4121 /* opt_pass methods: */
4122 virtual bool gate (function *)
4124 /* The placement of the splitting that we do for shorten_branches
4125 depends on whether regstack is used by the target or not. */
4126 #if HAVE_ATTR_length && !defined (STACK_REGS)
4127 return true;
4128 #else
4129 return false;
4130 #endif
4133 virtual unsigned int execute (function *)
4135 return split_all_insns_noflow ();
4138 }; // class pass_split_for_shorten_branches
4140 } // anon namespace
4142 rtl_opt_pass *
4143 make_pass_split_for_shorten_branches (gcc::context *ctxt)
4145 return new pass_split_for_shorten_branches (ctxt);
4148 /* (Re)initialize the target information after a change in target. */
4150 void
4151 recog_init ()
4153 /* The information is zero-initialized, so we don't need to do anything
4154 first time round. */
4155 if (!this_target_recog->x_initialized)
4157 this_target_recog->x_initialized = true;
4158 return;
4160 memset (this_target_recog->x_bool_attr_masks, 0,
4161 sizeof (this_target_recog->x_bool_attr_masks));
4162 for (int i = 0; i < LAST_INSN_CODE; ++i)
4163 if (this_target_recog->x_op_alt[i])
4165 free (this_target_recog->x_op_alt[i]);
4166 this_target_recog->x_op_alt[i] = 0;