gcc/
[official-gcc.git] / gcc / recog.c
blobbaad154c448e2e014910f2614499181c7cfa963c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
64 #ifndef HAVE_ATTR_enabled
65 static inline bool
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
68 return true;
70 #endif
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 static void validate_replace_src_1 (rtx *, void *);
74 static rtx split_insn (rtx);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in reginfo.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
84 int volatile_ok;
86 struct recog_data recog_data;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
92 /* On return from `constrain_operands', indicate which alternative
93 was satisfied. */
95 int which_alternative;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
110 void
111 init_recog_no_volatile (void)
113 volatile_ok = 0;
116 void
117 init_recog (void)
119 volatile_ok = 1;
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x)
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = XALLOCAVEC (rtx, noperands);
150 constraints = XALLOCAVEC (const char *, noperands);
152 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (! asm_operand_ok (operands[i], c, constraints))
160 return 0;
163 return 1;
166 /* Static data for the next two routines. */
168 typedef struct change_t
170 rtx object;
171 int old_code;
172 rtx *loc;
173 rtx old;
174 bool unshare;
175 } change_t;
177 static change_t *changes;
178 static int changes_allocated;
180 static int num_changes = 0;
182 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
183 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
184 the change is simply made.
186 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
187 will be called with the address and mode as parameters. If OBJECT is
188 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
189 the change in place.
191 IN_GROUP is nonzero if this is part of a group of changes that must be
192 performed as a group. In that case, the changes will be stored. The
193 function `apply_change_group' will validate and apply the changes.
195 If IN_GROUP is zero, this is a single change. Try to recognize the insn
196 or validate the memory reference with the change applied. If the result
197 is not valid for the machine, suppress the change and return zero.
198 Otherwise, perform the change and return 1. */
200 static bool
201 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
203 rtx old = *loc;
205 if (old == new_rtx || rtx_equal_p (old, new_rtx))
206 return 1;
208 gcc_assert (in_group != 0 || num_changes == 0);
210 *loc = new_rtx;
212 /* Save the information describing this change. */
213 if (num_changes >= changes_allocated)
215 if (changes_allocated == 0)
216 /* This value allows for repeated substitutions inside complex
217 indexed addresses, or changes in up to 5 insns. */
218 changes_allocated = MAX_RECOG_OPERANDS * 5;
219 else
220 changes_allocated *= 2;
222 changes = XRESIZEVEC (change_t, changes, changes_allocated);
225 changes[num_changes].object = object;
226 changes[num_changes].loc = loc;
227 changes[num_changes].old = old;
228 changes[num_changes].unshare = unshare;
230 if (object && !MEM_P (object))
232 /* Set INSN_CODE to force rerecognition of insn. Save old code in
233 case invalid. */
234 changes[num_changes].old_code = INSN_CODE (object);
235 INSN_CODE (object) = -1;
238 num_changes++;
240 /* If we are making a group of changes, return 1. Otherwise, validate the
241 change group we made. */
243 if (in_group)
244 return 1;
245 else
246 return apply_change_group ();
249 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
250 UNSHARE to false. */
252 bool
253 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
255 return validate_change_1 (object, loc, new_rtx, in_group, false);
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259 UNSHARE to true. */
261 bool
262 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 return validate_change_1 (object, loc, new_rtx, in_group, true);
268 /* Keep X canonicalized if some changes have made it non-canonical; only
269 modifies the operands of X, not (for example) its code. Simplifications
270 are not the job of this routine.
272 Return true if anything was changed. */
273 bool
274 canonicalize_change_group (rtx insn, rtx x)
276 if (COMMUTATIVE_P (x)
277 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
279 /* Oops, the caller has made X no longer canonical.
280 Let's redo the changes in the correct order. */
281 rtx tem = XEXP (x, 0);
282 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
283 validate_change (insn, &XEXP (x, 1), tem, 1);
284 return true;
286 else
287 return false;
291 /* This subroutine of apply_change_group verifies whether the changes to INSN
292 were valid; i.e. whether INSN can still be recognized. */
295 insn_invalid_p (rtx insn)
297 rtx pat = PATTERN (insn);
298 int num_clobbers = 0;
299 /* If we are before reload and the pattern is a SET, see if we can add
300 clobbers. */
301 int icode = recog (pat, insn,
302 (GET_CODE (pat) == SET
303 && ! reload_completed && ! reload_in_progress)
304 ? &num_clobbers : 0);
305 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
308 /* If this is an asm and the operand aren't legal, then fail. Likewise if
309 this is not an asm and the insn wasn't recognized. */
310 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
311 || (!is_asm && icode < 0))
312 return 1;
314 /* If we have to add CLOBBERs, fail if we have to add ones that reference
315 hard registers since our callers can't know if they are live or not.
316 Otherwise, add them. */
317 if (num_clobbers > 0)
319 rtx newpat;
321 if (added_clobbers_hard_reg_p (icode))
322 return 1;
324 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
325 XVECEXP (newpat, 0, 0) = pat;
326 add_clobbers (newpat, icode);
327 PATTERN (insn) = pat = newpat;
330 /* After reload, verify that all constraints are satisfied. */
331 if (reload_completed)
333 extract_insn (insn);
335 if (! constrain_operands (1))
336 return 1;
339 INSN_CODE (insn) = icode;
340 return 0;
343 /* Return number of changes made and not validated yet. */
345 num_changes_pending (void)
347 return num_changes;
350 /* Tentatively apply the changes numbered NUM and up.
351 Return 1 if all changes are valid, zero otherwise. */
354 verify_changes (int num)
356 int i;
357 rtx last_validated = NULL_RTX;
359 /* The changes have been applied and all INSN_CODEs have been reset to force
360 rerecognition.
362 The changes are valid if we aren't given an object, or if we are
363 given a MEM and it still is a valid address, or if this is in insn
364 and it is recognized. In the latter case, if reload has completed,
365 we also require that the operands meet the constraints for
366 the insn. */
368 for (i = num; i < num_changes; i++)
370 rtx object = changes[i].object;
372 /* If there is no object to test or if it is the same as the one we
373 already tested, ignore it. */
374 if (object == 0 || object == last_validated)
375 continue;
377 if (MEM_P (object))
379 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
380 break;
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
392 else if (insn_invalid_p (object))
394 rtx pat = PATTERN (object);
396 /* Perhaps we couldn't recognize the insn because there were
397 extra CLOBBERs at the end. If so, try to re-recognize
398 without the last CLOBBER (later iterations will cause each of
399 them to be eliminated, in turn). But don't do this if we
400 have an ASM_OPERAND. */
401 if (GET_CODE (pat) == PARALLEL
402 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
403 && asm_noperands (PATTERN (object)) < 0)
405 rtx newpat;
407 if (XVECLEN (pat, 0) == 2)
408 newpat = XVECEXP (pat, 0, 0);
409 else
411 int j;
413 newpat
414 = gen_rtx_PARALLEL (VOIDmode,
415 rtvec_alloc (XVECLEN (pat, 0) - 1));
416 for (j = 0; j < XVECLEN (newpat, 0); j++)
417 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
420 /* Add a new change to this group to replace the pattern
421 with this new pattern. Then consider this change
422 as having succeeded. The change we added will
423 cause the entire call to fail if things remain invalid.
425 Note that this can lose if a later change than the one
426 we are processing specified &XVECEXP (PATTERN (object), 0, X)
427 but this shouldn't occur. */
429 validate_change (object, &PATTERN (object), newpat, 1);
430 continue;
432 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
433 /* If this insn is a CLOBBER or USE, it is always valid, but is
434 never recognized. */
435 continue;
436 else
437 break;
439 last_validated = object;
442 return (i == num_changes);
445 /* A group of changes has previously been issued with validate_change
446 and verified with verify_changes. Call df_insn_rescan for each of
447 the insn changed and clear num_changes. */
449 void
450 confirm_change_group (void)
452 int i;
453 rtx last_object = NULL;
455 for (i = 0; i < num_changes; i++)
457 rtx object = changes[i].object;
459 if (changes[i].unshare)
460 *changes[i].loc = copy_rtx (*changes[i].loc);
462 /* Avoid unnecessary rescanning when multiple changes to same instruction
463 are made. */
464 if (object)
466 if (object != last_object && last_object && INSN_P (last_object))
467 df_insn_rescan (last_object);
468 last_object = object;
472 if (last_object && INSN_P (last_object))
473 df_insn_rescan (last_object);
474 num_changes = 0;
477 /* Apply a group of changes previously issued with `validate_change'.
478 If all changes are valid, call confirm_change_group and return 1,
479 otherwise, call cancel_changes and return 0. */
482 apply_change_group (void)
484 if (verify_changes (0))
486 confirm_change_group ();
487 return 1;
489 else
491 cancel_changes (0);
492 return 0;
497 /* Return the number of changes so far in the current group. */
500 num_validated_changes (void)
502 return num_changes;
505 /* Retract the changes numbered NUM and up. */
507 void
508 cancel_changes (int num)
510 int i;
512 /* Back out all the changes. Do this in the opposite order in which
513 they were made. */
514 for (i = num_changes - 1; i >= num; i--)
516 *changes[i].loc = changes[i].old;
517 if (changes[i].object && !MEM_P (changes[i].object))
518 INSN_CODE (changes[i].object) = changes[i].old_code;
520 num_changes = num;
523 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
524 rtx. */
526 static void
527 simplify_while_replacing (rtx *loc, rtx to, rtx object,
528 enum machine_mode op0_mode)
530 rtx x = *loc;
531 enum rtx_code code = GET_CODE (x);
532 rtx new_rtx;
534 if (SWAPPABLE_OPERANDS_P (x)
535 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
537 validate_unshare_change (object, loc,
538 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
539 : swap_condition (code),
540 GET_MODE (x), XEXP (x, 1),
541 XEXP (x, 0)), 1);
542 x = *loc;
543 code = GET_CODE (x);
546 switch (code)
548 case PLUS:
549 /* If we have a PLUS whose second operand is now a CONST_INT, use
550 simplify_gen_binary to try to simplify it.
551 ??? We may want later to remove this, once simplification is
552 separated from this function. */
553 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
554 validate_change (object, loc,
555 simplify_gen_binary
556 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
557 break;
558 case MINUS:
559 if (GET_CODE (XEXP (x, 1)) == CONST_INT
560 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
561 validate_change (object, loc,
562 simplify_gen_binary
563 (PLUS, GET_MODE (x), XEXP (x, 0),
564 simplify_gen_unary (NEG,
565 GET_MODE (x), XEXP (x, 1),
566 GET_MODE (x))), 1);
567 break;
568 case ZERO_EXTEND:
569 case SIGN_EXTEND:
570 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
572 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
573 op0_mode);
574 /* If any of the above failed, substitute in something that
575 we know won't be recognized. */
576 if (!new_rtx)
577 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
578 validate_change (object, loc, new_rtx, 1);
580 break;
581 case SUBREG:
582 /* All subregs possible to simplify should be simplified. */
583 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
584 SUBREG_BYTE (x));
586 /* Subregs of VOIDmode operands are incorrect. */
587 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
588 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
589 if (new_rtx)
590 validate_change (object, loc, new_rtx, 1);
591 break;
592 case ZERO_EXTRACT:
593 case SIGN_EXTRACT:
594 /* If we are replacing a register with memory, try to change the memory
595 to be the mode required for memory in extract operations (this isn't
596 likely to be an insertion operation; if it was, nothing bad will
597 happen, we might just fail in some cases). */
599 if (MEM_P (XEXP (x, 0))
600 && GET_CODE (XEXP (x, 1)) == CONST_INT
601 && GET_CODE (XEXP (x, 2)) == CONST_INT
602 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
603 && !MEM_VOLATILE_P (XEXP (x, 0)))
605 enum machine_mode wanted_mode = VOIDmode;
606 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
607 int pos = INTVAL (XEXP (x, 2));
609 if (GET_CODE (x) == ZERO_EXTRACT)
611 enum machine_mode new_mode
612 = mode_for_extraction (EP_extzv, 1);
613 if (new_mode != MAX_MACHINE_MODE)
614 wanted_mode = new_mode;
616 else if (GET_CODE (x) == SIGN_EXTRACT)
618 enum machine_mode new_mode
619 = mode_for_extraction (EP_extv, 1);
620 if (new_mode != MAX_MACHINE_MODE)
621 wanted_mode = new_mode;
624 /* If we have a narrower mode, we can do something. */
625 if (wanted_mode != VOIDmode
626 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
628 int offset = pos / BITS_PER_UNIT;
629 rtx newmem;
631 /* If the bytes and bits are counted differently, we
632 must adjust the offset. */
633 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
634 offset =
635 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
636 offset);
638 pos %= GET_MODE_BITSIZE (wanted_mode);
640 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
642 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
643 validate_change (object, &XEXP (x, 0), newmem, 1);
647 break;
649 default:
650 break;
654 /* Replace every occurrence of FROM in X with TO. Mark each change with
655 validate_change passing OBJECT. */
657 static void
658 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
659 bool simplify)
661 int i, j;
662 const char *fmt;
663 rtx x = *loc;
664 enum rtx_code code;
665 enum machine_mode op0_mode = VOIDmode;
666 int prev_changes = num_changes;
668 if (!x)
669 return;
671 code = GET_CODE (x);
672 fmt = GET_RTX_FORMAT (code);
673 if (fmt[0] == 'e')
674 op0_mode = GET_MODE (XEXP (x, 0));
676 /* X matches FROM if it is the same rtx or they are both referring to the
677 same register in the same mode. Avoid calling rtx_equal_p unless the
678 operands look similar. */
680 if (x == from
681 || (REG_P (x) && REG_P (from)
682 && GET_MODE (x) == GET_MODE (from)
683 && REGNO (x) == REGNO (from))
684 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
685 && rtx_equal_p (x, from)))
687 validate_unshare_change (object, loc, to, 1);
688 return;
691 /* Call ourself recursively to perform the replacements.
692 We must not replace inside already replaced expression, otherwise we
693 get infinite recursion for replacements like (reg X)->(subreg (reg X))
694 done by regmove, so we must special case shared ASM_OPERANDS. */
696 if (GET_CODE (x) == PARALLEL)
698 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
700 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
701 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
703 /* Verify that operands are really shared. */
704 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
705 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
706 (x, 0, j))));
707 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
708 from, to, object, simplify);
710 else
711 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
712 simplify);
715 else
716 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
718 if (fmt[i] == 'e')
719 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
720 else if (fmt[i] == 'E')
721 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
722 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
723 simplify);
726 /* If we didn't substitute, there is nothing more to do. */
727 if (num_changes == prev_changes)
728 return;
730 /* Allow substituted expression to have different mode. This is used by
731 regmove to change mode of pseudo register. */
732 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
733 op0_mode = GET_MODE (XEXP (x, 0));
735 /* Do changes needed to keep rtx consistent. Don't do any other
736 simplifications, as it is not our job. */
737 if (simplify)
738 simplify_while_replacing (loc, to, object, op0_mode);
741 /* Try replacing every occurrence of FROM in INSN with TO. After all
742 changes have been made, validate by seeing if INSN is still valid. */
745 validate_replace_rtx (rtx from, rtx to, rtx insn)
747 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
748 return apply_change_group ();
751 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
752 is a part of INSN. After all changes have been made, validate by seeing if
753 INSN is still valid.
754 validate_replace_rtx (from, to, insn) is equivalent to
755 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
758 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
760 validate_replace_rtx_1 (where, from, to, insn, true);
761 return apply_change_group ();
764 /* Same as above, but do not simplify rtx afterwards. */
765 int
766 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
767 rtx insn)
769 validate_replace_rtx_1 (where, from, to, insn, false);
770 return apply_change_group ();
774 /* Try replacing every occurrence of FROM in INSN with TO. */
776 void
777 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
779 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
782 /* Function called by note_uses to replace used subexpressions. */
783 struct validate_replace_src_data
785 rtx from; /* Old RTX */
786 rtx to; /* New RTX */
787 rtx insn; /* Insn in which substitution is occurring. */
790 static void
791 validate_replace_src_1 (rtx *x, void *data)
793 struct validate_replace_src_data *d
794 = (struct validate_replace_src_data *) data;
796 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
799 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
800 SET_DESTs. */
802 void
803 validate_replace_src_group (rtx from, rtx to, rtx insn)
805 struct validate_replace_src_data d;
807 d.from = from;
808 d.to = to;
809 d.insn = insn;
810 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
813 /* Try simplify INSN.
814 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
815 pattern and return true if something was simplified. */
817 bool
818 validate_simplify_insn (rtx insn)
820 int i;
821 rtx pat = NULL;
822 rtx newpat = NULL;
824 pat = PATTERN (insn);
826 if (GET_CODE (pat) == SET)
828 newpat = simplify_rtx (SET_SRC (pat));
829 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
830 validate_change (insn, &SET_SRC (pat), newpat, 1);
831 newpat = simplify_rtx (SET_DEST (pat));
832 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
833 validate_change (insn, &SET_DEST (pat), newpat, 1);
835 else if (GET_CODE (pat) == PARALLEL)
836 for (i = 0; i < XVECLEN (pat, 0); i++)
838 rtx s = XVECEXP (pat, 0, i);
840 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
842 newpat = simplify_rtx (SET_SRC (s));
843 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
844 validate_change (insn, &SET_SRC (s), newpat, 1);
845 newpat = simplify_rtx (SET_DEST (s));
846 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
847 validate_change (insn, &SET_DEST (s), newpat, 1);
850 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
853 #ifdef HAVE_cc0
854 /* Return 1 if the insn using CC0 set by INSN does not contain
855 any ordered tests applied to the condition codes.
856 EQ and NE tests do not count. */
859 next_insn_tests_no_inequality (rtx insn)
861 rtx next = next_cc0_user (insn);
863 /* If there is no next insn, we have to take the conservative choice. */
864 if (next == 0)
865 return 0;
867 return (INSN_P (next)
868 && ! inequality_comparisons_p (PATTERN (next)));
870 #endif
872 /* Return 1 if OP is a valid general operand for machine mode MODE.
873 This is either a register reference, a memory reference,
874 or a constant. In the case of a memory reference, the address
875 is checked for general validity for the target machine.
877 Register and memory references must have mode MODE in order to be valid,
878 but some constants have no machine mode and are valid for any mode.
880 If MODE is VOIDmode, OP is checked for validity for whatever mode
881 it has.
883 The main use of this function is as a predicate in match_operand
884 expressions in the machine description.
886 For an explanation of this function's behavior for registers of
887 class NO_REGS, see the comment for `register_operand'. */
890 general_operand (rtx op, enum machine_mode mode)
892 enum rtx_code code = GET_CODE (op);
894 if (mode == VOIDmode)
895 mode = GET_MODE (op);
897 /* Don't accept CONST_INT or anything similar
898 if the caller wants something floating. */
899 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
900 && GET_MODE_CLASS (mode) != MODE_INT
901 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
902 return 0;
904 if (GET_CODE (op) == CONST_INT
905 && mode != VOIDmode
906 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
907 return 0;
909 if (CONSTANT_P (op))
910 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
911 || mode == VOIDmode)
912 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
913 && LEGITIMATE_CONSTANT_P (op));
915 /* Except for certain constants with VOIDmode, already checked for,
916 OP's mode must match MODE if MODE specifies a mode. */
918 if (GET_MODE (op) != mode)
919 return 0;
921 if (code == SUBREG)
923 rtx sub = SUBREG_REG (op);
925 #ifdef INSN_SCHEDULING
926 /* On machines that have insn scheduling, we want all memory
927 reference to be explicit, so outlaw paradoxical SUBREGs.
928 However, we must allow them after reload so that they can
929 get cleaned up by cleanup_subreg_operands. */
930 if (!reload_completed && MEM_P (sub)
931 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
932 return 0;
933 #endif
934 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
935 may result in incorrect reference. We should simplify all valid
936 subregs of MEM anyway. But allow this after reload because we
937 might be called from cleanup_subreg_operands.
939 ??? This is a kludge. */
940 if (!reload_completed && SUBREG_BYTE (op) != 0
941 && MEM_P (sub))
942 return 0;
944 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
945 create such rtl, and we must reject it. */
946 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
947 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
948 return 0;
950 op = sub;
951 code = GET_CODE (op);
954 if (code == REG)
955 /* A register whose class is NO_REGS is not a general operand. */
956 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
957 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
959 if (code == MEM)
961 rtx y = XEXP (op, 0);
963 if (! volatile_ok && MEM_VOLATILE_P (op))
964 return 0;
966 /* Use the mem's mode, since it will be reloaded thus. */
967 if (memory_address_p (GET_MODE (op), y))
968 return 1;
971 return 0;
974 /* Return 1 if OP is a valid memory address for a memory reference
975 of mode MODE.
977 The main use of this function is as a predicate in match_operand
978 expressions in the machine description. */
981 address_operand (rtx op, enum machine_mode mode)
983 return memory_address_p (mode, op);
986 /* Return 1 if OP is a register reference of mode MODE.
987 If MODE is VOIDmode, accept a register in any mode.
989 The main use of this function is as a predicate in match_operand
990 expressions in the machine description.
992 As a special exception, registers whose class is NO_REGS are
993 not accepted by `register_operand'. The reason for this change
994 is to allow the representation of special architecture artifacts
995 (such as a condition code register) without extending the rtl
996 definitions. Since registers of class NO_REGS cannot be used
997 as registers in any case where register classes are examined,
998 it is most consistent to keep this function from accepting them. */
1001 register_operand (rtx op, enum machine_mode mode)
1003 if (GET_MODE (op) != mode && mode != VOIDmode)
1004 return 0;
1006 if (GET_CODE (op) == SUBREG)
1008 rtx sub = SUBREG_REG (op);
1010 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1011 because it is guaranteed to be reloaded into one.
1012 Just make sure the MEM is valid in itself.
1013 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1014 but currently it does result from (SUBREG (REG)...) where the
1015 reg went on the stack.) */
1016 if (! reload_completed && MEM_P (sub))
1017 return general_operand (op, mode);
1019 #ifdef CANNOT_CHANGE_MODE_CLASS
1020 if (REG_P (sub)
1021 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1022 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1023 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1024 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1025 return 0;
1026 #endif
1028 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1029 create such rtl, and we must reject it. */
1030 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1031 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1032 return 0;
1034 op = sub;
1037 /* We don't consider registers whose class is NO_REGS
1038 to be a register operand. */
1039 return (REG_P (op)
1040 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1041 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1044 /* Return 1 for a register in Pmode; ignore the tested mode. */
1047 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1049 return register_operand (op, Pmode);
1052 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1053 or a hard register. */
1056 scratch_operand (rtx op, enum machine_mode mode)
1058 if (GET_MODE (op) != mode && mode != VOIDmode)
1059 return 0;
1061 return (GET_CODE (op) == SCRATCH
1062 || (REG_P (op)
1063 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1066 /* Return 1 if OP is a valid immediate operand for mode MODE.
1068 The main use of this function is as a predicate in match_operand
1069 expressions in the machine description. */
1072 immediate_operand (rtx op, enum machine_mode mode)
1074 /* Don't accept CONST_INT or anything similar
1075 if the caller wants something floating. */
1076 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1077 && GET_MODE_CLASS (mode) != MODE_INT
1078 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1079 return 0;
1081 if (GET_CODE (op) == CONST_INT
1082 && mode != VOIDmode
1083 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1084 return 0;
1086 return (CONSTANT_P (op)
1087 && (GET_MODE (op) == mode || mode == VOIDmode
1088 || GET_MODE (op) == VOIDmode)
1089 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1090 && LEGITIMATE_CONSTANT_P (op));
1093 /* Returns 1 if OP is an operand that is a CONST_INT. */
1096 const_int_operand (rtx op, enum machine_mode mode)
1098 if (GET_CODE (op) != CONST_INT)
1099 return 0;
1101 if (mode != VOIDmode
1102 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1103 return 0;
1105 return 1;
1108 /* Returns 1 if OP is an operand that is a constant integer or constant
1109 floating-point number. */
1112 const_double_operand (rtx op, enum machine_mode mode)
1114 /* Don't accept CONST_INT or anything similar
1115 if the caller wants something floating. */
1116 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1117 && GET_MODE_CLASS (mode) != MODE_INT
1118 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1119 return 0;
1121 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1122 && (mode == VOIDmode || GET_MODE (op) == mode
1123 || GET_MODE (op) == VOIDmode));
1126 /* Return 1 if OP is a general operand that is not an immediate operand. */
1129 nonimmediate_operand (rtx op, enum machine_mode mode)
1131 return (general_operand (op, mode) && ! CONSTANT_P (op));
1134 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1137 nonmemory_operand (rtx op, enum machine_mode mode)
1139 if (CONSTANT_P (op))
1141 /* Don't accept CONST_INT or anything similar
1142 if the caller wants something floating. */
1143 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1144 && GET_MODE_CLASS (mode) != MODE_INT
1145 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1146 return 0;
1148 if (GET_CODE (op) == CONST_INT
1149 && mode != VOIDmode
1150 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1151 return 0;
1153 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1154 || mode == VOIDmode)
1155 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1156 && LEGITIMATE_CONSTANT_P (op));
1159 if (GET_MODE (op) != mode && mode != VOIDmode)
1160 return 0;
1162 if (GET_CODE (op) == SUBREG)
1164 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1165 because it is guaranteed to be reloaded into one.
1166 Just make sure the MEM is valid in itself.
1167 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1168 but currently it does result from (SUBREG (REG)...) where the
1169 reg went on the stack.) */
1170 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1171 return general_operand (op, mode);
1172 op = SUBREG_REG (op);
1175 /* We don't consider registers whose class is NO_REGS
1176 to be a register operand. */
1177 return (REG_P (op)
1178 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1179 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1182 /* Return 1 if OP is a valid operand that stands for pushing a
1183 value of mode MODE onto the stack.
1185 The main use of this function is as a predicate in match_operand
1186 expressions in the machine description. */
1189 push_operand (rtx op, enum machine_mode mode)
1191 unsigned int rounded_size = GET_MODE_SIZE (mode);
1193 #ifdef PUSH_ROUNDING
1194 rounded_size = PUSH_ROUNDING (rounded_size);
1195 #endif
1197 if (!MEM_P (op))
1198 return 0;
1200 if (mode != VOIDmode && GET_MODE (op) != mode)
1201 return 0;
1203 op = XEXP (op, 0);
1205 if (rounded_size == GET_MODE_SIZE (mode))
1207 if (GET_CODE (op) != STACK_PUSH_CODE)
1208 return 0;
1210 else
1212 if (GET_CODE (op) != PRE_MODIFY
1213 || GET_CODE (XEXP (op, 1)) != PLUS
1214 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1215 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1216 #ifdef STACK_GROWS_DOWNWARD
1217 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1218 #else
1219 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1220 #endif
1222 return 0;
1225 return XEXP (op, 0) == stack_pointer_rtx;
1228 /* Return 1 if OP is a valid operand that stands for popping a
1229 value of mode MODE off the stack.
1231 The main use of this function is as a predicate in match_operand
1232 expressions in the machine description. */
1235 pop_operand (rtx op, enum machine_mode mode)
1237 if (!MEM_P (op))
1238 return 0;
1240 if (mode != VOIDmode && GET_MODE (op) != mode)
1241 return 0;
1243 op = XEXP (op, 0);
1245 if (GET_CODE (op) != STACK_POP_CODE)
1246 return 0;
1248 return XEXP (op, 0) == stack_pointer_rtx;
1251 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1254 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1256 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1257 return 0;
1259 win:
1260 return 1;
1263 /* Return 1 if OP is a valid memory reference with mode MODE,
1264 including a valid address.
1266 The main use of this function is as a predicate in match_operand
1267 expressions in the machine description. */
1270 memory_operand (rtx op, enum machine_mode mode)
1272 rtx inner;
1274 if (! reload_completed)
1275 /* Note that no SUBREG is a memory operand before end of reload pass,
1276 because (SUBREG (MEM...)) forces reloading into a register. */
1277 return MEM_P (op) && general_operand (op, mode);
1279 if (mode != VOIDmode && GET_MODE (op) != mode)
1280 return 0;
1282 inner = op;
1283 if (GET_CODE (inner) == SUBREG)
1284 inner = SUBREG_REG (inner);
1286 return (MEM_P (inner) && general_operand (op, mode));
1289 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1290 that is, a memory reference whose address is a general_operand. */
1293 indirect_operand (rtx op, enum machine_mode mode)
1295 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1296 if (! reload_completed
1297 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1299 int offset = SUBREG_BYTE (op);
1300 rtx inner = SUBREG_REG (op);
1302 if (mode != VOIDmode && GET_MODE (op) != mode)
1303 return 0;
1305 /* The only way that we can have a general_operand as the resulting
1306 address is if OFFSET is zero and the address already is an operand
1307 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1308 operand. */
1310 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1311 || (GET_CODE (XEXP (inner, 0)) == PLUS
1312 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1313 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1314 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1317 return (MEM_P (op)
1318 && memory_operand (op, mode)
1319 && general_operand (XEXP (op, 0), Pmode));
1322 /* Return 1 if this is an ordered comparison operator (not including
1323 ORDERED and UNORDERED). */
1326 ordered_comparison_operator (rtx op, enum machine_mode mode)
1328 if (mode != VOIDmode && GET_MODE (op) != mode)
1329 return false;
1330 switch (GET_CODE (op))
1332 case EQ:
1333 case NE:
1334 case LT:
1335 case LTU:
1336 case LE:
1337 case LEU:
1338 case GT:
1339 case GTU:
1340 case GE:
1341 case GEU:
1342 return true;
1343 default:
1344 return false;
1348 /* Return 1 if this is a comparison operator. This allows the use of
1349 MATCH_OPERATOR to recognize all the branch insns. */
1352 comparison_operator (rtx op, enum machine_mode mode)
1354 return ((mode == VOIDmode || GET_MODE (op) == mode)
1355 && COMPARISON_P (op));
1358 /* If BODY is an insn body that uses ASM_OPERANDS,
1359 return the number of operands (both input and output) in the insn.
1360 Otherwise return -1. */
1363 asm_noperands (const_rtx body)
1365 switch (GET_CODE (body))
1367 case ASM_OPERANDS:
1368 /* No output operands: return number of input operands. */
1369 return ASM_OPERANDS_INPUT_LENGTH (body);
1370 case SET:
1371 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1372 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1373 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1374 else
1375 return -1;
1376 case PARALLEL:
1377 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1378 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1380 /* Multiple output operands, or 1 output plus some clobbers:
1381 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1382 int i;
1383 int n_sets;
1385 /* Count backwards through CLOBBERs to determine number of SETs. */
1386 for (i = XVECLEN (body, 0); i > 0; i--)
1388 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1389 break;
1390 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1391 return -1;
1394 /* N_SETS is now number of output operands. */
1395 n_sets = i;
1397 /* Verify that all the SETs we have
1398 came from a single original asm_operands insn
1399 (so that invalid combinations are blocked). */
1400 for (i = 0; i < n_sets; i++)
1402 rtx elt = XVECEXP (body, 0, i);
1403 if (GET_CODE (elt) != SET)
1404 return -1;
1405 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1406 return -1;
1407 /* If these ASM_OPERANDS rtx's came from different original insns
1408 then they aren't allowed together. */
1409 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1410 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1411 return -1;
1413 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1414 + n_sets);
1416 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1418 /* 0 outputs, but some clobbers:
1419 body is [(asm_operands ...) (clobber (reg ...))...]. */
1420 int i;
1422 /* Make sure all the other parallel things really are clobbers. */
1423 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1424 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1425 return -1;
1427 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1429 else
1430 return -1;
1431 default:
1432 return -1;
1436 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1437 copy its operands (both input and output) into the vector OPERANDS,
1438 the locations of the operands within the insn into the vector OPERAND_LOCS,
1439 and the constraints for the operands into CONSTRAINTS.
1440 Write the modes of the operands into MODES.
1441 Return the assembler-template.
1443 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1444 we don't store that info. */
1446 const char *
1447 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1448 const char **constraints, enum machine_mode *modes,
1449 location_t *loc)
1451 int i;
1452 int noperands;
1453 rtx asmop = 0;
1455 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1457 asmop = SET_SRC (body);
1458 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1460 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1462 for (i = 1; i < noperands; i++)
1464 if (operand_locs)
1465 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1466 if (operands)
1467 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1468 if (constraints)
1469 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1470 if (modes)
1471 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1474 /* The output is in the SET.
1475 Its constraint is in the ASM_OPERANDS itself. */
1476 if (operands)
1477 operands[0] = SET_DEST (body);
1478 if (operand_locs)
1479 operand_locs[0] = &SET_DEST (body);
1480 if (constraints)
1481 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1482 if (modes)
1483 modes[0] = GET_MODE (SET_DEST (body));
1485 else if (GET_CODE (body) == ASM_OPERANDS)
1487 asmop = body;
1488 /* No output operands: BODY is (asm_operands ....). */
1490 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1492 /* The input operands are found in the 1st element vector. */
1493 /* Constraints for inputs are in the 2nd element vector. */
1494 for (i = 0; i < noperands; i++)
1496 if (operand_locs)
1497 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1498 if (operands)
1499 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1500 if (constraints)
1501 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1502 if (modes)
1503 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1506 else if (GET_CODE (body) == PARALLEL
1507 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1508 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1510 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1511 int nin;
1512 int nout = 0; /* Does not include CLOBBERs. */
1514 asmop = SET_SRC (XVECEXP (body, 0, 0));
1515 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1517 /* At least one output, plus some CLOBBERs. */
1519 /* The outputs are in the SETs.
1520 Their constraints are in the ASM_OPERANDS itself. */
1521 for (i = 0; i < nparallel; i++)
1523 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1524 break; /* Past last SET */
1526 if (operands)
1527 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1528 if (operand_locs)
1529 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1530 if (constraints)
1531 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1532 if (modes)
1533 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1534 nout++;
1537 for (i = 0; i < nin; i++)
1539 if (operand_locs)
1540 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1541 if (operands)
1542 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1543 if (constraints)
1544 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1545 if (modes)
1546 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1549 else if (GET_CODE (body) == PARALLEL
1550 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1552 /* No outputs, but some CLOBBERs. */
1554 int nin;
1556 asmop = XVECEXP (body, 0, 0);
1557 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1559 for (i = 0; i < nin; i++)
1561 if (operand_locs)
1562 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1563 if (operands)
1564 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1565 if (constraints)
1566 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1567 if (modes)
1568 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1573 if (loc)
1574 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1576 return ASM_OPERANDS_TEMPLATE (asmop);
1579 /* Check if an asm_operand matches its constraints.
1580 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1583 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1585 int result = 0;
1587 /* Use constrain_operands after reload. */
1588 gcc_assert (!reload_completed);
1590 while (*constraint)
1592 char c = *constraint;
1593 int len;
1594 switch (c)
1596 case ',':
1597 constraint++;
1598 continue;
1599 case '=':
1600 case '+':
1601 case '*':
1602 case '%':
1603 case '!':
1604 case '#':
1605 case '&':
1606 case '?':
1607 break;
1609 case '0': case '1': case '2': case '3': case '4':
1610 case '5': case '6': case '7': case '8': case '9':
1611 /* If caller provided constraints pointer, look up
1612 the maching constraint. Otherwise, our caller should have
1613 given us the proper matching constraint, but we can't
1614 actually fail the check if they didn't. Indicate that
1615 results are inconclusive. */
1616 if (constraints)
1618 char *end;
1619 unsigned long match;
1621 match = strtoul (constraint, &end, 10);
1622 if (!result)
1623 result = asm_operand_ok (op, constraints[match], NULL);
1624 constraint = (const char *) end;
1626 else
1629 constraint++;
1630 while (ISDIGIT (*constraint));
1631 if (! result)
1632 result = -1;
1634 continue;
1636 case 'p':
1637 if (address_operand (op, VOIDmode))
1638 result = 1;
1639 break;
1641 case TARGET_MEM_CONSTRAINT:
1642 case 'V': /* non-offsettable */
1643 if (memory_operand (op, VOIDmode))
1644 result = 1;
1645 break;
1647 case 'o': /* offsettable */
1648 if (offsettable_nonstrict_memref_p (op))
1649 result = 1;
1650 break;
1652 case '<':
1653 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1654 excepting those that expand_call created. Further, on some
1655 machines which do not have generalized auto inc/dec, an inc/dec
1656 is not a memory_operand.
1658 Match any memory and hope things are resolved after reload. */
1660 if (MEM_P (op)
1661 && (1
1662 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1663 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1664 result = 1;
1665 break;
1667 case '>':
1668 if (MEM_P (op)
1669 && (1
1670 || GET_CODE (XEXP (op, 0)) == PRE_INC
1671 || GET_CODE (XEXP (op, 0)) == POST_INC))
1672 result = 1;
1673 break;
1675 case 'E':
1676 case 'F':
1677 if (GET_CODE (op) == CONST_DOUBLE
1678 || (GET_CODE (op) == CONST_VECTOR
1679 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1680 result = 1;
1681 break;
1683 case 'G':
1684 if (GET_CODE (op) == CONST_DOUBLE
1685 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1686 result = 1;
1687 break;
1688 case 'H':
1689 if (GET_CODE (op) == CONST_DOUBLE
1690 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1691 result = 1;
1692 break;
1694 case 's':
1695 if (GET_CODE (op) == CONST_INT
1696 || (GET_CODE (op) == CONST_DOUBLE
1697 && GET_MODE (op) == VOIDmode))
1698 break;
1699 /* Fall through. */
1701 case 'i':
1702 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1703 result = 1;
1704 break;
1706 case 'n':
1707 if (GET_CODE (op) == CONST_INT
1708 || (GET_CODE (op) == CONST_DOUBLE
1709 && GET_MODE (op) == VOIDmode))
1710 result = 1;
1711 break;
1713 case 'I':
1714 if (GET_CODE (op) == CONST_INT
1715 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1716 result = 1;
1717 break;
1718 case 'J':
1719 if (GET_CODE (op) == CONST_INT
1720 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1721 result = 1;
1722 break;
1723 case 'K':
1724 if (GET_CODE (op) == CONST_INT
1725 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1726 result = 1;
1727 break;
1728 case 'L':
1729 if (GET_CODE (op) == CONST_INT
1730 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1731 result = 1;
1732 break;
1733 case 'M':
1734 if (GET_CODE (op) == CONST_INT
1735 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1736 result = 1;
1737 break;
1738 case 'N':
1739 if (GET_CODE (op) == CONST_INT
1740 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1741 result = 1;
1742 break;
1743 case 'O':
1744 if (GET_CODE (op) == CONST_INT
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1746 result = 1;
1747 break;
1748 case 'P':
1749 if (GET_CODE (op) == CONST_INT
1750 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1751 result = 1;
1752 break;
1754 case 'X':
1755 result = 1;
1756 break;
1758 case 'g':
1759 if (general_operand (op, VOIDmode))
1760 result = 1;
1761 break;
1763 default:
1764 /* For all other letters, we first check for a register class,
1765 otherwise it is an EXTRA_CONSTRAINT. */
1766 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1768 case 'r':
1769 if (GET_MODE (op) == BLKmode)
1770 break;
1771 if (register_operand (op, VOIDmode))
1772 result = 1;
1774 #ifdef EXTRA_CONSTRAINT_STR
1775 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1776 /* Every memory operand can be reloaded to fit. */
1777 result = result || memory_operand (op, VOIDmode);
1778 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1779 /* Every address operand can be reloaded to fit. */
1780 result = result || address_operand (op, VOIDmode);
1781 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1782 result = 1;
1783 #endif
1784 break;
1786 len = CONSTRAINT_LEN (c, constraint);
1788 constraint++;
1789 while (--len && *constraint);
1790 if (len)
1791 return 0;
1794 return result;
1797 /* Given an rtx *P, if it is a sum containing an integer constant term,
1798 return the location (type rtx *) of the pointer to that constant term.
1799 Otherwise, return a null pointer. */
1801 rtx *
1802 find_constant_term_loc (rtx *p)
1804 rtx *tem;
1805 enum rtx_code code = GET_CODE (*p);
1807 /* If *P IS such a constant term, P is its location. */
1809 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1810 || code == CONST)
1811 return p;
1813 /* Otherwise, if not a sum, it has no constant term. */
1815 if (GET_CODE (*p) != PLUS)
1816 return 0;
1818 /* If one of the summands is constant, return its location. */
1820 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1821 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1822 return p;
1824 /* Otherwise, check each summand for containing a constant term. */
1826 if (XEXP (*p, 0) != 0)
1828 tem = find_constant_term_loc (&XEXP (*p, 0));
1829 if (tem != 0)
1830 return tem;
1833 if (XEXP (*p, 1) != 0)
1835 tem = find_constant_term_loc (&XEXP (*p, 1));
1836 if (tem != 0)
1837 return tem;
1840 return 0;
1843 /* Return 1 if OP is a memory reference
1844 whose address contains no side effects
1845 and remains valid after the addition
1846 of a positive integer less than the
1847 size of the object being referenced.
1849 We assume that the original address is valid and do not check it.
1851 This uses strict_memory_address_p as a subroutine, so
1852 don't use it before reload. */
1855 offsettable_memref_p (rtx op)
1857 return ((MEM_P (op))
1858 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1861 /* Similar, but don't require a strictly valid mem ref:
1862 consider pseudo-regs valid as index or base regs. */
1865 offsettable_nonstrict_memref_p (rtx op)
1867 return ((MEM_P (op))
1868 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1871 /* Return 1 if Y is a memory address which contains no side effects
1872 and would remain valid after the addition of a positive integer
1873 less than the size of that mode.
1875 We assume that the original address is valid and do not check it.
1876 We do check that it is valid for narrower modes.
1878 If STRICTP is nonzero, we require a strictly valid address,
1879 for the sake of use in reload.c. */
1882 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1884 enum rtx_code ycode = GET_CODE (y);
1885 rtx z;
1886 rtx y1 = y;
1887 rtx *y2;
1888 int (*addressp) (enum machine_mode, rtx) =
1889 (strictp ? strict_memory_address_p : memory_address_p);
1890 unsigned int mode_sz = GET_MODE_SIZE (mode);
1892 if (CONSTANT_ADDRESS_P (y))
1893 return 1;
1895 /* Adjusting an offsettable address involves changing to a narrower mode.
1896 Make sure that's OK. */
1898 if (mode_dependent_address_p (y))
1899 return 0;
1901 /* ??? How much offset does an offsettable BLKmode reference need?
1902 Clearly that depends on the situation in which it's being used.
1903 However, the current situation in which we test 0xffffffff is
1904 less than ideal. Caveat user. */
1905 if (mode_sz == 0)
1906 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1908 /* If the expression contains a constant term,
1909 see if it remains valid when max possible offset is added. */
1911 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1913 int good;
1915 y1 = *y2;
1916 *y2 = plus_constant (*y2, mode_sz - 1);
1917 /* Use QImode because an odd displacement may be automatically invalid
1918 for any wider mode. But it should be valid for a single byte. */
1919 good = (*addressp) (QImode, y);
1921 /* In any case, restore old contents of memory. */
1922 *y2 = y1;
1923 return good;
1926 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1927 return 0;
1929 /* The offset added here is chosen as the maximum offset that
1930 any instruction could need to add when operating on something
1931 of the specified mode. We assume that if Y and Y+c are
1932 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1933 go inside a LO_SUM here, so we do so as well. */
1934 if (GET_CODE (y) == LO_SUM
1935 && mode != BLKmode
1936 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1937 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1938 plus_constant (XEXP (y, 1), mode_sz - 1));
1939 else
1940 z = plus_constant (y, mode_sz - 1);
1942 /* Use QImode because an odd displacement may be automatically invalid
1943 for any wider mode. But it should be valid for a single byte. */
1944 return (*addressp) (QImode, z);
1947 /* Return 1 if ADDR is an address-expression whose effect depends
1948 on the mode of the memory reference it is used in.
1950 Autoincrement addressing is a typical example of mode-dependence
1951 because the amount of the increment depends on the mode. */
1954 mode_dependent_address_p (rtx addr)
1956 /* Auto-increment addressing with anything other than post_modify
1957 or pre_modify always introduces a mode dependency. Catch such
1958 cases now instead of deferring to the target. */
1959 if (GET_CODE (addr) == PRE_INC
1960 || GET_CODE (addr) == POST_INC
1961 || GET_CODE (addr) == PRE_DEC
1962 || GET_CODE (addr) == POST_DEC)
1963 return 1;
1965 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1966 return 0;
1967 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1968 win: ATTRIBUTE_UNUSED_LABEL
1969 return 1;
1972 /* Like extract_insn, but save insn extracted and don't extract again, when
1973 called again for the same insn expecting that recog_data still contain the
1974 valid information. This is used primary by gen_attr infrastructure that
1975 often does extract insn again and again. */
1976 void
1977 extract_insn_cached (rtx insn)
1979 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1980 return;
1981 extract_insn (insn);
1982 recog_data.insn = insn;
1985 /* Do cached extract_insn, constrain_operands and complain about failures.
1986 Used by insn_attrtab. */
1987 void
1988 extract_constrain_insn_cached (rtx insn)
1990 extract_insn_cached (insn);
1991 if (which_alternative == -1
1992 && !constrain_operands (reload_completed))
1993 fatal_insn_not_found (insn);
1996 /* Do cached constrain_operands and complain about failures. */
1998 constrain_operands_cached (int strict)
2000 if (which_alternative == -1)
2001 return constrain_operands (strict);
2002 else
2003 return 1;
2006 /* Analyze INSN and fill in recog_data. */
2008 void
2009 extract_insn (rtx insn)
2011 int i;
2012 int icode;
2013 int noperands;
2014 rtx body = PATTERN (insn);
2016 recog_data.n_operands = 0;
2017 recog_data.n_alternatives = 0;
2018 recog_data.n_dups = 0;
2020 switch (GET_CODE (body))
2022 case USE:
2023 case CLOBBER:
2024 case ASM_INPUT:
2025 case ADDR_VEC:
2026 case ADDR_DIFF_VEC:
2027 return;
2029 case SET:
2030 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2031 goto asm_insn;
2032 else
2033 goto normal_insn;
2034 case PARALLEL:
2035 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2036 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2037 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2038 goto asm_insn;
2039 else
2040 goto normal_insn;
2041 case ASM_OPERANDS:
2042 asm_insn:
2043 recog_data.n_operands = noperands = asm_noperands (body);
2044 if (noperands >= 0)
2046 /* This insn is an `asm' with operands. */
2048 /* expand_asm_operands makes sure there aren't too many operands. */
2049 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2051 /* Now get the operand values and constraints out of the insn. */
2052 decode_asm_operands (body, recog_data.operand,
2053 recog_data.operand_loc,
2054 recog_data.constraints,
2055 recog_data.operand_mode, NULL);
2056 if (noperands > 0)
2058 const char *p = recog_data.constraints[0];
2059 recog_data.n_alternatives = 1;
2060 while (*p)
2061 recog_data.n_alternatives += (*p++ == ',');
2063 break;
2065 fatal_insn_not_found (insn);
2067 default:
2068 normal_insn:
2069 /* Ordinary insn: recognize it, get the operands via insn_extract
2070 and get the constraints. */
2072 icode = recog_memoized (insn);
2073 if (icode < 0)
2074 fatal_insn_not_found (insn);
2076 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2077 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2078 recog_data.n_dups = insn_data[icode].n_dups;
2080 insn_extract (insn);
2082 for (i = 0; i < noperands; i++)
2084 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2085 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2086 /* VOIDmode match_operands gets mode from their real operand. */
2087 if (recog_data.operand_mode[i] == VOIDmode)
2088 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2091 for (i = 0; i < noperands; i++)
2092 recog_data.operand_type[i]
2093 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2094 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2095 : OP_IN);
2097 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2099 if (INSN_CODE (insn) < 0)
2100 for (i = 0; i < recog_data.n_alternatives; i++)
2101 recog_data.alternative_enabled_p[i] = true;
2102 else
2104 recog_data.insn = insn;
2105 for (i = 0; i < recog_data.n_alternatives; i++)
2107 which_alternative = i;
2108 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2112 recog_data.insn = NULL;
2113 which_alternative = -1;
2116 /* After calling extract_insn, you can use this function to extract some
2117 information from the constraint strings into a more usable form.
2118 The collected data is stored in recog_op_alt. */
2119 void
2120 preprocess_constraints (void)
2122 int i;
2124 for (i = 0; i < recog_data.n_operands; i++)
2125 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2126 * sizeof (struct operand_alternative)));
2128 for (i = 0; i < recog_data.n_operands; i++)
2130 int j;
2131 struct operand_alternative *op_alt;
2132 const char *p = recog_data.constraints[i];
2134 op_alt = recog_op_alt[i];
2136 for (j = 0; j < recog_data.n_alternatives; j++)
2138 op_alt[j].cl = NO_REGS;
2139 op_alt[j].constraint = p;
2140 op_alt[j].matches = -1;
2141 op_alt[j].matched = -1;
2143 if (!recog_data.alternative_enabled_p[j])
2145 p = skip_alternative (p);
2146 continue;
2149 if (*p == '\0' || *p == ',')
2151 op_alt[j].anything_ok = 1;
2152 continue;
2155 for (;;)
2157 char c = *p;
2158 if (c == '#')
2160 c = *++p;
2161 while (c != ',' && c != '\0');
2162 if (c == ',' || c == '\0')
2164 p++;
2165 break;
2168 switch (c)
2170 case '=': case '+': case '*': case '%':
2171 case 'E': case 'F': case 'G': case 'H':
2172 case 's': case 'i': case 'n':
2173 case 'I': case 'J': case 'K': case 'L':
2174 case 'M': case 'N': case 'O': case 'P':
2175 /* These don't say anything we care about. */
2176 break;
2178 case '?':
2179 op_alt[j].reject += 6;
2180 break;
2181 case '!':
2182 op_alt[j].reject += 600;
2183 break;
2184 case '&':
2185 op_alt[j].earlyclobber = 1;
2186 break;
2188 case '0': case '1': case '2': case '3': case '4':
2189 case '5': case '6': case '7': case '8': case '9':
2191 char *end;
2192 op_alt[j].matches = strtoul (p, &end, 10);
2193 recog_op_alt[op_alt[j].matches][j].matched = i;
2194 p = end;
2196 continue;
2198 case TARGET_MEM_CONSTRAINT:
2199 op_alt[j].memory_ok = 1;
2200 break;
2201 case '<':
2202 op_alt[j].decmem_ok = 1;
2203 break;
2204 case '>':
2205 op_alt[j].incmem_ok = 1;
2206 break;
2207 case 'V':
2208 op_alt[j].nonoffmem_ok = 1;
2209 break;
2210 case 'o':
2211 op_alt[j].offmem_ok = 1;
2212 break;
2213 case 'X':
2214 op_alt[j].anything_ok = 1;
2215 break;
2217 case 'p':
2218 op_alt[j].is_address = 1;
2219 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2220 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2221 break;
2223 case 'g':
2224 case 'r':
2225 op_alt[j].cl =
2226 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2227 break;
2229 default:
2230 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2232 op_alt[j].memory_ok = 1;
2233 break;
2235 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2237 op_alt[j].is_address = 1;
2238 op_alt[j].cl
2239 = (reg_class_subunion
2240 [(int) op_alt[j].cl]
2241 [(int) base_reg_class (VOIDmode, ADDRESS,
2242 SCRATCH)]);
2243 break;
2246 op_alt[j].cl
2247 = (reg_class_subunion
2248 [(int) op_alt[j].cl]
2249 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2250 break;
2252 p += CONSTRAINT_LEN (c, p);
2258 /* Check the operands of an insn against the insn's operand constraints
2259 and return 1 if they are valid.
2260 The information about the insn's operands, constraints, operand modes
2261 etc. is obtained from the global variables set up by extract_insn.
2263 WHICH_ALTERNATIVE is set to a number which indicates which
2264 alternative of constraints was matched: 0 for the first alternative,
2265 1 for the next, etc.
2267 In addition, when two operands are required to match
2268 and it happens that the output operand is (reg) while the
2269 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2270 make the output operand look like the input.
2271 This is because the output operand is the one the template will print.
2273 This is used in final, just before printing the assembler code and by
2274 the routines that determine an insn's attribute.
2276 If STRICT is a positive nonzero value, it means that we have been
2277 called after reload has been completed. In that case, we must
2278 do all checks strictly. If it is zero, it means that we have been called
2279 before reload has completed. In that case, we first try to see if we can
2280 find an alternative that matches strictly. If not, we try again, this
2281 time assuming that reload will fix up the insn. This provides a "best
2282 guess" for the alternative and is used to compute attributes of insns prior
2283 to reload. A negative value of STRICT is used for this internal call. */
2285 struct funny_match
2287 int this_op, other;
2291 constrain_operands (int strict)
2293 const char *constraints[MAX_RECOG_OPERANDS];
2294 int matching_operands[MAX_RECOG_OPERANDS];
2295 int earlyclobber[MAX_RECOG_OPERANDS];
2296 int c;
2298 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2299 int funny_match_index;
2301 which_alternative = 0;
2302 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2303 return 1;
2305 for (c = 0; c < recog_data.n_operands; c++)
2307 constraints[c] = recog_data.constraints[c];
2308 matching_operands[c] = -1;
2313 int seen_earlyclobber_at = -1;
2314 int opno;
2315 int lose = 0;
2316 funny_match_index = 0;
2318 if (!recog_data.alternative_enabled_p[which_alternative])
2320 int i;
2322 for (i = 0; i < recog_data.n_operands; i++)
2323 constraints[i] = skip_alternative (constraints[i]);
2325 which_alternative++;
2326 continue;
2329 for (opno = 0; opno < recog_data.n_operands; opno++)
2331 rtx op = recog_data.operand[opno];
2332 enum machine_mode mode = GET_MODE (op);
2333 const char *p = constraints[opno];
2334 int offset = 0;
2335 int win = 0;
2336 int val;
2337 int len;
2339 earlyclobber[opno] = 0;
2341 /* A unary operator may be accepted by the predicate, but it
2342 is irrelevant for matching constraints. */
2343 if (UNARY_P (op))
2344 op = XEXP (op, 0);
2346 if (GET_CODE (op) == SUBREG)
2348 if (REG_P (SUBREG_REG (op))
2349 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2350 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2351 GET_MODE (SUBREG_REG (op)),
2352 SUBREG_BYTE (op),
2353 GET_MODE (op));
2354 op = SUBREG_REG (op);
2357 /* An empty constraint or empty alternative
2358 allows anything which matched the pattern. */
2359 if (*p == 0 || *p == ',')
2360 win = 1;
2363 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2365 case '\0':
2366 len = 0;
2367 break;
2368 case ',':
2369 c = '\0';
2370 break;
2372 case '?': case '!': case '*': case '%':
2373 case '=': case '+':
2374 break;
2376 case '#':
2377 /* Ignore rest of this alternative as far as
2378 constraint checking is concerned. */
2380 p++;
2381 while (*p && *p != ',');
2382 len = 0;
2383 break;
2385 case '&':
2386 earlyclobber[opno] = 1;
2387 if (seen_earlyclobber_at < 0)
2388 seen_earlyclobber_at = opno;
2389 break;
2391 case '0': case '1': case '2': case '3': case '4':
2392 case '5': case '6': case '7': case '8': case '9':
2394 /* This operand must be the same as a previous one.
2395 This kind of constraint is used for instructions such
2396 as add when they take only two operands.
2398 Note that the lower-numbered operand is passed first.
2400 If we are not testing strictly, assume that this
2401 constraint will be satisfied. */
2403 char *end;
2404 int match;
2406 match = strtoul (p, &end, 10);
2407 p = end;
2409 if (strict < 0)
2410 val = 1;
2411 else
2413 rtx op1 = recog_data.operand[match];
2414 rtx op2 = recog_data.operand[opno];
2416 /* A unary operator may be accepted by the predicate,
2417 but it is irrelevant for matching constraints. */
2418 if (UNARY_P (op1))
2419 op1 = XEXP (op1, 0);
2420 if (UNARY_P (op2))
2421 op2 = XEXP (op2, 0);
2423 val = operands_match_p (op1, op2);
2426 matching_operands[opno] = match;
2427 matching_operands[match] = opno;
2429 if (val != 0)
2430 win = 1;
2432 /* If output is *x and input is *--x, arrange later
2433 to change the output to *--x as well, since the
2434 output op is the one that will be printed. */
2435 if (val == 2 && strict > 0)
2437 funny_match[funny_match_index].this_op = opno;
2438 funny_match[funny_match_index++].other = match;
2441 len = 0;
2442 break;
2444 case 'p':
2445 /* p is used for address_operands. When we are called by
2446 gen_reload, no one will have checked that the address is
2447 strictly valid, i.e., that all pseudos requiring hard regs
2448 have gotten them. */
2449 if (strict <= 0
2450 || (strict_memory_address_p (recog_data.operand_mode[opno],
2451 op)))
2452 win = 1;
2453 break;
2455 /* No need to check general_operand again;
2456 it was done in insn-recog.c. Well, except that reload
2457 doesn't check the validity of its replacements, but
2458 that should only matter when there's a bug. */
2459 case 'g':
2460 /* Anything goes unless it is a REG and really has a hard reg
2461 but the hard reg is not in the class GENERAL_REGS. */
2462 if (REG_P (op))
2464 if (strict < 0
2465 || GENERAL_REGS == ALL_REGS
2466 || (reload_in_progress
2467 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2468 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2469 win = 1;
2471 else if (strict < 0 || general_operand (op, mode))
2472 win = 1;
2473 break;
2475 case 'X':
2476 /* This is used for a MATCH_SCRATCH in the cases when
2477 we don't actually need anything. So anything goes
2478 any time. */
2479 win = 1;
2480 break;
2482 case TARGET_MEM_CONSTRAINT:
2483 /* Memory operands must be valid, to the extent
2484 required by STRICT. */
2485 if (MEM_P (op))
2487 if (strict > 0
2488 && !strict_memory_address_p (GET_MODE (op),
2489 XEXP (op, 0)))
2490 break;
2491 if (strict == 0
2492 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2493 break;
2494 win = 1;
2496 /* Before reload, accept what reload can turn into mem. */
2497 else if (strict < 0 && CONSTANT_P (op))
2498 win = 1;
2499 /* During reload, accept a pseudo */
2500 else if (reload_in_progress && REG_P (op)
2501 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2502 win = 1;
2503 break;
2505 case '<':
2506 if (MEM_P (op)
2507 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2508 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2509 win = 1;
2510 break;
2512 case '>':
2513 if (MEM_P (op)
2514 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2515 || GET_CODE (XEXP (op, 0)) == POST_INC))
2516 win = 1;
2517 break;
2519 case 'E':
2520 case 'F':
2521 if (GET_CODE (op) == CONST_DOUBLE
2522 || (GET_CODE (op) == CONST_VECTOR
2523 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2524 win = 1;
2525 break;
2527 case 'G':
2528 case 'H':
2529 if (GET_CODE (op) == CONST_DOUBLE
2530 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2531 win = 1;
2532 break;
2534 case 's':
2535 if (GET_CODE (op) == CONST_INT
2536 || (GET_CODE (op) == CONST_DOUBLE
2537 && GET_MODE (op) == VOIDmode))
2538 break;
2539 case 'i':
2540 if (CONSTANT_P (op))
2541 win = 1;
2542 break;
2544 case 'n':
2545 if (GET_CODE (op) == CONST_INT
2546 || (GET_CODE (op) == CONST_DOUBLE
2547 && GET_MODE (op) == VOIDmode))
2548 win = 1;
2549 break;
2551 case 'I':
2552 case 'J':
2553 case 'K':
2554 case 'L':
2555 case 'M':
2556 case 'N':
2557 case 'O':
2558 case 'P':
2559 if (GET_CODE (op) == CONST_INT
2560 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2561 win = 1;
2562 break;
2564 case 'V':
2565 if (MEM_P (op)
2566 && ((strict > 0 && ! offsettable_memref_p (op))
2567 || (strict < 0
2568 && !(CONSTANT_P (op) || MEM_P (op)))
2569 || (reload_in_progress
2570 && !(REG_P (op)
2571 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2572 win = 1;
2573 break;
2575 case 'o':
2576 if ((strict > 0 && offsettable_memref_p (op))
2577 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2578 /* Before reload, accept what reload can handle. */
2579 || (strict < 0
2580 && (CONSTANT_P (op) || MEM_P (op)))
2581 /* During reload, accept a pseudo */
2582 || (reload_in_progress && REG_P (op)
2583 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2584 win = 1;
2585 break;
2587 default:
2589 enum reg_class cl;
2591 cl = (c == 'r'
2592 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2593 if (cl != NO_REGS)
2595 if (strict < 0
2596 || (strict == 0
2597 && REG_P (op)
2598 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2599 || (strict == 0 && GET_CODE (op) == SCRATCH)
2600 || (REG_P (op)
2601 && reg_fits_class_p (op, cl, offset, mode)))
2602 win = 1;
2604 #ifdef EXTRA_CONSTRAINT_STR
2605 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2606 win = 1;
2608 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2609 /* Every memory operand can be reloaded to fit. */
2610 && ((strict < 0 && MEM_P (op))
2611 /* Before reload, accept what reload can turn
2612 into mem. */
2613 || (strict < 0 && CONSTANT_P (op))
2614 /* During reload, accept a pseudo */
2615 || (reload_in_progress && REG_P (op)
2616 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2617 win = 1;
2618 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2619 /* Every address operand can be reloaded to fit. */
2620 && strict < 0)
2621 win = 1;
2622 #endif
2623 break;
2626 while (p += len, c);
2628 constraints[opno] = p;
2629 /* If this operand did not win somehow,
2630 this alternative loses. */
2631 if (! win)
2632 lose = 1;
2634 /* This alternative won; the operands are ok.
2635 Change whichever operands this alternative says to change. */
2636 if (! lose)
2638 int opno, eopno;
2640 /* See if any earlyclobber operand conflicts with some other
2641 operand. */
2643 if (strict > 0 && seen_earlyclobber_at >= 0)
2644 for (eopno = seen_earlyclobber_at;
2645 eopno < recog_data.n_operands;
2646 eopno++)
2647 /* Ignore earlyclobber operands now in memory,
2648 because we would often report failure when we have
2649 two memory operands, one of which was formerly a REG. */
2650 if (earlyclobber[eopno]
2651 && REG_P (recog_data.operand[eopno]))
2652 for (opno = 0; opno < recog_data.n_operands; opno++)
2653 if ((MEM_P (recog_data.operand[opno])
2654 || recog_data.operand_type[opno] != OP_OUT)
2655 && opno != eopno
2656 /* Ignore things like match_operator operands. */
2657 && *recog_data.constraints[opno] != 0
2658 && ! (matching_operands[opno] == eopno
2659 && operands_match_p (recog_data.operand[opno],
2660 recog_data.operand[eopno]))
2661 && ! safe_from_earlyclobber (recog_data.operand[opno],
2662 recog_data.operand[eopno]))
2663 lose = 1;
2665 if (! lose)
2667 while (--funny_match_index >= 0)
2669 recog_data.operand[funny_match[funny_match_index].other]
2670 = recog_data.operand[funny_match[funny_match_index].this_op];
2673 return 1;
2677 which_alternative++;
2679 while (which_alternative < recog_data.n_alternatives);
2681 which_alternative = -1;
2682 /* If we are about to reject this, but we are not to test strictly,
2683 try a very loose test. Only return failure if it fails also. */
2684 if (strict == 0)
2685 return constrain_operands (-1);
2686 else
2687 return 0;
2690 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2691 is a hard reg in class CLASS when its regno is offset by OFFSET
2692 and changed to mode MODE.
2693 If REG occupies multiple hard regs, all of them must be in CLASS. */
2696 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2697 enum machine_mode mode)
2699 int regno = REGNO (operand);
2701 if (cl == NO_REGS)
2702 return 0;
2704 return (regno < FIRST_PSEUDO_REGISTER
2705 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2706 mode, regno + offset));
2709 /* Split single instruction. Helper function for split_all_insns and
2710 split_all_insns_noflow. Return last insn in the sequence if successful,
2711 or NULL if unsuccessful. */
2713 static rtx
2714 split_insn (rtx insn)
2716 /* Split insns here to get max fine-grain parallelism. */
2717 rtx first = PREV_INSN (insn);
2718 rtx last = try_split (PATTERN (insn), insn, 1);
2719 rtx insn_set, last_set, note;
2721 if (last == insn)
2722 return NULL_RTX;
2724 /* If the original instruction was a single set that was known to be
2725 equivalent to a constant, see if we can say the same about the last
2726 instruction in the split sequence. The two instructions must set
2727 the same destination. */
2728 insn_set = single_set (insn);
2729 if (insn_set)
2731 last_set = single_set (last);
2732 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2734 note = find_reg_equal_equiv_note (insn);
2735 if (note && CONSTANT_P (XEXP (note, 0)))
2736 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2737 else if (CONSTANT_P (SET_SRC (insn_set)))
2738 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2742 /* try_split returns the NOTE that INSN became. */
2743 SET_INSN_DELETED (insn);
2745 /* ??? Coddle to md files that generate subregs in post-reload
2746 splitters instead of computing the proper hard register. */
2747 if (reload_completed && first != last)
2749 first = NEXT_INSN (first);
2750 for (;;)
2752 if (INSN_P (first))
2753 cleanup_subreg_operands (first);
2754 if (first == last)
2755 break;
2756 first = NEXT_INSN (first);
2760 return last;
2763 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2765 void
2766 split_all_insns (void)
2768 sbitmap blocks;
2769 bool changed;
2770 basic_block bb;
2772 blocks = sbitmap_alloc (last_basic_block);
2773 sbitmap_zero (blocks);
2774 changed = false;
2776 FOR_EACH_BB_REVERSE (bb)
2778 rtx insn, next;
2779 bool finish = false;
2781 rtl_profile_for_bb (bb);
2782 for (insn = BB_HEAD (bb); !finish ; insn = next)
2784 /* Can't use `next_real_insn' because that might go across
2785 CODE_LABELS and short-out basic blocks. */
2786 next = NEXT_INSN (insn);
2787 finish = (insn == BB_END (bb));
2788 if (INSN_P (insn))
2790 rtx set = single_set (insn);
2792 /* Don't split no-op move insns. These should silently
2793 disappear later in final. Splitting such insns would
2794 break the code that handles LIBCALL blocks. */
2795 if (set && set_noop_p (set))
2797 /* Nops get in the way while scheduling, so delete them
2798 now if register allocation has already been done. It
2799 is too risky to try to do this before register
2800 allocation, and there are unlikely to be very many
2801 nops then anyways. */
2802 if (reload_completed)
2803 delete_insn_and_edges (insn);
2805 else
2807 rtx last = split_insn (insn);
2808 if (last)
2810 /* The split sequence may include barrier, but the
2811 BB boundary we are interested in will be set to
2812 previous one. */
2814 while (BARRIER_P (last))
2815 last = PREV_INSN (last);
2816 SET_BIT (blocks, bb->index);
2817 changed = true;
2824 default_rtl_profile ();
2825 if (changed)
2826 find_many_sub_basic_blocks (blocks);
2828 #ifdef ENABLE_CHECKING
2829 verify_flow_info ();
2830 #endif
2832 sbitmap_free (blocks);
2835 /* Same as split_all_insns, but do not expect CFG to be available.
2836 Used by machine dependent reorg passes. */
2838 unsigned int
2839 split_all_insns_noflow (void)
2841 rtx next, insn;
2843 for (insn = get_insns (); insn; insn = next)
2845 next = NEXT_INSN (insn);
2846 if (INSN_P (insn))
2848 /* Don't split no-op move insns. These should silently
2849 disappear later in final. Splitting such insns would
2850 break the code that handles LIBCALL blocks. */
2851 rtx set = single_set (insn);
2852 if (set && set_noop_p (set))
2854 /* Nops get in the way while scheduling, so delete them
2855 now if register allocation has already been done. It
2856 is too risky to try to do this before register
2857 allocation, and there are unlikely to be very many
2858 nops then anyways.
2860 ??? Should we use delete_insn when the CFG isn't valid? */
2861 if (reload_completed)
2862 delete_insn_and_edges (insn);
2864 else
2865 split_insn (insn);
2868 return 0;
2871 #ifdef HAVE_peephole2
2872 struct peep2_insn_data
2874 rtx insn;
2875 regset live_before;
2878 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2879 static int peep2_current;
2880 /* The number of instructions available to match a peep2. */
2881 int peep2_current_count;
2883 /* A non-insn marker indicating the last insn of the block.
2884 The live_before regset for this element is correct, indicating
2885 DF_LIVE_OUT for the block. */
2886 #define PEEP2_EOB pc_rtx
2888 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2889 does not exist. Used by the recognizer to find the next insn to match
2890 in a multi-insn pattern. */
2893 peep2_next_insn (int n)
2895 gcc_assert (n <= peep2_current_count);
2897 n += peep2_current;
2898 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2899 n -= MAX_INSNS_PER_PEEP2 + 1;
2901 return peep2_insn_data[n].insn;
2904 /* Return true if REGNO is dead before the Nth non-note insn
2905 after `current'. */
2908 peep2_regno_dead_p (int ofs, int regno)
2910 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2912 ofs += peep2_current;
2913 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2914 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2916 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2918 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2921 /* Similarly for a REG. */
2924 peep2_reg_dead_p (int ofs, rtx reg)
2926 int regno, n;
2928 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2930 ofs += peep2_current;
2931 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2932 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2934 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2936 regno = REGNO (reg);
2937 n = hard_regno_nregs[regno][GET_MODE (reg)];
2938 while (--n >= 0)
2939 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2940 return 0;
2941 return 1;
2944 /* Try to find a hard register of mode MODE, matching the register class in
2945 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2946 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2947 in which case the only condition is that the register must be available
2948 before CURRENT_INSN.
2949 Registers that already have bits set in REG_SET will not be considered.
2951 If an appropriate register is available, it will be returned and the
2952 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2953 returned. */
2956 peep2_find_free_register (int from, int to, const char *class_str,
2957 enum machine_mode mode, HARD_REG_SET *reg_set)
2959 static int search_ofs;
2960 enum reg_class cl;
2961 HARD_REG_SET live;
2962 int i;
2964 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2965 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2967 from += peep2_current;
2968 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2969 from -= MAX_INSNS_PER_PEEP2 + 1;
2970 to += peep2_current;
2971 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2972 to -= MAX_INSNS_PER_PEEP2 + 1;
2974 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2975 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2977 while (from != to)
2979 HARD_REG_SET this_live;
2981 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2982 from = 0;
2983 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2984 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2985 IOR_HARD_REG_SET (live, this_live);
2988 cl = (class_str[0] == 'r' ? GENERAL_REGS
2989 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2991 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2993 int raw_regno, regno, success, j;
2995 /* Distribute the free registers as much as possible. */
2996 raw_regno = search_ofs + i;
2997 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2998 raw_regno -= FIRST_PSEUDO_REGISTER;
2999 #ifdef REG_ALLOC_ORDER
3000 regno = reg_alloc_order[raw_regno];
3001 #else
3002 regno = raw_regno;
3003 #endif
3005 /* Don't allocate fixed registers. */
3006 if (fixed_regs[regno])
3007 continue;
3008 /* Don't allocate global registers. */
3009 if (global_regs[regno])
3010 continue;
3011 /* Make sure the register is of the right class. */
3012 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3013 continue;
3014 /* And can support the mode we need. */
3015 if (! HARD_REGNO_MODE_OK (regno, mode))
3016 continue;
3017 /* And that we don't create an extra save/restore. */
3018 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3019 continue;
3020 if (! targetm.hard_regno_scratch_ok (regno))
3021 continue;
3023 /* And we don't clobber traceback for noreturn functions. */
3024 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3025 && (! reload_completed || frame_pointer_needed))
3026 continue;
3028 success = 1;
3029 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3031 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3032 || TEST_HARD_REG_BIT (live, regno + j))
3034 success = 0;
3035 break;
3038 if (success)
3040 add_to_hard_reg_set (reg_set, mode, regno);
3042 /* Start the next search with the next register. */
3043 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3044 raw_regno = 0;
3045 search_ofs = raw_regno;
3047 return gen_rtx_REG (mode, regno);
3051 search_ofs = 0;
3052 return NULL_RTX;
3055 /* Perform the peephole2 optimization pass. */
3057 static void
3058 peephole2_optimize (void)
3060 rtx insn, prev;
3061 bitmap live;
3062 int i;
3063 basic_block bb;
3064 bool do_cleanup_cfg = false;
3065 bool do_rebuild_jump_labels = false;
3067 df_set_flags (DF_LR_RUN_DCE);
3068 df_analyze ();
3070 /* Initialize the regsets we're going to use. */
3071 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3072 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3073 live = BITMAP_ALLOC (&reg_obstack);
3075 FOR_EACH_BB_REVERSE (bb)
3077 rtl_profile_for_bb (bb);
3078 /* Indicate that all slots except the last holds invalid data. */
3079 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3080 peep2_insn_data[i].insn = NULL_RTX;
3081 peep2_current_count = 0;
3083 /* Indicate that the last slot contains live_after data. */
3084 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3085 peep2_current = MAX_INSNS_PER_PEEP2;
3087 /* Start up propagation. */
3088 bitmap_copy (live, DF_LR_OUT (bb));
3089 df_simulate_initialize_backwards (bb, live);
3090 bitmap_copy (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3092 for (insn = BB_END (bb); ; insn = prev)
3094 prev = PREV_INSN (insn);
3095 if (INSN_P (insn))
3097 rtx attempt, before_try, x;
3098 int match_len;
3099 rtx note;
3100 bool was_call = false;
3102 /* Record this insn. */
3103 if (--peep2_current < 0)
3104 peep2_current = MAX_INSNS_PER_PEEP2;
3105 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3106 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3107 peep2_current_count++;
3108 peep2_insn_data[peep2_current].insn = insn;
3109 df_simulate_one_insn_backwards (bb, insn, live);
3110 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3112 if (RTX_FRAME_RELATED_P (insn))
3114 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3115 substitution would lose the
3116 REG_FRAME_RELATED_EXPR that is attached. */
3117 peep2_current_count = 0;
3118 attempt = NULL;
3120 else
3121 /* Match the peephole. */
3122 attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3124 if (attempt != NULL)
3126 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3127 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3128 cfg-related call notes. */
3129 for (i = 0; i <= match_len; ++i)
3131 int j;
3132 rtx old_insn, new_insn, note;
3134 j = i + peep2_current;
3135 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3136 j -= MAX_INSNS_PER_PEEP2 + 1;
3137 old_insn = peep2_insn_data[j].insn;
3138 if (!CALL_P (old_insn))
3139 continue;
3140 was_call = true;
3142 new_insn = attempt;
3143 while (new_insn != NULL_RTX)
3145 if (CALL_P (new_insn))
3146 break;
3147 new_insn = NEXT_INSN (new_insn);
3150 gcc_assert (new_insn != NULL_RTX);
3152 CALL_INSN_FUNCTION_USAGE (new_insn)
3153 = CALL_INSN_FUNCTION_USAGE (old_insn);
3155 for (note = REG_NOTES (old_insn);
3156 note;
3157 note = XEXP (note, 1))
3158 switch (REG_NOTE_KIND (note))
3160 case REG_NORETURN:
3161 case REG_SETJMP:
3162 add_reg_note (new_insn, REG_NOTE_KIND (note),
3163 XEXP (note, 0));
3164 break;
3165 default:
3166 /* Discard all other reg notes. */
3167 break;
3170 /* Croak if there is another call in the sequence. */
3171 while (++i <= match_len)
3173 j = i + peep2_current;
3174 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3175 j -= MAX_INSNS_PER_PEEP2 + 1;
3176 old_insn = peep2_insn_data[j].insn;
3177 gcc_assert (!CALL_P (old_insn));
3179 break;
3182 i = match_len + peep2_current;
3183 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3184 i -= MAX_INSNS_PER_PEEP2 + 1;
3186 note = find_reg_note (peep2_insn_data[i].insn,
3187 REG_EH_REGION, NULL_RTX);
3189 /* Replace the old sequence with the new. */
3190 attempt = emit_insn_after_setloc (attempt,
3191 peep2_insn_data[i].insn,
3192 INSN_LOCATOR (peep2_insn_data[i].insn));
3193 before_try = PREV_INSN (insn);
3194 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3196 /* Re-insert the EH_REGION notes. */
3197 if (note || (was_call && nonlocal_goto_handler_labels))
3199 edge eh_edge;
3200 edge_iterator ei;
3202 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3203 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3204 break;
3206 for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3207 if (CALL_P (x)
3208 || (flag_non_call_exceptions
3209 && may_trap_p (PATTERN (x))
3210 && !find_reg_note (x, REG_EH_REGION, NULL)))
3212 if (note)
3213 add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
3215 if (x != BB_END (bb) && eh_edge)
3217 edge nfte, nehe;
3218 int flags;
3220 nfte = split_block (bb, x);
3221 flags = (eh_edge->flags
3222 & (EDGE_EH | EDGE_ABNORMAL));
3223 if (CALL_P (x))
3224 flags |= EDGE_ABNORMAL_CALL;
3225 nehe = make_edge (nfte->src, eh_edge->dest,
3226 flags);
3228 nehe->probability = eh_edge->probability;
3229 nfte->probability
3230 = REG_BR_PROB_BASE - nehe->probability;
3232 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3233 bb = nfte->src;
3234 eh_edge = nehe;
3238 /* Converting possibly trapping insn to non-trapping is
3239 possible. Zap dummy outgoing edges. */
3240 do_cleanup_cfg |= purge_dead_edges (bb);
3243 #ifdef HAVE_conditional_execution
3244 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3245 peep2_insn_data[i].insn = NULL_RTX;
3246 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3247 peep2_current_count = 0;
3248 #else
3249 /* Back up lifetime information past the end of the
3250 newly created sequence. */
3251 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3252 i = 0;
3253 bitmap_copy (live, peep2_insn_data[i].live_before);
3255 /* Update life information for the new sequence. */
3256 x = attempt;
3259 if (INSN_P (x))
3261 if (--i < 0)
3262 i = MAX_INSNS_PER_PEEP2;
3263 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3264 && peep2_insn_data[i].insn == NULL_RTX)
3265 peep2_current_count++;
3266 peep2_insn_data[i].insn = x;
3267 df_insn_rescan (x);
3268 df_simulate_one_insn_backwards (bb, x, live);
3269 bitmap_copy (peep2_insn_data[i].live_before, live);
3271 x = PREV_INSN (x);
3273 while (x != prev);
3275 peep2_current = i;
3276 #endif
3278 /* If we generated a jump instruction, it won't have
3279 JUMP_LABEL set. Recompute after we're done. */
3280 for (x = attempt; x != before_try; x = PREV_INSN (x))
3281 if (JUMP_P (x))
3283 do_rebuild_jump_labels = true;
3284 break;
3289 if (insn == BB_HEAD (bb))
3290 break;
3294 default_rtl_profile ();
3295 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3296 BITMAP_FREE (peep2_insn_data[i].live_before);
3297 BITMAP_FREE (live);
3298 if (do_rebuild_jump_labels)
3299 rebuild_jump_labels (get_insns ());
3301 #endif /* HAVE_peephole2 */
3303 /* Common predicates for use with define_bypass. */
3305 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3306 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3307 must be either a single_set or a PARALLEL with SETs inside. */
3310 store_data_bypass_p (rtx out_insn, rtx in_insn)
3312 rtx out_set, in_set;
3313 rtx out_pat, in_pat;
3314 rtx out_exp, in_exp;
3315 int i, j;
3317 in_set = single_set (in_insn);
3318 if (in_set)
3320 if (!MEM_P (SET_DEST (in_set)))
3321 return false;
3323 out_set = single_set (out_insn);
3324 if (out_set)
3326 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3327 return false;
3329 else
3331 out_pat = PATTERN (out_insn);
3333 if (GET_CODE (out_pat) != PARALLEL)
3334 return false;
3336 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3338 out_exp = XVECEXP (out_pat, 0, i);
3340 if (GET_CODE (out_exp) == CLOBBER)
3341 continue;
3343 gcc_assert (GET_CODE (out_exp) == SET);
3345 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3346 return false;
3350 else
3352 in_pat = PATTERN (in_insn);
3353 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3355 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3357 in_exp = XVECEXP (in_pat, 0, i);
3359 if (GET_CODE (in_exp) == CLOBBER)
3360 continue;
3362 gcc_assert (GET_CODE (in_exp) == SET);
3364 if (!MEM_P (SET_DEST (in_exp)))
3365 return false;
3367 out_set = single_set (out_insn);
3368 if (out_set)
3370 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3371 return false;
3373 else
3375 out_pat = PATTERN (out_insn);
3376 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3378 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3380 out_exp = XVECEXP (out_pat, 0, j);
3382 if (GET_CODE (out_exp) == CLOBBER)
3383 continue;
3385 gcc_assert (GET_CODE (out_exp) == SET);
3387 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3388 return false;
3394 return true;
3397 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3398 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3399 or multiple set; IN_INSN should be single_set for truth, but for convenience
3400 of insn categorization may be any JUMP or CALL insn. */
3403 if_test_bypass_p (rtx out_insn, rtx in_insn)
3405 rtx out_set, in_set;
3407 in_set = single_set (in_insn);
3408 if (! in_set)
3410 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3411 return false;
3414 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3415 return false;
3416 in_set = SET_SRC (in_set);
3418 out_set = single_set (out_insn);
3419 if (out_set)
3421 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3422 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3423 return false;
3425 else
3427 rtx out_pat;
3428 int i;
3430 out_pat = PATTERN (out_insn);
3431 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3433 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3435 rtx exp = XVECEXP (out_pat, 0, i);
3437 if (GET_CODE (exp) == CLOBBER)
3438 continue;
3440 gcc_assert (GET_CODE (exp) == SET);
3442 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3443 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3444 return false;
3448 return true;
3451 static bool
3452 gate_handle_peephole2 (void)
3454 return (optimize > 0 && flag_peephole2);
3457 static unsigned int
3458 rest_of_handle_peephole2 (void)
3460 #ifdef HAVE_peephole2
3461 peephole2_optimize ();
3462 #endif
3463 return 0;
3466 struct rtl_opt_pass pass_peephole2 =
3469 RTL_PASS,
3470 "peephole2", /* name */
3471 gate_handle_peephole2, /* gate */
3472 rest_of_handle_peephole2, /* execute */
3473 NULL, /* sub */
3474 NULL, /* next */
3475 0, /* static_pass_number */
3476 TV_PEEPHOLE2, /* tv_id */
3477 0, /* properties_required */
3478 0, /* properties_provided */
3479 0, /* properties_destroyed */
3480 0, /* todo_flags_start */
3481 TODO_df_finish | TODO_verify_rtl_sharing |
3482 TODO_dump_func /* todo_flags_finish */
3486 static unsigned int
3487 rest_of_handle_split_all_insns (void)
3489 split_all_insns ();
3490 return 0;
3493 struct rtl_opt_pass pass_split_all_insns =
3496 RTL_PASS,
3497 "split1", /* name */
3498 NULL, /* gate */
3499 rest_of_handle_split_all_insns, /* execute */
3500 NULL, /* sub */
3501 NULL, /* next */
3502 0, /* static_pass_number */
3503 TV_NONE, /* tv_id */
3504 0, /* properties_required */
3505 0, /* properties_provided */
3506 0, /* properties_destroyed */
3507 0, /* todo_flags_start */
3508 TODO_dump_func /* todo_flags_finish */
3512 static unsigned int
3513 rest_of_handle_split_after_reload (void)
3515 /* If optimizing, then go ahead and split insns now. */
3516 #ifndef STACK_REGS
3517 if (optimize > 0)
3518 #endif
3519 split_all_insns ();
3520 return 0;
3523 struct rtl_opt_pass pass_split_after_reload =
3526 RTL_PASS,
3527 "split2", /* name */
3528 NULL, /* gate */
3529 rest_of_handle_split_after_reload, /* execute */
3530 NULL, /* sub */
3531 NULL, /* next */
3532 0, /* static_pass_number */
3533 TV_NONE, /* tv_id */
3534 0, /* properties_required */
3535 0, /* properties_provided */
3536 0, /* properties_destroyed */
3537 0, /* todo_flags_start */
3538 TODO_dump_func /* todo_flags_finish */
3542 static bool
3543 gate_handle_split_before_regstack (void)
3545 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3546 /* If flow2 creates new instructions which need splitting
3547 and scheduling after reload is not done, they might not be
3548 split until final which doesn't allow splitting
3549 if HAVE_ATTR_length. */
3550 # ifdef INSN_SCHEDULING
3551 return (optimize && !flag_schedule_insns_after_reload);
3552 # else
3553 return (optimize);
3554 # endif
3555 #else
3556 return 0;
3557 #endif
3560 static unsigned int
3561 rest_of_handle_split_before_regstack (void)
3563 split_all_insns ();
3564 return 0;
3567 struct rtl_opt_pass pass_split_before_regstack =
3570 RTL_PASS,
3571 "split3", /* name */
3572 gate_handle_split_before_regstack, /* gate */
3573 rest_of_handle_split_before_regstack, /* execute */
3574 NULL, /* sub */
3575 NULL, /* next */
3576 0, /* static_pass_number */
3577 TV_NONE, /* tv_id */
3578 0, /* properties_required */
3579 0, /* properties_provided */
3580 0, /* properties_destroyed */
3581 0, /* todo_flags_start */
3582 TODO_dump_func /* todo_flags_finish */
3586 static bool
3587 gate_handle_split_before_sched2 (void)
3589 #ifdef INSN_SCHEDULING
3590 return optimize > 0 && flag_schedule_insns_after_reload;
3591 #else
3592 return 0;
3593 #endif
3596 static unsigned int
3597 rest_of_handle_split_before_sched2 (void)
3599 #ifdef INSN_SCHEDULING
3600 split_all_insns ();
3601 #endif
3602 return 0;
3605 struct rtl_opt_pass pass_split_before_sched2 =
3608 RTL_PASS,
3609 "split4", /* name */
3610 gate_handle_split_before_sched2, /* gate */
3611 rest_of_handle_split_before_sched2, /* execute */
3612 NULL, /* sub */
3613 NULL, /* next */
3614 0, /* static_pass_number */
3615 TV_NONE, /* tv_id */
3616 0, /* properties_required */
3617 0, /* properties_provided */
3618 0, /* properties_destroyed */
3619 0, /* todo_flags_start */
3620 TODO_verify_flow |
3621 TODO_dump_func /* todo_flags_finish */
3625 /* The placement of the splitting that we do for shorten_branches
3626 depends on whether regstack is used by the target or not. */
3627 static bool
3628 gate_do_final_split (void)
3630 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3631 return 1;
3632 #else
3633 return 0;
3634 #endif
3637 struct rtl_opt_pass pass_split_for_shorten_branches =
3640 RTL_PASS,
3641 "split5", /* name */
3642 gate_do_final_split, /* gate */
3643 split_all_insns_noflow, /* execute */
3644 NULL, /* sub */
3645 NULL, /* next */
3646 0, /* static_pass_number */
3647 TV_NONE, /* tv_id */
3648 0, /* properties_required */
3649 0, /* properties_provided */
3650 0, /* properties_destroyed */
3651 0, /* todo_flags_start */
3652 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */