Enable dumping of alias graphs.
[official-gcc/Ramakrishna.git] / gcc / recog.c
blobc1e25d746a15150039aec6da7bc3f6b20c3b03cb
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
64 #ifndef HAVE_ATTR_enabled
65 static inline bool
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
68 return true;
70 #endif
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 static void validate_replace_src_1 (rtx *, void *);
74 static rtx split_insn (rtx);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in reginfo.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
84 int volatile_ok;
86 struct recog_data recog_data;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
92 /* On return from `constrain_operands', indicate which alternative
93 was satisfied. */
95 int which_alternative;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
110 void
111 init_recog_no_volatile (void)
113 volatile_ok = 0;
116 void
117 init_recog (void)
119 volatile_ok = 1;
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x)
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = XALLOCAVEC (rtx, noperands);
150 constraints = XALLOCAVEC (const char *, noperands);
152 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (! asm_operand_ok (operands[i], c, constraints))
160 return 0;
163 return 1;
166 /* Static data for the next two routines. */
168 typedef struct change_t
170 rtx object;
171 int old_code;
172 rtx *loc;
173 rtx old;
174 bool unshare;
175 } change_t;
177 static change_t *changes;
178 static int changes_allocated;
180 static int num_changes = 0;
182 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
183 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
184 the change is simply made.
186 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
187 will be called with the address and mode as parameters. If OBJECT is
188 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
189 the change in place.
191 IN_GROUP is nonzero if this is part of a group of changes that must be
192 performed as a group. In that case, the changes will be stored. The
193 function `apply_change_group' will validate and apply the changes.
195 If IN_GROUP is zero, this is a single change. Try to recognize the insn
196 or validate the memory reference with the change applied. If the result
197 is not valid for the machine, suppress the change and return zero.
198 Otherwise, perform the change and return 1. */
200 static bool
201 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
203 rtx old = *loc;
205 if (old == new_rtx || rtx_equal_p (old, new_rtx))
206 return 1;
208 gcc_assert (in_group != 0 || num_changes == 0);
210 *loc = new_rtx;
212 /* Save the information describing this change. */
213 if (num_changes >= changes_allocated)
215 if (changes_allocated == 0)
216 /* This value allows for repeated substitutions inside complex
217 indexed addresses, or changes in up to 5 insns. */
218 changes_allocated = MAX_RECOG_OPERANDS * 5;
219 else
220 changes_allocated *= 2;
222 changes = XRESIZEVEC (change_t, changes, changes_allocated);
225 changes[num_changes].object = object;
226 changes[num_changes].loc = loc;
227 changes[num_changes].old = old;
228 changes[num_changes].unshare = unshare;
230 if (object && !MEM_P (object))
232 /* Set INSN_CODE to force rerecognition of insn. Save old code in
233 case invalid. */
234 changes[num_changes].old_code = INSN_CODE (object);
235 INSN_CODE (object) = -1;
238 num_changes++;
240 /* If we are making a group of changes, return 1. Otherwise, validate the
241 change group we made. */
243 if (in_group)
244 return 1;
245 else
246 return apply_change_group ();
249 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
250 UNSHARE to false. */
252 bool
253 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
255 return validate_change_1 (object, loc, new_rtx, in_group, false);
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259 UNSHARE to true. */
261 bool
262 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 return validate_change_1 (object, loc, new_rtx, in_group, true);
268 /* Keep X canonicalized if some changes have made it non-canonical; only
269 modifies the operands of X, not (for example) its code. Simplifications
270 are not the job of this routine.
272 Return true if anything was changed. */
273 bool
274 canonicalize_change_group (rtx insn, rtx x)
276 if (COMMUTATIVE_P (x)
277 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
279 /* Oops, the caller has made X no longer canonical.
280 Let's redo the changes in the correct order. */
281 rtx tem = XEXP (x, 0);
282 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
283 validate_change (insn, &XEXP (x, 1), tem, 1);
284 return true;
286 else
287 return false;
291 /* This subroutine of apply_change_group verifies whether the changes to INSN
292 were valid; i.e. whether INSN can still be recognized. */
295 insn_invalid_p (rtx insn)
297 rtx pat = PATTERN (insn);
298 int num_clobbers = 0;
299 /* If we are before reload and the pattern is a SET, see if we can add
300 clobbers. */
301 int icode = recog (pat, insn,
302 (GET_CODE (pat) == SET
303 && ! reload_completed && ! reload_in_progress)
304 ? &num_clobbers : 0);
305 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
308 /* If this is an asm and the operand aren't legal, then fail. Likewise if
309 this is not an asm and the insn wasn't recognized. */
310 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
311 || (!is_asm && icode < 0))
312 return 1;
314 /* If we have to add CLOBBERs, fail if we have to add ones that reference
315 hard registers since our callers can't know if they are live or not.
316 Otherwise, add them. */
317 if (num_clobbers > 0)
319 rtx newpat;
321 if (added_clobbers_hard_reg_p (icode))
322 return 1;
324 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
325 XVECEXP (newpat, 0, 0) = pat;
326 add_clobbers (newpat, icode);
327 PATTERN (insn) = pat = newpat;
330 /* After reload, verify that all constraints are satisfied. */
331 if (reload_completed)
333 extract_insn (insn);
335 if (! constrain_operands (1))
336 return 1;
339 INSN_CODE (insn) = icode;
340 return 0;
343 /* Return number of changes made and not validated yet. */
345 num_changes_pending (void)
347 return num_changes;
350 /* Tentatively apply the changes numbered NUM and up.
351 Return 1 if all changes are valid, zero otherwise. */
354 verify_changes (int num)
356 int i;
357 rtx last_validated = NULL_RTX;
359 /* The changes have been applied and all INSN_CODEs have been reset to force
360 rerecognition.
362 The changes are valid if we aren't given an object, or if we are
363 given a MEM and it still is a valid address, or if this is in insn
364 and it is recognized. In the latter case, if reload has completed,
365 we also require that the operands meet the constraints for
366 the insn. */
368 for (i = num; i < num_changes; i++)
370 rtx object = changes[i].object;
372 /* If there is no object to test or if it is the same as the one we
373 already tested, ignore it. */
374 if (object == 0 || object == last_validated)
375 continue;
377 if (MEM_P (object))
379 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
380 break;
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
392 else if (DEBUG_INSN_P (object))
393 continue;
394 else if (insn_invalid_p (object))
396 rtx pat = PATTERN (object);
398 /* Perhaps we couldn't recognize the insn because there were
399 extra CLOBBERs at the end. If so, try to re-recognize
400 without the last CLOBBER (later iterations will cause each of
401 them to be eliminated, in turn). But don't do this if we
402 have an ASM_OPERAND. */
403 if (GET_CODE (pat) == PARALLEL
404 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
405 && asm_noperands (PATTERN (object)) < 0)
407 rtx newpat;
409 if (XVECLEN (pat, 0) == 2)
410 newpat = XVECEXP (pat, 0, 0);
411 else
413 int j;
415 newpat
416 = gen_rtx_PARALLEL (VOIDmode,
417 rtvec_alloc (XVECLEN (pat, 0) - 1));
418 for (j = 0; j < XVECLEN (newpat, 0); j++)
419 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
422 /* Add a new change to this group to replace the pattern
423 with this new pattern. Then consider this change
424 as having succeeded. The change we added will
425 cause the entire call to fail if things remain invalid.
427 Note that this can lose if a later change than the one
428 we are processing specified &XVECEXP (PATTERN (object), 0, X)
429 but this shouldn't occur. */
431 validate_change (object, &PATTERN (object), newpat, 1);
432 continue;
434 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER
435 || GET_CODE (pat) == VAR_LOCATION)
436 /* If this insn is a CLOBBER or USE, it is always valid, but is
437 never recognized. */
438 continue;
439 else
440 break;
442 last_validated = object;
445 return (i == num_changes);
448 /* A group of changes has previously been issued with validate_change
449 and verified with verify_changes. Call df_insn_rescan for each of
450 the insn changed and clear num_changes. */
452 void
453 confirm_change_group (void)
455 int i;
456 rtx last_object = NULL;
458 for (i = 0; i < num_changes; i++)
460 rtx object = changes[i].object;
462 if (changes[i].unshare)
463 *changes[i].loc = copy_rtx (*changes[i].loc);
465 /* Avoid unnecessary rescanning when multiple changes to same instruction
466 are made. */
467 if (object)
469 if (object != last_object && last_object && INSN_P (last_object))
470 df_insn_rescan (last_object);
471 last_object = object;
475 if (last_object && INSN_P (last_object))
476 df_insn_rescan (last_object);
477 num_changes = 0;
480 /* Apply a group of changes previously issued with `validate_change'.
481 If all changes are valid, call confirm_change_group and return 1,
482 otherwise, call cancel_changes and return 0. */
485 apply_change_group (void)
487 if (verify_changes (0))
489 confirm_change_group ();
490 return 1;
492 else
494 cancel_changes (0);
495 return 0;
500 /* Return the number of changes so far in the current group. */
503 num_validated_changes (void)
505 return num_changes;
508 /* Retract the changes numbered NUM and up. */
510 void
511 cancel_changes (int num)
513 int i;
515 /* Back out all the changes. Do this in the opposite order in which
516 they were made. */
517 for (i = num_changes - 1; i >= num; i--)
519 *changes[i].loc = changes[i].old;
520 if (changes[i].object && !MEM_P (changes[i].object))
521 INSN_CODE (changes[i].object) = changes[i].old_code;
523 num_changes = num;
526 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
527 rtx. */
529 static void
530 simplify_while_replacing (rtx *loc, rtx to, rtx object,
531 enum machine_mode op0_mode)
533 rtx x = *loc;
534 enum rtx_code code = GET_CODE (x);
535 rtx new_rtx;
537 if (SWAPPABLE_OPERANDS_P (x)
538 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
540 validate_unshare_change (object, loc,
541 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
542 : swap_condition (code),
543 GET_MODE (x), XEXP (x, 1),
544 XEXP (x, 0)), 1);
545 x = *loc;
546 code = GET_CODE (x);
549 switch (code)
551 case PLUS:
552 /* If we have a PLUS whose second operand is now a CONST_INT, use
553 simplify_gen_binary to try to simplify it.
554 ??? We may want later to remove this, once simplification is
555 separated from this function. */
556 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
557 validate_change (object, loc,
558 simplify_gen_binary
559 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
560 break;
561 case MINUS:
562 if (CONST_INT_P (XEXP (x, 1))
563 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
564 validate_change (object, loc,
565 simplify_gen_binary
566 (PLUS, GET_MODE (x), XEXP (x, 0),
567 simplify_gen_unary (NEG,
568 GET_MODE (x), XEXP (x, 1),
569 GET_MODE (x))), 1);
570 break;
571 case ZERO_EXTEND:
572 case SIGN_EXTEND:
573 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
575 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
576 op0_mode);
577 /* If any of the above failed, substitute in something that
578 we know won't be recognized. */
579 if (!new_rtx)
580 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
581 validate_change (object, loc, new_rtx, 1);
583 break;
584 case SUBREG:
585 /* All subregs possible to simplify should be simplified. */
586 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
587 SUBREG_BYTE (x));
589 /* Subregs of VOIDmode operands are incorrect. */
590 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
591 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
592 if (new_rtx)
593 validate_change (object, loc, new_rtx, 1);
594 break;
595 case ZERO_EXTRACT:
596 case SIGN_EXTRACT:
597 /* If we are replacing a register with memory, try to change the memory
598 to be the mode required for memory in extract operations (this isn't
599 likely to be an insertion operation; if it was, nothing bad will
600 happen, we might just fail in some cases). */
602 if (MEM_P (XEXP (x, 0))
603 && CONST_INT_P (XEXP (x, 1))
604 && CONST_INT_P (XEXP (x, 2))
605 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
606 && !MEM_VOLATILE_P (XEXP (x, 0)))
608 enum machine_mode wanted_mode = VOIDmode;
609 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
610 int pos = INTVAL (XEXP (x, 2));
612 if (GET_CODE (x) == ZERO_EXTRACT)
614 enum machine_mode new_mode
615 = mode_for_extraction (EP_extzv, 1);
616 if (new_mode != MAX_MACHINE_MODE)
617 wanted_mode = new_mode;
619 else if (GET_CODE (x) == SIGN_EXTRACT)
621 enum machine_mode new_mode
622 = mode_for_extraction (EP_extv, 1);
623 if (new_mode != MAX_MACHINE_MODE)
624 wanted_mode = new_mode;
627 /* If we have a narrower mode, we can do something. */
628 if (wanted_mode != VOIDmode
629 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
631 int offset = pos / BITS_PER_UNIT;
632 rtx newmem;
634 /* If the bytes and bits are counted differently, we
635 must adjust the offset. */
636 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
637 offset =
638 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
639 offset);
641 pos %= GET_MODE_BITSIZE (wanted_mode);
643 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
645 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
646 validate_change (object, &XEXP (x, 0), newmem, 1);
650 break;
652 default:
653 break;
657 /* Replace every occurrence of FROM in X with TO. Mark each change with
658 validate_change passing OBJECT. */
660 static void
661 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
662 bool simplify)
664 int i, j;
665 const char *fmt;
666 rtx x = *loc;
667 enum rtx_code code;
668 enum machine_mode op0_mode = VOIDmode;
669 int prev_changes = num_changes;
671 if (!x)
672 return;
674 code = GET_CODE (x);
675 fmt = GET_RTX_FORMAT (code);
676 if (fmt[0] == 'e')
677 op0_mode = GET_MODE (XEXP (x, 0));
679 /* X matches FROM if it is the same rtx or they are both referring to the
680 same register in the same mode. Avoid calling rtx_equal_p unless the
681 operands look similar. */
683 if (x == from
684 || (REG_P (x) && REG_P (from)
685 && GET_MODE (x) == GET_MODE (from)
686 && REGNO (x) == REGNO (from))
687 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
688 && rtx_equal_p (x, from)))
690 validate_unshare_change (object, loc, to, 1);
691 return;
694 /* Call ourself recursively to perform the replacements.
695 We must not replace inside already replaced expression, otherwise we
696 get infinite recursion for replacements like (reg X)->(subreg (reg X))
697 done by regmove, so we must special case shared ASM_OPERANDS. */
699 if (GET_CODE (x) == PARALLEL)
701 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
703 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
704 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
706 /* Verify that operands are really shared. */
707 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
708 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
709 (x, 0, j))));
710 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
711 from, to, object, simplify);
713 else
714 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
715 simplify);
718 else
719 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
721 if (fmt[i] == 'e')
722 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
723 else if (fmt[i] == 'E')
724 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
725 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
726 simplify);
729 /* If we didn't substitute, there is nothing more to do. */
730 if (num_changes == prev_changes)
731 return;
733 /* Allow substituted expression to have different mode. This is used by
734 regmove to change mode of pseudo register. */
735 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
736 op0_mode = GET_MODE (XEXP (x, 0));
738 /* Do changes needed to keep rtx consistent. Don't do any other
739 simplifications, as it is not our job. */
740 if (simplify)
741 simplify_while_replacing (loc, to, object, op0_mode);
744 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
745 with TO. After all changes have been made, validate by seeing
746 if INSN is still valid. */
749 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
751 validate_replace_rtx_1 (loc, from, to, insn, true);
752 return apply_change_group ();
755 /* Try replacing every occurrence of FROM in INSN with TO. After all
756 changes have been made, validate by seeing if INSN is still valid. */
759 validate_replace_rtx (rtx from, rtx to, rtx insn)
761 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
762 return apply_change_group ();
765 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
766 is a part of INSN. After all changes have been made, validate by seeing if
767 INSN is still valid.
768 validate_replace_rtx (from, to, insn) is equivalent to
769 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
772 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
774 validate_replace_rtx_1 (where, from, to, insn, true);
775 return apply_change_group ();
778 /* Same as above, but do not simplify rtx afterwards. */
779 int
780 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
781 rtx insn)
783 validate_replace_rtx_1 (where, from, to, insn, false);
784 return apply_change_group ();
788 /* Try replacing every occurrence of FROM in INSN with TO. */
790 void
791 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
793 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
796 /* Function called by note_uses to replace used subexpressions. */
797 struct validate_replace_src_data
799 rtx from; /* Old RTX */
800 rtx to; /* New RTX */
801 rtx insn; /* Insn in which substitution is occurring. */
804 static void
805 validate_replace_src_1 (rtx *x, void *data)
807 struct validate_replace_src_data *d
808 = (struct validate_replace_src_data *) data;
810 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
813 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
814 SET_DESTs. */
816 void
817 validate_replace_src_group (rtx from, rtx to, rtx insn)
819 struct validate_replace_src_data d;
821 d.from = from;
822 d.to = to;
823 d.insn = insn;
824 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
827 /* Try simplify INSN.
828 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
829 pattern and return true if something was simplified. */
831 bool
832 validate_simplify_insn (rtx insn)
834 int i;
835 rtx pat = NULL;
836 rtx newpat = NULL;
838 pat = PATTERN (insn);
840 if (GET_CODE (pat) == SET)
842 newpat = simplify_rtx (SET_SRC (pat));
843 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
844 validate_change (insn, &SET_SRC (pat), newpat, 1);
845 newpat = simplify_rtx (SET_DEST (pat));
846 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
847 validate_change (insn, &SET_DEST (pat), newpat, 1);
849 else if (GET_CODE (pat) == PARALLEL)
850 for (i = 0; i < XVECLEN (pat, 0); i++)
852 rtx s = XVECEXP (pat, 0, i);
854 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
856 newpat = simplify_rtx (SET_SRC (s));
857 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
858 validate_change (insn, &SET_SRC (s), newpat, 1);
859 newpat = simplify_rtx (SET_DEST (s));
860 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
861 validate_change (insn, &SET_DEST (s), newpat, 1);
864 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
867 #ifdef HAVE_cc0
868 /* Return 1 if the insn using CC0 set by INSN does not contain
869 any ordered tests applied to the condition codes.
870 EQ and NE tests do not count. */
873 next_insn_tests_no_inequality (rtx insn)
875 rtx next = next_cc0_user (insn);
877 /* If there is no next insn, we have to take the conservative choice. */
878 if (next == 0)
879 return 0;
881 return (INSN_P (next)
882 && ! inequality_comparisons_p (PATTERN (next)));
884 #endif
886 /* Return 1 if OP is a valid general operand for machine mode MODE.
887 This is either a register reference, a memory reference,
888 or a constant. In the case of a memory reference, the address
889 is checked for general validity for the target machine.
891 Register and memory references must have mode MODE in order to be valid,
892 but some constants have no machine mode and are valid for any mode.
894 If MODE is VOIDmode, OP is checked for validity for whatever mode
895 it has.
897 The main use of this function is as a predicate in match_operand
898 expressions in the machine description.
900 For an explanation of this function's behavior for registers of
901 class NO_REGS, see the comment for `register_operand'. */
904 general_operand (rtx op, enum machine_mode mode)
906 enum rtx_code code = GET_CODE (op);
908 if (mode == VOIDmode)
909 mode = GET_MODE (op);
911 /* Don't accept CONST_INT or anything similar
912 if the caller wants something floating. */
913 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
914 && GET_MODE_CLASS (mode) != MODE_INT
915 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
916 return 0;
918 if (CONST_INT_P (op)
919 && mode != VOIDmode
920 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
921 return 0;
923 if (CONSTANT_P (op))
924 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
925 || mode == VOIDmode)
926 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
927 && LEGITIMATE_CONSTANT_P (op));
929 /* Except for certain constants with VOIDmode, already checked for,
930 OP's mode must match MODE if MODE specifies a mode. */
932 if (GET_MODE (op) != mode)
933 return 0;
935 if (code == SUBREG)
937 rtx sub = SUBREG_REG (op);
939 #ifdef INSN_SCHEDULING
940 /* On machines that have insn scheduling, we want all memory
941 reference to be explicit, so outlaw paradoxical SUBREGs.
942 However, we must allow them after reload so that they can
943 get cleaned up by cleanup_subreg_operands. */
944 if (!reload_completed && MEM_P (sub)
945 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
946 return 0;
947 #endif
948 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
949 may result in incorrect reference. We should simplify all valid
950 subregs of MEM anyway. But allow this after reload because we
951 might be called from cleanup_subreg_operands.
953 ??? This is a kludge. */
954 if (!reload_completed && SUBREG_BYTE (op) != 0
955 && MEM_P (sub))
956 return 0;
958 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
959 create such rtl, and we must reject it. */
960 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
961 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
962 return 0;
964 op = sub;
965 code = GET_CODE (op);
968 if (code == REG)
969 /* A register whose class is NO_REGS is not a general operand. */
970 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
971 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
973 if (code == MEM)
975 rtx y = XEXP (op, 0);
977 if (! volatile_ok && MEM_VOLATILE_P (op))
978 return 0;
980 /* Use the mem's mode, since it will be reloaded thus. */
981 if (memory_address_p (GET_MODE (op), y))
982 return 1;
985 return 0;
988 /* Return 1 if OP is a valid memory address for a memory reference
989 of mode MODE.
991 The main use of this function is as a predicate in match_operand
992 expressions in the machine description. */
995 address_operand (rtx op, enum machine_mode mode)
997 return memory_address_p (mode, op);
1000 /* Return 1 if OP is a register reference of mode MODE.
1001 If MODE is VOIDmode, accept a register in any mode.
1003 The main use of this function is as a predicate in match_operand
1004 expressions in the machine description.
1006 As a special exception, registers whose class is NO_REGS are
1007 not accepted by `register_operand'. The reason for this change
1008 is to allow the representation of special architecture artifacts
1009 (such as a condition code register) without extending the rtl
1010 definitions. Since registers of class NO_REGS cannot be used
1011 as registers in any case where register classes are examined,
1012 it is most consistent to keep this function from accepting them. */
1015 register_operand (rtx op, enum machine_mode mode)
1017 if (GET_MODE (op) != mode && mode != VOIDmode)
1018 return 0;
1020 if (GET_CODE (op) == SUBREG)
1022 rtx sub = SUBREG_REG (op);
1024 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1025 because it is guaranteed to be reloaded into one.
1026 Just make sure the MEM is valid in itself.
1027 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1028 but currently it does result from (SUBREG (REG)...) where the
1029 reg went on the stack.) */
1030 if (! reload_completed && MEM_P (sub))
1031 return general_operand (op, mode);
1033 #ifdef CANNOT_CHANGE_MODE_CLASS
1034 if (REG_P (sub)
1035 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1036 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1037 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1038 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1039 return 0;
1040 #endif
1042 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1043 create such rtl, and we must reject it. */
1044 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1045 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1046 return 0;
1048 op = sub;
1051 /* We don't consider registers whose class is NO_REGS
1052 to be a register operand. */
1053 return (REG_P (op)
1054 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1055 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1058 /* Return 1 for a register in Pmode; ignore the tested mode. */
1061 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1063 return register_operand (op, Pmode);
1066 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1067 or a hard register. */
1070 scratch_operand (rtx op, enum machine_mode mode)
1072 if (GET_MODE (op) != mode && mode != VOIDmode)
1073 return 0;
1075 return (GET_CODE (op) == SCRATCH
1076 || (REG_P (op)
1077 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1080 /* Return 1 if OP is a valid immediate operand for mode MODE.
1082 The main use of this function is as a predicate in match_operand
1083 expressions in the machine description. */
1086 immediate_operand (rtx op, enum machine_mode mode)
1088 /* Don't accept CONST_INT or anything similar
1089 if the caller wants something floating. */
1090 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1091 && GET_MODE_CLASS (mode) != MODE_INT
1092 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1093 return 0;
1095 if (CONST_INT_P (op)
1096 && mode != VOIDmode
1097 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1098 return 0;
1100 return (CONSTANT_P (op)
1101 && (GET_MODE (op) == mode || mode == VOIDmode
1102 || GET_MODE (op) == VOIDmode)
1103 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1104 && LEGITIMATE_CONSTANT_P (op));
1107 /* Returns 1 if OP is an operand that is a CONST_INT. */
1110 const_int_operand (rtx op, enum machine_mode mode)
1112 if (!CONST_INT_P (op))
1113 return 0;
1115 if (mode != VOIDmode
1116 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1117 return 0;
1119 return 1;
1122 /* Returns 1 if OP is an operand that is a constant integer or constant
1123 floating-point number. */
1126 const_double_operand (rtx op, enum machine_mode mode)
1128 /* Don't accept CONST_INT or anything similar
1129 if the caller wants something floating. */
1130 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1131 && GET_MODE_CLASS (mode) != MODE_INT
1132 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1133 return 0;
1135 return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1136 && (mode == VOIDmode || GET_MODE (op) == mode
1137 || GET_MODE (op) == VOIDmode));
1140 /* Return 1 if OP is a general operand that is not an immediate operand. */
1143 nonimmediate_operand (rtx op, enum machine_mode mode)
1145 return (general_operand (op, mode) && ! CONSTANT_P (op));
1148 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1151 nonmemory_operand (rtx op, enum machine_mode mode)
1153 if (CONSTANT_P (op))
1155 /* Don't accept CONST_INT or anything similar
1156 if the caller wants something floating. */
1157 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1158 && GET_MODE_CLASS (mode) != MODE_INT
1159 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1160 return 0;
1162 if (CONST_INT_P (op)
1163 && mode != VOIDmode
1164 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1165 return 0;
1167 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1168 || mode == VOIDmode)
1169 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1170 && LEGITIMATE_CONSTANT_P (op));
1173 if (GET_MODE (op) != mode && mode != VOIDmode)
1174 return 0;
1176 if (GET_CODE (op) == SUBREG)
1178 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1179 because it is guaranteed to be reloaded into one.
1180 Just make sure the MEM is valid in itself.
1181 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1182 but currently it does result from (SUBREG (REG)...) where the
1183 reg went on the stack.) */
1184 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1185 return general_operand (op, mode);
1186 op = SUBREG_REG (op);
1189 /* We don't consider registers whose class is NO_REGS
1190 to be a register operand. */
1191 return (REG_P (op)
1192 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1193 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1196 /* Return 1 if OP is a valid operand that stands for pushing a
1197 value of mode MODE onto the stack.
1199 The main use of this function is as a predicate in match_operand
1200 expressions in the machine description. */
1203 push_operand (rtx op, enum machine_mode mode)
1205 unsigned int rounded_size = GET_MODE_SIZE (mode);
1207 #ifdef PUSH_ROUNDING
1208 rounded_size = PUSH_ROUNDING (rounded_size);
1209 #endif
1211 if (!MEM_P (op))
1212 return 0;
1214 if (mode != VOIDmode && GET_MODE (op) != mode)
1215 return 0;
1217 op = XEXP (op, 0);
1219 if (rounded_size == GET_MODE_SIZE (mode))
1221 if (GET_CODE (op) != STACK_PUSH_CODE)
1222 return 0;
1224 else
1226 if (GET_CODE (op) != PRE_MODIFY
1227 || GET_CODE (XEXP (op, 1)) != PLUS
1228 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1229 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1230 #ifdef STACK_GROWS_DOWNWARD
1231 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1232 #else
1233 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1234 #endif
1236 return 0;
1239 return XEXP (op, 0) == stack_pointer_rtx;
1242 /* Return 1 if OP is a valid operand that stands for popping a
1243 value of mode MODE off the stack.
1245 The main use of this function is as a predicate in match_operand
1246 expressions in the machine description. */
1249 pop_operand (rtx op, enum machine_mode mode)
1251 if (!MEM_P (op))
1252 return 0;
1254 if (mode != VOIDmode && GET_MODE (op) != mode)
1255 return 0;
1257 op = XEXP (op, 0);
1259 if (GET_CODE (op) != STACK_POP_CODE)
1260 return 0;
1262 return XEXP (op, 0) == stack_pointer_rtx;
1265 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1268 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1270 #ifdef GO_IF_LEGITIMATE_ADDRESS
1271 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1272 return 0;
1274 win:
1275 return 1;
1276 #else
1277 return targetm.legitimate_address_p (mode, addr, 0);
1278 #endif
1281 /* Return 1 if OP is a valid memory reference with mode MODE,
1282 including a valid address.
1284 The main use of this function is as a predicate in match_operand
1285 expressions in the machine description. */
1288 memory_operand (rtx op, enum machine_mode mode)
1290 rtx inner;
1292 if (! reload_completed)
1293 /* Note that no SUBREG is a memory operand before end of reload pass,
1294 because (SUBREG (MEM...)) forces reloading into a register. */
1295 return MEM_P (op) && general_operand (op, mode);
1297 if (mode != VOIDmode && GET_MODE (op) != mode)
1298 return 0;
1300 inner = op;
1301 if (GET_CODE (inner) == SUBREG)
1302 inner = SUBREG_REG (inner);
1304 return (MEM_P (inner) && general_operand (op, mode));
1307 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1308 that is, a memory reference whose address is a general_operand. */
1311 indirect_operand (rtx op, enum machine_mode mode)
1313 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1314 if (! reload_completed
1315 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1317 int offset = SUBREG_BYTE (op);
1318 rtx inner = SUBREG_REG (op);
1320 if (mode != VOIDmode && GET_MODE (op) != mode)
1321 return 0;
1323 /* The only way that we can have a general_operand as the resulting
1324 address is if OFFSET is zero and the address already is an operand
1325 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1326 operand. */
1328 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1329 || (GET_CODE (XEXP (inner, 0)) == PLUS
1330 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1331 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1332 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1335 return (MEM_P (op)
1336 && memory_operand (op, mode)
1337 && general_operand (XEXP (op, 0), Pmode));
1340 /* Return 1 if this is an ordered comparison operator (not including
1341 ORDERED and UNORDERED). */
1344 ordered_comparison_operator (rtx op, enum machine_mode mode)
1346 if (mode != VOIDmode && GET_MODE (op) != mode)
1347 return false;
1348 switch (GET_CODE (op))
1350 case EQ:
1351 case NE:
1352 case LT:
1353 case LTU:
1354 case LE:
1355 case LEU:
1356 case GT:
1357 case GTU:
1358 case GE:
1359 case GEU:
1360 return true;
1361 default:
1362 return false;
1366 /* Return 1 if this is a comparison operator. This allows the use of
1367 MATCH_OPERATOR to recognize all the branch insns. */
1370 comparison_operator (rtx op, enum machine_mode mode)
1372 return ((mode == VOIDmode || GET_MODE (op) == mode)
1373 && COMPARISON_P (op));
1376 /* If BODY is an insn body that uses ASM_OPERANDS,
1377 return the number of operands (both input and output) in the insn.
1378 Otherwise return -1. */
1381 asm_noperands (const_rtx body)
1383 switch (GET_CODE (body))
1385 case ASM_OPERANDS:
1386 /* No output operands: return number of input operands. */
1387 return ASM_OPERANDS_INPUT_LENGTH (body);
1388 case SET:
1389 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1390 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1391 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1392 else
1393 return -1;
1394 case PARALLEL:
1395 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1396 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1398 /* Multiple output operands, or 1 output plus some clobbers:
1399 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1400 int i;
1401 int n_sets;
1403 /* Count backwards through CLOBBERs to determine number of SETs. */
1404 for (i = XVECLEN (body, 0); i > 0; i--)
1406 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1407 break;
1408 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1409 return -1;
1412 /* N_SETS is now number of output operands. */
1413 n_sets = i;
1415 /* Verify that all the SETs we have
1416 came from a single original asm_operands insn
1417 (so that invalid combinations are blocked). */
1418 for (i = 0; i < n_sets; i++)
1420 rtx elt = XVECEXP (body, 0, i);
1421 if (GET_CODE (elt) != SET)
1422 return -1;
1423 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1424 return -1;
1425 /* If these ASM_OPERANDS rtx's came from different original insns
1426 then they aren't allowed together. */
1427 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1428 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1429 return -1;
1431 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1432 + n_sets);
1434 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1436 /* 0 outputs, but some clobbers:
1437 body is [(asm_operands ...) (clobber (reg ...))...]. */
1438 int i;
1440 /* Make sure all the other parallel things really are clobbers. */
1441 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1442 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1443 return -1;
1445 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1447 else
1448 return -1;
1449 default:
1450 return -1;
1454 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1455 copy its operands (both input and output) into the vector OPERANDS,
1456 the locations of the operands within the insn into the vector OPERAND_LOCS,
1457 and the constraints for the operands into CONSTRAINTS.
1458 Write the modes of the operands into MODES.
1459 Return the assembler-template.
1461 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1462 we don't store that info. */
1464 const char *
1465 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1466 const char **constraints, enum machine_mode *modes,
1467 location_t *loc)
1469 int i;
1470 int noperands;
1471 rtx asmop = 0;
1473 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1475 asmop = SET_SRC (body);
1476 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1478 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1480 for (i = 1; i < noperands; i++)
1482 if (operand_locs)
1483 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1484 if (operands)
1485 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1486 if (constraints)
1487 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1488 if (modes)
1489 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1492 /* The output is in the SET.
1493 Its constraint is in the ASM_OPERANDS itself. */
1494 if (operands)
1495 operands[0] = SET_DEST (body);
1496 if (operand_locs)
1497 operand_locs[0] = &SET_DEST (body);
1498 if (constraints)
1499 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1500 if (modes)
1501 modes[0] = GET_MODE (SET_DEST (body));
1503 else if (GET_CODE (body) == ASM_OPERANDS)
1505 asmop = body;
1506 /* No output operands: BODY is (asm_operands ....). */
1508 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1510 /* The input operands are found in the 1st element vector. */
1511 /* Constraints for inputs are in the 2nd element vector. */
1512 for (i = 0; i < noperands; i++)
1514 if (operand_locs)
1515 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1516 if (operands)
1517 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1518 if (constraints)
1519 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1520 if (modes)
1521 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1524 else if (GET_CODE (body) == PARALLEL
1525 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1526 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1528 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1529 int nin;
1530 int nout = 0; /* Does not include CLOBBERs. */
1532 asmop = SET_SRC (XVECEXP (body, 0, 0));
1533 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1535 /* At least one output, plus some CLOBBERs. */
1537 /* The outputs are in the SETs.
1538 Their constraints are in the ASM_OPERANDS itself. */
1539 for (i = 0; i < nparallel; i++)
1541 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1542 break; /* Past last SET */
1544 if (operands)
1545 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1546 if (operand_locs)
1547 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1548 if (constraints)
1549 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1550 if (modes)
1551 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1552 nout++;
1555 for (i = 0; i < nin; i++)
1557 if (operand_locs)
1558 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1559 if (operands)
1560 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1561 if (constraints)
1562 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1563 if (modes)
1564 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1567 else if (GET_CODE (body) == PARALLEL
1568 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1570 /* No outputs, but some CLOBBERs. */
1572 int nin;
1574 asmop = XVECEXP (body, 0, 0);
1575 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1577 for (i = 0; i < nin; i++)
1579 if (operand_locs)
1580 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1581 if (operands)
1582 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1583 if (constraints)
1584 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1585 if (modes)
1586 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1591 if (loc)
1592 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1594 return ASM_OPERANDS_TEMPLATE (asmop);
1597 /* Check if an asm_operand matches its constraints.
1598 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1601 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1603 int result = 0;
1605 /* Use constrain_operands after reload. */
1606 gcc_assert (!reload_completed);
1608 while (*constraint)
1610 char c = *constraint;
1611 int len;
1612 switch (c)
1614 case ',':
1615 constraint++;
1616 continue;
1617 case '=':
1618 case '+':
1619 case '*':
1620 case '%':
1621 case '!':
1622 case '#':
1623 case '&':
1624 case '?':
1625 break;
1627 case '0': case '1': case '2': case '3': case '4':
1628 case '5': case '6': case '7': case '8': case '9':
1629 /* If caller provided constraints pointer, look up
1630 the maching constraint. Otherwise, our caller should have
1631 given us the proper matching constraint, but we can't
1632 actually fail the check if they didn't. Indicate that
1633 results are inconclusive. */
1634 if (constraints)
1636 char *end;
1637 unsigned long match;
1639 match = strtoul (constraint, &end, 10);
1640 if (!result)
1641 result = asm_operand_ok (op, constraints[match], NULL);
1642 constraint = (const char *) end;
1644 else
1647 constraint++;
1648 while (ISDIGIT (*constraint));
1649 if (! result)
1650 result = -1;
1652 continue;
1654 case 'p':
1655 if (address_operand (op, VOIDmode))
1656 result = 1;
1657 break;
1659 case TARGET_MEM_CONSTRAINT:
1660 case 'V': /* non-offsettable */
1661 if (memory_operand (op, VOIDmode))
1662 result = 1;
1663 break;
1665 case 'o': /* offsettable */
1666 if (offsettable_nonstrict_memref_p (op))
1667 result = 1;
1668 break;
1670 case '<':
1671 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1672 excepting those that expand_call created. Further, on some
1673 machines which do not have generalized auto inc/dec, an inc/dec
1674 is not a memory_operand.
1676 Match any memory and hope things are resolved after reload. */
1678 if (MEM_P (op)
1679 && (1
1680 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1681 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1682 result = 1;
1683 break;
1685 case '>':
1686 if (MEM_P (op)
1687 && (1
1688 || GET_CODE (XEXP (op, 0)) == PRE_INC
1689 || GET_CODE (XEXP (op, 0)) == POST_INC))
1690 result = 1;
1691 break;
1693 case 'E':
1694 case 'F':
1695 if (GET_CODE (op) == CONST_DOUBLE
1696 || (GET_CODE (op) == CONST_VECTOR
1697 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1698 result = 1;
1699 break;
1701 case 'G':
1702 if (GET_CODE (op) == CONST_DOUBLE
1703 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1704 result = 1;
1705 break;
1706 case 'H':
1707 if (GET_CODE (op) == CONST_DOUBLE
1708 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1709 result = 1;
1710 break;
1712 case 's':
1713 if (CONST_INT_P (op)
1714 || (GET_CODE (op) == CONST_DOUBLE
1715 && GET_MODE (op) == VOIDmode))
1716 break;
1717 /* Fall through. */
1719 case 'i':
1720 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1721 result = 1;
1722 break;
1724 case 'n':
1725 if (CONST_INT_P (op)
1726 || (GET_CODE (op) == CONST_DOUBLE
1727 && GET_MODE (op) == VOIDmode))
1728 result = 1;
1729 break;
1731 case 'I':
1732 if (CONST_INT_P (op)
1733 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1734 result = 1;
1735 break;
1736 case 'J':
1737 if (CONST_INT_P (op)
1738 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1739 result = 1;
1740 break;
1741 case 'K':
1742 if (CONST_INT_P (op)
1743 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1744 result = 1;
1745 break;
1746 case 'L':
1747 if (CONST_INT_P (op)
1748 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1749 result = 1;
1750 break;
1751 case 'M':
1752 if (CONST_INT_P (op)
1753 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1754 result = 1;
1755 break;
1756 case 'N':
1757 if (CONST_INT_P (op)
1758 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1759 result = 1;
1760 break;
1761 case 'O':
1762 if (CONST_INT_P (op)
1763 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1764 result = 1;
1765 break;
1766 case 'P':
1767 if (CONST_INT_P (op)
1768 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1769 result = 1;
1770 break;
1772 case 'X':
1773 result = 1;
1774 break;
1776 case 'g':
1777 if (general_operand (op, VOIDmode))
1778 result = 1;
1779 break;
1781 default:
1782 /* For all other letters, we first check for a register class,
1783 otherwise it is an EXTRA_CONSTRAINT. */
1784 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1786 case 'r':
1787 if (GET_MODE (op) == BLKmode)
1788 break;
1789 if (register_operand (op, VOIDmode))
1790 result = 1;
1792 #ifdef EXTRA_CONSTRAINT_STR
1793 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1794 /* Every memory operand can be reloaded to fit. */
1795 result = result || memory_operand (op, VOIDmode);
1796 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1797 /* Every address operand can be reloaded to fit. */
1798 result = result || address_operand (op, VOIDmode);
1799 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1800 result = 1;
1801 #endif
1802 break;
1804 len = CONSTRAINT_LEN (c, constraint);
1806 constraint++;
1807 while (--len && *constraint);
1808 if (len)
1809 return 0;
1812 return result;
1815 /* Given an rtx *P, if it is a sum containing an integer constant term,
1816 return the location (type rtx *) of the pointer to that constant term.
1817 Otherwise, return a null pointer. */
1819 rtx *
1820 find_constant_term_loc (rtx *p)
1822 rtx *tem;
1823 enum rtx_code code = GET_CODE (*p);
1825 /* If *P IS such a constant term, P is its location. */
1827 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1828 || code == CONST)
1829 return p;
1831 /* Otherwise, if not a sum, it has no constant term. */
1833 if (GET_CODE (*p) != PLUS)
1834 return 0;
1836 /* If one of the summands is constant, return its location. */
1838 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1839 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1840 return p;
1842 /* Otherwise, check each summand for containing a constant term. */
1844 if (XEXP (*p, 0) != 0)
1846 tem = find_constant_term_loc (&XEXP (*p, 0));
1847 if (tem != 0)
1848 return tem;
1851 if (XEXP (*p, 1) != 0)
1853 tem = find_constant_term_loc (&XEXP (*p, 1));
1854 if (tem != 0)
1855 return tem;
1858 return 0;
1861 /* Return 1 if OP is a memory reference
1862 whose address contains no side effects
1863 and remains valid after the addition
1864 of a positive integer less than the
1865 size of the object being referenced.
1867 We assume that the original address is valid and do not check it.
1869 This uses strict_memory_address_p as a subroutine, so
1870 don't use it before reload. */
1873 offsettable_memref_p (rtx op)
1875 return ((MEM_P (op))
1876 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1879 /* Similar, but don't require a strictly valid mem ref:
1880 consider pseudo-regs valid as index or base regs. */
1883 offsettable_nonstrict_memref_p (rtx op)
1885 return ((MEM_P (op))
1886 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1889 /* Return 1 if Y is a memory address which contains no side effects
1890 and would remain valid after the addition of a positive integer
1891 less than the size of that mode.
1893 We assume that the original address is valid and do not check it.
1894 We do check that it is valid for narrower modes.
1896 If STRICTP is nonzero, we require a strictly valid address,
1897 for the sake of use in reload.c. */
1900 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1902 enum rtx_code ycode = GET_CODE (y);
1903 rtx z;
1904 rtx y1 = y;
1905 rtx *y2;
1906 int (*addressp) (enum machine_mode, rtx) =
1907 (strictp ? strict_memory_address_p : memory_address_p);
1908 unsigned int mode_sz = GET_MODE_SIZE (mode);
1910 if (CONSTANT_ADDRESS_P (y))
1911 return 1;
1913 /* Adjusting an offsettable address involves changing to a narrower mode.
1914 Make sure that's OK. */
1916 if (mode_dependent_address_p (y))
1917 return 0;
1919 /* ??? How much offset does an offsettable BLKmode reference need?
1920 Clearly that depends on the situation in which it's being used.
1921 However, the current situation in which we test 0xffffffff is
1922 less than ideal. Caveat user. */
1923 if (mode_sz == 0)
1924 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1926 /* If the expression contains a constant term,
1927 see if it remains valid when max possible offset is added. */
1929 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1931 int good;
1933 y1 = *y2;
1934 *y2 = plus_constant (*y2, mode_sz - 1);
1935 /* Use QImode because an odd displacement may be automatically invalid
1936 for any wider mode. But it should be valid for a single byte. */
1937 good = (*addressp) (QImode, y);
1939 /* In any case, restore old contents of memory. */
1940 *y2 = y1;
1941 return good;
1944 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1945 return 0;
1947 /* The offset added here is chosen as the maximum offset that
1948 any instruction could need to add when operating on something
1949 of the specified mode. We assume that if Y and Y+c are
1950 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1951 go inside a LO_SUM here, so we do so as well. */
1952 if (GET_CODE (y) == LO_SUM
1953 && mode != BLKmode
1954 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1955 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1956 plus_constant (XEXP (y, 1), mode_sz - 1));
1957 else
1958 z = plus_constant (y, mode_sz - 1);
1960 /* Use QImode because an odd displacement may be automatically invalid
1961 for any wider mode. But it should be valid for a single byte. */
1962 return (*addressp) (QImode, z);
1965 /* Return 1 if ADDR is an address-expression whose effect depends
1966 on the mode of the memory reference it is used in.
1968 Autoincrement addressing is a typical example of mode-dependence
1969 because the amount of the increment depends on the mode. */
1972 mode_dependent_address_p (rtx addr)
1974 /* Auto-increment addressing with anything other than post_modify
1975 or pre_modify always introduces a mode dependency. Catch such
1976 cases now instead of deferring to the target. */
1977 if (GET_CODE (addr) == PRE_INC
1978 || GET_CODE (addr) == POST_INC
1979 || GET_CODE (addr) == PRE_DEC
1980 || GET_CODE (addr) == POST_DEC)
1981 return 1;
1983 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1984 return 0;
1985 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1986 win: ATTRIBUTE_UNUSED_LABEL
1987 return 1;
1990 /* Like extract_insn, but save insn extracted and don't extract again, when
1991 called again for the same insn expecting that recog_data still contain the
1992 valid information. This is used primary by gen_attr infrastructure that
1993 often does extract insn again and again. */
1994 void
1995 extract_insn_cached (rtx insn)
1997 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1998 return;
1999 extract_insn (insn);
2000 recog_data.insn = insn;
2003 /* Do cached extract_insn, constrain_operands and complain about failures.
2004 Used by insn_attrtab. */
2005 void
2006 extract_constrain_insn_cached (rtx insn)
2008 extract_insn_cached (insn);
2009 if (which_alternative == -1
2010 && !constrain_operands (reload_completed))
2011 fatal_insn_not_found (insn);
2014 /* Do cached constrain_operands and complain about failures. */
2016 constrain_operands_cached (int strict)
2018 if (which_alternative == -1)
2019 return constrain_operands (strict);
2020 else
2021 return 1;
2024 /* Analyze INSN and fill in recog_data. */
2026 void
2027 extract_insn (rtx insn)
2029 int i;
2030 int icode;
2031 int noperands;
2032 rtx body = PATTERN (insn);
2034 recog_data.n_operands = 0;
2035 recog_data.n_alternatives = 0;
2036 recog_data.n_dups = 0;
2038 switch (GET_CODE (body))
2040 case USE:
2041 case CLOBBER:
2042 case ASM_INPUT:
2043 case ADDR_VEC:
2044 case ADDR_DIFF_VEC:
2045 case VAR_LOCATION:
2046 return;
2048 case SET:
2049 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2050 goto asm_insn;
2051 else
2052 goto normal_insn;
2053 case PARALLEL:
2054 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2055 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2056 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2057 goto asm_insn;
2058 else
2059 goto normal_insn;
2060 case ASM_OPERANDS:
2061 asm_insn:
2062 recog_data.n_operands = noperands = asm_noperands (body);
2063 if (noperands >= 0)
2065 /* This insn is an `asm' with operands. */
2067 /* expand_asm_operands makes sure there aren't too many operands. */
2068 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2070 /* Now get the operand values and constraints out of the insn. */
2071 decode_asm_operands (body, recog_data.operand,
2072 recog_data.operand_loc,
2073 recog_data.constraints,
2074 recog_data.operand_mode, NULL);
2075 if (noperands > 0)
2077 const char *p = recog_data.constraints[0];
2078 recog_data.n_alternatives = 1;
2079 while (*p)
2080 recog_data.n_alternatives += (*p++ == ',');
2082 break;
2084 fatal_insn_not_found (insn);
2086 default:
2087 normal_insn:
2088 /* Ordinary insn: recognize it, get the operands via insn_extract
2089 and get the constraints. */
2091 icode = recog_memoized (insn);
2092 if (icode < 0)
2093 fatal_insn_not_found (insn);
2095 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2096 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2097 recog_data.n_dups = insn_data[icode].n_dups;
2099 insn_extract (insn);
2101 for (i = 0; i < noperands; i++)
2103 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2104 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2105 /* VOIDmode match_operands gets mode from their real operand. */
2106 if (recog_data.operand_mode[i] == VOIDmode)
2107 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2110 for (i = 0; i < noperands; i++)
2111 recog_data.operand_type[i]
2112 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2113 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2114 : OP_IN);
2116 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2118 if (INSN_CODE (insn) < 0)
2119 for (i = 0; i < recog_data.n_alternatives; i++)
2120 recog_data.alternative_enabled_p[i] = true;
2121 else
2123 recog_data.insn = insn;
2124 for (i = 0; i < recog_data.n_alternatives; i++)
2126 which_alternative = i;
2127 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2131 recog_data.insn = NULL;
2132 which_alternative = -1;
2135 /* After calling extract_insn, you can use this function to extract some
2136 information from the constraint strings into a more usable form.
2137 The collected data is stored in recog_op_alt. */
2138 void
2139 preprocess_constraints (void)
2141 int i;
2143 for (i = 0; i < recog_data.n_operands; i++)
2144 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2145 * sizeof (struct operand_alternative)));
2147 for (i = 0; i < recog_data.n_operands; i++)
2149 int j;
2150 struct operand_alternative *op_alt;
2151 const char *p = recog_data.constraints[i];
2153 op_alt = recog_op_alt[i];
2155 for (j = 0; j < recog_data.n_alternatives; j++)
2157 op_alt[j].cl = NO_REGS;
2158 op_alt[j].constraint = p;
2159 op_alt[j].matches = -1;
2160 op_alt[j].matched = -1;
2162 if (!recog_data.alternative_enabled_p[j])
2164 p = skip_alternative (p);
2165 continue;
2168 if (*p == '\0' || *p == ',')
2170 op_alt[j].anything_ok = 1;
2171 continue;
2174 for (;;)
2176 char c = *p;
2177 if (c == '#')
2179 c = *++p;
2180 while (c != ',' && c != '\0');
2181 if (c == ',' || c == '\0')
2183 p++;
2184 break;
2187 switch (c)
2189 case '=': case '+': case '*': case '%':
2190 case 'E': case 'F': case 'G': case 'H':
2191 case 's': case 'i': case 'n':
2192 case 'I': case 'J': case 'K': case 'L':
2193 case 'M': case 'N': case 'O': case 'P':
2194 /* These don't say anything we care about. */
2195 break;
2197 case '?':
2198 op_alt[j].reject += 6;
2199 break;
2200 case '!':
2201 op_alt[j].reject += 600;
2202 break;
2203 case '&':
2204 op_alt[j].earlyclobber = 1;
2205 break;
2207 case '0': case '1': case '2': case '3': case '4':
2208 case '5': case '6': case '7': case '8': case '9':
2210 char *end;
2211 op_alt[j].matches = strtoul (p, &end, 10);
2212 recog_op_alt[op_alt[j].matches][j].matched = i;
2213 p = end;
2215 continue;
2217 case TARGET_MEM_CONSTRAINT:
2218 op_alt[j].memory_ok = 1;
2219 break;
2220 case '<':
2221 op_alt[j].decmem_ok = 1;
2222 break;
2223 case '>':
2224 op_alt[j].incmem_ok = 1;
2225 break;
2226 case 'V':
2227 op_alt[j].nonoffmem_ok = 1;
2228 break;
2229 case 'o':
2230 op_alt[j].offmem_ok = 1;
2231 break;
2232 case 'X':
2233 op_alt[j].anything_ok = 1;
2234 break;
2236 case 'p':
2237 op_alt[j].is_address = 1;
2238 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2239 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2240 break;
2242 case 'g':
2243 case 'r':
2244 op_alt[j].cl =
2245 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2246 break;
2248 default:
2249 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2251 op_alt[j].memory_ok = 1;
2252 break;
2254 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2256 op_alt[j].is_address = 1;
2257 op_alt[j].cl
2258 = (reg_class_subunion
2259 [(int) op_alt[j].cl]
2260 [(int) base_reg_class (VOIDmode, ADDRESS,
2261 SCRATCH)]);
2262 break;
2265 op_alt[j].cl
2266 = (reg_class_subunion
2267 [(int) op_alt[j].cl]
2268 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2269 break;
2271 p += CONSTRAINT_LEN (c, p);
2277 /* Check the operands of an insn against the insn's operand constraints
2278 and return 1 if they are valid.
2279 The information about the insn's operands, constraints, operand modes
2280 etc. is obtained from the global variables set up by extract_insn.
2282 WHICH_ALTERNATIVE is set to a number which indicates which
2283 alternative of constraints was matched: 0 for the first alternative,
2284 1 for the next, etc.
2286 In addition, when two operands are required to match
2287 and it happens that the output operand is (reg) while the
2288 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2289 make the output operand look like the input.
2290 This is because the output operand is the one the template will print.
2292 This is used in final, just before printing the assembler code and by
2293 the routines that determine an insn's attribute.
2295 If STRICT is a positive nonzero value, it means that we have been
2296 called after reload has been completed. In that case, we must
2297 do all checks strictly. If it is zero, it means that we have been called
2298 before reload has completed. In that case, we first try to see if we can
2299 find an alternative that matches strictly. If not, we try again, this
2300 time assuming that reload will fix up the insn. This provides a "best
2301 guess" for the alternative and is used to compute attributes of insns prior
2302 to reload. A negative value of STRICT is used for this internal call. */
2304 struct funny_match
2306 int this_op, other;
2310 constrain_operands (int strict)
2312 const char *constraints[MAX_RECOG_OPERANDS];
2313 int matching_operands[MAX_RECOG_OPERANDS];
2314 int earlyclobber[MAX_RECOG_OPERANDS];
2315 int c;
2317 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2318 int funny_match_index;
2320 which_alternative = 0;
2321 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2322 return 1;
2324 for (c = 0; c < recog_data.n_operands; c++)
2326 constraints[c] = recog_data.constraints[c];
2327 matching_operands[c] = -1;
2332 int seen_earlyclobber_at = -1;
2333 int opno;
2334 int lose = 0;
2335 funny_match_index = 0;
2337 if (!recog_data.alternative_enabled_p[which_alternative])
2339 int i;
2341 for (i = 0; i < recog_data.n_operands; i++)
2342 constraints[i] = skip_alternative (constraints[i]);
2344 which_alternative++;
2345 continue;
2348 for (opno = 0; opno < recog_data.n_operands; opno++)
2350 rtx op = recog_data.operand[opno];
2351 enum machine_mode mode = GET_MODE (op);
2352 const char *p = constraints[opno];
2353 int offset = 0;
2354 int win = 0;
2355 int val;
2356 int len;
2358 earlyclobber[opno] = 0;
2360 /* A unary operator may be accepted by the predicate, but it
2361 is irrelevant for matching constraints. */
2362 if (UNARY_P (op))
2363 op = XEXP (op, 0);
2365 if (GET_CODE (op) == SUBREG)
2367 if (REG_P (SUBREG_REG (op))
2368 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2369 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2370 GET_MODE (SUBREG_REG (op)),
2371 SUBREG_BYTE (op),
2372 GET_MODE (op));
2373 op = SUBREG_REG (op);
2376 /* An empty constraint or empty alternative
2377 allows anything which matched the pattern. */
2378 if (*p == 0 || *p == ',')
2379 win = 1;
2382 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2384 case '\0':
2385 len = 0;
2386 break;
2387 case ',':
2388 c = '\0';
2389 break;
2391 case '?': case '!': case '*': case '%':
2392 case '=': case '+':
2393 break;
2395 case '#':
2396 /* Ignore rest of this alternative as far as
2397 constraint checking is concerned. */
2399 p++;
2400 while (*p && *p != ',');
2401 len = 0;
2402 break;
2404 case '&':
2405 earlyclobber[opno] = 1;
2406 if (seen_earlyclobber_at < 0)
2407 seen_earlyclobber_at = opno;
2408 break;
2410 case '0': case '1': case '2': case '3': case '4':
2411 case '5': case '6': case '7': case '8': case '9':
2413 /* This operand must be the same as a previous one.
2414 This kind of constraint is used for instructions such
2415 as add when they take only two operands.
2417 Note that the lower-numbered operand is passed first.
2419 If we are not testing strictly, assume that this
2420 constraint will be satisfied. */
2422 char *end;
2423 int match;
2425 match = strtoul (p, &end, 10);
2426 p = end;
2428 if (strict < 0)
2429 val = 1;
2430 else
2432 rtx op1 = recog_data.operand[match];
2433 rtx op2 = recog_data.operand[opno];
2435 /* A unary operator may be accepted by the predicate,
2436 but it is irrelevant for matching constraints. */
2437 if (UNARY_P (op1))
2438 op1 = XEXP (op1, 0);
2439 if (UNARY_P (op2))
2440 op2 = XEXP (op2, 0);
2442 val = operands_match_p (op1, op2);
2445 matching_operands[opno] = match;
2446 matching_operands[match] = opno;
2448 if (val != 0)
2449 win = 1;
2451 /* If output is *x and input is *--x, arrange later
2452 to change the output to *--x as well, since the
2453 output op is the one that will be printed. */
2454 if (val == 2 && strict > 0)
2456 funny_match[funny_match_index].this_op = opno;
2457 funny_match[funny_match_index++].other = match;
2460 len = 0;
2461 break;
2463 case 'p':
2464 /* p is used for address_operands. When we are called by
2465 gen_reload, no one will have checked that the address is
2466 strictly valid, i.e., that all pseudos requiring hard regs
2467 have gotten them. */
2468 if (strict <= 0
2469 || (strict_memory_address_p (recog_data.operand_mode[opno],
2470 op)))
2471 win = 1;
2472 break;
2474 /* No need to check general_operand again;
2475 it was done in insn-recog.c. Well, except that reload
2476 doesn't check the validity of its replacements, but
2477 that should only matter when there's a bug. */
2478 case 'g':
2479 /* Anything goes unless it is a REG and really has a hard reg
2480 but the hard reg is not in the class GENERAL_REGS. */
2481 if (REG_P (op))
2483 if (strict < 0
2484 || GENERAL_REGS == ALL_REGS
2485 || (reload_in_progress
2486 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2487 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2488 win = 1;
2490 else if (strict < 0 || general_operand (op, mode))
2491 win = 1;
2492 break;
2494 case 'X':
2495 /* This is used for a MATCH_SCRATCH in the cases when
2496 we don't actually need anything. So anything goes
2497 any time. */
2498 win = 1;
2499 break;
2501 case TARGET_MEM_CONSTRAINT:
2502 /* Memory operands must be valid, to the extent
2503 required by STRICT. */
2504 if (MEM_P (op))
2506 if (strict > 0
2507 && !strict_memory_address_p (GET_MODE (op),
2508 XEXP (op, 0)))
2509 break;
2510 if (strict == 0
2511 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2512 break;
2513 win = 1;
2515 /* Before reload, accept what reload can turn into mem. */
2516 else if (strict < 0 && CONSTANT_P (op))
2517 win = 1;
2518 /* During reload, accept a pseudo */
2519 else if (reload_in_progress && REG_P (op)
2520 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2521 win = 1;
2522 break;
2524 case '<':
2525 if (MEM_P (op)
2526 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2527 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2528 win = 1;
2529 break;
2531 case '>':
2532 if (MEM_P (op)
2533 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2534 || GET_CODE (XEXP (op, 0)) == POST_INC))
2535 win = 1;
2536 break;
2538 case 'E':
2539 case 'F':
2540 if (GET_CODE (op) == CONST_DOUBLE
2541 || (GET_CODE (op) == CONST_VECTOR
2542 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2543 win = 1;
2544 break;
2546 case 'G':
2547 case 'H':
2548 if (GET_CODE (op) == CONST_DOUBLE
2549 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2550 win = 1;
2551 break;
2553 case 's':
2554 if (CONST_INT_P (op)
2555 || (GET_CODE (op) == CONST_DOUBLE
2556 && GET_MODE (op) == VOIDmode))
2557 break;
2558 case 'i':
2559 if (CONSTANT_P (op))
2560 win = 1;
2561 break;
2563 case 'n':
2564 if (CONST_INT_P (op)
2565 || (GET_CODE (op) == CONST_DOUBLE
2566 && GET_MODE (op) == VOIDmode))
2567 win = 1;
2568 break;
2570 case 'I':
2571 case 'J':
2572 case 'K':
2573 case 'L':
2574 case 'M':
2575 case 'N':
2576 case 'O':
2577 case 'P':
2578 if (CONST_INT_P (op)
2579 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2580 win = 1;
2581 break;
2583 case 'V':
2584 if (MEM_P (op)
2585 && ((strict > 0 && ! offsettable_memref_p (op))
2586 || (strict < 0
2587 && !(CONSTANT_P (op) || MEM_P (op)))
2588 || (reload_in_progress
2589 && !(REG_P (op)
2590 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2591 win = 1;
2592 break;
2594 case 'o':
2595 if ((strict > 0 && offsettable_memref_p (op))
2596 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2597 /* Before reload, accept what reload can handle. */
2598 || (strict < 0
2599 && (CONSTANT_P (op) || MEM_P (op)))
2600 /* During reload, accept a pseudo */
2601 || (reload_in_progress && REG_P (op)
2602 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2603 win = 1;
2604 break;
2606 default:
2608 enum reg_class cl;
2610 cl = (c == 'r'
2611 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2612 if (cl != NO_REGS)
2614 if (strict < 0
2615 || (strict == 0
2616 && REG_P (op)
2617 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2618 || (strict == 0 && GET_CODE (op) == SCRATCH)
2619 || (REG_P (op)
2620 && reg_fits_class_p (op, cl, offset, mode)))
2621 win = 1;
2623 #ifdef EXTRA_CONSTRAINT_STR
2624 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2625 win = 1;
2627 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2628 /* Every memory operand can be reloaded to fit. */
2629 && ((strict < 0 && MEM_P (op))
2630 /* Before reload, accept what reload can turn
2631 into mem. */
2632 || (strict < 0 && CONSTANT_P (op))
2633 /* During reload, accept a pseudo */
2634 || (reload_in_progress && REG_P (op)
2635 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2636 win = 1;
2637 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2638 /* Every address operand can be reloaded to fit. */
2639 && strict < 0)
2640 win = 1;
2641 #endif
2642 break;
2645 while (p += len, c);
2647 constraints[opno] = p;
2648 /* If this operand did not win somehow,
2649 this alternative loses. */
2650 if (! win)
2651 lose = 1;
2653 /* This alternative won; the operands are ok.
2654 Change whichever operands this alternative says to change. */
2655 if (! lose)
2657 int opno, eopno;
2659 /* See if any earlyclobber operand conflicts with some other
2660 operand. */
2662 if (strict > 0 && seen_earlyclobber_at >= 0)
2663 for (eopno = seen_earlyclobber_at;
2664 eopno < recog_data.n_operands;
2665 eopno++)
2666 /* Ignore earlyclobber operands now in memory,
2667 because we would often report failure when we have
2668 two memory operands, one of which was formerly a REG. */
2669 if (earlyclobber[eopno]
2670 && REG_P (recog_data.operand[eopno]))
2671 for (opno = 0; opno < recog_data.n_operands; opno++)
2672 if ((MEM_P (recog_data.operand[opno])
2673 || recog_data.operand_type[opno] != OP_OUT)
2674 && opno != eopno
2675 /* Ignore things like match_operator operands. */
2676 && *recog_data.constraints[opno] != 0
2677 && ! (matching_operands[opno] == eopno
2678 && operands_match_p (recog_data.operand[opno],
2679 recog_data.operand[eopno]))
2680 && ! safe_from_earlyclobber (recog_data.operand[opno],
2681 recog_data.operand[eopno]))
2682 lose = 1;
2684 if (! lose)
2686 while (--funny_match_index >= 0)
2688 recog_data.operand[funny_match[funny_match_index].other]
2689 = recog_data.operand[funny_match[funny_match_index].this_op];
2692 return 1;
2696 which_alternative++;
2698 while (which_alternative < recog_data.n_alternatives);
2700 which_alternative = -1;
2701 /* If we are about to reject this, but we are not to test strictly,
2702 try a very loose test. Only return failure if it fails also. */
2703 if (strict == 0)
2704 return constrain_operands (-1);
2705 else
2706 return 0;
2709 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2710 is a hard reg in class CLASS when its regno is offset by OFFSET
2711 and changed to mode MODE.
2712 If REG occupies multiple hard regs, all of them must be in CLASS. */
2715 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2716 enum machine_mode mode)
2718 int regno = REGNO (operand);
2720 if (cl == NO_REGS)
2721 return 0;
2723 return (regno < FIRST_PSEUDO_REGISTER
2724 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2725 mode, regno + offset));
2728 /* Split single instruction. Helper function for split_all_insns and
2729 split_all_insns_noflow. Return last insn in the sequence if successful,
2730 or NULL if unsuccessful. */
2732 static rtx
2733 split_insn (rtx insn)
2735 /* Split insns here to get max fine-grain parallelism. */
2736 rtx first = PREV_INSN (insn);
2737 rtx last = try_split (PATTERN (insn), insn, 1);
2738 rtx insn_set, last_set, note;
2740 if (last == insn)
2741 return NULL_RTX;
2743 /* If the original instruction was a single set that was known to be
2744 equivalent to a constant, see if we can say the same about the last
2745 instruction in the split sequence. The two instructions must set
2746 the same destination. */
2747 insn_set = single_set (insn);
2748 if (insn_set)
2750 last_set = single_set (last);
2751 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2753 note = find_reg_equal_equiv_note (insn);
2754 if (note && CONSTANT_P (XEXP (note, 0)))
2755 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2756 else if (CONSTANT_P (SET_SRC (insn_set)))
2757 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2761 /* try_split returns the NOTE that INSN became. */
2762 SET_INSN_DELETED (insn);
2764 /* ??? Coddle to md files that generate subregs in post-reload
2765 splitters instead of computing the proper hard register. */
2766 if (reload_completed && first != last)
2768 first = NEXT_INSN (first);
2769 for (;;)
2771 if (INSN_P (first))
2772 cleanup_subreg_operands (first);
2773 if (first == last)
2774 break;
2775 first = NEXT_INSN (first);
2779 return last;
2782 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2784 void
2785 split_all_insns (void)
2787 sbitmap blocks;
2788 bool changed;
2789 basic_block bb;
2791 blocks = sbitmap_alloc (last_basic_block);
2792 sbitmap_zero (blocks);
2793 changed = false;
2795 FOR_EACH_BB_REVERSE (bb)
2797 rtx insn, next;
2798 bool finish = false;
2800 rtl_profile_for_bb (bb);
2801 for (insn = BB_HEAD (bb); !finish ; insn = next)
2803 /* Can't use `next_real_insn' because that might go across
2804 CODE_LABELS and short-out basic blocks. */
2805 next = NEXT_INSN (insn);
2806 finish = (insn == BB_END (bb));
2807 if (INSN_P (insn))
2809 rtx set = single_set (insn);
2811 /* Don't split no-op move insns. These should silently
2812 disappear later in final. Splitting such insns would
2813 break the code that handles LIBCALL blocks. */
2814 if (set && set_noop_p (set))
2816 /* Nops get in the way while scheduling, so delete them
2817 now if register allocation has already been done. It
2818 is too risky to try to do this before register
2819 allocation, and there are unlikely to be very many
2820 nops then anyways. */
2821 if (reload_completed)
2822 delete_insn_and_edges (insn);
2824 else
2826 rtx last = split_insn (insn);
2827 if (last)
2829 /* The split sequence may include barrier, but the
2830 BB boundary we are interested in will be set to
2831 previous one. */
2833 while (BARRIER_P (last))
2834 last = PREV_INSN (last);
2835 SET_BIT (blocks, bb->index);
2836 changed = true;
2843 default_rtl_profile ();
2844 if (changed)
2845 find_many_sub_basic_blocks (blocks);
2847 #ifdef ENABLE_CHECKING
2848 verify_flow_info ();
2849 #endif
2851 sbitmap_free (blocks);
2854 /* Same as split_all_insns, but do not expect CFG to be available.
2855 Used by machine dependent reorg passes. */
2857 unsigned int
2858 split_all_insns_noflow (void)
2860 rtx next, insn;
2862 for (insn = get_insns (); insn; insn = next)
2864 next = NEXT_INSN (insn);
2865 if (INSN_P (insn))
2867 /* Don't split no-op move insns. These should silently
2868 disappear later in final. Splitting such insns would
2869 break the code that handles LIBCALL blocks. */
2870 rtx set = single_set (insn);
2871 if (set && set_noop_p (set))
2873 /* Nops get in the way while scheduling, so delete them
2874 now if register allocation has already been done. It
2875 is too risky to try to do this before register
2876 allocation, and there are unlikely to be very many
2877 nops then anyways.
2879 ??? Should we use delete_insn when the CFG isn't valid? */
2880 if (reload_completed)
2881 delete_insn_and_edges (insn);
2883 else
2884 split_insn (insn);
2887 return 0;
2890 #ifdef HAVE_peephole2
2891 struct peep2_insn_data
2893 rtx insn;
2894 regset live_before;
2897 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2898 static int peep2_current;
2899 /* The number of instructions available to match a peep2. */
2900 int peep2_current_count;
2902 /* A non-insn marker indicating the last insn of the block.
2903 The live_before regset for this element is correct, indicating
2904 DF_LIVE_OUT for the block. */
2905 #define PEEP2_EOB pc_rtx
2907 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2908 does not exist. Used by the recognizer to find the next insn to match
2909 in a multi-insn pattern. */
2912 peep2_next_insn (int n)
2914 gcc_assert (n <= peep2_current_count);
2916 n += peep2_current;
2917 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2918 n -= MAX_INSNS_PER_PEEP2 + 1;
2920 return peep2_insn_data[n].insn;
2923 /* Return true if REGNO is dead before the Nth non-note insn
2924 after `current'. */
2927 peep2_regno_dead_p (int ofs, int regno)
2929 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2931 ofs += peep2_current;
2932 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2933 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2935 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2937 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2940 /* Similarly for a REG. */
2943 peep2_reg_dead_p (int ofs, rtx reg)
2945 int regno, n;
2947 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2949 ofs += peep2_current;
2950 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2951 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2953 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2955 regno = REGNO (reg);
2956 n = hard_regno_nregs[regno][GET_MODE (reg)];
2957 while (--n >= 0)
2958 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2959 return 0;
2960 return 1;
2963 /* Try to find a hard register of mode MODE, matching the register class in
2964 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2965 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2966 in which case the only condition is that the register must be available
2967 before CURRENT_INSN.
2968 Registers that already have bits set in REG_SET will not be considered.
2970 If an appropriate register is available, it will be returned and the
2971 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2972 returned. */
2975 peep2_find_free_register (int from, int to, const char *class_str,
2976 enum machine_mode mode, HARD_REG_SET *reg_set)
2978 static int search_ofs;
2979 enum reg_class cl;
2980 HARD_REG_SET live;
2981 int i;
2983 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2984 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2986 from += peep2_current;
2987 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2988 from -= MAX_INSNS_PER_PEEP2 + 1;
2989 to += peep2_current;
2990 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2991 to -= MAX_INSNS_PER_PEEP2 + 1;
2993 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2994 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2996 while (from != to)
2998 HARD_REG_SET this_live;
3000 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3001 from = 0;
3002 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
3003 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3004 IOR_HARD_REG_SET (live, this_live);
3007 cl = (class_str[0] == 'r' ? GENERAL_REGS
3008 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3010 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3012 int raw_regno, regno, success, j;
3014 /* Distribute the free registers as much as possible. */
3015 raw_regno = search_ofs + i;
3016 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3017 raw_regno -= FIRST_PSEUDO_REGISTER;
3018 #ifdef REG_ALLOC_ORDER
3019 regno = reg_alloc_order[raw_regno];
3020 #else
3021 regno = raw_regno;
3022 #endif
3024 /* Don't allocate fixed registers. */
3025 if (fixed_regs[regno])
3026 continue;
3027 /* Don't allocate global registers. */
3028 if (global_regs[regno])
3029 continue;
3030 /* Make sure the register is of the right class. */
3031 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3032 continue;
3033 /* And can support the mode we need. */
3034 if (! HARD_REGNO_MODE_OK (regno, mode))
3035 continue;
3036 /* And that we don't create an extra save/restore. */
3037 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3038 continue;
3039 if (! targetm.hard_regno_scratch_ok (regno))
3040 continue;
3042 /* And we don't clobber traceback for noreturn functions. */
3043 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3044 && (! reload_completed || frame_pointer_needed))
3045 continue;
3047 success = 1;
3048 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3050 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3051 || TEST_HARD_REG_BIT (live, regno + j))
3053 success = 0;
3054 break;
3057 if (success)
3059 add_to_hard_reg_set (reg_set, mode, regno);
3061 /* Start the next search with the next register. */
3062 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3063 raw_regno = 0;
3064 search_ofs = raw_regno;
3066 return gen_rtx_REG (mode, regno);
3070 search_ofs = 0;
3071 return NULL_RTX;
3074 /* Forget all currently tracked instructions, only remember current
3075 LIVE regset. */
3077 static void
3078 peep2_reinit_state (regset live)
3080 int i;
3082 /* Indicate that all slots except the last holds invalid data. */
3083 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3084 peep2_insn_data[i].insn = NULL_RTX;
3085 peep2_current_count = 0;
3087 /* Indicate that the last slot contains live_after data. */
3088 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3089 peep2_current = MAX_INSNS_PER_PEEP2;
3091 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3094 /* Perform the peephole2 optimization pass. */
3096 static void
3097 peephole2_optimize (void)
3099 rtx insn, prev;
3100 bitmap live;
3101 int i;
3102 basic_block bb;
3103 bool do_cleanup_cfg = false;
3104 bool do_rebuild_jump_labels = false;
3106 df_set_flags (DF_LR_RUN_DCE);
3107 df_analyze ();
3109 /* Initialize the regsets we're going to use. */
3110 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3111 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3112 live = BITMAP_ALLOC (&reg_obstack);
3114 FOR_EACH_BB_REVERSE (bb)
3116 rtl_profile_for_bb (bb);
3118 /* Start up propagation. */
3119 bitmap_copy (live, DF_LR_OUT (bb));
3120 df_simulate_initialize_backwards (bb, live);
3121 peep2_reinit_state (live);
3123 for (insn = BB_END (bb); ; insn = prev)
3125 prev = PREV_INSN (insn);
3126 if (NONDEBUG_INSN_P (insn))
3128 rtx attempt, before_try, x;
3129 int match_len;
3130 rtx note;
3131 bool was_call = false;
3133 /* Record this insn. */
3134 if (--peep2_current < 0)
3135 peep2_current = MAX_INSNS_PER_PEEP2;
3136 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3137 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3138 peep2_current_count++;
3139 peep2_insn_data[peep2_current].insn = insn;
3140 df_simulate_one_insn_backwards (bb, insn, live);
3141 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3143 if (RTX_FRAME_RELATED_P (insn))
3145 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3146 substitution would lose the
3147 REG_FRAME_RELATED_EXPR that is attached. */
3148 peep2_reinit_state (live);
3149 attempt = NULL;
3151 else
3152 /* Match the peephole. */
3153 attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3155 if (attempt != NULL)
3157 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3158 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3159 cfg-related call notes. */
3160 for (i = 0; i <= match_len; ++i)
3162 int j;
3163 rtx old_insn, new_insn, note;
3165 j = i + peep2_current;
3166 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3167 j -= MAX_INSNS_PER_PEEP2 + 1;
3168 old_insn = peep2_insn_data[j].insn;
3169 if (!CALL_P (old_insn))
3170 continue;
3171 was_call = true;
3173 new_insn = attempt;
3174 while (new_insn != NULL_RTX)
3176 if (CALL_P (new_insn))
3177 break;
3178 new_insn = NEXT_INSN (new_insn);
3181 gcc_assert (new_insn != NULL_RTX);
3183 CALL_INSN_FUNCTION_USAGE (new_insn)
3184 = CALL_INSN_FUNCTION_USAGE (old_insn);
3186 for (note = REG_NOTES (old_insn);
3187 note;
3188 note = XEXP (note, 1))
3189 switch (REG_NOTE_KIND (note))
3191 case REG_NORETURN:
3192 case REG_SETJMP:
3193 add_reg_note (new_insn, REG_NOTE_KIND (note),
3194 XEXP (note, 0));
3195 break;
3196 default:
3197 /* Discard all other reg notes. */
3198 break;
3201 /* Croak if there is another call in the sequence. */
3202 while (++i <= match_len)
3204 j = i + peep2_current;
3205 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3206 j -= MAX_INSNS_PER_PEEP2 + 1;
3207 old_insn = peep2_insn_data[j].insn;
3208 gcc_assert (!CALL_P (old_insn));
3210 break;
3213 i = match_len + peep2_current;
3214 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3215 i -= MAX_INSNS_PER_PEEP2 + 1;
3217 note = find_reg_note (peep2_insn_data[i].insn,
3218 REG_EH_REGION, NULL_RTX);
3220 /* Replace the old sequence with the new. */
3221 attempt = emit_insn_after_setloc (attempt,
3222 peep2_insn_data[i].insn,
3223 INSN_LOCATOR (peep2_insn_data[i].insn));
3224 before_try = PREV_INSN (insn);
3225 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3227 /* Re-insert the EH_REGION notes. */
3228 if (note || (was_call && nonlocal_goto_handler_labels))
3230 edge eh_edge;
3231 edge_iterator ei;
3233 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3234 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3235 break;
3237 for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3238 if (CALL_P (x)
3239 || (flag_non_call_exceptions
3240 && may_trap_p (PATTERN (x))
3241 && !find_reg_note (x, REG_EH_REGION, NULL)))
3243 if (note)
3244 add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
3246 if (x != BB_END (bb) && eh_edge)
3248 edge nfte, nehe;
3249 int flags;
3251 nfte = split_block (bb, x);
3252 flags = (eh_edge->flags
3253 & (EDGE_EH | EDGE_ABNORMAL));
3254 if (CALL_P (x))
3255 flags |= EDGE_ABNORMAL_CALL;
3256 nehe = make_edge (nfte->src, eh_edge->dest,
3257 flags);
3259 nehe->probability = eh_edge->probability;
3260 nfte->probability
3261 = REG_BR_PROB_BASE - nehe->probability;
3263 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3264 bb = nfte->src;
3265 eh_edge = nehe;
3269 /* Converting possibly trapping insn to non-trapping is
3270 possible. Zap dummy outgoing edges. */
3271 do_cleanup_cfg |= purge_dead_edges (bb);
3274 #ifdef HAVE_conditional_execution
3275 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3276 peep2_insn_data[i].insn = NULL_RTX;
3277 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3278 peep2_current_count = 0;
3279 #else
3280 /* Back up lifetime information past the end of the
3281 newly created sequence. */
3282 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3283 i = 0;
3284 bitmap_copy (live, peep2_insn_data[i].live_before);
3286 /* Update life information for the new sequence. */
3287 x = attempt;
3290 if (INSN_P (x))
3292 if (--i < 0)
3293 i = MAX_INSNS_PER_PEEP2;
3294 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3295 && peep2_insn_data[i].insn == NULL_RTX)
3296 peep2_current_count++;
3297 peep2_insn_data[i].insn = x;
3298 df_insn_rescan (x);
3299 df_simulate_one_insn_backwards (bb, x, live);
3300 bitmap_copy (peep2_insn_data[i].live_before, live);
3302 x = PREV_INSN (x);
3304 while (x != prev);
3306 peep2_current = i;
3307 #endif
3309 /* If we generated a jump instruction, it won't have
3310 JUMP_LABEL set. Recompute after we're done. */
3311 for (x = attempt; x != before_try; x = PREV_INSN (x))
3312 if (JUMP_P (x))
3314 do_rebuild_jump_labels = true;
3315 break;
3320 if (insn == BB_HEAD (bb))
3321 break;
3325 default_rtl_profile ();
3326 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3327 BITMAP_FREE (peep2_insn_data[i].live_before);
3328 BITMAP_FREE (live);
3329 if (do_rebuild_jump_labels)
3330 rebuild_jump_labels (get_insns ());
3332 #endif /* HAVE_peephole2 */
3334 /* Common predicates for use with define_bypass. */
3336 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3337 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3338 must be either a single_set or a PARALLEL with SETs inside. */
3341 store_data_bypass_p (rtx out_insn, rtx in_insn)
3343 rtx out_set, in_set;
3344 rtx out_pat, in_pat;
3345 rtx out_exp, in_exp;
3346 int i, j;
3348 in_set = single_set (in_insn);
3349 if (in_set)
3351 if (!MEM_P (SET_DEST (in_set)))
3352 return false;
3354 out_set = single_set (out_insn);
3355 if (out_set)
3357 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3358 return false;
3360 else
3362 out_pat = PATTERN (out_insn);
3364 if (GET_CODE (out_pat) != PARALLEL)
3365 return false;
3367 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3369 out_exp = XVECEXP (out_pat, 0, i);
3371 if (GET_CODE (out_exp) == CLOBBER)
3372 continue;
3374 gcc_assert (GET_CODE (out_exp) == SET);
3376 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3377 return false;
3381 else
3383 in_pat = PATTERN (in_insn);
3384 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3386 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3388 in_exp = XVECEXP (in_pat, 0, i);
3390 if (GET_CODE (in_exp) == CLOBBER)
3391 continue;
3393 gcc_assert (GET_CODE (in_exp) == SET);
3395 if (!MEM_P (SET_DEST (in_exp)))
3396 return false;
3398 out_set = single_set (out_insn);
3399 if (out_set)
3401 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3402 return false;
3404 else
3406 out_pat = PATTERN (out_insn);
3407 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3409 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3411 out_exp = XVECEXP (out_pat, 0, j);
3413 if (GET_CODE (out_exp) == CLOBBER)
3414 continue;
3416 gcc_assert (GET_CODE (out_exp) == SET);
3418 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3419 return false;
3425 return true;
3428 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3429 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3430 or multiple set; IN_INSN should be single_set for truth, but for convenience
3431 of insn categorization may be any JUMP or CALL insn. */
3434 if_test_bypass_p (rtx out_insn, rtx in_insn)
3436 rtx out_set, in_set;
3438 in_set = single_set (in_insn);
3439 if (! in_set)
3441 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3442 return false;
3445 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3446 return false;
3447 in_set = SET_SRC (in_set);
3449 out_set = single_set (out_insn);
3450 if (out_set)
3452 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3453 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3454 return false;
3456 else
3458 rtx out_pat;
3459 int i;
3461 out_pat = PATTERN (out_insn);
3462 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3464 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3466 rtx exp = XVECEXP (out_pat, 0, i);
3468 if (GET_CODE (exp) == CLOBBER)
3469 continue;
3471 gcc_assert (GET_CODE (exp) == SET);
3473 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3474 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3475 return false;
3479 return true;
3482 static bool
3483 gate_handle_peephole2 (void)
3485 return (optimize > 0 && flag_peephole2);
3488 static unsigned int
3489 rest_of_handle_peephole2 (void)
3491 #ifdef HAVE_peephole2
3492 peephole2_optimize ();
3493 #endif
3494 return 0;
3497 struct rtl_opt_pass pass_peephole2 =
3500 RTL_PASS,
3501 "peephole2", /* name */
3502 gate_handle_peephole2, /* gate */
3503 rest_of_handle_peephole2, /* execute */
3504 NULL, /* sub */
3505 NULL, /* next */
3506 0, /* static_pass_number */
3507 TV_PEEPHOLE2, /* tv_id */
3508 0, /* properties_required */
3509 0, /* properties_provided */
3510 0, /* properties_destroyed */
3511 0, /* todo_flags_start */
3512 TODO_df_finish | TODO_verify_rtl_sharing |
3513 TODO_dump_func /* todo_flags_finish */
3517 static unsigned int
3518 rest_of_handle_split_all_insns (void)
3520 split_all_insns ();
3521 return 0;
3524 struct rtl_opt_pass pass_split_all_insns =
3527 RTL_PASS,
3528 "split1", /* name */
3529 NULL, /* gate */
3530 rest_of_handle_split_all_insns, /* execute */
3531 NULL, /* sub */
3532 NULL, /* next */
3533 0, /* static_pass_number */
3534 TV_NONE, /* tv_id */
3535 0, /* properties_required */
3536 0, /* properties_provided */
3537 0, /* properties_destroyed */
3538 0, /* todo_flags_start */
3539 TODO_dump_func /* todo_flags_finish */
3543 static unsigned int
3544 rest_of_handle_split_after_reload (void)
3546 /* If optimizing, then go ahead and split insns now. */
3547 #ifndef STACK_REGS
3548 if (optimize > 0)
3549 #endif
3550 split_all_insns ();
3551 return 0;
3554 struct rtl_opt_pass pass_split_after_reload =
3557 RTL_PASS,
3558 "split2", /* name */
3559 NULL, /* gate */
3560 rest_of_handle_split_after_reload, /* execute */
3561 NULL, /* sub */
3562 NULL, /* next */
3563 0, /* static_pass_number */
3564 TV_NONE, /* tv_id */
3565 0, /* properties_required */
3566 0, /* properties_provided */
3567 0, /* properties_destroyed */
3568 0, /* todo_flags_start */
3569 TODO_dump_func /* todo_flags_finish */
3573 static bool
3574 gate_handle_split_before_regstack (void)
3576 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3577 /* If flow2 creates new instructions which need splitting
3578 and scheduling after reload is not done, they might not be
3579 split until final which doesn't allow splitting
3580 if HAVE_ATTR_length. */
3581 # ifdef INSN_SCHEDULING
3582 return (optimize && !flag_schedule_insns_after_reload);
3583 # else
3584 return (optimize);
3585 # endif
3586 #else
3587 return 0;
3588 #endif
3591 static unsigned int
3592 rest_of_handle_split_before_regstack (void)
3594 split_all_insns ();
3595 return 0;
3598 struct rtl_opt_pass pass_split_before_regstack =
3601 RTL_PASS,
3602 "split3", /* name */
3603 gate_handle_split_before_regstack, /* gate */
3604 rest_of_handle_split_before_regstack, /* execute */
3605 NULL, /* sub */
3606 NULL, /* next */
3607 0, /* static_pass_number */
3608 TV_NONE, /* tv_id */
3609 0, /* properties_required */
3610 0, /* properties_provided */
3611 0, /* properties_destroyed */
3612 0, /* todo_flags_start */
3613 TODO_dump_func /* todo_flags_finish */
3617 static bool
3618 gate_handle_split_before_sched2 (void)
3620 #ifdef INSN_SCHEDULING
3621 return optimize > 0 && flag_schedule_insns_after_reload;
3622 #else
3623 return 0;
3624 #endif
3627 static unsigned int
3628 rest_of_handle_split_before_sched2 (void)
3630 #ifdef INSN_SCHEDULING
3631 split_all_insns ();
3632 #endif
3633 return 0;
3636 struct rtl_opt_pass pass_split_before_sched2 =
3639 RTL_PASS,
3640 "split4", /* name */
3641 gate_handle_split_before_sched2, /* gate */
3642 rest_of_handle_split_before_sched2, /* execute */
3643 NULL, /* sub */
3644 NULL, /* next */
3645 0, /* static_pass_number */
3646 TV_NONE, /* tv_id */
3647 0, /* properties_required */
3648 0, /* properties_provided */
3649 0, /* properties_destroyed */
3650 0, /* todo_flags_start */
3651 TODO_verify_flow |
3652 TODO_dump_func /* todo_flags_finish */
3656 /* The placement of the splitting that we do for shorten_branches
3657 depends on whether regstack is used by the target or not. */
3658 static bool
3659 gate_do_final_split (void)
3661 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3662 return 1;
3663 #else
3664 return 0;
3665 #endif
3668 struct rtl_opt_pass pass_split_for_shorten_branches =
3671 RTL_PASS,
3672 "split5", /* name */
3673 gate_do_final_split, /* gate */
3674 split_all_insns_noflow, /* execute */
3675 NULL, /* sub */
3676 NULL, /* next */
3677 0, /* static_pass_number */
3678 TV_NONE, /* tv_id */
3679 0, /* properties_required */
3680 0, /* properties_provided */
3681 0, /* properties_destroyed */
3682 0, /* todo_flags_start */
3683 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */