2009-07-17 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / recog.c
blob138b03bcd19ec44540b1e2caf3d377de7519119e
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "addresses.h"
35 #include "expr.h"
36 #include "function.h"
37 #include "flags.h"
38 #include "real.h"
39 #include "toplev.h"
40 #include "basic-block.h"
41 #include "output.h"
42 #include "reload.h"
43 #include "target.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "df.h"
48 #ifndef STACK_PUSH_CODE
49 #ifdef STACK_GROWS_DOWNWARD
50 #define STACK_PUSH_CODE PRE_DEC
51 #else
52 #define STACK_PUSH_CODE PRE_INC
53 #endif
54 #endif
56 #ifndef STACK_POP_CODE
57 #ifdef STACK_GROWS_DOWNWARD
58 #define STACK_POP_CODE POST_INC
59 #else
60 #define STACK_POP_CODE POST_DEC
61 #endif
62 #endif
64 #ifndef HAVE_ATTR_enabled
65 static inline bool
66 get_attr_enabled (rtx insn ATTRIBUTE_UNUSED)
68 return true;
70 #endif
72 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx, bool);
73 static void validate_replace_src_1 (rtx *, void *);
74 static rtx split_insn (rtx);
76 /* Nonzero means allow operands to be volatile.
77 This should be 0 if you are generating rtl, such as if you are calling
78 the functions in optabs.c and expmed.c (most of the time).
79 This should be 1 if all valid insns need to be recognized,
80 such as in reginfo.c and final.c and reload.c.
82 init_recog and init_recog_no_volatile are responsible for setting this. */
84 int volatile_ok;
86 struct recog_data recog_data;
88 /* Contains a vector of operand_alternative structures for every operand.
89 Set up by preprocess_constraints. */
90 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
92 /* On return from `constrain_operands', indicate which alternative
93 was satisfied. */
95 int which_alternative;
97 /* Nonzero after end of reload pass.
98 Set to 1 or 0 by toplev.c.
99 Controls the significance of (SUBREG (MEM)). */
101 int reload_completed;
103 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
104 int epilogue_completed;
106 /* Initialize data used by the function `recog'.
107 This must be called once in the compilation of a function
108 before any insn recognition may be done in the function. */
110 void
111 init_recog_no_volatile (void)
113 volatile_ok = 0;
116 void
117 init_recog (void)
119 volatile_ok = 1;
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (rtx x)
129 int noperands;
130 rtx *operands;
131 const char **constraints;
132 int i;
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
143 noperands = asm_noperands (x);
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
149 operands = XALLOCAVEC (rtx, noperands);
150 constraints = XALLOCAVEC (const char *, noperands);
152 decode_asm_operands (x, operands, NULL, constraints, NULL, NULL);
154 for (i = 0; i < noperands; i++)
156 const char *c = constraints[i];
157 if (c[0] == '%')
158 c++;
159 if (! asm_operand_ok (operands[i], c, constraints))
160 return 0;
163 return 1;
166 /* Static data for the next two routines. */
168 typedef struct change_t
170 rtx object;
171 int old_code;
172 rtx *loc;
173 rtx old;
174 bool unshare;
175 } change_t;
177 static change_t *changes;
178 static int changes_allocated;
180 static int num_changes = 0;
182 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
183 at which NEW_RTX will be placed. If OBJECT is zero, no validation is done,
184 the change is simply made.
186 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
187 will be called with the address and mode as parameters. If OBJECT is
188 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
189 the change in place.
191 IN_GROUP is nonzero if this is part of a group of changes that must be
192 performed as a group. In that case, the changes will be stored. The
193 function `apply_change_group' will validate and apply the changes.
195 If IN_GROUP is zero, this is a single change. Try to recognize the insn
196 or validate the memory reference with the change applied. If the result
197 is not valid for the machine, suppress the change and return zero.
198 Otherwise, perform the change and return 1. */
200 static bool
201 validate_change_1 (rtx object, rtx *loc, rtx new_rtx, bool in_group, bool unshare)
203 rtx old = *loc;
205 if (old == new_rtx || rtx_equal_p (old, new_rtx))
206 return 1;
208 gcc_assert (in_group != 0 || num_changes == 0);
210 *loc = new_rtx;
212 /* Save the information describing this change. */
213 if (num_changes >= changes_allocated)
215 if (changes_allocated == 0)
216 /* This value allows for repeated substitutions inside complex
217 indexed addresses, or changes in up to 5 insns. */
218 changes_allocated = MAX_RECOG_OPERANDS * 5;
219 else
220 changes_allocated *= 2;
222 changes = XRESIZEVEC (change_t, changes, changes_allocated);
225 changes[num_changes].object = object;
226 changes[num_changes].loc = loc;
227 changes[num_changes].old = old;
228 changes[num_changes].unshare = unshare;
230 if (object && !MEM_P (object))
232 /* Set INSN_CODE to force rerecognition of insn. Save old code in
233 case invalid. */
234 changes[num_changes].old_code = INSN_CODE (object);
235 INSN_CODE (object) = -1;
238 num_changes++;
240 /* If we are making a group of changes, return 1. Otherwise, validate the
241 change group we made. */
243 if (in_group)
244 return 1;
245 else
246 return apply_change_group ();
249 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
250 UNSHARE to false. */
252 bool
253 validate_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
255 return validate_change_1 (object, loc, new_rtx, in_group, false);
258 /* Wrapper for validate_change_1 without the UNSHARE argument defaulting
259 UNSHARE to true. */
261 bool
262 validate_unshare_change (rtx object, rtx *loc, rtx new_rtx, bool in_group)
264 return validate_change_1 (object, loc, new_rtx, in_group, true);
268 /* Keep X canonicalized if some changes have made it non-canonical; only
269 modifies the operands of X, not (for example) its code. Simplifications
270 are not the job of this routine.
272 Return true if anything was changed. */
273 bool
274 canonicalize_change_group (rtx insn, rtx x)
276 if (COMMUTATIVE_P (x)
277 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
279 /* Oops, the caller has made X no longer canonical.
280 Let's redo the changes in the correct order. */
281 rtx tem = XEXP (x, 0);
282 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
283 validate_change (insn, &XEXP (x, 1), tem, 1);
284 return true;
286 else
287 return false;
291 /* This subroutine of apply_change_group verifies whether the changes to INSN
292 were valid; i.e. whether INSN can still be recognized. */
295 insn_invalid_p (rtx insn)
297 rtx pat = PATTERN (insn);
298 int num_clobbers = 0;
299 /* If we are before reload and the pattern is a SET, see if we can add
300 clobbers. */
301 int icode = recog (pat, insn,
302 (GET_CODE (pat) == SET
303 && ! reload_completed && ! reload_in_progress)
304 ? &num_clobbers : 0);
305 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
308 /* If this is an asm and the operand aren't legal, then fail. Likewise if
309 this is not an asm and the insn wasn't recognized. */
310 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
311 || (!is_asm && icode < 0))
312 return 1;
314 /* If we have to add CLOBBERs, fail if we have to add ones that reference
315 hard registers since our callers can't know if they are live or not.
316 Otherwise, add them. */
317 if (num_clobbers > 0)
319 rtx newpat;
321 if (added_clobbers_hard_reg_p (icode))
322 return 1;
324 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
325 XVECEXP (newpat, 0, 0) = pat;
326 add_clobbers (newpat, icode);
327 PATTERN (insn) = pat = newpat;
330 /* After reload, verify that all constraints are satisfied. */
331 if (reload_completed)
333 extract_insn (insn);
335 if (! constrain_operands (1))
336 return 1;
339 INSN_CODE (insn) = icode;
340 return 0;
343 /* Return number of changes made and not validated yet. */
345 num_changes_pending (void)
347 return num_changes;
350 /* Tentatively apply the changes numbered NUM and up.
351 Return 1 if all changes are valid, zero otherwise. */
354 verify_changes (int num)
356 int i;
357 rtx last_validated = NULL_RTX;
359 /* The changes have been applied and all INSN_CODEs have been reset to force
360 rerecognition.
362 The changes are valid if we aren't given an object, or if we are
363 given a MEM and it still is a valid address, or if this is in insn
364 and it is recognized. In the latter case, if reload has completed,
365 we also require that the operands meet the constraints for
366 the insn. */
368 for (i = num; i < num_changes; i++)
370 rtx object = changes[i].object;
372 /* If there is no object to test or if it is the same as the one we
373 already tested, ignore it. */
374 if (object == 0 || object == last_validated)
375 continue;
377 if (MEM_P (object))
379 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
380 break;
382 else if (REG_P (changes[i].old)
383 && asm_noperands (PATTERN (object)) > 0
384 && REG_EXPR (changes[i].old) != NULL_TREE
385 && DECL_ASSEMBLER_NAME_SET_P (REG_EXPR (changes[i].old))
386 && DECL_REGISTER (REG_EXPR (changes[i].old)))
388 /* Don't allow changes of hard register operands to inline
389 assemblies if they have been defined as register asm ("x"). */
390 break;
392 else if (insn_invalid_p (object))
394 rtx pat = PATTERN (object);
396 /* Perhaps we couldn't recognize the insn because there were
397 extra CLOBBERs at the end. If so, try to re-recognize
398 without the last CLOBBER (later iterations will cause each of
399 them to be eliminated, in turn). But don't do this if we
400 have an ASM_OPERAND. */
401 if (GET_CODE (pat) == PARALLEL
402 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
403 && asm_noperands (PATTERN (object)) < 0)
405 rtx newpat;
407 if (XVECLEN (pat, 0) == 2)
408 newpat = XVECEXP (pat, 0, 0);
409 else
411 int j;
413 newpat
414 = gen_rtx_PARALLEL (VOIDmode,
415 rtvec_alloc (XVECLEN (pat, 0) - 1));
416 for (j = 0; j < XVECLEN (newpat, 0); j++)
417 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
420 /* Add a new change to this group to replace the pattern
421 with this new pattern. Then consider this change
422 as having succeeded. The change we added will
423 cause the entire call to fail if things remain invalid.
425 Note that this can lose if a later change than the one
426 we are processing specified &XVECEXP (PATTERN (object), 0, X)
427 but this shouldn't occur. */
429 validate_change (object, &PATTERN (object), newpat, 1);
430 continue;
432 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
433 /* If this insn is a CLOBBER or USE, it is always valid, but is
434 never recognized. */
435 continue;
436 else
437 break;
439 last_validated = object;
442 return (i == num_changes);
445 /* A group of changes has previously been issued with validate_change
446 and verified with verify_changes. Call df_insn_rescan for each of
447 the insn changed and clear num_changes. */
449 void
450 confirm_change_group (void)
452 int i;
453 rtx last_object = NULL;
455 for (i = 0; i < num_changes; i++)
457 rtx object = changes[i].object;
459 if (changes[i].unshare)
460 *changes[i].loc = copy_rtx (*changes[i].loc);
462 /* Avoid unnecessary rescanning when multiple changes to same instruction
463 are made. */
464 if (object)
466 if (object != last_object && last_object && INSN_P (last_object))
467 df_insn_rescan (last_object);
468 last_object = object;
472 if (last_object && INSN_P (last_object))
473 df_insn_rescan (last_object);
474 num_changes = 0;
477 /* Apply a group of changes previously issued with `validate_change'.
478 If all changes are valid, call confirm_change_group and return 1,
479 otherwise, call cancel_changes and return 0. */
482 apply_change_group (void)
484 if (verify_changes (0))
486 confirm_change_group ();
487 return 1;
489 else
491 cancel_changes (0);
492 return 0;
497 /* Return the number of changes so far in the current group. */
500 num_validated_changes (void)
502 return num_changes;
505 /* Retract the changes numbered NUM and up. */
507 void
508 cancel_changes (int num)
510 int i;
512 /* Back out all the changes. Do this in the opposite order in which
513 they were made. */
514 for (i = num_changes - 1; i >= num; i--)
516 *changes[i].loc = changes[i].old;
517 if (changes[i].object && !MEM_P (changes[i].object))
518 INSN_CODE (changes[i].object) = changes[i].old_code;
520 num_changes = num;
523 /* A subroutine of validate_replace_rtx_1 that tries to simplify the resulting
524 rtx. */
526 static void
527 simplify_while_replacing (rtx *loc, rtx to, rtx object,
528 enum machine_mode op0_mode)
530 rtx x = *loc;
531 enum rtx_code code = GET_CODE (x);
532 rtx new_rtx;
534 if (SWAPPABLE_OPERANDS_P (x)
535 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
537 validate_unshare_change (object, loc,
538 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
539 : swap_condition (code),
540 GET_MODE (x), XEXP (x, 1),
541 XEXP (x, 0)), 1);
542 x = *loc;
543 code = GET_CODE (x);
546 switch (code)
548 case PLUS:
549 /* If we have a PLUS whose second operand is now a CONST_INT, use
550 simplify_gen_binary to try to simplify it.
551 ??? We may want later to remove this, once simplification is
552 separated from this function. */
553 if (CONST_INT_P (XEXP (x, 1)) && XEXP (x, 1) == to)
554 validate_change (object, loc,
555 simplify_gen_binary
556 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
557 break;
558 case MINUS:
559 if (CONST_INT_P (XEXP (x, 1))
560 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
561 validate_change (object, loc,
562 simplify_gen_binary
563 (PLUS, GET_MODE (x), XEXP (x, 0),
564 simplify_gen_unary (NEG,
565 GET_MODE (x), XEXP (x, 1),
566 GET_MODE (x))), 1);
567 break;
568 case ZERO_EXTEND:
569 case SIGN_EXTEND:
570 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
572 new_rtx = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
573 op0_mode);
574 /* If any of the above failed, substitute in something that
575 we know won't be recognized. */
576 if (!new_rtx)
577 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
578 validate_change (object, loc, new_rtx, 1);
580 break;
581 case SUBREG:
582 /* All subregs possible to simplify should be simplified. */
583 new_rtx = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
584 SUBREG_BYTE (x));
586 /* Subregs of VOIDmode operands are incorrect. */
587 if (!new_rtx && GET_MODE (SUBREG_REG (x)) == VOIDmode)
588 new_rtx = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
589 if (new_rtx)
590 validate_change (object, loc, new_rtx, 1);
591 break;
592 case ZERO_EXTRACT:
593 case SIGN_EXTRACT:
594 /* If we are replacing a register with memory, try to change the memory
595 to be the mode required for memory in extract operations (this isn't
596 likely to be an insertion operation; if it was, nothing bad will
597 happen, we might just fail in some cases). */
599 if (MEM_P (XEXP (x, 0))
600 && CONST_INT_P (XEXP (x, 1))
601 && CONST_INT_P (XEXP (x, 2))
602 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
603 && !MEM_VOLATILE_P (XEXP (x, 0)))
605 enum machine_mode wanted_mode = VOIDmode;
606 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
607 int pos = INTVAL (XEXP (x, 2));
609 if (GET_CODE (x) == ZERO_EXTRACT)
611 enum machine_mode new_mode
612 = mode_for_extraction (EP_extzv, 1);
613 if (new_mode != MAX_MACHINE_MODE)
614 wanted_mode = new_mode;
616 else if (GET_CODE (x) == SIGN_EXTRACT)
618 enum machine_mode new_mode
619 = mode_for_extraction (EP_extv, 1);
620 if (new_mode != MAX_MACHINE_MODE)
621 wanted_mode = new_mode;
624 /* If we have a narrower mode, we can do something. */
625 if (wanted_mode != VOIDmode
626 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
628 int offset = pos / BITS_PER_UNIT;
629 rtx newmem;
631 /* If the bytes and bits are counted differently, we
632 must adjust the offset. */
633 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
634 offset =
635 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
636 offset);
638 pos %= GET_MODE_BITSIZE (wanted_mode);
640 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
642 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
643 validate_change (object, &XEXP (x, 0), newmem, 1);
647 break;
649 default:
650 break;
654 /* Replace every occurrence of FROM in X with TO. Mark each change with
655 validate_change passing OBJECT. */
657 static void
658 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object,
659 bool simplify)
661 int i, j;
662 const char *fmt;
663 rtx x = *loc;
664 enum rtx_code code;
665 enum machine_mode op0_mode = VOIDmode;
666 int prev_changes = num_changes;
668 if (!x)
669 return;
671 code = GET_CODE (x);
672 fmt = GET_RTX_FORMAT (code);
673 if (fmt[0] == 'e')
674 op0_mode = GET_MODE (XEXP (x, 0));
676 /* X matches FROM if it is the same rtx or they are both referring to the
677 same register in the same mode. Avoid calling rtx_equal_p unless the
678 operands look similar. */
680 if (x == from
681 || (REG_P (x) && REG_P (from)
682 && GET_MODE (x) == GET_MODE (from)
683 && REGNO (x) == REGNO (from))
684 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
685 && rtx_equal_p (x, from)))
687 validate_unshare_change (object, loc, to, 1);
688 return;
691 /* Call ourself recursively to perform the replacements.
692 We must not replace inside already replaced expression, otherwise we
693 get infinite recursion for replacements like (reg X)->(subreg (reg X))
694 done by regmove, so we must special case shared ASM_OPERANDS. */
696 if (GET_CODE (x) == PARALLEL)
698 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
700 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
701 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
703 /* Verify that operands are really shared. */
704 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
705 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
706 (x, 0, j))));
707 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
708 from, to, object, simplify);
710 else
711 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object,
712 simplify);
715 else
716 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
718 if (fmt[i] == 'e')
719 validate_replace_rtx_1 (&XEXP (x, i), from, to, object, simplify);
720 else if (fmt[i] == 'E')
721 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
722 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object,
723 simplify);
726 /* If we didn't substitute, there is nothing more to do. */
727 if (num_changes == prev_changes)
728 return;
730 /* Allow substituted expression to have different mode. This is used by
731 regmove to change mode of pseudo register. */
732 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
733 op0_mode = GET_MODE (XEXP (x, 0));
735 /* Do changes needed to keep rtx consistent. Don't do any other
736 simplifications, as it is not our job. */
737 if (simplify)
738 simplify_while_replacing (loc, to, object, op0_mode);
741 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
742 with TO. After all changes have been made, validate by seeing
743 if INSN is still valid. */
746 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
748 validate_replace_rtx_1 (loc, from, to, insn, true);
749 return apply_change_group ();
752 /* Try replacing every occurrence of FROM in INSN with TO. After all
753 changes have been made, validate by seeing if INSN is still valid. */
756 validate_replace_rtx (rtx from, rtx to, rtx insn)
758 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
759 return apply_change_group ();
762 /* Try replacing every occurrence of FROM in WHERE with TO. Assume that WHERE
763 is a part of INSN. After all changes have been made, validate by seeing if
764 INSN is still valid.
765 validate_replace_rtx (from, to, insn) is equivalent to
766 validate_replace_rtx_part (from, to, &PATTERN (insn), insn). */
769 validate_replace_rtx_part (rtx from, rtx to, rtx *where, rtx insn)
771 validate_replace_rtx_1 (where, from, to, insn, true);
772 return apply_change_group ();
775 /* Same as above, but do not simplify rtx afterwards. */
776 int
777 validate_replace_rtx_part_nosimplify (rtx from, rtx to, rtx *where,
778 rtx insn)
780 validate_replace_rtx_1 (where, from, to, insn, false);
781 return apply_change_group ();
785 /* Try replacing every occurrence of FROM in INSN with TO. */
787 void
788 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
790 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn, true);
793 /* Function called by note_uses to replace used subexpressions. */
794 struct validate_replace_src_data
796 rtx from; /* Old RTX */
797 rtx to; /* New RTX */
798 rtx insn; /* Insn in which substitution is occurring. */
801 static void
802 validate_replace_src_1 (rtx *x, void *data)
804 struct validate_replace_src_data *d
805 = (struct validate_replace_src_data *) data;
807 validate_replace_rtx_1 (x, d->from, d->to, d->insn, true);
810 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
811 SET_DESTs. */
813 void
814 validate_replace_src_group (rtx from, rtx to, rtx insn)
816 struct validate_replace_src_data d;
818 d.from = from;
819 d.to = to;
820 d.insn = insn;
821 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
824 /* Try simplify INSN.
825 Invoke simplify_rtx () on every SET_SRC and SET_DEST inside the INSN's
826 pattern and return true if something was simplified. */
828 bool
829 validate_simplify_insn (rtx insn)
831 int i;
832 rtx pat = NULL;
833 rtx newpat = NULL;
835 pat = PATTERN (insn);
837 if (GET_CODE (pat) == SET)
839 newpat = simplify_rtx (SET_SRC (pat));
840 if (newpat && !rtx_equal_p (SET_SRC (pat), newpat))
841 validate_change (insn, &SET_SRC (pat), newpat, 1);
842 newpat = simplify_rtx (SET_DEST (pat));
843 if (newpat && !rtx_equal_p (SET_DEST (pat), newpat))
844 validate_change (insn, &SET_DEST (pat), newpat, 1);
846 else if (GET_CODE (pat) == PARALLEL)
847 for (i = 0; i < XVECLEN (pat, 0); i++)
849 rtx s = XVECEXP (pat, 0, i);
851 if (GET_CODE (XVECEXP (pat, 0, i)) == SET)
853 newpat = simplify_rtx (SET_SRC (s));
854 if (newpat && !rtx_equal_p (SET_SRC (s), newpat))
855 validate_change (insn, &SET_SRC (s), newpat, 1);
856 newpat = simplify_rtx (SET_DEST (s));
857 if (newpat && !rtx_equal_p (SET_DEST (s), newpat))
858 validate_change (insn, &SET_DEST (s), newpat, 1);
861 return ((num_changes_pending () > 0) && (apply_change_group () > 0));
864 #ifdef HAVE_cc0
865 /* Return 1 if the insn using CC0 set by INSN does not contain
866 any ordered tests applied to the condition codes.
867 EQ and NE tests do not count. */
870 next_insn_tests_no_inequality (rtx insn)
872 rtx next = next_cc0_user (insn);
874 /* If there is no next insn, we have to take the conservative choice. */
875 if (next == 0)
876 return 0;
878 return (INSN_P (next)
879 && ! inequality_comparisons_p (PATTERN (next)));
881 #endif
883 /* Return 1 if OP is a valid general operand for machine mode MODE.
884 This is either a register reference, a memory reference,
885 or a constant. In the case of a memory reference, the address
886 is checked for general validity for the target machine.
888 Register and memory references must have mode MODE in order to be valid,
889 but some constants have no machine mode and are valid for any mode.
891 If MODE is VOIDmode, OP is checked for validity for whatever mode
892 it has.
894 The main use of this function is as a predicate in match_operand
895 expressions in the machine description.
897 For an explanation of this function's behavior for registers of
898 class NO_REGS, see the comment for `register_operand'. */
901 general_operand (rtx op, enum machine_mode mode)
903 enum rtx_code code = GET_CODE (op);
905 if (mode == VOIDmode)
906 mode = GET_MODE (op);
908 /* Don't accept CONST_INT or anything similar
909 if the caller wants something floating. */
910 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
911 && GET_MODE_CLASS (mode) != MODE_INT
912 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
913 return 0;
915 if (CONST_INT_P (op)
916 && mode != VOIDmode
917 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
918 return 0;
920 if (CONSTANT_P (op))
921 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
922 || mode == VOIDmode)
923 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
924 && LEGITIMATE_CONSTANT_P (op));
926 /* Except for certain constants with VOIDmode, already checked for,
927 OP's mode must match MODE if MODE specifies a mode. */
929 if (GET_MODE (op) != mode)
930 return 0;
932 if (code == SUBREG)
934 rtx sub = SUBREG_REG (op);
936 #ifdef INSN_SCHEDULING
937 /* On machines that have insn scheduling, we want all memory
938 reference to be explicit, so outlaw paradoxical SUBREGs.
939 However, we must allow them after reload so that they can
940 get cleaned up by cleanup_subreg_operands. */
941 if (!reload_completed && MEM_P (sub)
942 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
943 return 0;
944 #endif
945 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
946 may result in incorrect reference. We should simplify all valid
947 subregs of MEM anyway. But allow this after reload because we
948 might be called from cleanup_subreg_operands.
950 ??? This is a kludge. */
951 if (!reload_completed && SUBREG_BYTE (op) != 0
952 && MEM_P (sub))
953 return 0;
955 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
956 create such rtl, and we must reject it. */
957 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
958 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
959 return 0;
961 op = sub;
962 code = GET_CODE (op);
965 if (code == REG)
966 /* A register whose class is NO_REGS is not a general operand. */
967 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
968 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
970 if (code == MEM)
972 rtx y = XEXP (op, 0);
974 if (! volatile_ok && MEM_VOLATILE_P (op))
975 return 0;
977 /* Use the mem's mode, since it will be reloaded thus. */
978 if (memory_address_p (GET_MODE (op), y))
979 return 1;
982 return 0;
985 /* Return 1 if OP is a valid memory address for a memory reference
986 of mode MODE.
988 The main use of this function is as a predicate in match_operand
989 expressions in the machine description. */
992 address_operand (rtx op, enum machine_mode mode)
994 return memory_address_p (mode, op);
997 /* Return 1 if OP is a register reference of mode MODE.
998 If MODE is VOIDmode, accept a register in any mode.
1000 The main use of this function is as a predicate in match_operand
1001 expressions in the machine description.
1003 As a special exception, registers whose class is NO_REGS are
1004 not accepted by `register_operand'. The reason for this change
1005 is to allow the representation of special architecture artifacts
1006 (such as a condition code register) without extending the rtl
1007 definitions. Since registers of class NO_REGS cannot be used
1008 as registers in any case where register classes are examined,
1009 it is most consistent to keep this function from accepting them. */
1012 register_operand (rtx op, enum machine_mode mode)
1014 if (GET_MODE (op) != mode && mode != VOIDmode)
1015 return 0;
1017 if (GET_CODE (op) == SUBREG)
1019 rtx sub = SUBREG_REG (op);
1021 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1022 because it is guaranteed to be reloaded into one.
1023 Just make sure the MEM is valid in itself.
1024 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1025 but currently it does result from (SUBREG (REG)...) where the
1026 reg went on the stack.) */
1027 if (! reload_completed && MEM_P (sub))
1028 return general_operand (op, mode);
1030 #ifdef CANNOT_CHANGE_MODE_CLASS
1031 if (REG_P (sub)
1032 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1033 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1034 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1035 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1036 return 0;
1037 #endif
1039 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1040 create such rtl, and we must reject it. */
1041 if (SCALAR_FLOAT_MODE_P (GET_MODE (op))
1042 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1043 return 0;
1045 op = sub;
1048 /* We don't consider registers whose class is NO_REGS
1049 to be a register operand. */
1050 return (REG_P (op)
1051 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1052 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1055 /* Return 1 for a register in Pmode; ignore the tested mode. */
1058 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1060 return register_operand (op, Pmode);
1063 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1064 or a hard register. */
1067 scratch_operand (rtx op, enum machine_mode mode)
1069 if (GET_MODE (op) != mode && mode != VOIDmode)
1070 return 0;
1072 return (GET_CODE (op) == SCRATCH
1073 || (REG_P (op)
1074 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1077 /* Return 1 if OP is a valid immediate operand for mode MODE.
1079 The main use of this function is as a predicate in match_operand
1080 expressions in the machine description. */
1083 immediate_operand (rtx op, enum machine_mode mode)
1085 /* Don't accept CONST_INT or anything similar
1086 if the caller wants something floating. */
1087 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1088 && GET_MODE_CLASS (mode) != MODE_INT
1089 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1090 return 0;
1092 if (CONST_INT_P (op)
1093 && mode != VOIDmode
1094 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1095 return 0;
1097 return (CONSTANT_P (op)
1098 && (GET_MODE (op) == mode || mode == VOIDmode
1099 || GET_MODE (op) == VOIDmode)
1100 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1101 && LEGITIMATE_CONSTANT_P (op));
1104 /* Returns 1 if OP is an operand that is a CONST_INT. */
1107 const_int_operand (rtx op, enum machine_mode mode)
1109 if (!CONST_INT_P (op))
1110 return 0;
1112 if (mode != VOIDmode
1113 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1114 return 0;
1116 return 1;
1119 /* Returns 1 if OP is an operand that is a constant integer or constant
1120 floating-point number. */
1123 const_double_operand (rtx op, enum machine_mode mode)
1125 /* Don't accept CONST_INT or anything similar
1126 if the caller wants something floating. */
1127 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1128 && GET_MODE_CLASS (mode) != MODE_INT
1129 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1130 return 0;
1132 return ((GET_CODE (op) == CONST_DOUBLE || CONST_INT_P (op))
1133 && (mode == VOIDmode || GET_MODE (op) == mode
1134 || GET_MODE (op) == VOIDmode));
1137 /* Return 1 if OP is a general operand that is not an immediate operand. */
1140 nonimmediate_operand (rtx op, enum machine_mode mode)
1142 return (general_operand (op, mode) && ! CONSTANT_P (op));
1145 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1148 nonmemory_operand (rtx op, enum machine_mode mode)
1150 if (CONSTANT_P (op))
1152 /* Don't accept CONST_INT or anything similar
1153 if the caller wants something floating. */
1154 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1155 && GET_MODE_CLASS (mode) != MODE_INT
1156 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1157 return 0;
1159 if (CONST_INT_P (op)
1160 && mode != VOIDmode
1161 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1162 return 0;
1164 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1165 || mode == VOIDmode)
1166 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1167 && LEGITIMATE_CONSTANT_P (op));
1170 if (GET_MODE (op) != mode && mode != VOIDmode)
1171 return 0;
1173 if (GET_CODE (op) == SUBREG)
1175 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1176 because it is guaranteed to be reloaded into one.
1177 Just make sure the MEM is valid in itself.
1178 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1179 but currently it does result from (SUBREG (REG)...) where the
1180 reg went on the stack.) */
1181 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1182 return general_operand (op, mode);
1183 op = SUBREG_REG (op);
1186 /* We don't consider registers whose class is NO_REGS
1187 to be a register operand. */
1188 return (REG_P (op)
1189 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1190 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1193 /* Return 1 if OP is a valid operand that stands for pushing a
1194 value of mode MODE onto the stack.
1196 The main use of this function is as a predicate in match_operand
1197 expressions in the machine description. */
1200 push_operand (rtx op, enum machine_mode mode)
1202 unsigned int rounded_size = GET_MODE_SIZE (mode);
1204 #ifdef PUSH_ROUNDING
1205 rounded_size = PUSH_ROUNDING (rounded_size);
1206 #endif
1208 if (!MEM_P (op))
1209 return 0;
1211 if (mode != VOIDmode && GET_MODE (op) != mode)
1212 return 0;
1214 op = XEXP (op, 0);
1216 if (rounded_size == GET_MODE_SIZE (mode))
1218 if (GET_CODE (op) != STACK_PUSH_CODE)
1219 return 0;
1221 else
1223 if (GET_CODE (op) != PRE_MODIFY
1224 || GET_CODE (XEXP (op, 1)) != PLUS
1225 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1226 || !CONST_INT_P (XEXP (XEXP (op, 1), 1))
1227 #ifdef STACK_GROWS_DOWNWARD
1228 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1229 #else
1230 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1231 #endif
1233 return 0;
1236 return XEXP (op, 0) == stack_pointer_rtx;
1239 /* Return 1 if OP is a valid operand that stands for popping a
1240 value of mode MODE off the stack.
1242 The main use of this function is as a predicate in match_operand
1243 expressions in the machine description. */
1246 pop_operand (rtx op, enum machine_mode mode)
1248 if (!MEM_P (op))
1249 return 0;
1251 if (mode != VOIDmode && GET_MODE (op) != mode)
1252 return 0;
1254 op = XEXP (op, 0);
1256 if (GET_CODE (op) != STACK_POP_CODE)
1257 return 0;
1259 return XEXP (op, 0) == stack_pointer_rtx;
1262 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1265 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1267 #ifdef GO_IF_LEGITIMATE_ADDRESS
1268 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1269 return 0;
1271 win:
1272 return 1;
1273 #else
1274 return targetm.legitimate_address_p (mode, addr, 0);
1275 #endif
1278 /* Return 1 if OP is a valid memory reference with mode MODE,
1279 including a valid address.
1281 The main use of this function is as a predicate in match_operand
1282 expressions in the machine description. */
1285 memory_operand (rtx op, enum machine_mode mode)
1287 rtx inner;
1289 if (! reload_completed)
1290 /* Note that no SUBREG is a memory operand before end of reload pass,
1291 because (SUBREG (MEM...)) forces reloading into a register. */
1292 return MEM_P (op) && general_operand (op, mode);
1294 if (mode != VOIDmode && GET_MODE (op) != mode)
1295 return 0;
1297 inner = op;
1298 if (GET_CODE (inner) == SUBREG)
1299 inner = SUBREG_REG (inner);
1301 return (MEM_P (inner) && general_operand (op, mode));
1304 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1305 that is, a memory reference whose address is a general_operand. */
1308 indirect_operand (rtx op, enum machine_mode mode)
1310 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1311 if (! reload_completed
1312 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1314 int offset = SUBREG_BYTE (op);
1315 rtx inner = SUBREG_REG (op);
1317 if (mode != VOIDmode && GET_MODE (op) != mode)
1318 return 0;
1320 /* The only way that we can have a general_operand as the resulting
1321 address is if OFFSET is zero and the address already is an operand
1322 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1323 operand. */
1325 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1326 || (GET_CODE (XEXP (inner, 0)) == PLUS
1327 && CONST_INT_P (XEXP (XEXP (inner, 0), 1))
1328 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1329 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1332 return (MEM_P (op)
1333 && memory_operand (op, mode)
1334 && general_operand (XEXP (op, 0), Pmode));
1337 /* Return 1 if this is an ordered comparison operator (not including
1338 ORDERED and UNORDERED). */
1341 ordered_comparison_operator (rtx op, enum machine_mode mode)
1343 if (mode != VOIDmode && GET_MODE (op) != mode)
1344 return false;
1345 switch (GET_CODE (op))
1347 case EQ:
1348 case NE:
1349 case LT:
1350 case LTU:
1351 case LE:
1352 case LEU:
1353 case GT:
1354 case GTU:
1355 case GE:
1356 case GEU:
1357 return true;
1358 default:
1359 return false;
1363 /* Return 1 if this is a comparison operator. This allows the use of
1364 MATCH_OPERATOR to recognize all the branch insns. */
1367 comparison_operator (rtx op, enum machine_mode mode)
1369 return ((mode == VOIDmode || GET_MODE (op) == mode)
1370 && COMPARISON_P (op));
1373 /* If BODY is an insn body that uses ASM_OPERANDS,
1374 return the number of operands (both input and output) in the insn.
1375 Otherwise return -1. */
1378 asm_noperands (const_rtx body)
1380 switch (GET_CODE (body))
1382 case ASM_OPERANDS:
1383 /* No output operands: return number of input operands. */
1384 return ASM_OPERANDS_INPUT_LENGTH (body);
1385 case SET:
1386 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1387 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1388 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1389 else
1390 return -1;
1391 case PARALLEL:
1392 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1393 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1395 /* Multiple output operands, or 1 output plus some clobbers:
1396 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1397 int i;
1398 int n_sets;
1400 /* Count backwards through CLOBBERs to determine number of SETs. */
1401 for (i = XVECLEN (body, 0); i > 0; i--)
1403 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1404 break;
1405 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1406 return -1;
1409 /* N_SETS is now number of output operands. */
1410 n_sets = i;
1412 /* Verify that all the SETs we have
1413 came from a single original asm_operands insn
1414 (so that invalid combinations are blocked). */
1415 for (i = 0; i < n_sets; i++)
1417 rtx elt = XVECEXP (body, 0, i);
1418 if (GET_CODE (elt) != SET)
1419 return -1;
1420 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1421 return -1;
1422 /* If these ASM_OPERANDS rtx's came from different original insns
1423 then they aren't allowed together. */
1424 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1425 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1426 return -1;
1428 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1429 + n_sets);
1431 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1433 /* 0 outputs, but some clobbers:
1434 body is [(asm_operands ...) (clobber (reg ...))...]. */
1435 int i;
1437 /* Make sure all the other parallel things really are clobbers. */
1438 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1439 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1440 return -1;
1442 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1444 else
1445 return -1;
1446 default:
1447 return -1;
1451 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1452 copy its operands (both input and output) into the vector OPERANDS,
1453 the locations of the operands within the insn into the vector OPERAND_LOCS,
1454 and the constraints for the operands into CONSTRAINTS.
1455 Write the modes of the operands into MODES.
1456 Return the assembler-template.
1458 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1459 we don't store that info. */
1461 const char *
1462 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1463 const char **constraints, enum machine_mode *modes,
1464 location_t *loc)
1466 int i;
1467 int noperands;
1468 rtx asmop = 0;
1470 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1472 asmop = SET_SRC (body);
1473 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1475 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1477 for (i = 1; i < noperands; i++)
1479 if (operand_locs)
1480 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1481 if (operands)
1482 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1483 if (constraints)
1484 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1485 if (modes)
1486 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1489 /* The output is in the SET.
1490 Its constraint is in the ASM_OPERANDS itself. */
1491 if (operands)
1492 operands[0] = SET_DEST (body);
1493 if (operand_locs)
1494 operand_locs[0] = &SET_DEST (body);
1495 if (constraints)
1496 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1497 if (modes)
1498 modes[0] = GET_MODE (SET_DEST (body));
1500 else if (GET_CODE (body) == ASM_OPERANDS)
1502 asmop = body;
1503 /* No output operands: BODY is (asm_operands ....). */
1505 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1507 /* The input operands are found in the 1st element vector. */
1508 /* Constraints for inputs are in the 2nd element vector. */
1509 for (i = 0; i < noperands; i++)
1511 if (operand_locs)
1512 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1513 if (operands)
1514 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1515 if (constraints)
1516 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1517 if (modes)
1518 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1521 else if (GET_CODE (body) == PARALLEL
1522 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1523 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1525 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1526 int nin;
1527 int nout = 0; /* Does not include CLOBBERs. */
1529 asmop = SET_SRC (XVECEXP (body, 0, 0));
1530 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1532 /* At least one output, plus some CLOBBERs. */
1534 /* The outputs are in the SETs.
1535 Their constraints are in the ASM_OPERANDS itself. */
1536 for (i = 0; i < nparallel; i++)
1538 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1539 break; /* Past last SET */
1541 if (operands)
1542 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1543 if (operand_locs)
1544 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1545 if (constraints)
1546 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1547 if (modes)
1548 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1549 nout++;
1552 for (i = 0; i < nin; i++)
1554 if (operand_locs)
1555 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1556 if (operands)
1557 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1558 if (constraints)
1559 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1560 if (modes)
1561 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1564 else if (GET_CODE (body) == PARALLEL
1565 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1567 /* No outputs, but some CLOBBERs. */
1569 int nin;
1571 asmop = XVECEXP (body, 0, 0);
1572 nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1574 for (i = 0; i < nin; i++)
1576 if (operand_locs)
1577 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1578 if (operands)
1579 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1580 if (constraints)
1581 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1582 if (modes)
1583 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1588 if (loc)
1589 *loc = ASM_OPERANDS_SOURCE_LOCATION (asmop);
1591 return ASM_OPERANDS_TEMPLATE (asmop);
1594 /* Check if an asm_operand matches its constraints.
1595 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1598 asm_operand_ok (rtx op, const char *constraint, const char **constraints)
1600 int result = 0;
1602 /* Use constrain_operands after reload. */
1603 gcc_assert (!reload_completed);
1605 while (*constraint)
1607 char c = *constraint;
1608 int len;
1609 switch (c)
1611 case ',':
1612 constraint++;
1613 continue;
1614 case '=':
1615 case '+':
1616 case '*':
1617 case '%':
1618 case '!':
1619 case '#':
1620 case '&':
1621 case '?':
1622 break;
1624 case '0': case '1': case '2': case '3': case '4':
1625 case '5': case '6': case '7': case '8': case '9':
1626 /* If caller provided constraints pointer, look up
1627 the maching constraint. Otherwise, our caller should have
1628 given us the proper matching constraint, but we can't
1629 actually fail the check if they didn't. Indicate that
1630 results are inconclusive. */
1631 if (constraints)
1633 char *end;
1634 unsigned long match;
1636 match = strtoul (constraint, &end, 10);
1637 if (!result)
1638 result = asm_operand_ok (op, constraints[match], NULL);
1639 constraint = (const char *) end;
1641 else
1644 constraint++;
1645 while (ISDIGIT (*constraint));
1646 if (! result)
1647 result = -1;
1649 continue;
1651 case 'p':
1652 if (address_operand (op, VOIDmode))
1653 result = 1;
1654 break;
1656 case TARGET_MEM_CONSTRAINT:
1657 case 'V': /* non-offsettable */
1658 if (memory_operand (op, VOIDmode))
1659 result = 1;
1660 break;
1662 case 'o': /* offsettable */
1663 if (offsettable_nonstrict_memref_p (op))
1664 result = 1;
1665 break;
1667 case '<':
1668 /* ??? Before auto-inc-dec, auto inc/dec insns are not supposed to exist,
1669 excepting those that expand_call created. Further, on some
1670 machines which do not have generalized auto inc/dec, an inc/dec
1671 is not a memory_operand.
1673 Match any memory and hope things are resolved after reload. */
1675 if (MEM_P (op)
1676 && (1
1677 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1678 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1679 result = 1;
1680 break;
1682 case '>':
1683 if (MEM_P (op)
1684 && (1
1685 || GET_CODE (XEXP (op, 0)) == PRE_INC
1686 || GET_CODE (XEXP (op, 0)) == POST_INC))
1687 result = 1;
1688 break;
1690 case 'E':
1691 case 'F':
1692 if (GET_CODE (op) == CONST_DOUBLE
1693 || (GET_CODE (op) == CONST_VECTOR
1694 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1695 result = 1;
1696 break;
1698 case 'G':
1699 if (GET_CODE (op) == CONST_DOUBLE
1700 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1701 result = 1;
1702 break;
1703 case 'H':
1704 if (GET_CODE (op) == CONST_DOUBLE
1705 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1706 result = 1;
1707 break;
1709 case 's':
1710 if (CONST_INT_P (op)
1711 || (GET_CODE (op) == CONST_DOUBLE
1712 && GET_MODE (op) == VOIDmode))
1713 break;
1714 /* Fall through. */
1716 case 'i':
1717 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1718 result = 1;
1719 break;
1721 case 'n':
1722 if (CONST_INT_P (op)
1723 || (GET_CODE (op) == CONST_DOUBLE
1724 && GET_MODE (op) == VOIDmode))
1725 result = 1;
1726 break;
1728 case 'I':
1729 if (CONST_INT_P (op)
1730 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1731 result = 1;
1732 break;
1733 case 'J':
1734 if (CONST_INT_P (op)
1735 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1736 result = 1;
1737 break;
1738 case 'K':
1739 if (CONST_INT_P (op)
1740 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1741 result = 1;
1742 break;
1743 case 'L':
1744 if (CONST_INT_P (op)
1745 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1746 result = 1;
1747 break;
1748 case 'M':
1749 if (CONST_INT_P (op)
1750 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1751 result = 1;
1752 break;
1753 case 'N':
1754 if (CONST_INT_P (op)
1755 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1756 result = 1;
1757 break;
1758 case 'O':
1759 if (CONST_INT_P (op)
1760 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1761 result = 1;
1762 break;
1763 case 'P':
1764 if (CONST_INT_P (op)
1765 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1766 result = 1;
1767 break;
1769 case 'X':
1770 result = 1;
1771 break;
1773 case 'g':
1774 if (general_operand (op, VOIDmode))
1775 result = 1;
1776 break;
1778 default:
1779 /* For all other letters, we first check for a register class,
1780 otherwise it is an EXTRA_CONSTRAINT. */
1781 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1783 case 'r':
1784 if (GET_MODE (op) == BLKmode)
1785 break;
1786 if (register_operand (op, VOIDmode))
1787 result = 1;
1789 #ifdef EXTRA_CONSTRAINT_STR
1790 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint))
1791 /* Every memory operand can be reloaded to fit. */
1792 result = result || memory_operand (op, VOIDmode);
1793 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint))
1794 /* Every address operand can be reloaded to fit. */
1795 result = result || address_operand (op, VOIDmode);
1796 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1797 result = 1;
1798 #endif
1799 break;
1801 len = CONSTRAINT_LEN (c, constraint);
1803 constraint++;
1804 while (--len && *constraint);
1805 if (len)
1806 return 0;
1809 return result;
1812 /* Given an rtx *P, if it is a sum containing an integer constant term,
1813 return the location (type rtx *) of the pointer to that constant term.
1814 Otherwise, return a null pointer. */
1816 rtx *
1817 find_constant_term_loc (rtx *p)
1819 rtx *tem;
1820 enum rtx_code code = GET_CODE (*p);
1822 /* If *P IS such a constant term, P is its location. */
1824 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1825 || code == CONST)
1826 return p;
1828 /* Otherwise, if not a sum, it has no constant term. */
1830 if (GET_CODE (*p) != PLUS)
1831 return 0;
1833 /* If one of the summands is constant, return its location. */
1835 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1836 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1837 return p;
1839 /* Otherwise, check each summand for containing a constant term. */
1841 if (XEXP (*p, 0) != 0)
1843 tem = find_constant_term_loc (&XEXP (*p, 0));
1844 if (tem != 0)
1845 return tem;
1848 if (XEXP (*p, 1) != 0)
1850 tem = find_constant_term_loc (&XEXP (*p, 1));
1851 if (tem != 0)
1852 return tem;
1855 return 0;
1858 /* Return 1 if OP is a memory reference
1859 whose address contains no side effects
1860 and remains valid after the addition
1861 of a positive integer less than the
1862 size of the object being referenced.
1864 We assume that the original address is valid and do not check it.
1866 This uses strict_memory_address_p as a subroutine, so
1867 don't use it before reload. */
1870 offsettable_memref_p (rtx op)
1872 return ((MEM_P (op))
1873 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1876 /* Similar, but don't require a strictly valid mem ref:
1877 consider pseudo-regs valid as index or base regs. */
1880 offsettable_nonstrict_memref_p (rtx op)
1882 return ((MEM_P (op))
1883 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1886 /* Return 1 if Y is a memory address which contains no side effects
1887 and would remain valid after the addition of a positive integer
1888 less than the size of that mode.
1890 We assume that the original address is valid and do not check it.
1891 We do check that it is valid for narrower modes.
1893 If STRICTP is nonzero, we require a strictly valid address,
1894 for the sake of use in reload.c. */
1897 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1899 enum rtx_code ycode = GET_CODE (y);
1900 rtx z;
1901 rtx y1 = y;
1902 rtx *y2;
1903 int (*addressp) (enum machine_mode, rtx) =
1904 (strictp ? strict_memory_address_p : memory_address_p);
1905 unsigned int mode_sz = GET_MODE_SIZE (mode);
1907 if (CONSTANT_ADDRESS_P (y))
1908 return 1;
1910 /* Adjusting an offsettable address involves changing to a narrower mode.
1911 Make sure that's OK. */
1913 if (mode_dependent_address_p (y))
1914 return 0;
1916 /* ??? How much offset does an offsettable BLKmode reference need?
1917 Clearly that depends on the situation in which it's being used.
1918 However, the current situation in which we test 0xffffffff is
1919 less than ideal. Caveat user. */
1920 if (mode_sz == 0)
1921 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1923 /* If the expression contains a constant term,
1924 see if it remains valid when max possible offset is added. */
1926 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1928 int good;
1930 y1 = *y2;
1931 *y2 = plus_constant (*y2, mode_sz - 1);
1932 /* Use QImode because an odd displacement may be automatically invalid
1933 for any wider mode. But it should be valid for a single byte. */
1934 good = (*addressp) (QImode, y);
1936 /* In any case, restore old contents of memory. */
1937 *y2 = y1;
1938 return good;
1941 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1942 return 0;
1944 /* The offset added here is chosen as the maximum offset that
1945 any instruction could need to add when operating on something
1946 of the specified mode. We assume that if Y and Y+c are
1947 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1948 go inside a LO_SUM here, so we do so as well. */
1949 if (GET_CODE (y) == LO_SUM
1950 && mode != BLKmode
1951 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1952 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1953 plus_constant (XEXP (y, 1), mode_sz - 1));
1954 else
1955 z = plus_constant (y, mode_sz - 1);
1957 /* Use QImode because an odd displacement may be automatically invalid
1958 for any wider mode. But it should be valid for a single byte. */
1959 return (*addressp) (QImode, z);
1962 /* Return 1 if ADDR is an address-expression whose effect depends
1963 on the mode of the memory reference it is used in.
1965 Autoincrement addressing is a typical example of mode-dependence
1966 because the amount of the increment depends on the mode. */
1969 mode_dependent_address_p (rtx addr)
1971 /* Auto-increment addressing with anything other than post_modify
1972 or pre_modify always introduces a mode dependency. Catch such
1973 cases now instead of deferring to the target. */
1974 if (GET_CODE (addr) == PRE_INC
1975 || GET_CODE (addr) == POST_INC
1976 || GET_CODE (addr) == PRE_DEC
1977 || GET_CODE (addr) == POST_DEC)
1978 return 1;
1980 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1981 return 0;
1982 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1983 win: ATTRIBUTE_UNUSED_LABEL
1984 return 1;
1987 /* Like extract_insn, but save insn extracted and don't extract again, when
1988 called again for the same insn expecting that recog_data still contain the
1989 valid information. This is used primary by gen_attr infrastructure that
1990 often does extract insn again and again. */
1991 void
1992 extract_insn_cached (rtx insn)
1994 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1995 return;
1996 extract_insn (insn);
1997 recog_data.insn = insn;
2000 /* Do cached extract_insn, constrain_operands and complain about failures.
2001 Used by insn_attrtab. */
2002 void
2003 extract_constrain_insn_cached (rtx insn)
2005 extract_insn_cached (insn);
2006 if (which_alternative == -1
2007 && !constrain_operands (reload_completed))
2008 fatal_insn_not_found (insn);
2011 /* Do cached constrain_operands and complain about failures. */
2013 constrain_operands_cached (int strict)
2015 if (which_alternative == -1)
2016 return constrain_operands (strict);
2017 else
2018 return 1;
2021 /* Analyze INSN and fill in recog_data. */
2023 void
2024 extract_insn (rtx insn)
2026 int i;
2027 int icode;
2028 int noperands;
2029 rtx body = PATTERN (insn);
2031 recog_data.n_operands = 0;
2032 recog_data.n_alternatives = 0;
2033 recog_data.n_dups = 0;
2035 switch (GET_CODE (body))
2037 case USE:
2038 case CLOBBER:
2039 case ASM_INPUT:
2040 case ADDR_VEC:
2041 case ADDR_DIFF_VEC:
2042 return;
2044 case SET:
2045 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2046 goto asm_insn;
2047 else
2048 goto normal_insn;
2049 case PARALLEL:
2050 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2051 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2052 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2053 goto asm_insn;
2054 else
2055 goto normal_insn;
2056 case ASM_OPERANDS:
2057 asm_insn:
2058 recog_data.n_operands = noperands = asm_noperands (body);
2059 if (noperands >= 0)
2061 /* This insn is an `asm' with operands. */
2063 /* expand_asm_operands makes sure there aren't too many operands. */
2064 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2066 /* Now get the operand values and constraints out of the insn. */
2067 decode_asm_operands (body, recog_data.operand,
2068 recog_data.operand_loc,
2069 recog_data.constraints,
2070 recog_data.operand_mode, NULL);
2071 if (noperands > 0)
2073 const char *p = recog_data.constraints[0];
2074 recog_data.n_alternatives = 1;
2075 while (*p)
2076 recog_data.n_alternatives += (*p++ == ',');
2078 break;
2080 fatal_insn_not_found (insn);
2082 default:
2083 normal_insn:
2084 /* Ordinary insn: recognize it, get the operands via insn_extract
2085 and get the constraints. */
2087 icode = recog_memoized (insn);
2088 if (icode < 0)
2089 fatal_insn_not_found (insn);
2091 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2092 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2093 recog_data.n_dups = insn_data[icode].n_dups;
2095 insn_extract (insn);
2097 for (i = 0; i < noperands; i++)
2099 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2100 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2101 /* VOIDmode match_operands gets mode from their real operand. */
2102 if (recog_data.operand_mode[i] == VOIDmode)
2103 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2106 for (i = 0; i < noperands; i++)
2107 recog_data.operand_type[i]
2108 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2109 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2110 : OP_IN);
2112 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2114 if (INSN_CODE (insn) < 0)
2115 for (i = 0; i < recog_data.n_alternatives; i++)
2116 recog_data.alternative_enabled_p[i] = true;
2117 else
2119 recog_data.insn = insn;
2120 for (i = 0; i < recog_data.n_alternatives; i++)
2122 which_alternative = i;
2123 recog_data.alternative_enabled_p[i] = get_attr_enabled (insn);
2127 recog_data.insn = NULL;
2128 which_alternative = -1;
2131 /* After calling extract_insn, you can use this function to extract some
2132 information from the constraint strings into a more usable form.
2133 The collected data is stored in recog_op_alt. */
2134 void
2135 preprocess_constraints (void)
2137 int i;
2139 for (i = 0; i < recog_data.n_operands; i++)
2140 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2141 * sizeof (struct operand_alternative)));
2143 for (i = 0; i < recog_data.n_operands; i++)
2145 int j;
2146 struct operand_alternative *op_alt;
2147 const char *p = recog_data.constraints[i];
2149 op_alt = recog_op_alt[i];
2151 for (j = 0; j < recog_data.n_alternatives; j++)
2153 op_alt[j].cl = NO_REGS;
2154 op_alt[j].constraint = p;
2155 op_alt[j].matches = -1;
2156 op_alt[j].matched = -1;
2158 if (!recog_data.alternative_enabled_p[j])
2160 p = skip_alternative (p);
2161 continue;
2164 if (*p == '\0' || *p == ',')
2166 op_alt[j].anything_ok = 1;
2167 continue;
2170 for (;;)
2172 char c = *p;
2173 if (c == '#')
2175 c = *++p;
2176 while (c != ',' && c != '\0');
2177 if (c == ',' || c == '\0')
2179 p++;
2180 break;
2183 switch (c)
2185 case '=': case '+': case '*': case '%':
2186 case 'E': case 'F': case 'G': case 'H':
2187 case 's': case 'i': case 'n':
2188 case 'I': case 'J': case 'K': case 'L':
2189 case 'M': case 'N': case 'O': case 'P':
2190 /* These don't say anything we care about. */
2191 break;
2193 case '?':
2194 op_alt[j].reject += 6;
2195 break;
2196 case '!':
2197 op_alt[j].reject += 600;
2198 break;
2199 case '&':
2200 op_alt[j].earlyclobber = 1;
2201 break;
2203 case '0': case '1': case '2': case '3': case '4':
2204 case '5': case '6': case '7': case '8': case '9':
2206 char *end;
2207 op_alt[j].matches = strtoul (p, &end, 10);
2208 recog_op_alt[op_alt[j].matches][j].matched = i;
2209 p = end;
2211 continue;
2213 case TARGET_MEM_CONSTRAINT:
2214 op_alt[j].memory_ok = 1;
2215 break;
2216 case '<':
2217 op_alt[j].decmem_ok = 1;
2218 break;
2219 case '>':
2220 op_alt[j].incmem_ok = 1;
2221 break;
2222 case 'V':
2223 op_alt[j].nonoffmem_ok = 1;
2224 break;
2225 case 'o':
2226 op_alt[j].offmem_ok = 1;
2227 break;
2228 case 'X':
2229 op_alt[j].anything_ok = 1;
2230 break;
2232 case 'p':
2233 op_alt[j].is_address = 1;
2234 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2235 [(int) base_reg_class (VOIDmode, ADDRESS, SCRATCH)];
2236 break;
2238 case 'g':
2239 case 'r':
2240 op_alt[j].cl =
2241 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2242 break;
2244 default:
2245 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2247 op_alt[j].memory_ok = 1;
2248 break;
2250 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2252 op_alt[j].is_address = 1;
2253 op_alt[j].cl
2254 = (reg_class_subunion
2255 [(int) op_alt[j].cl]
2256 [(int) base_reg_class (VOIDmode, ADDRESS,
2257 SCRATCH)]);
2258 break;
2261 op_alt[j].cl
2262 = (reg_class_subunion
2263 [(int) op_alt[j].cl]
2264 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2265 break;
2267 p += CONSTRAINT_LEN (c, p);
2273 /* Check the operands of an insn against the insn's operand constraints
2274 and return 1 if they are valid.
2275 The information about the insn's operands, constraints, operand modes
2276 etc. is obtained from the global variables set up by extract_insn.
2278 WHICH_ALTERNATIVE is set to a number which indicates which
2279 alternative of constraints was matched: 0 for the first alternative,
2280 1 for the next, etc.
2282 In addition, when two operands are required to match
2283 and it happens that the output operand is (reg) while the
2284 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2285 make the output operand look like the input.
2286 This is because the output operand is the one the template will print.
2288 This is used in final, just before printing the assembler code and by
2289 the routines that determine an insn's attribute.
2291 If STRICT is a positive nonzero value, it means that we have been
2292 called after reload has been completed. In that case, we must
2293 do all checks strictly. If it is zero, it means that we have been called
2294 before reload has completed. In that case, we first try to see if we can
2295 find an alternative that matches strictly. If not, we try again, this
2296 time assuming that reload will fix up the insn. This provides a "best
2297 guess" for the alternative and is used to compute attributes of insns prior
2298 to reload. A negative value of STRICT is used for this internal call. */
2300 struct funny_match
2302 int this_op, other;
2306 constrain_operands (int strict)
2308 const char *constraints[MAX_RECOG_OPERANDS];
2309 int matching_operands[MAX_RECOG_OPERANDS];
2310 int earlyclobber[MAX_RECOG_OPERANDS];
2311 int c;
2313 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2314 int funny_match_index;
2316 which_alternative = 0;
2317 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2318 return 1;
2320 for (c = 0; c < recog_data.n_operands; c++)
2322 constraints[c] = recog_data.constraints[c];
2323 matching_operands[c] = -1;
2328 int seen_earlyclobber_at = -1;
2329 int opno;
2330 int lose = 0;
2331 funny_match_index = 0;
2333 if (!recog_data.alternative_enabled_p[which_alternative])
2335 int i;
2337 for (i = 0; i < recog_data.n_operands; i++)
2338 constraints[i] = skip_alternative (constraints[i]);
2340 which_alternative++;
2341 continue;
2344 for (opno = 0; opno < recog_data.n_operands; opno++)
2346 rtx op = recog_data.operand[opno];
2347 enum machine_mode mode = GET_MODE (op);
2348 const char *p = constraints[opno];
2349 int offset = 0;
2350 int win = 0;
2351 int val;
2352 int len;
2354 earlyclobber[opno] = 0;
2356 /* A unary operator may be accepted by the predicate, but it
2357 is irrelevant for matching constraints. */
2358 if (UNARY_P (op))
2359 op = XEXP (op, 0);
2361 if (GET_CODE (op) == SUBREG)
2363 if (REG_P (SUBREG_REG (op))
2364 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2365 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2366 GET_MODE (SUBREG_REG (op)),
2367 SUBREG_BYTE (op),
2368 GET_MODE (op));
2369 op = SUBREG_REG (op);
2372 /* An empty constraint or empty alternative
2373 allows anything which matched the pattern. */
2374 if (*p == 0 || *p == ',')
2375 win = 1;
2378 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2380 case '\0':
2381 len = 0;
2382 break;
2383 case ',':
2384 c = '\0';
2385 break;
2387 case '?': case '!': case '*': case '%':
2388 case '=': case '+':
2389 break;
2391 case '#':
2392 /* Ignore rest of this alternative as far as
2393 constraint checking is concerned. */
2395 p++;
2396 while (*p && *p != ',');
2397 len = 0;
2398 break;
2400 case '&':
2401 earlyclobber[opno] = 1;
2402 if (seen_earlyclobber_at < 0)
2403 seen_earlyclobber_at = opno;
2404 break;
2406 case '0': case '1': case '2': case '3': case '4':
2407 case '5': case '6': case '7': case '8': case '9':
2409 /* This operand must be the same as a previous one.
2410 This kind of constraint is used for instructions such
2411 as add when they take only two operands.
2413 Note that the lower-numbered operand is passed first.
2415 If we are not testing strictly, assume that this
2416 constraint will be satisfied. */
2418 char *end;
2419 int match;
2421 match = strtoul (p, &end, 10);
2422 p = end;
2424 if (strict < 0)
2425 val = 1;
2426 else
2428 rtx op1 = recog_data.operand[match];
2429 rtx op2 = recog_data.operand[opno];
2431 /* A unary operator may be accepted by the predicate,
2432 but it is irrelevant for matching constraints. */
2433 if (UNARY_P (op1))
2434 op1 = XEXP (op1, 0);
2435 if (UNARY_P (op2))
2436 op2 = XEXP (op2, 0);
2438 val = operands_match_p (op1, op2);
2441 matching_operands[opno] = match;
2442 matching_operands[match] = opno;
2444 if (val != 0)
2445 win = 1;
2447 /* If output is *x and input is *--x, arrange later
2448 to change the output to *--x as well, since the
2449 output op is the one that will be printed. */
2450 if (val == 2 && strict > 0)
2452 funny_match[funny_match_index].this_op = opno;
2453 funny_match[funny_match_index++].other = match;
2456 len = 0;
2457 break;
2459 case 'p':
2460 /* p is used for address_operands. When we are called by
2461 gen_reload, no one will have checked that the address is
2462 strictly valid, i.e., that all pseudos requiring hard regs
2463 have gotten them. */
2464 if (strict <= 0
2465 || (strict_memory_address_p (recog_data.operand_mode[opno],
2466 op)))
2467 win = 1;
2468 break;
2470 /* No need to check general_operand again;
2471 it was done in insn-recog.c. Well, except that reload
2472 doesn't check the validity of its replacements, but
2473 that should only matter when there's a bug. */
2474 case 'g':
2475 /* Anything goes unless it is a REG and really has a hard reg
2476 but the hard reg is not in the class GENERAL_REGS. */
2477 if (REG_P (op))
2479 if (strict < 0
2480 || GENERAL_REGS == ALL_REGS
2481 || (reload_in_progress
2482 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2483 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2484 win = 1;
2486 else if (strict < 0 || general_operand (op, mode))
2487 win = 1;
2488 break;
2490 case 'X':
2491 /* This is used for a MATCH_SCRATCH in the cases when
2492 we don't actually need anything. So anything goes
2493 any time. */
2494 win = 1;
2495 break;
2497 case TARGET_MEM_CONSTRAINT:
2498 /* Memory operands must be valid, to the extent
2499 required by STRICT. */
2500 if (MEM_P (op))
2502 if (strict > 0
2503 && !strict_memory_address_p (GET_MODE (op),
2504 XEXP (op, 0)))
2505 break;
2506 if (strict == 0
2507 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2508 break;
2509 win = 1;
2511 /* Before reload, accept what reload can turn into mem. */
2512 else if (strict < 0 && CONSTANT_P (op))
2513 win = 1;
2514 /* During reload, accept a pseudo */
2515 else if (reload_in_progress && REG_P (op)
2516 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2517 win = 1;
2518 break;
2520 case '<':
2521 if (MEM_P (op)
2522 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2523 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2524 win = 1;
2525 break;
2527 case '>':
2528 if (MEM_P (op)
2529 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2530 || GET_CODE (XEXP (op, 0)) == POST_INC))
2531 win = 1;
2532 break;
2534 case 'E':
2535 case 'F':
2536 if (GET_CODE (op) == CONST_DOUBLE
2537 || (GET_CODE (op) == CONST_VECTOR
2538 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2539 win = 1;
2540 break;
2542 case 'G':
2543 case 'H':
2544 if (GET_CODE (op) == CONST_DOUBLE
2545 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2546 win = 1;
2547 break;
2549 case 's':
2550 if (CONST_INT_P (op)
2551 || (GET_CODE (op) == CONST_DOUBLE
2552 && GET_MODE (op) == VOIDmode))
2553 break;
2554 case 'i':
2555 if (CONSTANT_P (op))
2556 win = 1;
2557 break;
2559 case 'n':
2560 if (CONST_INT_P (op)
2561 || (GET_CODE (op) == CONST_DOUBLE
2562 && GET_MODE (op) == VOIDmode))
2563 win = 1;
2564 break;
2566 case 'I':
2567 case 'J':
2568 case 'K':
2569 case 'L':
2570 case 'M':
2571 case 'N':
2572 case 'O':
2573 case 'P':
2574 if (CONST_INT_P (op)
2575 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2576 win = 1;
2577 break;
2579 case 'V':
2580 if (MEM_P (op)
2581 && ((strict > 0 && ! offsettable_memref_p (op))
2582 || (strict < 0
2583 && !(CONSTANT_P (op) || MEM_P (op)))
2584 || (reload_in_progress
2585 && !(REG_P (op)
2586 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2587 win = 1;
2588 break;
2590 case 'o':
2591 if ((strict > 0 && offsettable_memref_p (op))
2592 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2593 /* Before reload, accept what reload can handle. */
2594 || (strict < 0
2595 && (CONSTANT_P (op) || MEM_P (op)))
2596 /* During reload, accept a pseudo */
2597 || (reload_in_progress && REG_P (op)
2598 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2599 win = 1;
2600 break;
2602 default:
2604 enum reg_class cl;
2606 cl = (c == 'r'
2607 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2608 if (cl != NO_REGS)
2610 if (strict < 0
2611 || (strict == 0
2612 && REG_P (op)
2613 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2614 || (strict == 0 && GET_CODE (op) == SCRATCH)
2615 || (REG_P (op)
2616 && reg_fits_class_p (op, cl, offset, mode)))
2617 win = 1;
2619 #ifdef EXTRA_CONSTRAINT_STR
2620 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2621 win = 1;
2623 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2624 /* Every memory operand can be reloaded to fit. */
2625 && ((strict < 0 && MEM_P (op))
2626 /* Before reload, accept what reload can turn
2627 into mem. */
2628 || (strict < 0 && CONSTANT_P (op))
2629 /* During reload, accept a pseudo */
2630 || (reload_in_progress && REG_P (op)
2631 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2632 win = 1;
2633 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2634 /* Every address operand can be reloaded to fit. */
2635 && strict < 0)
2636 win = 1;
2637 #endif
2638 break;
2641 while (p += len, c);
2643 constraints[opno] = p;
2644 /* If this operand did not win somehow,
2645 this alternative loses. */
2646 if (! win)
2647 lose = 1;
2649 /* This alternative won; the operands are ok.
2650 Change whichever operands this alternative says to change. */
2651 if (! lose)
2653 int opno, eopno;
2655 /* See if any earlyclobber operand conflicts with some other
2656 operand. */
2658 if (strict > 0 && seen_earlyclobber_at >= 0)
2659 for (eopno = seen_earlyclobber_at;
2660 eopno < recog_data.n_operands;
2661 eopno++)
2662 /* Ignore earlyclobber operands now in memory,
2663 because we would often report failure when we have
2664 two memory operands, one of which was formerly a REG. */
2665 if (earlyclobber[eopno]
2666 && REG_P (recog_data.operand[eopno]))
2667 for (opno = 0; opno < recog_data.n_operands; opno++)
2668 if ((MEM_P (recog_data.operand[opno])
2669 || recog_data.operand_type[opno] != OP_OUT)
2670 && opno != eopno
2671 /* Ignore things like match_operator operands. */
2672 && *recog_data.constraints[opno] != 0
2673 && ! (matching_operands[opno] == eopno
2674 && operands_match_p (recog_data.operand[opno],
2675 recog_data.operand[eopno]))
2676 && ! safe_from_earlyclobber (recog_data.operand[opno],
2677 recog_data.operand[eopno]))
2678 lose = 1;
2680 if (! lose)
2682 while (--funny_match_index >= 0)
2684 recog_data.operand[funny_match[funny_match_index].other]
2685 = recog_data.operand[funny_match[funny_match_index].this_op];
2688 return 1;
2692 which_alternative++;
2694 while (which_alternative < recog_data.n_alternatives);
2696 which_alternative = -1;
2697 /* If we are about to reject this, but we are not to test strictly,
2698 try a very loose test. Only return failure if it fails also. */
2699 if (strict == 0)
2700 return constrain_operands (-1);
2701 else
2702 return 0;
2705 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2706 is a hard reg in class CLASS when its regno is offset by OFFSET
2707 and changed to mode MODE.
2708 If REG occupies multiple hard regs, all of them must be in CLASS. */
2711 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2712 enum machine_mode mode)
2714 int regno = REGNO (operand);
2716 if (cl == NO_REGS)
2717 return 0;
2719 return (regno < FIRST_PSEUDO_REGISTER
2720 && in_hard_reg_set_p (reg_class_contents[(int) cl],
2721 mode, regno + offset));
2724 /* Split single instruction. Helper function for split_all_insns and
2725 split_all_insns_noflow. Return last insn in the sequence if successful,
2726 or NULL if unsuccessful. */
2728 static rtx
2729 split_insn (rtx insn)
2731 /* Split insns here to get max fine-grain parallelism. */
2732 rtx first = PREV_INSN (insn);
2733 rtx last = try_split (PATTERN (insn), insn, 1);
2734 rtx insn_set, last_set, note;
2736 if (last == insn)
2737 return NULL_RTX;
2739 /* If the original instruction was a single set that was known to be
2740 equivalent to a constant, see if we can say the same about the last
2741 instruction in the split sequence. The two instructions must set
2742 the same destination. */
2743 insn_set = single_set (insn);
2744 if (insn_set)
2746 last_set = single_set (last);
2747 if (last_set && rtx_equal_p (SET_DEST (last_set), SET_DEST (insn_set)))
2749 note = find_reg_equal_equiv_note (insn);
2750 if (note && CONSTANT_P (XEXP (note, 0)))
2751 set_unique_reg_note (last, REG_EQUAL, XEXP (note, 0));
2752 else if (CONSTANT_P (SET_SRC (insn_set)))
2753 set_unique_reg_note (last, REG_EQUAL, SET_SRC (insn_set));
2757 /* try_split returns the NOTE that INSN became. */
2758 SET_INSN_DELETED (insn);
2760 /* ??? Coddle to md files that generate subregs in post-reload
2761 splitters instead of computing the proper hard register. */
2762 if (reload_completed && first != last)
2764 first = NEXT_INSN (first);
2765 for (;;)
2767 if (INSN_P (first))
2768 cleanup_subreg_operands (first);
2769 if (first == last)
2770 break;
2771 first = NEXT_INSN (first);
2775 return last;
2778 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2780 void
2781 split_all_insns (void)
2783 sbitmap blocks;
2784 bool changed;
2785 basic_block bb;
2787 blocks = sbitmap_alloc (last_basic_block);
2788 sbitmap_zero (blocks);
2789 changed = false;
2791 FOR_EACH_BB_REVERSE (bb)
2793 rtx insn, next;
2794 bool finish = false;
2796 rtl_profile_for_bb (bb);
2797 for (insn = BB_HEAD (bb); !finish ; insn = next)
2799 /* Can't use `next_real_insn' because that might go across
2800 CODE_LABELS and short-out basic blocks. */
2801 next = NEXT_INSN (insn);
2802 finish = (insn == BB_END (bb));
2803 if (INSN_P (insn))
2805 rtx set = single_set (insn);
2807 /* Don't split no-op move insns. These should silently
2808 disappear later in final. Splitting such insns would
2809 break the code that handles LIBCALL blocks. */
2810 if (set && set_noop_p (set))
2812 /* Nops get in the way while scheduling, so delete them
2813 now if register allocation has already been done. It
2814 is too risky to try to do this before register
2815 allocation, and there are unlikely to be very many
2816 nops then anyways. */
2817 if (reload_completed)
2818 delete_insn_and_edges (insn);
2820 else
2822 rtx last = split_insn (insn);
2823 if (last)
2825 /* The split sequence may include barrier, but the
2826 BB boundary we are interested in will be set to
2827 previous one. */
2829 while (BARRIER_P (last))
2830 last = PREV_INSN (last);
2831 SET_BIT (blocks, bb->index);
2832 changed = true;
2839 default_rtl_profile ();
2840 if (changed)
2841 find_many_sub_basic_blocks (blocks);
2843 #ifdef ENABLE_CHECKING
2844 verify_flow_info ();
2845 #endif
2847 sbitmap_free (blocks);
2850 /* Same as split_all_insns, but do not expect CFG to be available.
2851 Used by machine dependent reorg passes. */
2853 unsigned int
2854 split_all_insns_noflow (void)
2856 rtx next, insn;
2858 for (insn = get_insns (); insn; insn = next)
2860 next = NEXT_INSN (insn);
2861 if (INSN_P (insn))
2863 /* Don't split no-op move insns. These should silently
2864 disappear later in final. Splitting such insns would
2865 break the code that handles LIBCALL blocks. */
2866 rtx set = single_set (insn);
2867 if (set && set_noop_p (set))
2869 /* Nops get in the way while scheduling, so delete them
2870 now if register allocation has already been done. It
2871 is too risky to try to do this before register
2872 allocation, and there are unlikely to be very many
2873 nops then anyways.
2875 ??? Should we use delete_insn when the CFG isn't valid? */
2876 if (reload_completed)
2877 delete_insn_and_edges (insn);
2879 else
2880 split_insn (insn);
2883 return 0;
2886 #ifdef HAVE_peephole2
2887 struct peep2_insn_data
2889 rtx insn;
2890 regset live_before;
2893 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2894 static int peep2_current;
2895 /* The number of instructions available to match a peep2. */
2896 int peep2_current_count;
2898 /* A non-insn marker indicating the last insn of the block.
2899 The live_before regset for this element is correct, indicating
2900 DF_LIVE_OUT for the block. */
2901 #define PEEP2_EOB pc_rtx
2903 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2904 does not exist. Used by the recognizer to find the next insn to match
2905 in a multi-insn pattern. */
2908 peep2_next_insn (int n)
2910 gcc_assert (n <= peep2_current_count);
2912 n += peep2_current;
2913 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2914 n -= MAX_INSNS_PER_PEEP2 + 1;
2916 return peep2_insn_data[n].insn;
2919 /* Return true if REGNO is dead before the Nth non-note insn
2920 after `current'. */
2923 peep2_regno_dead_p (int ofs, int regno)
2925 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2927 ofs += peep2_current;
2928 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2929 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2931 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2933 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2936 /* Similarly for a REG. */
2939 peep2_reg_dead_p (int ofs, rtx reg)
2941 int regno, n;
2943 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2945 ofs += peep2_current;
2946 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2947 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2949 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2951 regno = REGNO (reg);
2952 n = hard_regno_nregs[regno][GET_MODE (reg)];
2953 while (--n >= 0)
2954 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2955 return 0;
2956 return 1;
2959 /* Try to find a hard register of mode MODE, matching the register class in
2960 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2961 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2962 in which case the only condition is that the register must be available
2963 before CURRENT_INSN.
2964 Registers that already have bits set in REG_SET will not be considered.
2966 If an appropriate register is available, it will be returned and the
2967 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2968 returned. */
2971 peep2_find_free_register (int from, int to, const char *class_str,
2972 enum machine_mode mode, HARD_REG_SET *reg_set)
2974 static int search_ofs;
2975 enum reg_class cl;
2976 HARD_REG_SET live;
2977 int i;
2979 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2980 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2982 from += peep2_current;
2983 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2984 from -= MAX_INSNS_PER_PEEP2 + 1;
2985 to += peep2_current;
2986 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2987 to -= MAX_INSNS_PER_PEEP2 + 1;
2989 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2990 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2992 while (from != to)
2994 HARD_REG_SET this_live;
2996 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2997 from = 0;
2998 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2999 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3000 IOR_HARD_REG_SET (live, this_live);
3003 cl = (class_str[0] == 'r' ? GENERAL_REGS
3004 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
3006 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3008 int raw_regno, regno, success, j;
3010 /* Distribute the free registers as much as possible. */
3011 raw_regno = search_ofs + i;
3012 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3013 raw_regno -= FIRST_PSEUDO_REGISTER;
3014 #ifdef REG_ALLOC_ORDER
3015 regno = reg_alloc_order[raw_regno];
3016 #else
3017 regno = raw_regno;
3018 #endif
3020 /* Don't allocate fixed registers. */
3021 if (fixed_regs[regno])
3022 continue;
3023 /* Don't allocate global registers. */
3024 if (global_regs[regno])
3025 continue;
3026 /* Make sure the register is of the right class. */
3027 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
3028 continue;
3029 /* And can support the mode we need. */
3030 if (! HARD_REGNO_MODE_OK (regno, mode))
3031 continue;
3032 /* And that we don't create an extra save/restore. */
3033 if (! call_used_regs[regno] && ! df_regs_ever_live_p (regno))
3034 continue;
3035 if (! targetm.hard_regno_scratch_ok (regno))
3036 continue;
3038 /* And we don't clobber traceback for noreturn functions. */
3039 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3040 && (! reload_completed || frame_pointer_needed))
3041 continue;
3043 success = 1;
3044 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
3046 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3047 || TEST_HARD_REG_BIT (live, regno + j))
3049 success = 0;
3050 break;
3053 if (success)
3055 add_to_hard_reg_set (reg_set, mode, regno);
3057 /* Start the next search with the next register. */
3058 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3059 raw_regno = 0;
3060 search_ofs = raw_regno;
3062 return gen_rtx_REG (mode, regno);
3066 search_ofs = 0;
3067 return NULL_RTX;
3070 /* Forget all currently tracked instructions, only remember current
3071 LIVE regset. */
3073 static void
3074 peep2_reinit_state (regset live)
3076 int i;
3078 /* Indicate that all slots except the last holds invalid data. */
3079 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3080 peep2_insn_data[i].insn = NULL_RTX;
3081 peep2_current_count = 0;
3083 /* Indicate that the last slot contains live_after data. */
3084 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3085 peep2_current = MAX_INSNS_PER_PEEP2;
3087 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3090 /* Perform the peephole2 optimization pass. */
3092 static void
3093 peephole2_optimize (void)
3095 rtx insn, prev;
3096 bitmap live;
3097 int i;
3098 basic_block bb;
3099 bool do_cleanup_cfg = false;
3100 bool do_rebuild_jump_labels = false;
3102 df_set_flags (DF_LR_RUN_DCE);
3103 df_analyze ();
3105 /* Initialize the regsets we're going to use. */
3106 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3107 peep2_insn_data[i].live_before = BITMAP_ALLOC (&reg_obstack);
3108 live = BITMAP_ALLOC (&reg_obstack);
3110 FOR_EACH_BB_REVERSE (bb)
3112 rtl_profile_for_bb (bb);
3114 /* Start up propagation. */
3115 bitmap_copy (live, DF_LR_OUT (bb));
3116 df_simulate_initialize_backwards (bb, live);
3117 peep2_reinit_state (live);
3119 for (insn = BB_END (bb); ; insn = prev)
3121 prev = PREV_INSN (insn);
3122 if (INSN_P (insn))
3124 rtx attempt, before_try, x;
3125 int match_len;
3126 rtx note;
3127 bool was_call = false;
3129 /* Record this insn. */
3130 if (--peep2_current < 0)
3131 peep2_current = MAX_INSNS_PER_PEEP2;
3132 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3133 && peep2_insn_data[peep2_current].insn == NULL_RTX)
3134 peep2_current_count++;
3135 peep2_insn_data[peep2_current].insn = insn;
3136 df_simulate_one_insn_backwards (bb, insn, live);
3137 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3139 if (RTX_FRAME_RELATED_P (insn))
3141 /* If an insn has RTX_FRAME_RELATED_P set, peephole
3142 substitution would lose the
3143 REG_FRAME_RELATED_EXPR that is attached. */
3144 peep2_reinit_state (live);
3145 attempt = NULL;
3147 else
3148 /* Match the peephole. */
3149 attempt = peephole2_insns (PATTERN (insn), insn, &match_len);
3151 if (attempt != NULL)
3153 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3154 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3155 cfg-related call notes. */
3156 for (i = 0; i <= match_len; ++i)
3158 int j;
3159 rtx old_insn, new_insn, note;
3161 j = i + peep2_current;
3162 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3163 j -= MAX_INSNS_PER_PEEP2 + 1;
3164 old_insn = peep2_insn_data[j].insn;
3165 if (!CALL_P (old_insn))
3166 continue;
3167 was_call = true;
3169 new_insn = attempt;
3170 while (new_insn != NULL_RTX)
3172 if (CALL_P (new_insn))
3173 break;
3174 new_insn = NEXT_INSN (new_insn);
3177 gcc_assert (new_insn != NULL_RTX);
3179 CALL_INSN_FUNCTION_USAGE (new_insn)
3180 = CALL_INSN_FUNCTION_USAGE (old_insn);
3182 for (note = REG_NOTES (old_insn);
3183 note;
3184 note = XEXP (note, 1))
3185 switch (REG_NOTE_KIND (note))
3187 case REG_NORETURN:
3188 case REG_SETJMP:
3189 add_reg_note (new_insn, REG_NOTE_KIND (note),
3190 XEXP (note, 0));
3191 break;
3192 default:
3193 /* Discard all other reg notes. */
3194 break;
3197 /* Croak if there is another call in the sequence. */
3198 while (++i <= match_len)
3200 j = i + peep2_current;
3201 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3202 j -= MAX_INSNS_PER_PEEP2 + 1;
3203 old_insn = peep2_insn_data[j].insn;
3204 gcc_assert (!CALL_P (old_insn));
3206 break;
3209 i = match_len + peep2_current;
3210 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3211 i -= MAX_INSNS_PER_PEEP2 + 1;
3213 note = find_reg_note (peep2_insn_data[i].insn,
3214 REG_EH_REGION, NULL_RTX);
3216 /* Replace the old sequence with the new. */
3217 attempt = emit_insn_after_setloc (attempt,
3218 peep2_insn_data[i].insn,
3219 INSN_LOCATOR (peep2_insn_data[i].insn));
3220 before_try = PREV_INSN (insn);
3221 delete_insn_chain (insn, peep2_insn_data[i].insn, false);
3223 /* Re-insert the EH_REGION notes. */
3224 if (note || (was_call && nonlocal_goto_handler_labels))
3226 edge eh_edge;
3227 edge_iterator ei;
3229 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3230 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3231 break;
3233 for (x = attempt ; x != before_try ; x = PREV_INSN (x))
3234 if (CALL_P (x)
3235 || (flag_non_call_exceptions
3236 && may_trap_p (PATTERN (x))
3237 && !find_reg_note (x, REG_EH_REGION, NULL)))
3239 if (note)
3240 add_reg_note (x, REG_EH_REGION, XEXP (note, 0));
3242 if (x != BB_END (bb) && eh_edge)
3244 edge nfte, nehe;
3245 int flags;
3247 nfte = split_block (bb, x);
3248 flags = (eh_edge->flags
3249 & (EDGE_EH | EDGE_ABNORMAL));
3250 if (CALL_P (x))
3251 flags |= EDGE_ABNORMAL_CALL;
3252 nehe = make_edge (nfte->src, eh_edge->dest,
3253 flags);
3255 nehe->probability = eh_edge->probability;
3256 nfte->probability
3257 = REG_BR_PROB_BASE - nehe->probability;
3259 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3260 bb = nfte->src;
3261 eh_edge = nehe;
3265 /* Converting possibly trapping insn to non-trapping is
3266 possible. Zap dummy outgoing edges. */
3267 do_cleanup_cfg |= purge_dead_edges (bb);
3270 #ifdef HAVE_conditional_execution
3271 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3272 peep2_insn_data[i].insn = NULL_RTX;
3273 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3274 peep2_current_count = 0;
3275 #else
3276 /* Back up lifetime information past the end of the
3277 newly created sequence. */
3278 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3279 i = 0;
3280 bitmap_copy (live, peep2_insn_data[i].live_before);
3282 /* Update life information for the new sequence. */
3283 x = attempt;
3286 if (INSN_P (x))
3288 if (--i < 0)
3289 i = MAX_INSNS_PER_PEEP2;
3290 if (peep2_current_count < MAX_INSNS_PER_PEEP2
3291 && peep2_insn_data[i].insn == NULL_RTX)
3292 peep2_current_count++;
3293 peep2_insn_data[i].insn = x;
3294 df_insn_rescan (x);
3295 df_simulate_one_insn_backwards (bb, x, live);
3296 bitmap_copy (peep2_insn_data[i].live_before, live);
3298 x = PREV_INSN (x);
3300 while (x != prev);
3302 peep2_current = i;
3303 #endif
3305 /* If we generated a jump instruction, it won't have
3306 JUMP_LABEL set. Recompute after we're done. */
3307 for (x = attempt; x != before_try; x = PREV_INSN (x))
3308 if (JUMP_P (x))
3310 do_rebuild_jump_labels = true;
3311 break;
3316 if (insn == BB_HEAD (bb))
3317 break;
3321 default_rtl_profile ();
3322 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3323 BITMAP_FREE (peep2_insn_data[i].live_before);
3324 BITMAP_FREE (live);
3325 if (do_rebuild_jump_labels)
3326 rebuild_jump_labels (get_insns ());
3328 #endif /* HAVE_peephole2 */
3330 /* Common predicates for use with define_bypass. */
3332 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3333 data not the address operand(s) of the store. IN_INSN and OUT_INSN
3334 must be either a single_set or a PARALLEL with SETs inside. */
3337 store_data_bypass_p (rtx out_insn, rtx in_insn)
3339 rtx out_set, in_set;
3340 rtx out_pat, in_pat;
3341 rtx out_exp, in_exp;
3342 int i, j;
3344 in_set = single_set (in_insn);
3345 if (in_set)
3347 if (!MEM_P (SET_DEST (in_set)))
3348 return false;
3350 out_set = single_set (out_insn);
3351 if (out_set)
3353 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3354 return false;
3356 else
3358 out_pat = PATTERN (out_insn);
3360 if (GET_CODE (out_pat) != PARALLEL)
3361 return false;
3363 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3365 out_exp = XVECEXP (out_pat, 0, i);
3367 if (GET_CODE (out_exp) == CLOBBER)
3368 continue;
3370 gcc_assert (GET_CODE (out_exp) == SET);
3372 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_set)))
3373 return false;
3377 else
3379 in_pat = PATTERN (in_insn);
3380 gcc_assert (GET_CODE (in_pat) == PARALLEL);
3382 for (i = 0; i < XVECLEN (in_pat, 0); i++)
3384 in_exp = XVECEXP (in_pat, 0, i);
3386 if (GET_CODE (in_exp) == CLOBBER)
3387 continue;
3389 gcc_assert (GET_CODE (in_exp) == SET);
3391 if (!MEM_P (SET_DEST (in_exp)))
3392 return false;
3394 out_set = single_set (out_insn);
3395 if (out_set)
3397 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_exp)))
3398 return false;
3400 else
3402 out_pat = PATTERN (out_insn);
3403 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3405 for (j = 0; j < XVECLEN (out_pat, 0); j++)
3407 out_exp = XVECEXP (out_pat, 0, j);
3409 if (GET_CODE (out_exp) == CLOBBER)
3410 continue;
3412 gcc_assert (GET_CODE (out_exp) == SET);
3414 if (reg_mentioned_p (SET_DEST (out_exp), SET_DEST (in_exp)))
3415 return false;
3421 return true;
3424 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3425 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3426 or multiple set; IN_INSN should be single_set for truth, but for convenience
3427 of insn categorization may be any JUMP or CALL insn. */
3430 if_test_bypass_p (rtx out_insn, rtx in_insn)
3432 rtx out_set, in_set;
3434 in_set = single_set (in_insn);
3435 if (! in_set)
3437 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3438 return false;
3441 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3442 return false;
3443 in_set = SET_SRC (in_set);
3445 out_set = single_set (out_insn);
3446 if (out_set)
3448 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3449 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3450 return false;
3452 else
3454 rtx out_pat;
3455 int i;
3457 out_pat = PATTERN (out_insn);
3458 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3460 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3462 rtx exp = XVECEXP (out_pat, 0, i);
3464 if (GET_CODE (exp) == CLOBBER)
3465 continue;
3467 gcc_assert (GET_CODE (exp) == SET);
3469 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3470 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3471 return false;
3475 return true;
3478 static bool
3479 gate_handle_peephole2 (void)
3481 return (optimize > 0 && flag_peephole2);
3484 static unsigned int
3485 rest_of_handle_peephole2 (void)
3487 #ifdef HAVE_peephole2
3488 peephole2_optimize ();
3489 #endif
3490 return 0;
3493 struct rtl_opt_pass pass_peephole2 =
3496 RTL_PASS,
3497 "peephole2", /* name */
3498 gate_handle_peephole2, /* gate */
3499 rest_of_handle_peephole2, /* execute */
3500 NULL, /* sub */
3501 NULL, /* next */
3502 0, /* static_pass_number */
3503 TV_PEEPHOLE2, /* tv_id */
3504 0, /* properties_required */
3505 0, /* properties_provided */
3506 0, /* properties_destroyed */
3507 0, /* todo_flags_start */
3508 TODO_df_finish | TODO_verify_rtl_sharing |
3509 TODO_dump_func /* todo_flags_finish */
3513 static unsigned int
3514 rest_of_handle_split_all_insns (void)
3516 split_all_insns ();
3517 return 0;
3520 struct rtl_opt_pass pass_split_all_insns =
3523 RTL_PASS,
3524 "split1", /* name */
3525 NULL, /* gate */
3526 rest_of_handle_split_all_insns, /* execute */
3527 NULL, /* sub */
3528 NULL, /* next */
3529 0, /* static_pass_number */
3530 TV_NONE, /* tv_id */
3531 0, /* properties_required */
3532 0, /* properties_provided */
3533 0, /* properties_destroyed */
3534 0, /* todo_flags_start */
3535 TODO_dump_func /* todo_flags_finish */
3539 static unsigned int
3540 rest_of_handle_split_after_reload (void)
3542 /* If optimizing, then go ahead and split insns now. */
3543 #ifndef STACK_REGS
3544 if (optimize > 0)
3545 #endif
3546 split_all_insns ();
3547 return 0;
3550 struct rtl_opt_pass pass_split_after_reload =
3553 RTL_PASS,
3554 "split2", /* name */
3555 NULL, /* gate */
3556 rest_of_handle_split_after_reload, /* execute */
3557 NULL, /* sub */
3558 NULL, /* next */
3559 0, /* static_pass_number */
3560 TV_NONE, /* tv_id */
3561 0, /* properties_required */
3562 0, /* properties_provided */
3563 0, /* properties_destroyed */
3564 0, /* todo_flags_start */
3565 TODO_dump_func /* todo_flags_finish */
3569 static bool
3570 gate_handle_split_before_regstack (void)
3572 #if defined (HAVE_ATTR_length) && defined (STACK_REGS)
3573 /* If flow2 creates new instructions which need splitting
3574 and scheduling after reload is not done, they might not be
3575 split until final which doesn't allow splitting
3576 if HAVE_ATTR_length. */
3577 # ifdef INSN_SCHEDULING
3578 return (optimize && !flag_schedule_insns_after_reload);
3579 # else
3580 return (optimize);
3581 # endif
3582 #else
3583 return 0;
3584 #endif
3587 static unsigned int
3588 rest_of_handle_split_before_regstack (void)
3590 split_all_insns ();
3591 return 0;
3594 struct rtl_opt_pass pass_split_before_regstack =
3597 RTL_PASS,
3598 "split3", /* name */
3599 gate_handle_split_before_regstack, /* gate */
3600 rest_of_handle_split_before_regstack, /* execute */
3601 NULL, /* sub */
3602 NULL, /* next */
3603 0, /* static_pass_number */
3604 TV_NONE, /* tv_id */
3605 0, /* properties_required */
3606 0, /* properties_provided */
3607 0, /* properties_destroyed */
3608 0, /* todo_flags_start */
3609 TODO_dump_func /* todo_flags_finish */
3613 static bool
3614 gate_handle_split_before_sched2 (void)
3616 #ifdef INSN_SCHEDULING
3617 return optimize > 0 && flag_schedule_insns_after_reload;
3618 #else
3619 return 0;
3620 #endif
3623 static unsigned int
3624 rest_of_handle_split_before_sched2 (void)
3626 #ifdef INSN_SCHEDULING
3627 split_all_insns ();
3628 #endif
3629 return 0;
3632 struct rtl_opt_pass pass_split_before_sched2 =
3635 RTL_PASS,
3636 "split4", /* name */
3637 gate_handle_split_before_sched2, /* gate */
3638 rest_of_handle_split_before_sched2, /* execute */
3639 NULL, /* sub */
3640 NULL, /* next */
3641 0, /* static_pass_number */
3642 TV_NONE, /* tv_id */
3643 0, /* properties_required */
3644 0, /* properties_provided */
3645 0, /* properties_destroyed */
3646 0, /* todo_flags_start */
3647 TODO_verify_flow |
3648 TODO_dump_func /* todo_flags_finish */
3652 /* The placement of the splitting that we do for shorten_branches
3653 depends on whether regstack is used by the target or not. */
3654 static bool
3655 gate_do_final_split (void)
3657 #if defined (HAVE_ATTR_length) && !defined (STACK_REGS)
3658 return 1;
3659 #else
3660 return 0;
3661 #endif
3664 struct rtl_opt_pass pass_split_for_shorten_branches =
3667 RTL_PASS,
3668 "split5", /* name */
3669 gate_do_final_split, /* gate */
3670 split_all_insns_noflow, /* execute */
3671 NULL, /* sub */
3672 NULL, /* next */
3673 0, /* static_pass_number */
3674 TV_NONE, /* tv_id */
3675 0, /* properties_required */
3676 0, /* properties_provided */
3677 0, /* properties_destroyed */
3678 0, /* todo_flags_start */
3679 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */