* c-common.c (catenate_strings): New.
[official-gcc.git] / gcc / recog.c
blobeeb37afe9bb82f49b5af4024e668721d55595b72
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "insn-config.h"
30 #include "insn-attr.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "expr.h"
35 #include "function.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "toplev.h"
39 #include "basic-block.h"
40 #include "output.h"
41 #include "reload.h"
43 #ifndef STACK_PUSH_CODE
44 #ifdef STACK_GROWS_DOWNWARD
45 #define STACK_PUSH_CODE PRE_DEC
46 #else
47 #define STACK_PUSH_CODE PRE_INC
48 #endif
49 #endif
51 #ifndef STACK_POP_CODE
52 #ifdef STACK_GROWS_DOWNWARD
53 #define STACK_POP_CODE POST_INC
54 #else
55 #define STACK_POP_CODE POST_DEC
56 #endif
57 #endif
59 static void validate_replace_rtx_1 (rtx *, rtx, rtx, rtx);
60 static rtx *find_single_use_1 (rtx, rtx *);
61 static void validate_replace_src_1 (rtx *, void *);
62 static rtx split_insn (rtx);
64 /* Nonzero means allow operands to be volatile.
65 This should be 0 if you are generating rtl, such as if you are calling
66 the functions in optabs.c and expmed.c (most of the time).
67 This should be 1 if all valid insns need to be recognized,
68 such as in regclass.c and final.c and reload.c.
70 init_recog and init_recog_no_volatile are responsible for setting this. */
72 int volatile_ok;
74 struct recog_data recog_data;
76 /* Contains a vector of operand_alternative structures for every operand.
77 Set up by preprocess_constraints. */
78 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
80 /* On return from `constrain_operands', indicate which alternative
81 was satisfied. */
83 int which_alternative;
85 /* Nonzero after end of reload pass.
86 Set to 1 or 0 by toplev.c.
87 Controls the significance of (SUBREG (MEM)). */
89 int reload_completed;
91 /* Nonzero after thread_prologue_and_epilogue_insns has run. */
92 int epilogue_completed;
94 /* Initialize data used by the function `recog'.
95 This must be called once in the compilation of a function
96 before any insn recognition may be done in the function. */
98 void
99 init_recog_no_volatile (void)
101 volatile_ok = 0;
104 void
105 init_recog (void)
107 volatile_ok = 1;
110 /* Try recognizing the instruction INSN,
111 and return the code number that results.
112 Remember the code so that repeated calls do not
113 need to spend the time for actual rerecognition.
115 This function is the normal interface to instruction recognition.
116 The automatically-generated function `recog' is normally called
117 through this one. (The only exception is in combine.c.) */
120 recog_memoized_1 (rtx insn)
122 if (INSN_CODE (insn) < 0)
123 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
124 return INSN_CODE (insn);
127 /* Check that X is an insn-body for an `asm' with operands
128 and that the operands mentioned in it are legitimate. */
131 check_asm_operands (rtx x)
133 int noperands;
134 rtx *operands;
135 const char **constraints;
136 int i;
138 /* Post-reload, be more strict with things. */
139 if (reload_completed)
141 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
142 extract_insn (make_insn_raw (x));
143 constrain_operands (1);
144 return which_alternative >= 0;
147 noperands = asm_noperands (x);
148 if (noperands < 0)
149 return 0;
150 if (noperands == 0)
151 return 1;
153 operands = alloca (noperands * sizeof (rtx));
154 constraints = alloca (noperands * sizeof (char *));
156 decode_asm_operands (x, operands, NULL, constraints, NULL);
158 for (i = 0; i < noperands; i++)
160 const char *c = constraints[i];
161 if (c[0] == '%')
162 c++;
163 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
164 c = constraints[c[0] - '0'];
166 if (! asm_operand_ok (operands[i], c))
167 return 0;
170 return 1;
173 /* Static data for the next two routines. */
175 typedef struct change_t
177 rtx object;
178 int old_code;
179 rtx *loc;
180 rtx old;
181 } change_t;
183 static change_t *changes;
184 static int changes_allocated;
186 static int num_changes = 0;
188 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
189 at which NEW will be placed. If OBJECT is zero, no validation is done,
190 the change is simply made.
192 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
193 will be called with the address and mode as parameters. If OBJECT is
194 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
195 the change in place.
197 IN_GROUP is nonzero if this is part of a group of changes that must be
198 performed as a group. In that case, the changes will be stored. The
199 function `apply_change_group' will validate and apply the changes.
201 If IN_GROUP is zero, this is a single change. Try to recognize the insn
202 or validate the memory reference with the change applied. If the result
203 is not valid for the machine, suppress the change and return zero.
204 Otherwise, perform the change and return 1. */
207 validate_change (rtx object, rtx *loc, rtx new, int in_group)
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 gcc_assert (in_group != 0 || num_changes == 0);
216 *loc = new;
218 /* Save the information describing this change. */
219 if (num_changes >= changes_allocated)
221 if (changes_allocated == 0)
222 /* This value allows for repeated substitutions inside complex
223 indexed addresses, or changes in up to 5 insns. */
224 changes_allocated = MAX_RECOG_OPERANDS * 5;
225 else
226 changes_allocated *= 2;
228 changes = xrealloc (changes, sizeof (change_t) * changes_allocated);
231 changes[num_changes].object = object;
232 changes[num_changes].loc = loc;
233 changes[num_changes].old = old;
235 if (object && !MEM_P (object))
237 /* Set INSN_CODE to force rerecognition of insn. Save old code in
238 case invalid. */
239 changes[num_changes].old_code = INSN_CODE (object);
240 INSN_CODE (object) = -1;
243 num_changes++;
245 /* If we are making a group of changes, return 1. Otherwise, validate the
246 change group we made. */
248 if (in_group)
249 return 1;
250 else
251 return apply_change_group ();
254 /* This subroutine of apply_change_group verifies whether the changes to INSN
255 were valid; i.e. whether INSN can still be recognized. */
258 insn_invalid_p (rtx insn)
260 rtx pat = PATTERN (insn);
261 int num_clobbers = 0;
262 /* If we are before reload and the pattern is a SET, see if we can add
263 clobbers. */
264 int icode = recog (pat, insn,
265 (GET_CODE (pat) == SET
266 && ! reload_completed && ! reload_in_progress)
267 ? &num_clobbers : 0);
268 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
271 /* If this is an asm and the operand aren't legal, then fail. Likewise if
272 this is not an asm and the insn wasn't recognized. */
273 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
274 || (!is_asm && icode < 0))
275 return 1;
277 /* If we have to add CLOBBERs, fail if we have to add ones that reference
278 hard registers since our callers can't know if they are live or not.
279 Otherwise, add them. */
280 if (num_clobbers > 0)
282 rtx newpat;
284 if (added_clobbers_hard_reg_p (icode))
285 return 1;
287 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
288 XVECEXP (newpat, 0, 0) = pat;
289 add_clobbers (newpat, icode);
290 PATTERN (insn) = pat = newpat;
293 /* After reload, verify that all constraints are satisfied. */
294 if (reload_completed)
296 extract_insn (insn);
298 if (! constrain_operands (1))
299 return 1;
302 INSN_CODE (insn) = icode;
303 return 0;
306 /* Return number of changes made and not validated yet. */
308 num_changes_pending (void)
310 return num_changes;
313 /* Apply a group of changes previously issued with `validate_change'.
314 Return 1 if all changes are valid, zero otherwise. */
317 apply_change_group (void)
319 int i;
320 rtx last_validated = NULL_RTX;
322 /* The changes have been applied and all INSN_CODEs have been reset to force
323 rerecognition.
325 The changes are valid if we aren't given an object, or if we are
326 given a MEM and it still is a valid address, or if this is in insn
327 and it is recognized. In the latter case, if reload has completed,
328 we also require that the operands meet the constraints for
329 the insn. */
331 for (i = 0; i < num_changes; i++)
333 rtx object = changes[i].object;
335 /* If there is no object to test or if it is the same as the one we
336 already tested, ignore it. */
337 if (object == 0 || object == last_validated)
338 continue;
340 if (MEM_P (object))
342 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
343 break;
345 else if (insn_invalid_p (object))
347 rtx pat = PATTERN (object);
349 /* Perhaps we couldn't recognize the insn because there were
350 extra CLOBBERs at the end. If so, try to re-recognize
351 without the last CLOBBER (later iterations will cause each of
352 them to be eliminated, in turn). But don't do this if we
353 have an ASM_OPERAND. */
354 if (GET_CODE (pat) == PARALLEL
355 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
356 && asm_noperands (PATTERN (object)) < 0)
358 rtx newpat;
360 if (XVECLEN (pat, 0) == 2)
361 newpat = XVECEXP (pat, 0, 0);
362 else
364 int j;
366 newpat
367 = gen_rtx_PARALLEL (VOIDmode,
368 rtvec_alloc (XVECLEN (pat, 0) - 1));
369 for (j = 0; j < XVECLEN (newpat, 0); j++)
370 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
373 /* Add a new change to this group to replace the pattern
374 with this new pattern. Then consider this change
375 as having succeeded. The change we added will
376 cause the entire call to fail if things remain invalid.
378 Note that this can lose if a later change than the one
379 we are processing specified &XVECEXP (PATTERN (object), 0, X)
380 but this shouldn't occur. */
382 validate_change (object, &PATTERN (object), newpat, 1);
383 continue;
385 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
386 /* If this insn is a CLOBBER or USE, it is always valid, but is
387 never recognized. */
388 continue;
389 else
390 break;
392 last_validated = object;
395 if (i == num_changes)
397 basic_block bb;
399 for (i = 0; i < num_changes; i++)
400 if (changes[i].object
401 && INSN_P (changes[i].object)
402 && (bb = BLOCK_FOR_INSN (changes[i].object)))
403 bb->flags |= BB_DIRTY;
405 num_changes = 0;
406 return 1;
408 else
410 cancel_changes (0);
411 return 0;
415 /* Return the number of changes so far in the current group. */
418 num_validated_changes (void)
420 return num_changes;
423 /* Retract the changes numbered NUM and up. */
425 void
426 cancel_changes (int num)
428 int i;
430 /* Back out all the changes. Do this in the opposite order in which
431 they were made. */
432 for (i = num_changes - 1; i >= num; i--)
434 *changes[i].loc = changes[i].old;
435 if (changes[i].object && !MEM_P (changes[i].object))
436 INSN_CODE (changes[i].object) = changes[i].old_code;
438 num_changes = num;
441 /* Replace every occurrence of FROM in X with TO. Mark each change with
442 validate_change passing OBJECT. */
444 static void
445 validate_replace_rtx_1 (rtx *loc, rtx from, rtx to, rtx object)
447 int i, j;
448 const char *fmt;
449 rtx x = *loc;
450 enum rtx_code code;
451 enum machine_mode op0_mode = VOIDmode;
452 int prev_changes = num_changes;
453 rtx new;
455 if (!x)
456 return;
458 code = GET_CODE (x);
459 fmt = GET_RTX_FORMAT (code);
460 if (fmt[0] == 'e')
461 op0_mode = GET_MODE (XEXP (x, 0));
463 /* X matches FROM if it is the same rtx or they are both referring to the
464 same register in the same mode. Avoid calling rtx_equal_p unless the
465 operands look similar. */
467 if (x == from
468 || (REG_P (x) && REG_P (from)
469 && GET_MODE (x) == GET_MODE (from)
470 && REGNO (x) == REGNO (from))
471 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
472 && rtx_equal_p (x, from)))
474 validate_change (object, loc, to, 1);
475 return;
478 /* Call ourself recursively to perform the replacements.
479 We must not replace inside already replaced expression, otherwise we
480 get infinite recursion for replacements like (reg X)->(subreg (reg X))
481 done by regmove, so we must special case shared ASM_OPERANDS. */
483 if (GET_CODE (x) == PARALLEL)
485 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
487 if (j && GET_CODE (XVECEXP (x, 0, j)) == SET
488 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == ASM_OPERANDS)
490 /* Verify that operands are really shared. */
491 gcc_assert (ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (x, 0, 0)))
492 == ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP
493 (x, 0, j))));
494 validate_replace_rtx_1 (&SET_DEST (XVECEXP (x, 0, j)),
495 from, to, object);
497 else
498 validate_replace_rtx_1 (&XVECEXP (x, 0, j), from, to, object);
501 else
502 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
504 if (fmt[i] == 'e')
505 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
506 else if (fmt[i] == 'E')
507 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
508 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
511 /* If we didn't substitute, there is nothing more to do. */
512 if (num_changes == prev_changes)
513 return;
515 /* Allow substituted expression to have different mode. This is used by
516 regmove to change mode of pseudo register. */
517 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
518 op0_mode = GET_MODE (XEXP (x, 0));
520 /* Do changes needed to keep rtx consistent. Don't do any other
521 simplifications, as it is not our job. */
523 if (SWAPPABLE_OPERANDS_P (x)
524 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
526 validate_change (object, loc,
527 gen_rtx_fmt_ee (COMMUTATIVE_ARITH_P (x) ? code
528 : swap_condition (code),
529 GET_MODE (x), XEXP (x, 1),
530 XEXP (x, 0)), 1);
531 x = *loc;
532 code = GET_CODE (x);
535 switch (code)
537 case PLUS:
538 /* If we have a PLUS whose second operand is now a CONST_INT, use
539 simplify_gen_binary to try to simplify it.
540 ??? We may want later to remove this, once simplification is
541 separated from this function. */
542 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
543 validate_change (object, loc,
544 simplify_gen_binary
545 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
546 break;
547 case MINUS:
548 if (GET_CODE (XEXP (x, 1)) == CONST_INT
549 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
550 validate_change (object, loc,
551 simplify_gen_binary
552 (PLUS, GET_MODE (x), XEXP (x, 0),
553 simplify_gen_unary (NEG,
554 GET_MODE (x), XEXP (x, 1),
555 GET_MODE (x))), 1);
556 break;
557 case ZERO_EXTEND:
558 case SIGN_EXTEND:
559 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
561 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
562 op0_mode);
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
565 if (!new)
566 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
567 validate_change (object, loc, new, 1);
569 break;
570 case SUBREG:
571 /* All subregs possible to simplify should be simplified. */
572 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
573 SUBREG_BYTE (x));
575 /* Subregs of VOIDmode operands are incorrect. */
576 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
577 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
578 if (new)
579 validate_change (object, loc, new, 1);
580 break;
581 case ZERO_EXTRACT:
582 case SIGN_EXTRACT:
583 /* If we are replacing a register with memory, try to change the memory
584 to be the mode required for memory in extract operations (this isn't
585 likely to be an insertion operation; if it was, nothing bad will
586 happen, we might just fail in some cases). */
588 if (MEM_P (XEXP (x, 0))
589 && GET_CODE (XEXP (x, 1)) == CONST_INT
590 && GET_CODE (XEXP (x, 2)) == CONST_INT
591 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
592 && !MEM_VOLATILE_P (XEXP (x, 0)))
594 enum machine_mode wanted_mode = VOIDmode;
595 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
596 int pos = INTVAL (XEXP (x, 2));
598 if (GET_CODE (x) == ZERO_EXTRACT)
600 enum machine_mode new_mode
601 = mode_for_extraction (EP_extzv, 1);
602 if (new_mode != MAX_MACHINE_MODE)
603 wanted_mode = new_mode;
605 else if (GET_CODE (x) == SIGN_EXTRACT)
607 enum machine_mode new_mode
608 = mode_for_extraction (EP_extv, 1);
609 if (new_mode != MAX_MACHINE_MODE)
610 wanted_mode = new_mode;
613 /* If we have a narrower mode, we can do something. */
614 if (wanted_mode != VOIDmode
615 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
617 int offset = pos / BITS_PER_UNIT;
618 rtx newmem;
620 /* If the bytes and bits are counted differently, we
621 must adjust the offset. */
622 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
623 offset =
624 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
625 offset);
627 pos %= GET_MODE_BITSIZE (wanted_mode);
629 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
631 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
632 validate_change (object, &XEXP (x, 0), newmem, 1);
636 break;
638 default:
639 break;
643 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
644 with TO. After all changes have been made, validate by seeing
645 if INSN is still valid. */
648 validate_replace_rtx_subexp (rtx from, rtx to, rtx insn, rtx *loc)
650 validate_replace_rtx_1 (loc, from, to, insn);
651 return apply_change_group ();
654 /* Try replacing every occurrence of FROM in INSN with TO. After all
655 changes have been made, validate by seeing if INSN is still valid. */
658 validate_replace_rtx (rtx from, rtx to, rtx insn)
660 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
661 return apply_change_group ();
664 /* Try replacing every occurrence of FROM in INSN with TO. */
666 void
667 validate_replace_rtx_group (rtx from, rtx to, rtx insn)
669 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
672 /* Function called by note_uses to replace used subexpressions. */
673 struct validate_replace_src_data
675 rtx from; /* Old RTX */
676 rtx to; /* New RTX */
677 rtx insn; /* Insn in which substitution is occurring. */
680 static void
681 validate_replace_src_1 (rtx *x, void *data)
683 struct validate_replace_src_data *d
684 = (struct validate_replace_src_data *) data;
686 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
689 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
690 SET_DESTs. */
692 void
693 validate_replace_src_group (rtx from, rtx to, rtx insn)
695 struct validate_replace_src_data d;
697 d.from = from;
698 d.to = to;
699 d.insn = insn;
700 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
703 #ifdef HAVE_cc0
704 /* Return 1 if the insn using CC0 set by INSN does not contain
705 any ordered tests applied to the condition codes.
706 EQ and NE tests do not count. */
709 next_insn_tests_no_inequality (rtx insn)
711 rtx next = next_cc0_user (insn);
713 /* If there is no next insn, we have to take the conservative choice. */
714 if (next == 0)
715 return 0;
717 return (INSN_P (next)
718 && ! inequality_comparisons_p (PATTERN (next)));
720 #endif
722 /* This is used by find_single_use to locate an rtx that contains exactly one
723 use of DEST, which is typically either a REG or CC0. It returns a
724 pointer to the innermost rtx expression containing DEST. Appearances of
725 DEST that are being used to totally replace it are not counted. */
727 static rtx *
728 find_single_use_1 (rtx dest, rtx *loc)
730 rtx x = *loc;
731 enum rtx_code code = GET_CODE (x);
732 rtx *result = 0;
733 rtx *this_result;
734 int i;
735 const char *fmt;
737 switch (code)
739 case CONST_INT:
740 case CONST:
741 case LABEL_REF:
742 case SYMBOL_REF:
743 case CONST_DOUBLE:
744 case CONST_VECTOR:
745 case CLOBBER:
746 return 0;
748 case SET:
749 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
750 of a REG that occupies all of the REG, the insn uses DEST if
751 it is mentioned in the destination or the source. Otherwise, we
752 need just check the source. */
753 if (GET_CODE (SET_DEST (x)) != CC0
754 && GET_CODE (SET_DEST (x)) != PC
755 && !REG_P (SET_DEST (x))
756 && ! (GET_CODE (SET_DEST (x)) == SUBREG
757 && REG_P (SUBREG_REG (SET_DEST (x)))
758 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
759 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
760 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
761 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
762 break;
764 return find_single_use_1 (dest, &SET_SRC (x));
766 case MEM:
767 case SUBREG:
768 return find_single_use_1 (dest, &XEXP (x, 0));
770 default:
771 break;
774 /* If it wasn't one of the common cases above, check each expression and
775 vector of this code. Look for a unique usage of DEST. */
777 fmt = GET_RTX_FORMAT (code);
778 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
780 if (fmt[i] == 'e')
782 if (dest == XEXP (x, i)
783 || (REG_P (dest) && REG_P (XEXP (x, i))
784 && REGNO (dest) == REGNO (XEXP (x, i))))
785 this_result = loc;
786 else
787 this_result = find_single_use_1 (dest, &XEXP (x, i));
789 if (result == 0)
790 result = this_result;
791 else if (this_result)
792 /* Duplicate usage. */
793 return 0;
795 else if (fmt[i] == 'E')
797 int j;
799 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
801 if (XVECEXP (x, i, j) == dest
802 || (REG_P (dest)
803 && REG_P (XVECEXP (x, i, j))
804 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
805 this_result = loc;
806 else
807 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
809 if (result == 0)
810 result = this_result;
811 else if (this_result)
812 return 0;
817 return result;
820 /* See if DEST, produced in INSN, is used only a single time in the
821 sequel. If so, return a pointer to the innermost rtx expression in which
822 it is used.
824 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
826 This routine will return usually zero either before flow is called (because
827 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
828 note can't be trusted).
830 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
831 care about REG_DEAD notes or LOG_LINKS.
833 Otherwise, we find the single use by finding an insn that has a
834 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
835 only referenced once in that insn, we know that it must be the first
836 and last insn referencing DEST. */
838 rtx *
839 find_single_use (rtx dest, rtx insn, rtx *ploc)
841 rtx next;
842 rtx *result;
843 rtx link;
845 #ifdef HAVE_cc0
846 if (dest == cc0_rtx)
848 next = NEXT_INSN (insn);
849 if (next == 0
850 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
851 return 0;
853 result = find_single_use_1 (dest, &PATTERN (next));
854 if (result && ploc)
855 *ploc = next;
856 return result;
858 #endif
860 if (reload_completed || reload_in_progress || !REG_P (dest))
861 return 0;
863 for (next = next_nonnote_insn (insn);
864 next != 0 && !LABEL_P (next);
865 next = next_nonnote_insn (next))
866 if (INSN_P (next) && dead_or_set_p (next, dest))
868 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
869 if (XEXP (link, 0) == insn)
870 break;
872 if (link)
874 result = find_single_use_1 (dest, &PATTERN (next));
875 if (ploc)
876 *ploc = next;
877 return result;
881 return 0;
884 /* Return 1 if OP is a valid general operand for machine mode MODE.
885 This is either a register reference, a memory reference,
886 or a constant. In the case of a memory reference, the address
887 is checked for general validity for the target machine.
889 Register and memory references must have mode MODE in order to be valid,
890 but some constants have no machine mode and are valid for any mode.
892 If MODE is VOIDmode, OP is checked for validity for whatever mode
893 it has.
895 The main use of this function is as a predicate in match_operand
896 expressions in the machine description.
898 For an explanation of this function's behavior for registers of
899 class NO_REGS, see the comment for `register_operand'. */
902 general_operand (rtx op, enum machine_mode mode)
904 enum rtx_code code = GET_CODE (op);
906 if (mode == VOIDmode)
907 mode = GET_MODE (op);
909 /* Don't accept CONST_INT or anything similar
910 if the caller wants something floating. */
911 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
912 && GET_MODE_CLASS (mode) != MODE_INT
913 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
914 return 0;
916 if (GET_CODE (op) == CONST_INT
917 && mode != VOIDmode
918 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
919 return 0;
921 if (CONSTANT_P (op))
922 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
923 || mode == VOIDmode)
924 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
925 && LEGITIMATE_CONSTANT_P (op));
927 /* Except for certain constants with VOIDmode, already checked for,
928 OP's mode must match MODE if MODE specifies a mode. */
930 if (GET_MODE (op) != mode)
931 return 0;
933 if (code == SUBREG)
935 rtx sub = SUBREG_REG (op);
937 #ifdef INSN_SCHEDULING
938 /* On machines that have insn scheduling, we want all memory
939 reference to be explicit, so outlaw paradoxical SUBREGs.
940 However, we must allow them after reload so that they can
941 get cleaned up by cleanup_subreg_operands. */
942 if (!reload_completed && MEM_P (sub)
943 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
944 return 0;
945 #endif
946 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
947 may result in incorrect reference. We should simplify all valid
948 subregs of MEM anyway. But allow this after reload because we
949 might be called from cleanup_subreg_operands.
951 ??? This is a kludge. */
952 if (!reload_completed && SUBREG_BYTE (op) != 0
953 && MEM_P (sub))
954 return 0;
956 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
957 create such rtl, and we must reject it. */
958 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
959 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
960 return 0;
962 op = sub;
963 code = GET_CODE (op);
966 if (code == REG)
967 /* A register whose class is NO_REGS is not a general operand. */
968 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
969 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
971 if (code == MEM)
973 rtx y = XEXP (op, 0);
975 if (! volatile_ok && MEM_VOLATILE_P (op))
976 return 0;
978 /* Use the mem's mode, since it will be reloaded thus. */
979 if (memory_address_p (GET_MODE (op), y))
980 return 1;
983 return 0;
986 /* Return 1 if OP is a valid memory address for a memory reference
987 of mode MODE.
989 The main use of this function is as a predicate in match_operand
990 expressions in the machine description. */
993 address_operand (rtx op, enum machine_mode mode)
995 return memory_address_p (mode, op);
998 /* Return 1 if OP is a register reference of mode MODE.
999 If MODE is VOIDmode, accept a register in any mode.
1001 The main use of this function is as a predicate in match_operand
1002 expressions in the machine description.
1004 As a special exception, registers whose class is NO_REGS are
1005 not accepted by `register_operand'. The reason for this change
1006 is to allow the representation of special architecture artifacts
1007 (such as a condition code register) without extending the rtl
1008 definitions. Since registers of class NO_REGS cannot be used
1009 as registers in any case where register classes are examined,
1010 it is most consistent to keep this function from accepting them. */
1013 register_operand (rtx op, enum machine_mode mode)
1015 if (GET_MODE (op) != mode && mode != VOIDmode)
1016 return 0;
1018 if (GET_CODE (op) == SUBREG)
1020 rtx sub = SUBREG_REG (op);
1022 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1023 because it is guaranteed to be reloaded into one.
1024 Just make sure the MEM is valid in itself.
1025 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1026 but currently it does result from (SUBREG (REG)...) where the
1027 reg went on the stack.) */
1028 if (! reload_completed && MEM_P (sub))
1029 return general_operand (op, mode);
1031 #ifdef CANNOT_CHANGE_MODE_CLASS
1032 if (REG_P (sub)
1033 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1034 && REG_CANNOT_CHANGE_MODE_P (REGNO (sub), GET_MODE (sub), mode)
1035 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1036 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1037 return 0;
1038 #endif
1040 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1041 create such rtl, and we must reject it. */
1042 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1043 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1044 return 0;
1046 op = sub;
1049 /* We don't consider registers whose class is NO_REGS
1050 to be a register operand. */
1051 return (REG_P (op)
1052 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1053 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1056 /* Return 1 for a register in Pmode; ignore the tested mode. */
1059 pmode_register_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1061 return register_operand (op, Pmode);
1064 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1065 or a hard register. */
1068 scratch_operand (rtx op, enum machine_mode mode)
1070 if (GET_MODE (op) != mode && mode != VOIDmode)
1071 return 0;
1073 return (GET_CODE (op) == SCRATCH
1074 || (REG_P (op)
1075 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1078 /* Return 1 if OP is a valid immediate operand for mode MODE.
1080 The main use of this function is as a predicate in match_operand
1081 expressions in the machine description. */
1084 immediate_operand (rtx op, enum machine_mode mode)
1086 /* Don't accept CONST_INT or anything similar
1087 if the caller wants something floating. */
1088 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1089 && GET_MODE_CLASS (mode) != MODE_INT
1090 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1091 return 0;
1093 if (GET_CODE (op) == CONST_INT
1094 && mode != VOIDmode
1095 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1096 return 0;
1098 return (CONSTANT_P (op)
1099 && (GET_MODE (op) == mode || mode == VOIDmode
1100 || GET_MODE (op) == VOIDmode)
1101 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1102 && LEGITIMATE_CONSTANT_P (op));
1105 /* Returns 1 if OP is an operand that is a CONST_INT. */
1108 const_int_operand (rtx op, enum machine_mode mode)
1110 if (GET_CODE (op) != CONST_INT)
1111 return 0;
1113 if (mode != VOIDmode
1114 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1115 return 0;
1117 return 1;
1120 /* Returns 1 if OP is an operand that is a constant integer or constant
1121 floating-point number. */
1124 const_double_operand (rtx op, enum machine_mode mode)
1126 /* Don't accept CONST_INT or anything similar
1127 if the caller wants something floating. */
1128 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1129 && GET_MODE_CLASS (mode) != MODE_INT
1130 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1131 return 0;
1133 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1134 && (mode == VOIDmode || GET_MODE (op) == mode
1135 || GET_MODE (op) == VOIDmode));
1138 /* Return 1 if OP is a general operand that is not an immediate operand. */
1141 nonimmediate_operand (rtx op, enum machine_mode mode)
1143 return (general_operand (op, mode) && ! CONSTANT_P (op));
1146 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1149 nonmemory_operand (rtx op, enum machine_mode mode)
1151 if (CONSTANT_P (op))
1153 /* Don't accept CONST_INT or anything similar
1154 if the caller wants something floating. */
1155 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1156 && GET_MODE_CLASS (mode) != MODE_INT
1157 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1158 return 0;
1160 if (GET_CODE (op) == CONST_INT
1161 && mode != VOIDmode
1162 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1163 return 0;
1165 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1166 || mode == VOIDmode)
1167 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1168 && LEGITIMATE_CONSTANT_P (op));
1171 if (GET_MODE (op) != mode && mode != VOIDmode)
1172 return 0;
1174 if (GET_CODE (op) == SUBREG)
1176 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1177 because it is guaranteed to be reloaded into one.
1178 Just make sure the MEM is valid in itself.
1179 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1180 but currently it does result from (SUBREG (REG)...) where the
1181 reg went on the stack.) */
1182 if (! reload_completed && MEM_P (SUBREG_REG (op)))
1183 return general_operand (op, mode);
1184 op = SUBREG_REG (op);
1187 /* We don't consider registers whose class is NO_REGS
1188 to be a register operand. */
1189 return (REG_P (op)
1190 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1191 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1194 /* Return 1 if OP is a valid operand that stands for pushing a
1195 value of mode MODE onto the stack.
1197 The main use of this function is as a predicate in match_operand
1198 expressions in the machine description. */
1201 push_operand (rtx op, enum machine_mode mode)
1203 unsigned int rounded_size = GET_MODE_SIZE (mode);
1205 #ifdef PUSH_ROUNDING
1206 rounded_size = PUSH_ROUNDING (rounded_size);
1207 #endif
1209 if (!MEM_P (op))
1210 return 0;
1212 if (mode != VOIDmode && GET_MODE (op) != mode)
1213 return 0;
1215 op = XEXP (op, 0);
1217 if (rounded_size == GET_MODE_SIZE (mode))
1219 if (GET_CODE (op) != STACK_PUSH_CODE)
1220 return 0;
1222 else
1224 if (GET_CODE (op) != PRE_MODIFY
1225 || GET_CODE (XEXP (op, 1)) != PLUS
1226 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1227 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1228 #ifdef STACK_GROWS_DOWNWARD
1229 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1230 #else
1231 || INTVAL (XEXP (XEXP (op, 1), 1)) != (int) rounded_size
1232 #endif
1234 return 0;
1237 return XEXP (op, 0) == stack_pointer_rtx;
1240 /* Return 1 if OP is a valid operand that stands for popping a
1241 value of mode MODE off the stack.
1243 The main use of this function is as a predicate in match_operand
1244 expressions in the machine description. */
1247 pop_operand (rtx op, enum machine_mode mode)
1249 if (!MEM_P (op))
1250 return 0;
1252 if (mode != VOIDmode && GET_MODE (op) != mode)
1253 return 0;
1255 op = XEXP (op, 0);
1257 if (GET_CODE (op) != STACK_POP_CODE)
1258 return 0;
1260 return XEXP (op, 0) == stack_pointer_rtx;
1263 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1266 memory_address_p (enum machine_mode mode ATTRIBUTE_UNUSED, rtx addr)
1268 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1269 return 0;
1271 win:
1272 return 1;
1275 /* Return 1 if OP is a valid memory reference with mode MODE,
1276 including a valid address.
1278 The main use of this function is as a predicate in match_operand
1279 expressions in the machine description. */
1282 memory_operand (rtx op, enum machine_mode mode)
1284 rtx inner;
1286 if (! reload_completed)
1287 /* Note that no SUBREG is a memory operand before end of reload pass,
1288 because (SUBREG (MEM...)) forces reloading into a register. */
1289 return MEM_P (op) && general_operand (op, mode);
1291 if (mode != VOIDmode && GET_MODE (op) != mode)
1292 return 0;
1294 inner = op;
1295 if (GET_CODE (inner) == SUBREG)
1296 inner = SUBREG_REG (inner);
1298 return (MEM_P (inner) && general_operand (op, mode));
1301 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1302 that is, a memory reference whose address is a general_operand. */
1305 indirect_operand (rtx op, enum machine_mode mode)
1307 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1308 if (! reload_completed
1309 && GET_CODE (op) == SUBREG && MEM_P (SUBREG_REG (op)))
1311 int offset = SUBREG_BYTE (op);
1312 rtx inner = SUBREG_REG (op);
1314 if (mode != VOIDmode && GET_MODE (op) != mode)
1315 return 0;
1317 /* The only way that we can have a general_operand as the resulting
1318 address is if OFFSET is zero and the address already is an operand
1319 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1320 operand. */
1322 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1323 || (GET_CODE (XEXP (inner, 0)) == PLUS
1324 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1325 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1326 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1329 return (MEM_P (op)
1330 && memory_operand (op, mode)
1331 && general_operand (XEXP (op, 0), Pmode));
1334 /* Return 1 if this is a comparison operator. This allows the use of
1335 MATCH_OPERATOR to recognize all the branch insns. */
1338 comparison_operator (rtx op, enum machine_mode mode)
1340 return ((mode == VOIDmode || GET_MODE (op) == mode)
1341 && COMPARISON_P (op));
1344 /* If BODY is an insn body that uses ASM_OPERANDS,
1345 return the number of operands (both input and output) in the insn.
1346 Otherwise return -1. */
1349 asm_noperands (rtx body)
1351 switch (GET_CODE (body))
1353 case ASM_OPERANDS:
1354 /* No output operands: return number of input operands. */
1355 return ASM_OPERANDS_INPUT_LENGTH (body);
1356 case SET:
1357 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1358 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1359 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1360 else
1361 return -1;
1362 case PARALLEL:
1363 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1364 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1366 /* Multiple output operands, or 1 output plus some clobbers:
1367 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1368 int i;
1369 int n_sets;
1371 /* Count backwards through CLOBBERs to determine number of SETs. */
1372 for (i = XVECLEN (body, 0); i > 0; i--)
1374 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1375 break;
1376 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1377 return -1;
1380 /* N_SETS is now number of output operands. */
1381 n_sets = i;
1383 /* Verify that all the SETs we have
1384 came from a single original asm_operands insn
1385 (so that invalid combinations are blocked). */
1386 for (i = 0; i < n_sets; i++)
1388 rtx elt = XVECEXP (body, 0, i);
1389 if (GET_CODE (elt) != SET)
1390 return -1;
1391 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1392 return -1;
1393 /* If these ASM_OPERANDS rtx's came from different original insns
1394 then they aren't allowed together. */
1395 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1396 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1397 return -1;
1399 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1400 + n_sets);
1402 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1404 /* 0 outputs, but some clobbers:
1405 body is [(asm_operands ...) (clobber (reg ...))...]. */
1406 int i;
1408 /* Make sure all the other parallel things really are clobbers. */
1409 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1410 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1411 return -1;
1413 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1415 else
1416 return -1;
1417 default:
1418 return -1;
1422 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1423 copy its operands (both input and output) into the vector OPERANDS,
1424 the locations of the operands within the insn into the vector OPERAND_LOCS,
1425 and the constraints for the operands into CONSTRAINTS.
1426 Write the modes of the operands into MODES.
1427 Return the assembler-template.
1429 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1430 we don't store that info. */
1432 const char *
1433 decode_asm_operands (rtx body, rtx *operands, rtx **operand_locs,
1434 const char **constraints, enum machine_mode *modes)
1436 int i;
1437 int noperands;
1438 const char *template = 0;
1440 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1442 rtx asmop = SET_SRC (body);
1443 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1445 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1447 for (i = 1; i < noperands; i++)
1449 if (operand_locs)
1450 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1451 if (operands)
1452 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1453 if (constraints)
1454 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1455 if (modes)
1456 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1459 /* The output is in the SET.
1460 Its constraint is in the ASM_OPERANDS itself. */
1461 if (operands)
1462 operands[0] = SET_DEST (body);
1463 if (operand_locs)
1464 operand_locs[0] = &SET_DEST (body);
1465 if (constraints)
1466 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1467 if (modes)
1468 modes[0] = GET_MODE (SET_DEST (body));
1469 template = ASM_OPERANDS_TEMPLATE (asmop);
1471 else if (GET_CODE (body) == ASM_OPERANDS)
1473 rtx asmop = body;
1474 /* No output operands: BODY is (asm_operands ....). */
1476 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1478 /* The input operands are found in the 1st element vector. */
1479 /* Constraints for inputs are in the 2nd element vector. */
1480 for (i = 0; i < noperands; i++)
1482 if (operand_locs)
1483 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1484 if (operands)
1485 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1486 if (constraints)
1487 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1488 if (modes)
1489 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1491 template = ASM_OPERANDS_TEMPLATE (asmop);
1493 else if (GET_CODE (body) == PARALLEL
1494 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1495 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1497 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1498 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1499 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1500 int nout = 0; /* Does not include CLOBBERs. */
1502 /* At least one output, plus some CLOBBERs. */
1504 /* The outputs are in the SETs.
1505 Their constraints are in the ASM_OPERANDS itself. */
1506 for (i = 0; i < nparallel; i++)
1508 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1509 break; /* Past last SET */
1511 if (operands)
1512 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1513 if (operand_locs)
1514 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1515 if (constraints)
1516 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1517 if (modes)
1518 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1519 nout++;
1522 for (i = 0; i < nin; i++)
1524 if (operand_locs)
1525 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1526 if (operands)
1527 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1528 if (constraints)
1529 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1530 if (modes)
1531 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1534 template = ASM_OPERANDS_TEMPLATE (asmop);
1536 else if (GET_CODE (body) == PARALLEL
1537 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1539 /* No outputs, but some CLOBBERs. */
1541 rtx asmop = XVECEXP (body, 0, 0);
1542 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1544 for (i = 0; i < nin; i++)
1546 if (operand_locs)
1547 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1548 if (operands)
1549 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1550 if (constraints)
1551 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1552 if (modes)
1553 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1556 template = ASM_OPERANDS_TEMPLATE (asmop);
1559 return template;
1562 /* Check if an asm_operand matches its constraints.
1563 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1566 asm_operand_ok (rtx op, const char *constraint)
1568 int result = 0;
1570 /* Use constrain_operands after reload. */
1571 gcc_assert (!reload_completed);
1573 while (*constraint)
1575 char c = *constraint;
1576 int len;
1577 switch (c)
1579 case ',':
1580 constraint++;
1581 continue;
1582 case '=':
1583 case '+':
1584 case '*':
1585 case '%':
1586 case '!':
1587 case '#':
1588 case '&':
1589 case '?':
1590 break;
1592 case '0': case '1': case '2': case '3': case '4':
1593 case '5': case '6': case '7': case '8': case '9':
1594 /* For best results, our caller should have given us the
1595 proper matching constraint, but we can't actually fail
1596 the check if they didn't. Indicate that results are
1597 inconclusive. */
1599 constraint++;
1600 while (ISDIGIT (*constraint));
1601 if (! result)
1602 result = -1;
1603 continue;
1605 case 'p':
1606 if (address_operand (op, VOIDmode))
1607 result = 1;
1608 break;
1610 case 'm':
1611 case 'V': /* non-offsettable */
1612 if (memory_operand (op, VOIDmode))
1613 result = 1;
1614 break;
1616 case 'o': /* offsettable */
1617 if (offsettable_nonstrict_memref_p (op))
1618 result = 1;
1619 break;
1621 case '<':
1622 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1623 excepting those that expand_call created. Further, on some
1624 machines which do not have generalized auto inc/dec, an inc/dec
1625 is not a memory_operand.
1627 Match any memory and hope things are resolved after reload. */
1629 if (MEM_P (op)
1630 && (1
1631 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1632 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1633 result = 1;
1634 break;
1636 case '>':
1637 if (MEM_P (op)
1638 && (1
1639 || GET_CODE (XEXP (op, 0)) == PRE_INC
1640 || GET_CODE (XEXP (op, 0)) == POST_INC))
1641 result = 1;
1642 break;
1644 case 'E':
1645 case 'F':
1646 if (GET_CODE (op) == CONST_DOUBLE
1647 || (GET_CODE (op) == CONST_VECTOR
1648 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1649 result = 1;
1650 break;
1652 case 'G':
1653 if (GET_CODE (op) == CONST_DOUBLE
1654 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'G', constraint))
1655 result = 1;
1656 break;
1657 case 'H':
1658 if (GET_CODE (op) == CONST_DOUBLE
1659 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, 'H', constraint))
1660 result = 1;
1661 break;
1663 case 's':
1664 if (GET_CODE (op) == CONST_INT
1665 || (GET_CODE (op) == CONST_DOUBLE
1666 && GET_MODE (op) == VOIDmode))
1667 break;
1668 /* Fall through. */
1670 case 'i':
1671 if (CONSTANT_P (op) && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op)))
1672 result = 1;
1673 break;
1675 case 'n':
1676 if (GET_CODE (op) == CONST_INT
1677 || (GET_CODE (op) == CONST_DOUBLE
1678 && GET_MODE (op) == VOIDmode))
1679 result = 1;
1680 break;
1682 case 'I':
1683 if (GET_CODE (op) == CONST_INT
1684 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'I', constraint))
1685 result = 1;
1686 break;
1687 case 'J':
1688 if (GET_CODE (op) == CONST_INT
1689 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'J', constraint))
1690 result = 1;
1691 break;
1692 case 'K':
1693 if (GET_CODE (op) == CONST_INT
1694 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'K', constraint))
1695 result = 1;
1696 break;
1697 case 'L':
1698 if (GET_CODE (op) == CONST_INT
1699 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'L', constraint))
1700 result = 1;
1701 break;
1702 case 'M':
1703 if (GET_CODE (op) == CONST_INT
1704 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'M', constraint))
1705 result = 1;
1706 break;
1707 case 'N':
1708 if (GET_CODE (op) == CONST_INT
1709 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'N', constraint))
1710 result = 1;
1711 break;
1712 case 'O':
1713 if (GET_CODE (op) == CONST_INT
1714 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'O', constraint))
1715 result = 1;
1716 break;
1717 case 'P':
1718 if (GET_CODE (op) == CONST_INT
1719 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), 'P', constraint))
1720 result = 1;
1721 break;
1723 case 'X':
1724 result = 1;
1725 break;
1727 case 'g':
1728 if (general_operand (op, VOIDmode))
1729 result = 1;
1730 break;
1732 default:
1733 /* For all other letters, we first check for a register class,
1734 otherwise it is an EXTRA_CONSTRAINT. */
1735 if (REG_CLASS_FROM_CONSTRAINT (c, constraint) != NO_REGS)
1737 case 'r':
1738 if (GET_MODE (op) == BLKmode)
1739 break;
1740 if (register_operand (op, VOIDmode))
1741 result = 1;
1743 #ifdef EXTRA_CONSTRAINT_STR
1744 else if (EXTRA_CONSTRAINT_STR (op, c, constraint))
1745 result = 1;
1746 else if (EXTRA_MEMORY_CONSTRAINT (c, constraint)
1747 /* Every memory operand can be reloaded to fit. */
1748 && memory_operand (op, VOIDmode))
1749 result = 1;
1750 else if (EXTRA_ADDRESS_CONSTRAINT (c, constraint)
1751 /* Every address operand can be reloaded to fit. */
1752 && address_operand (op, VOIDmode))
1753 result = 1;
1754 #endif
1755 break;
1757 len = CONSTRAINT_LEN (c, constraint);
1759 constraint++;
1760 while (--len && *constraint);
1761 if (len)
1762 return 0;
1765 return result;
1768 /* Given an rtx *P, if it is a sum containing an integer constant term,
1769 return the location (type rtx *) of the pointer to that constant term.
1770 Otherwise, return a null pointer. */
1772 rtx *
1773 find_constant_term_loc (rtx *p)
1775 rtx *tem;
1776 enum rtx_code code = GET_CODE (*p);
1778 /* If *P IS such a constant term, P is its location. */
1780 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1781 || code == CONST)
1782 return p;
1784 /* Otherwise, if not a sum, it has no constant term. */
1786 if (GET_CODE (*p) != PLUS)
1787 return 0;
1789 /* If one of the summands is constant, return its location. */
1791 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1792 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1793 return p;
1795 /* Otherwise, check each summand for containing a constant term. */
1797 if (XEXP (*p, 0) != 0)
1799 tem = find_constant_term_loc (&XEXP (*p, 0));
1800 if (tem != 0)
1801 return tem;
1804 if (XEXP (*p, 1) != 0)
1806 tem = find_constant_term_loc (&XEXP (*p, 1));
1807 if (tem != 0)
1808 return tem;
1811 return 0;
1814 /* Return 1 if OP is a memory reference
1815 whose address contains no side effects
1816 and remains valid after the addition
1817 of a positive integer less than the
1818 size of the object being referenced.
1820 We assume that the original address is valid and do not check it.
1822 This uses strict_memory_address_p as a subroutine, so
1823 don't use it before reload. */
1826 offsettable_memref_p (rtx op)
1828 return ((MEM_P (op))
1829 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1832 /* Similar, but don't require a strictly valid mem ref:
1833 consider pseudo-regs valid as index or base regs. */
1836 offsettable_nonstrict_memref_p (rtx op)
1838 return ((MEM_P (op))
1839 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1842 /* Return 1 if Y is a memory address which contains no side effects
1843 and would remain valid after the addition of a positive integer
1844 less than the size of that mode.
1846 We assume that the original address is valid and do not check it.
1847 We do check that it is valid for narrower modes.
1849 If STRICTP is nonzero, we require a strictly valid address,
1850 for the sake of use in reload.c. */
1853 offsettable_address_p (int strictp, enum machine_mode mode, rtx y)
1855 enum rtx_code ycode = GET_CODE (y);
1856 rtx z;
1857 rtx y1 = y;
1858 rtx *y2;
1859 int (*addressp) (enum machine_mode, rtx) =
1860 (strictp ? strict_memory_address_p : memory_address_p);
1861 unsigned int mode_sz = GET_MODE_SIZE (mode);
1863 if (CONSTANT_ADDRESS_P (y))
1864 return 1;
1866 /* Adjusting an offsettable address involves changing to a narrower mode.
1867 Make sure that's OK. */
1869 if (mode_dependent_address_p (y))
1870 return 0;
1872 /* ??? How much offset does an offsettable BLKmode reference need?
1873 Clearly that depends on the situation in which it's being used.
1874 However, the current situation in which we test 0xffffffff is
1875 less than ideal. Caveat user. */
1876 if (mode_sz == 0)
1877 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1879 /* If the expression contains a constant term,
1880 see if it remains valid when max possible offset is added. */
1882 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1884 int good;
1886 y1 = *y2;
1887 *y2 = plus_constant (*y2, mode_sz - 1);
1888 /* Use QImode because an odd displacement may be automatically invalid
1889 for any wider mode. But it should be valid for a single byte. */
1890 good = (*addressp) (QImode, y);
1892 /* In any case, restore old contents of memory. */
1893 *y2 = y1;
1894 return good;
1897 if (GET_RTX_CLASS (ycode) == RTX_AUTOINC)
1898 return 0;
1900 /* The offset added here is chosen as the maximum offset that
1901 any instruction could need to add when operating on something
1902 of the specified mode. We assume that if Y and Y+c are
1903 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1904 go inside a LO_SUM here, so we do so as well. */
1905 if (GET_CODE (y) == LO_SUM
1906 && mode != BLKmode
1907 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1908 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1909 plus_constant (XEXP (y, 1), mode_sz - 1));
1910 else
1911 z = plus_constant (y, mode_sz - 1);
1913 /* Use QImode because an odd displacement may be automatically invalid
1914 for any wider mode. But it should be valid for a single byte. */
1915 return (*addressp) (QImode, z);
1918 /* Return 1 if ADDR is an address-expression whose effect depends
1919 on the mode of the memory reference it is used in.
1921 Autoincrement addressing is a typical example of mode-dependence
1922 because the amount of the increment depends on the mode. */
1925 mode_dependent_address_p (rtx addr ATTRIBUTE_UNUSED /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */)
1927 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1928 return 0;
1929 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1930 win: ATTRIBUTE_UNUSED_LABEL
1931 return 1;
1934 /* Like extract_insn, but save insn extracted and don't extract again, when
1935 called again for the same insn expecting that recog_data still contain the
1936 valid information. This is used primary by gen_attr infrastructure that
1937 often does extract insn again and again. */
1938 void
1939 extract_insn_cached (rtx insn)
1941 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
1942 return;
1943 extract_insn (insn);
1944 recog_data.insn = insn;
1946 /* Do cached extract_insn, constrain_operands and complain about failures.
1947 Used by insn_attrtab. */
1948 void
1949 extract_constrain_insn_cached (rtx insn)
1951 extract_insn_cached (insn);
1952 if (which_alternative == -1
1953 && !constrain_operands (reload_completed))
1954 fatal_insn_not_found (insn);
1956 /* Do cached constrain_operands and complain about failures. */
1958 constrain_operands_cached (int strict)
1960 if (which_alternative == -1)
1961 return constrain_operands (strict);
1962 else
1963 return 1;
1966 /* Analyze INSN and fill in recog_data. */
1968 void
1969 extract_insn (rtx insn)
1971 int i;
1972 int icode;
1973 int noperands;
1974 rtx body = PATTERN (insn);
1976 recog_data.insn = NULL;
1977 recog_data.n_operands = 0;
1978 recog_data.n_alternatives = 0;
1979 recog_data.n_dups = 0;
1980 which_alternative = -1;
1982 switch (GET_CODE (body))
1984 case USE:
1985 case CLOBBER:
1986 case ASM_INPUT:
1987 case ADDR_VEC:
1988 case ADDR_DIFF_VEC:
1989 return;
1991 case SET:
1992 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1993 goto asm_insn;
1994 else
1995 goto normal_insn;
1996 case PARALLEL:
1997 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
1998 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1999 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2000 goto asm_insn;
2001 else
2002 goto normal_insn;
2003 case ASM_OPERANDS:
2004 asm_insn:
2005 recog_data.n_operands = noperands = asm_noperands (body);
2006 if (noperands >= 0)
2008 /* This insn is an `asm' with operands. */
2010 /* expand_asm_operands makes sure there aren't too many operands. */
2011 gcc_assert (noperands <= MAX_RECOG_OPERANDS);
2013 /* Now get the operand values and constraints out of the insn. */
2014 decode_asm_operands (body, recog_data.operand,
2015 recog_data.operand_loc,
2016 recog_data.constraints,
2017 recog_data.operand_mode);
2018 if (noperands > 0)
2020 const char *p = recog_data.constraints[0];
2021 recog_data.n_alternatives = 1;
2022 while (*p)
2023 recog_data.n_alternatives += (*p++ == ',');
2025 break;
2027 fatal_insn_not_found (insn);
2029 default:
2030 normal_insn:
2031 /* Ordinary insn: recognize it, get the operands via insn_extract
2032 and get the constraints. */
2034 icode = recog_memoized (insn);
2035 if (icode < 0)
2036 fatal_insn_not_found (insn);
2038 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2039 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2040 recog_data.n_dups = insn_data[icode].n_dups;
2042 insn_extract (insn);
2044 for (i = 0; i < noperands; i++)
2046 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2047 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2048 /* VOIDmode match_operands gets mode from their real operand. */
2049 if (recog_data.operand_mode[i] == VOIDmode)
2050 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2053 for (i = 0; i < noperands; i++)
2054 recog_data.operand_type[i]
2055 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2056 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2057 : OP_IN);
2059 gcc_assert (recog_data.n_alternatives <= MAX_RECOG_ALTERNATIVES);
2062 /* After calling extract_insn, you can use this function to extract some
2063 information from the constraint strings into a more usable form.
2064 The collected data is stored in recog_op_alt. */
2065 void
2066 preprocess_constraints (void)
2068 int i;
2070 for (i = 0; i < recog_data.n_operands; i++)
2071 memset (recog_op_alt[i], 0, (recog_data.n_alternatives
2072 * sizeof (struct operand_alternative)));
2074 for (i = 0; i < recog_data.n_operands; i++)
2076 int j;
2077 struct operand_alternative *op_alt;
2078 const char *p = recog_data.constraints[i];
2080 op_alt = recog_op_alt[i];
2082 for (j = 0; j < recog_data.n_alternatives; j++)
2084 op_alt[j].cl = NO_REGS;
2085 op_alt[j].constraint = p;
2086 op_alt[j].matches = -1;
2087 op_alt[j].matched = -1;
2089 if (*p == '\0' || *p == ',')
2091 op_alt[j].anything_ok = 1;
2092 continue;
2095 for (;;)
2097 char c = *p;
2098 if (c == '#')
2100 c = *++p;
2101 while (c != ',' && c != '\0');
2102 if (c == ',' || c == '\0')
2104 p++;
2105 break;
2108 switch (c)
2110 case '=': case '+': case '*': case '%':
2111 case 'E': case 'F': case 'G': case 'H':
2112 case 's': case 'i': case 'n':
2113 case 'I': case 'J': case 'K': case 'L':
2114 case 'M': case 'N': case 'O': case 'P':
2115 /* These don't say anything we care about. */
2116 break;
2118 case '?':
2119 op_alt[j].reject += 6;
2120 break;
2121 case '!':
2122 op_alt[j].reject += 600;
2123 break;
2124 case '&':
2125 op_alt[j].earlyclobber = 1;
2126 break;
2128 case '0': case '1': case '2': case '3': case '4':
2129 case '5': case '6': case '7': case '8': case '9':
2131 char *end;
2132 op_alt[j].matches = strtoul (p, &end, 10);
2133 recog_op_alt[op_alt[j].matches][j].matched = i;
2134 p = end;
2136 continue;
2138 case 'm':
2139 op_alt[j].memory_ok = 1;
2140 break;
2141 case '<':
2142 op_alt[j].decmem_ok = 1;
2143 break;
2144 case '>':
2145 op_alt[j].incmem_ok = 1;
2146 break;
2147 case 'V':
2148 op_alt[j].nonoffmem_ok = 1;
2149 break;
2150 case 'o':
2151 op_alt[j].offmem_ok = 1;
2152 break;
2153 case 'X':
2154 op_alt[j].anything_ok = 1;
2155 break;
2157 case 'p':
2158 op_alt[j].is_address = 1;
2159 op_alt[j].cl = reg_class_subunion[(int) op_alt[j].cl]
2160 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2161 break;
2163 case 'g':
2164 case 'r':
2165 op_alt[j].cl =
2166 reg_class_subunion[(int) op_alt[j].cl][(int) GENERAL_REGS];
2167 break;
2169 default:
2170 if (EXTRA_MEMORY_CONSTRAINT (c, p))
2172 op_alt[j].memory_ok = 1;
2173 break;
2175 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
2177 op_alt[j].is_address = 1;
2178 op_alt[j].cl
2179 = (reg_class_subunion
2180 [(int) op_alt[j].cl]
2181 [(int) MODE_BASE_REG_CLASS (VOIDmode)]);
2182 break;
2185 op_alt[j].cl
2186 = (reg_class_subunion
2187 [(int) op_alt[j].cl]
2188 [(int) REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
2189 break;
2191 p += CONSTRAINT_LEN (c, p);
2197 /* Check the operands of an insn against the insn's operand constraints
2198 and return 1 if they are valid.
2199 The information about the insn's operands, constraints, operand modes
2200 etc. is obtained from the global variables set up by extract_insn.
2202 WHICH_ALTERNATIVE is set to a number which indicates which
2203 alternative of constraints was matched: 0 for the first alternative,
2204 1 for the next, etc.
2206 In addition, when two operands are required to match
2207 and it happens that the output operand is (reg) while the
2208 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2209 make the output operand look like the input.
2210 This is because the output operand is the one the template will print.
2212 This is used in final, just before printing the assembler code and by
2213 the routines that determine an insn's attribute.
2215 If STRICT is a positive nonzero value, it means that we have been
2216 called after reload has been completed. In that case, we must
2217 do all checks strictly. If it is zero, it means that we have been called
2218 before reload has completed. In that case, we first try to see if we can
2219 find an alternative that matches strictly. If not, we try again, this
2220 time assuming that reload will fix up the insn. This provides a "best
2221 guess" for the alternative and is used to compute attributes of insns prior
2222 to reload. A negative value of STRICT is used for this internal call. */
2224 struct funny_match
2226 int this, other;
2230 constrain_operands (int strict)
2232 const char *constraints[MAX_RECOG_OPERANDS];
2233 int matching_operands[MAX_RECOG_OPERANDS];
2234 int earlyclobber[MAX_RECOG_OPERANDS];
2235 int c;
2237 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2238 int funny_match_index;
2240 which_alternative = 0;
2241 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2242 return 1;
2244 for (c = 0; c < recog_data.n_operands; c++)
2246 constraints[c] = recog_data.constraints[c];
2247 matching_operands[c] = -1;
2252 int opno;
2253 int lose = 0;
2254 funny_match_index = 0;
2256 for (opno = 0; opno < recog_data.n_operands; opno++)
2258 rtx op = recog_data.operand[opno];
2259 enum machine_mode mode = GET_MODE (op);
2260 const char *p = constraints[opno];
2261 int offset = 0;
2262 int win = 0;
2263 int val;
2264 int len;
2266 earlyclobber[opno] = 0;
2268 /* A unary operator may be accepted by the predicate, but it
2269 is irrelevant for matching constraints. */
2270 if (UNARY_P (op))
2271 op = XEXP (op, 0);
2273 if (GET_CODE (op) == SUBREG)
2275 if (REG_P (SUBREG_REG (op))
2276 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2277 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2278 GET_MODE (SUBREG_REG (op)),
2279 SUBREG_BYTE (op),
2280 GET_MODE (op));
2281 op = SUBREG_REG (op);
2284 /* An empty constraint or empty alternative
2285 allows anything which matched the pattern. */
2286 if (*p == 0 || *p == ',')
2287 win = 1;
2290 switch (c = *p, len = CONSTRAINT_LEN (c, p), c)
2292 case '\0':
2293 len = 0;
2294 break;
2295 case ',':
2296 c = '\0';
2297 break;
2299 case '?': case '!': case '*': case '%':
2300 case '=': case '+':
2301 break;
2303 case '#':
2304 /* Ignore rest of this alternative as far as
2305 constraint checking is concerned. */
2307 p++;
2308 while (*p && *p != ',');
2309 len = 0;
2310 break;
2312 case '&':
2313 earlyclobber[opno] = 1;
2314 break;
2316 case '0': case '1': case '2': case '3': case '4':
2317 case '5': case '6': case '7': case '8': case '9':
2319 /* This operand must be the same as a previous one.
2320 This kind of constraint is used for instructions such
2321 as add when they take only two operands.
2323 Note that the lower-numbered operand is passed first.
2325 If we are not testing strictly, assume that this
2326 constraint will be satisfied. */
2328 char *end;
2329 int match;
2331 match = strtoul (p, &end, 10);
2332 p = end;
2334 if (strict < 0)
2335 val = 1;
2336 else
2338 rtx op1 = recog_data.operand[match];
2339 rtx op2 = recog_data.operand[opno];
2341 /* A unary operator may be accepted by the predicate,
2342 but it is irrelevant for matching constraints. */
2343 if (UNARY_P (op1))
2344 op1 = XEXP (op1, 0);
2345 if (UNARY_P (op2))
2346 op2 = XEXP (op2, 0);
2348 val = operands_match_p (op1, op2);
2351 matching_operands[opno] = match;
2352 matching_operands[match] = opno;
2354 if (val != 0)
2355 win = 1;
2357 /* If output is *x and input is *--x, arrange later
2358 to change the output to *--x as well, since the
2359 output op is the one that will be printed. */
2360 if (val == 2 && strict > 0)
2362 funny_match[funny_match_index].this = opno;
2363 funny_match[funny_match_index++].other = match;
2366 len = 0;
2367 break;
2369 case 'p':
2370 /* p is used for address_operands. When we are called by
2371 gen_reload, no one will have checked that the address is
2372 strictly valid, i.e., that all pseudos requiring hard regs
2373 have gotten them. */
2374 if (strict <= 0
2375 || (strict_memory_address_p (recog_data.operand_mode[opno],
2376 op)))
2377 win = 1;
2378 break;
2380 /* No need to check general_operand again;
2381 it was done in insn-recog.c. */
2382 case 'g':
2383 /* Anything goes unless it is a REG and really has a hard reg
2384 but the hard reg is not in the class GENERAL_REGS. */
2385 if (strict < 0
2386 || GENERAL_REGS == ALL_REGS
2387 || !REG_P (op)
2388 || (reload_in_progress
2389 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2390 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2391 win = 1;
2392 break;
2394 case 'X':
2395 /* This is used for a MATCH_SCRATCH in the cases when
2396 we don't actually need anything. So anything goes
2397 any time. */
2398 win = 1;
2399 break;
2401 case 'm':
2402 /* Memory operands must be valid, to the extent
2403 required by STRICT. */
2404 if (MEM_P (op))
2406 if (strict > 0
2407 && !strict_memory_address_p (GET_MODE (op),
2408 XEXP (op, 0)))
2409 break;
2410 if (strict == 0
2411 && !memory_address_p (GET_MODE (op), XEXP (op, 0)))
2412 break;
2413 win = 1;
2415 /* Before reload, accept what reload can turn into mem. */
2416 else if (strict < 0 && CONSTANT_P (op))
2417 win = 1;
2418 /* During reload, accept a pseudo */
2419 else if (reload_in_progress && REG_P (op)
2420 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2421 win = 1;
2422 break;
2424 case '<':
2425 if (MEM_P (op)
2426 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2427 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2428 win = 1;
2429 break;
2431 case '>':
2432 if (MEM_P (op)
2433 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2434 || GET_CODE (XEXP (op, 0)) == POST_INC))
2435 win = 1;
2436 break;
2438 case 'E':
2439 case 'F':
2440 if (GET_CODE (op) == CONST_DOUBLE
2441 || (GET_CODE (op) == CONST_VECTOR
2442 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2443 win = 1;
2444 break;
2446 case 'G':
2447 case 'H':
2448 if (GET_CODE (op) == CONST_DOUBLE
2449 && CONST_DOUBLE_OK_FOR_CONSTRAINT_P (op, c, p))
2450 win = 1;
2451 break;
2453 case 's':
2454 if (GET_CODE (op) == CONST_INT
2455 || (GET_CODE (op) == CONST_DOUBLE
2456 && GET_MODE (op) == VOIDmode))
2457 break;
2458 case 'i':
2459 if (CONSTANT_P (op))
2460 win = 1;
2461 break;
2463 case 'n':
2464 if (GET_CODE (op) == CONST_INT
2465 || (GET_CODE (op) == CONST_DOUBLE
2466 && GET_MODE (op) == VOIDmode))
2467 win = 1;
2468 break;
2470 case 'I':
2471 case 'J':
2472 case 'K':
2473 case 'L':
2474 case 'M':
2475 case 'N':
2476 case 'O':
2477 case 'P':
2478 if (GET_CODE (op) == CONST_INT
2479 && CONST_OK_FOR_CONSTRAINT_P (INTVAL (op), c, p))
2480 win = 1;
2481 break;
2483 case 'V':
2484 if (MEM_P (op)
2485 && ((strict > 0 && ! offsettable_memref_p (op))
2486 || (strict < 0
2487 && !(CONSTANT_P (op) || MEM_P (op)))
2488 || (reload_in_progress
2489 && !(REG_P (op)
2490 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2491 win = 1;
2492 break;
2494 case 'o':
2495 if ((strict > 0 && offsettable_memref_p (op))
2496 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2497 /* Before reload, accept what reload can handle. */
2498 || (strict < 0
2499 && (CONSTANT_P (op) || MEM_P (op)))
2500 /* During reload, accept a pseudo */
2501 || (reload_in_progress && REG_P (op)
2502 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2503 win = 1;
2504 break;
2506 default:
2508 enum reg_class cl;
2510 cl = (c == 'r'
2511 ? GENERAL_REGS : REG_CLASS_FROM_CONSTRAINT (c, p));
2512 if (cl != NO_REGS)
2514 if (strict < 0
2515 || (strict == 0
2516 && REG_P (op)
2517 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2518 || (strict == 0 && GET_CODE (op) == SCRATCH)
2519 || (REG_P (op)
2520 && reg_fits_class_p (op, cl, offset, mode)))
2521 win = 1;
2523 #ifdef EXTRA_CONSTRAINT_STR
2524 else if (EXTRA_CONSTRAINT_STR (op, c, p))
2525 win = 1;
2527 else if (EXTRA_MEMORY_CONSTRAINT (c, p)
2528 /* Every memory operand can be reloaded to fit. */
2529 && ((strict < 0 && MEM_P (op))
2530 /* Before reload, accept what reload can turn
2531 into mem. */
2532 || (strict < 0 && CONSTANT_P (op))
2533 /* During reload, accept a pseudo */
2534 || (reload_in_progress && REG_P (op)
2535 && REGNO (op) >= FIRST_PSEUDO_REGISTER)))
2536 win = 1;
2537 else if (EXTRA_ADDRESS_CONSTRAINT (c, p)
2538 /* Every address operand can be reloaded to fit. */
2539 && strict < 0)
2540 win = 1;
2541 #endif
2542 break;
2545 while (p += len, c);
2547 constraints[opno] = p;
2548 /* If this operand did not win somehow,
2549 this alternative loses. */
2550 if (! win)
2551 lose = 1;
2553 /* This alternative won; the operands are ok.
2554 Change whichever operands this alternative says to change. */
2555 if (! lose)
2557 int opno, eopno;
2559 /* See if any earlyclobber operand conflicts with some other
2560 operand. */
2562 if (strict > 0)
2563 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2564 /* Ignore earlyclobber operands now in memory,
2565 because we would often report failure when we have
2566 two memory operands, one of which was formerly a REG. */
2567 if (earlyclobber[eopno]
2568 && REG_P (recog_data.operand[eopno]))
2569 for (opno = 0; opno < recog_data.n_operands; opno++)
2570 if ((MEM_P (recog_data.operand[opno])
2571 || recog_data.operand_type[opno] != OP_OUT)
2572 && opno != eopno
2573 /* Ignore things like match_operator operands. */
2574 && *recog_data.constraints[opno] != 0
2575 && ! (matching_operands[opno] == eopno
2576 && operands_match_p (recog_data.operand[opno],
2577 recog_data.operand[eopno]))
2578 && ! safe_from_earlyclobber (recog_data.operand[opno],
2579 recog_data.operand[eopno]))
2580 lose = 1;
2582 if (! lose)
2584 while (--funny_match_index >= 0)
2586 recog_data.operand[funny_match[funny_match_index].other]
2587 = recog_data.operand[funny_match[funny_match_index].this];
2590 return 1;
2594 which_alternative++;
2596 while (which_alternative < recog_data.n_alternatives);
2598 which_alternative = -1;
2599 /* If we are about to reject this, but we are not to test strictly,
2600 try a very loose test. Only return failure if it fails also. */
2601 if (strict == 0)
2602 return constrain_operands (-1);
2603 else
2604 return 0;
2607 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2608 is a hard reg in class CLASS when its regno is offset by OFFSET
2609 and changed to mode MODE.
2610 If REG occupies multiple hard regs, all of them must be in CLASS. */
2613 reg_fits_class_p (rtx operand, enum reg_class cl, int offset,
2614 enum machine_mode mode)
2616 int regno = REGNO (operand);
2617 if (regno < FIRST_PSEUDO_REGISTER
2618 && TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2619 regno + offset))
2621 int sr;
2622 regno += offset;
2623 for (sr = hard_regno_nregs[regno][mode] - 1;
2624 sr > 0; sr--)
2625 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) cl],
2626 regno + sr))
2627 break;
2628 return sr == 0;
2631 return 0;
2634 /* Split single instruction. Helper function for split_all_insns and
2635 split_all_insns_noflow. Return last insn in the sequence if successful,
2636 or NULL if unsuccessful. */
2638 static rtx
2639 split_insn (rtx insn)
2641 /* Split insns here to get max fine-grain parallelism. */
2642 rtx first = PREV_INSN (insn);
2643 rtx last = try_split (PATTERN (insn), insn, 1);
2645 if (last == insn)
2646 return NULL_RTX;
2648 /* try_split returns the NOTE that INSN became. */
2649 SET_INSN_DELETED (insn);
2651 /* ??? Coddle to md files that generate subregs in post-reload
2652 splitters instead of computing the proper hard register. */
2653 if (reload_completed && first != last)
2655 first = NEXT_INSN (first);
2656 for (;;)
2658 if (INSN_P (first))
2659 cleanup_subreg_operands (first);
2660 if (first == last)
2661 break;
2662 first = NEXT_INSN (first);
2665 return last;
2668 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2670 void
2671 split_all_insns (int upd_life)
2673 sbitmap blocks;
2674 bool changed;
2675 basic_block bb;
2677 blocks = sbitmap_alloc (last_basic_block);
2678 sbitmap_zero (blocks);
2679 changed = false;
2681 FOR_EACH_BB_REVERSE (bb)
2683 rtx insn, next;
2684 bool finish = false;
2686 for (insn = BB_HEAD (bb); !finish ; insn = next)
2688 /* Can't use `next_real_insn' because that might go across
2689 CODE_LABELS and short-out basic blocks. */
2690 next = NEXT_INSN (insn);
2691 finish = (insn == BB_END (bb));
2692 if (INSN_P (insn))
2694 rtx set = single_set (insn);
2696 /* Don't split no-op move insns. These should silently
2697 disappear later in final. Splitting such insns would
2698 break the code that handles REG_NO_CONFLICT blocks. */
2699 if (set && set_noop_p (set))
2701 /* Nops get in the way while scheduling, so delete them
2702 now if register allocation has already been done. It
2703 is too risky to try to do this before register
2704 allocation, and there are unlikely to be very many
2705 nops then anyways. */
2706 if (reload_completed)
2708 /* If the no-op set has a REG_UNUSED note, we need
2709 to update liveness information. */
2710 if (find_reg_note (insn, REG_UNUSED, NULL_RTX))
2712 SET_BIT (blocks, bb->index);
2713 changed = true;
2715 /* ??? Is life info affected by deleting edges? */
2716 delete_insn_and_edges (insn);
2719 else
2721 rtx last = split_insn (insn);
2722 if (last)
2724 /* The split sequence may include barrier, but the
2725 BB boundary we are interested in will be set to
2726 previous one. */
2728 while (BARRIER_P (last))
2729 last = PREV_INSN (last);
2730 SET_BIT (blocks, bb->index);
2731 changed = true;
2738 if (changed)
2740 int old_last_basic_block = last_basic_block;
2742 find_many_sub_basic_blocks (blocks);
2744 if (old_last_basic_block != last_basic_block && upd_life)
2745 blocks = sbitmap_resize (blocks, last_basic_block, 1);
2748 if (changed && upd_life)
2749 update_life_info (blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
2750 PROP_DEATH_NOTES);
2752 #ifdef ENABLE_CHECKING
2753 verify_flow_info ();
2754 #endif
2756 sbitmap_free (blocks);
2759 /* Same as split_all_insns, but do not expect CFG to be available.
2760 Used by machine dependent reorg passes. */
2762 void
2763 split_all_insns_noflow (void)
2765 rtx next, insn;
2767 for (insn = get_insns (); insn; insn = next)
2769 next = NEXT_INSN (insn);
2770 if (INSN_P (insn))
2772 /* Don't split no-op move insns. These should silently
2773 disappear later in final. Splitting such insns would
2774 break the code that handles REG_NO_CONFLICT blocks. */
2775 rtx set = single_set (insn);
2776 if (set && set_noop_p (set))
2778 /* Nops get in the way while scheduling, so delete them
2779 now if register allocation has already been done. It
2780 is too risky to try to do this before register
2781 allocation, and there are unlikely to be very many
2782 nops then anyways.
2784 ??? Should we use delete_insn when the CFG isn't valid? */
2785 if (reload_completed)
2786 delete_insn_and_edges (insn);
2788 else
2789 split_insn (insn);
2794 #ifdef HAVE_peephole2
2795 struct peep2_insn_data
2797 rtx insn;
2798 regset live_before;
2801 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2802 static int peep2_current;
2804 /* A non-insn marker indicating the last insn of the block.
2805 The live_before regset for this element is correct, indicating
2806 global_live_at_end for the block. */
2807 #define PEEP2_EOB pc_rtx
2809 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2810 does not exist. Used by the recognizer to find the next insn to match
2811 in a multi-insn pattern. */
2814 peep2_next_insn (int n)
2816 gcc_assert (n < MAX_INSNS_PER_PEEP2 + 1);
2818 n += peep2_current;
2819 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2820 n -= MAX_INSNS_PER_PEEP2 + 1;
2822 if (peep2_insn_data[n].insn == PEEP2_EOB)
2823 return NULL_RTX;
2824 return peep2_insn_data[n].insn;
2827 /* Return true if REGNO is dead before the Nth non-note insn
2828 after `current'. */
2831 peep2_regno_dead_p (int ofs, int regno)
2833 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2835 ofs += peep2_current;
2836 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2837 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2839 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2841 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2844 /* Similarly for a REG. */
2847 peep2_reg_dead_p (int ofs, rtx reg)
2849 int regno, n;
2851 gcc_assert (ofs < MAX_INSNS_PER_PEEP2 + 1);
2853 ofs += peep2_current;
2854 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2855 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2857 gcc_assert (peep2_insn_data[ofs].insn != NULL_RTX);
2859 regno = REGNO (reg);
2860 n = hard_regno_nregs[regno][GET_MODE (reg)];
2861 while (--n >= 0)
2862 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2863 return 0;
2864 return 1;
2867 /* Try to find a hard register of mode MODE, matching the register class in
2868 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2869 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2870 in which case the only condition is that the register must be available
2871 before CURRENT_INSN.
2872 Registers that already have bits set in REG_SET will not be considered.
2874 If an appropriate register is available, it will be returned and the
2875 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2876 returned. */
2879 peep2_find_free_register (int from, int to, const char *class_str,
2880 enum machine_mode mode, HARD_REG_SET *reg_set)
2882 static int search_ofs;
2883 enum reg_class cl;
2884 HARD_REG_SET live;
2885 int i;
2887 gcc_assert (from < MAX_INSNS_PER_PEEP2 + 1);
2888 gcc_assert (to < MAX_INSNS_PER_PEEP2 + 1);
2890 from += peep2_current;
2891 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2892 from -= MAX_INSNS_PER_PEEP2 + 1;
2893 to += peep2_current;
2894 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2895 to -= MAX_INSNS_PER_PEEP2 + 1;
2897 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2898 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2900 while (from != to)
2902 HARD_REG_SET this_live;
2904 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2905 from = 0;
2906 gcc_assert (peep2_insn_data[from].insn != NULL_RTX);
2907 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2908 IOR_HARD_REG_SET (live, this_live);
2911 cl = (class_str[0] == 'r' ? GENERAL_REGS
2912 : REG_CLASS_FROM_CONSTRAINT (class_str[0], class_str));
2914 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2916 int raw_regno, regno, success, j;
2918 /* Distribute the free registers as much as possible. */
2919 raw_regno = search_ofs + i;
2920 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2921 raw_regno -= FIRST_PSEUDO_REGISTER;
2922 #ifdef REG_ALLOC_ORDER
2923 regno = reg_alloc_order[raw_regno];
2924 #else
2925 regno = raw_regno;
2926 #endif
2928 /* Don't allocate fixed registers. */
2929 if (fixed_regs[regno])
2930 continue;
2931 /* Make sure the register is of the right class. */
2932 if (! TEST_HARD_REG_BIT (reg_class_contents[cl], regno))
2933 continue;
2934 /* And can support the mode we need. */
2935 if (! HARD_REGNO_MODE_OK (regno, mode))
2936 continue;
2937 /* And that we don't create an extra save/restore. */
2938 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2939 continue;
2940 /* And we don't clobber traceback for noreturn functions. */
2941 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2942 && (! reload_completed || frame_pointer_needed))
2943 continue;
2945 success = 1;
2946 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2948 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2949 || TEST_HARD_REG_BIT (live, regno + j))
2951 success = 0;
2952 break;
2955 if (success)
2957 for (j = hard_regno_nregs[regno][mode] - 1; j >= 0; j--)
2958 SET_HARD_REG_BIT (*reg_set, regno + j);
2960 /* Start the next search with the next register. */
2961 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2962 raw_regno = 0;
2963 search_ofs = raw_regno;
2965 return gen_rtx_REG (mode, regno);
2969 search_ofs = 0;
2970 return NULL_RTX;
2973 /* Perform the peephole2 optimization pass. */
2975 void
2976 peephole2_optimize (FILE *dump_file ATTRIBUTE_UNUSED)
2978 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
2979 rtx insn, prev;
2980 regset live;
2981 int i;
2982 basic_block bb;
2983 #ifdef HAVE_conditional_execution
2984 sbitmap blocks;
2985 bool changed;
2986 #endif
2987 bool do_cleanup_cfg = false;
2988 bool do_rebuild_jump_labels = false;
2990 /* Initialize the regsets we're going to use. */
2991 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
2992 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
2993 live = INITIALIZE_REG_SET (rs_heads[i]);
2995 #ifdef HAVE_conditional_execution
2996 blocks = sbitmap_alloc (last_basic_block);
2997 sbitmap_zero (blocks);
2998 changed = false;
2999 #else
3000 count_or_remove_death_notes (NULL, 1);
3001 #endif
3003 FOR_EACH_BB_REVERSE (bb)
3005 struct propagate_block_info *pbi;
3007 /* Indicate that all slots except the last holds invalid data. */
3008 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3009 peep2_insn_data[i].insn = NULL_RTX;
3011 /* Indicate that the last slot contains live_after data. */
3012 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3013 peep2_current = MAX_INSNS_PER_PEEP2;
3015 /* Start up propagation. */
3016 COPY_REG_SET (live, bb->global_live_at_end);
3017 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3019 #ifdef HAVE_conditional_execution
3020 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3021 #else
3022 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3023 #endif
3025 for (insn = BB_END (bb); ; insn = prev)
3027 prev = PREV_INSN (insn);
3028 if (INSN_P (insn))
3030 rtx try, before_try, x;
3031 int match_len;
3032 rtx note;
3033 bool was_call = false;
3035 /* Record this insn. */
3036 if (--peep2_current < 0)
3037 peep2_current = MAX_INSNS_PER_PEEP2;
3038 peep2_insn_data[peep2_current].insn = insn;
3039 propagate_one_insn (pbi, insn);
3040 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3042 /* Match the peephole. */
3043 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3044 if (try != NULL)
3046 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3047 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3048 cfg-related call notes. */
3049 for (i = 0; i <= match_len; ++i)
3051 int j;
3052 rtx old_insn, new_insn, note;
3054 j = i + peep2_current;
3055 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3056 j -= MAX_INSNS_PER_PEEP2 + 1;
3057 old_insn = peep2_insn_data[j].insn;
3058 if (!CALL_P (old_insn))
3059 continue;
3060 was_call = true;
3062 new_insn = try;
3063 while (new_insn != NULL_RTX)
3065 if (CALL_P (new_insn))
3066 break;
3067 new_insn = NEXT_INSN (new_insn);
3070 gcc_assert (new_insn != NULL_RTX);
3072 CALL_INSN_FUNCTION_USAGE (new_insn)
3073 = CALL_INSN_FUNCTION_USAGE (old_insn);
3075 for (note = REG_NOTES (old_insn);
3076 note;
3077 note = XEXP (note, 1))
3078 switch (REG_NOTE_KIND (note))
3080 case REG_NORETURN:
3081 case REG_SETJMP:
3082 case REG_ALWAYS_RETURN:
3083 REG_NOTES (new_insn)
3084 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3085 XEXP (note, 0),
3086 REG_NOTES (new_insn));
3087 default:
3088 /* Discard all other reg notes. */
3089 break;
3092 /* Croak if there is another call in the sequence. */
3093 while (++i <= match_len)
3095 j = i + peep2_current;
3096 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3097 j -= MAX_INSNS_PER_PEEP2 + 1;
3098 old_insn = peep2_insn_data[j].insn;
3099 gcc_assert (!CALL_P (old_insn));
3101 break;
3104 i = match_len + peep2_current;
3105 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3106 i -= MAX_INSNS_PER_PEEP2 + 1;
3108 note = find_reg_note (peep2_insn_data[i].insn,
3109 REG_EH_REGION, NULL_RTX);
3111 /* Replace the old sequence with the new. */
3112 try = emit_insn_after_setloc (try, peep2_insn_data[i].insn,
3113 INSN_LOCATOR (peep2_insn_data[i].insn));
3114 before_try = PREV_INSN (insn);
3115 delete_insn_chain (insn, peep2_insn_data[i].insn);
3117 /* Re-insert the EH_REGION notes. */
3118 if (note || (was_call && nonlocal_goto_handler_labels))
3120 edge eh_edge;
3121 edge_iterator ei;
3123 FOR_EACH_EDGE (eh_edge, ei, bb->succs)
3124 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3125 break;
3127 for (x = try ; x != before_try ; x = PREV_INSN (x))
3128 if (CALL_P (x)
3129 || (flag_non_call_exceptions
3130 && may_trap_p (PATTERN (x))
3131 && !find_reg_note (x, REG_EH_REGION, NULL)))
3133 if (note)
3134 REG_NOTES (x)
3135 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3136 XEXP (note, 0),
3137 REG_NOTES (x));
3139 if (x != BB_END (bb) && eh_edge)
3141 edge nfte, nehe;
3142 int flags;
3144 nfte = split_block (bb, x);
3145 flags = (eh_edge->flags
3146 & (EDGE_EH | EDGE_ABNORMAL));
3147 if (CALL_P (x))
3148 flags |= EDGE_ABNORMAL_CALL;
3149 nehe = make_edge (nfte->src, eh_edge->dest,
3150 flags);
3152 nehe->probability = eh_edge->probability;
3153 nfte->probability
3154 = REG_BR_PROB_BASE - nehe->probability;
3156 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3157 #ifdef HAVE_conditional_execution
3158 SET_BIT (blocks, nfte->dest->index);
3159 changed = true;
3160 #endif
3161 bb = nfte->src;
3162 eh_edge = nehe;
3166 /* Converting possibly trapping insn to non-trapping is
3167 possible. Zap dummy outgoing edges. */
3168 do_cleanup_cfg |= purge_dead_edges (bb);
3171 #ifdef HAVE_conditional_execution
3172 /* With conditional execution, we cannot back up the
3173 live information so easily, since the conditional
3174 death data structures are not so self-contained.
3175 So record that we've made a modification to this
3176 block and update life information at the end. */
3177 SET_BIT (blocks, bb->index);
3178 changed = true;
3180 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3181 peep2_insn_data[i].insn = NULL_RTX;
3182 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3183 #else
3184 /* Back up lifetime information past the end of the
3185 newly created sequence. */
3186 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3187 i = 0;
3188 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3190 /* Update life information for the new sequence. */
3191 x = try;
3194 if (INSN_P (x))
3196 if (--i < 0)
3197 i = MAX_INSNS_PER_PEEP2;
3198 peep2_insn_data[i].insn = x;
3199 propagate_one_insn (pbi, x);
3200 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3202 x = PREV_INSN (x);
3204 while (x != prev);
3206 /* ??? Should verify that LIVE now matches what we
3207 had before the new sequence. */
3209 peep2_current = i;
3210 #endif
3212 /* If we generated a jump instruction, it won't have
3213 JUMP_LABEL set. Recompute after we're done. */
3214 for (x = try; x != before_try; x = PREV_INSN (x))
3215 if (JUMP_P (x))
3217 do_rebuild_jump_labels = true;
3218 break;
3223 if (insn == BB_HEAD (bb))
3224 break;
3227 free_propagate_block_info (pbi);
3230 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3231 FREE_REG_SET (peep2_insn_data[i].live_before);
3232 FREE_REG_SET (live);
3234 if (do_rebuild_jump_labels)
3235 rebuild_jump_labels (get_insns ());
3237 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3238 we've changed global life since exception handlers are no longer
3239 reachable. */
3240 if (do_cleanup_cfg)
3242 cleanup_cfg (0);
3243 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3245 #ifdef HAVE_conditional_execution
3246 else
3248 count_or_remove_death_notes (blocks, 1);
3249 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3251 sbitmap_free (blocks);
3252 #endif
3254 #endif /* HAVE_peephole2 */
3256 /* Common predicates for use with define_bypass. */
3258 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3259 data not the address operand(s) of the store. IN_INSN must be
3260 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3261 SETs inside. */
3264 store_data_bypass_p (rtx out_insn, rtx in_insn)
3266 rtx out_set, in_set;
3268 in_set = single_set (in_insn);
3269 gcc_assert (in_set);
3271 if (!MEM_P (SET_DEST (in_set)))
3272 return false;
3274 out_set = single_set (out_insn);
3275 if (out_set)
3277 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3278 return false;
3280 else
3282 rtx out_pat;
3283 int i;
3285 out_pat = PATTERN (out_insn);
3286 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3288 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3290 rtx exp = XVECEXP (out_pat, 0, i);
3292 if (GET_CODE (exp) == CLOBBER)
3293 continue;
3295 gcc_assert (GET_CODE (exp) == SET);
3297 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3298 return false;
3302 return true;
3305 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3306 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3307 or multiple set; IN_INSN should be single_set for truth, but for convenience
3308 of insn categorization may be any JUMP or CALL insn. */
3311 if_test_bypass_p (rtx out_insn, rtx in_insn)
3313 rtx out_set, in_set;
3315 in_set = single_set (in_insn);
3316 if (! in_set)
3318 gcc_assert (JUMP_P (in_insn) || CALL_P (in_insn));
3319 return false;
3322 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3323 return false;
3324 in_set = SET_SRC (in_set);
3326 out_set = single_set (out_insn);
3327 if (out_set)
3329 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3330 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3331 return false;
3333 else
3335 rtx out_pat;
3336 int i;
3338 out_pat = PATTERN (out_insn);
3339 gcc_assert (GET_CODE (out_pat) == PARALLEL);
3341 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3343 rtx exp = XVECEXP (out_pat, 0, i);
3345 if (GET_CODE (exp) == CLOBBER)
3346 continue;
3348 gcc_assert (GET_CODE (exp) == SET);
3350 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3351 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3352 return false;
3356 return true;