* rtl.h (rtunion_def): Constify member `rtstr'.
[official-gcc.git] / gcc / recog.c
blob56f86345b21e327cb480f9d70054c691dbc38d8e
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "recog.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "toplev.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "resource.h"
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
45 #else
46 #define STACK_PUSH_CODE PRE_INC
47 #endif
48 #endif
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
53 #else
54 #define STACK_POP_CODE POST_DEC
55 #endif
56 #endif
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static int insn_invalid_p PARAMS ((rtx));
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
69 init_recog and init_recog_no_volatile are responsible for setting this. */
71 int volatile_ok;
73 struct recog_data recog_data;
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
79 /* On return from `constrain_operands', indicate which alternative
80 was satisfied. */
82 int which_alternative;
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
88 int reload_completed;
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
94 void
95 init_recog_no_volatile ()
97 volatile_ok = 0;
100 void
101 init_recog ()
103 volatile_ok = 1;
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
116 recog_memoized (insn)
117 rtx insn;
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
128 check_asm_operands (x)
129 rtx x;
131 int noperands;
132 rtx *operands;
133 const char **constraints;
134 int i;
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
145 noperands = asm_noperands (x);
146 if (noperands < 0)
147 return 0;
148 if (noperands == 0)
149 return 1;
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
156 for (i = 0; i < noperands; i++)
158 const char *c = constraints[i];
159 if (c[0] == '%')
160 c++;
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
164 if (! asm_operand_ok (operands[i], c))
165 return 0;
168 return 1;
171 /* Static data for the next two routines. */
173 typedef struct change_t
175 rtx object;
176 int old_code;
177 rtx *loc;
178 rtx old;
179 } change_t;
181 static change_t *changes;
182 static int changes_allocated;
184 static int num_changes = 0;
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 the change in place.
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
205 validate_change (object, loc, new, in_group)
206 rtx object;
207 rtx *loc;
208 rtx new;
209 int in_group;
211 rtx old = *loc;
213 if (old == new || rtx_equal_p (old, new))
214 return 1;
216 if (in_group == 0 && num_changes != 0)
217 abort ();
219 *loc = new;
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 else
229 changes_allocated *= 2;
231 changes =
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
240 if (object && GET_CODE (object) != MEM)
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
248 num_changes++;
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
262 static int
263 insn_invalid_p (insn)
264 rtx insn;
266 int icode = recog_memoized (insn);
267 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
269 if (is_asm && ! check_asm_operands (PATTERN (insn)))
270 return 1;
271 if (! is_asm && icode < 0)
272 return 1;
274 /* After reload, verify that all constraints are satisfied. */
275 if (reload_completed)
277 extract_insn (insn);
279 if (! constrain_operands (1))
280 return 1;
283 return 0;
286 /* Apply a group of changes previously issued with `validate_change'.
287 Return 1 if all changes are valid, zero otherwise. */
290 apply_change_group ()
292 int i;
294 /* The changes have been applied and all INSN_CODEs have been reset to force
295 rerecognition.
297 The changes are valid if we aren't given an object, or if we are
298 given a MEM and it still is a valid address, or if this is in insn
299 and it is recognized. In the latter case, if reload has completed,
300 we also require that the operands meet the constraints for
301 the insn. */
303 for (i = 0; i < num_changes; i++)
305 rtx object = changes[i].object;
307 if (object == 0)
308 continue;
310 if (GET_CODE (object) == MEM)
312 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
313 break;
315 else if (insn_invalid_p (object))
317 rtx pat = PATTERN (object);
319 /* Perhaps we couldn't recognize the insn because there were
320 extra CLOBBERs at the end. If so, try to re-recognize
321 without the last CLOBBER (later iterations will cause each of
322 them to be eliminated, in turn). But don't do this if we
323 have an ASM_OPERAND. */
324 if (GET_CODE (pat) == PARALLEL
325 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
326 && asm_noperands (PATTERN (object)) < 0)
328 rtx newpat;
330 if (XVECLEN (pat, 0) == 2)
331 newpat = XVECEXP (pat, 0, 0);
332 else
334 int j;
336 newpat
337 = gen_rtx_PARALLEL (VOIDmode,
338 gen_rtvec (XVECLEN (pat, 0) - 1));
339 for (j = 0; j < XVECLEN (newpat, 0); j++)
340 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
343 /* Add a new change to this group to replace the pattern
344 with this new pattern. Then consider this change
345 as having succeeded. The change we added will
346 cause the entire call to fail if things remain invalid.
348 Note that this can lose if a later change than the one
349 we are processing specified &XVECEXP (PATTERN (object), 0, X)
350 but this shouldn't occur. */
352 validate_change (object, &PATTERN (object), newpat, 1);
354 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
355 /* If this insn is a CLOBBER or USE, it is always valid, but is
356 never recognized. */
357 continue;
358 else
359 break;
363 if (i == num_changes)
365 num_changes = 0;
366 return 1;
368 else
370 cancel_changes (0);
371 return 0;
375 /* Return the number of changes so far in the current group. */
378 num_validated_changes ()
380 return num_changes;
383 /* Retract the changes numbered NUM and up. */
385 void
386 cancel_changes (num)
387 int num;
389 int i;
391 /* Back out all the changes. Do this in the opposite order in which
392 they were made. */
393 for (i = num_changes - 1; i >= num; i--)
395 *changes[i].loc = changes[i].old;
396 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
397 INSN_CODE (changes[i].object) = changes[i].old_code;
399 num_changes = num;
402 /* Replace every occurrence of FROM in X with TO. Mark each change with
403 validate_change passing OBJECT. */
405 static void
406 validate_replace_rtx_1 (loc, from, to, object)
407 rtx *loc;
408 rtx from, to, object;
410 register int i, j;
411 register const char *fmt;
412 register rtx x = *loc;
413 enum rtx_code code = GET_CODE (x);
415 /* X matches FROM if it is the same rtx or they are both referring to the
416 same register in the same mode. Avoid calling rtx_equal_p unless the
417 operands look similar. */
419 if (x == from
420 || (GET_CODE (x) == REG && GET_CODE (from) == REG
421 && GET_MODE (x) == GET_MODE (from)
422 && REGNO (x) == REGNO (from))
423 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
424 && rtx_equal_p (x, from)))
426 validate_change (object, loc, to, 1);
427 return;
430 /* For commutative or comparison operations, try replacing each argument
431 separately and seeing if we made any changes. If so, put a constant
432 argument last.*/
433 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
435 int prev_changes = num_changes;
437 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
438 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
439 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
441 validate_change (object, loc,
442 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
443 : swap_condition (code),
444 GET_MODE (x), XEXP (x, 1),
445 XEXP (x, 0)),
447 x = *loc;
448 code = GET_CODE (x);
452 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
453 done the substitution, otherwise we won't. */
455 switch (code)
457 case PLUS:
458 /* If we have a PLUS whose second operand is now a CONST_INT, use
459 plus_constant to try to simplify it. */
460 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
461 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
463 return;
465 case MINUS:
466 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
468 validate_change (object, loc,
469 plus_constant (XEXP (x, 0), - INTVAL (to)),
471 return;
473 break;
475 case ZERO_EXTEND:
476 case SIGN_EXTEND:
477 /* In these cases, the operation to be performed depends on the mode
478 of the operand. If we are replacing the operand with a VOIDmode
479 constant, we lose the information. So try to simplify the operation
480 in that case. If it fails, substitute in something that we know
481 won't be recognized. */
482 if (GET_MODE (to) == VOIDmode
483 && (XEXP (x, 0) == from
484 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
485 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
486 && REGNO (XEXP (x, 0)) == REGNO (from))))
488 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
489 GET_MODE (from));
490 if (new == 0)
491 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
493 validate_change (object, loc, new, 1);
494 return;
496 break;
498 case SUBREG:
499 /* If we have a SUBREG of a register that we are replacing and we are
500 replacing it with a MEM, make a new MEM and try replacing the
501 SUBREG with it. Don't do this if the MEM has a mode-dependent address
502 or if we would be widening it. */
504 if (SUBREG_REG (x) == from
505 && GET_CODE (from) == REG
506 && GET_CODE (to) == MEM
507 && ! mode_dependent_address_p (XEXP (to, 0))
508 && ! MEM_VOLATILE_P (to)
509 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
511 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
512 enum machine_mode mode = GET_MODE (x);
513 rtx new;
515 if (BYTES_BIG_ENDIAN)
516 offset += (MIN (UNITS_PER_WORD,
517 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
518 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
520 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
521 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
522 MEM_COPY_ATTRIBUTES (new, to);
523 validate_change (object, loc, new, 1);
524 return;
526 break;
528 case ZERO_EXTRACT:
529 case SIGN_EXTRACT:
530 /* If we are replacing a register with memory, try to change the memory
531 to be the mode required for memory in extract operations (this isn't
532 likely to be an insertion operation; if it was, nothing bad will
533 happen, we might just fail in some cases). */
535 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
536 && GET_CODE (XEXP (x, 1)) == CONST_INT
537 && GET_CODE (XEXP (x, 2)) == CONST_INT
538 && ! mode_dependent_address_p (XEXP (to, 0))
539 && ! MEM_VOLATILE_P (to))
541 enum machine_mode wanted_mode = VOIDmode;
542 enum machine_mode is_mode = GET_MODE (to);
543 int pos = INTVAL (XEXP (x, 2));
545 #ifdef HAVE_extzv
546 if (code == ZERO_EXTRACT)
548 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
549 if (wanted_mode == VOIDmode)
550 wanted_mode = word_mode;
552 #endif
553 #ifdef HAVE_extv
554 if (code == SIGN_EXTRACT)
556 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
557 if (wanted_mode == VOIDmode)
558 wanted_mode = word_mode;
560 #endif
562 /* If we have a narrower mode, we can do something. */
563 if (wanted_mode != VOIDmode
564 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
566 int offset = pos / BITS_PER_UNIT;
567 rtx newmem;
569 /* If the bytes and bits are counted differently, we
570 must adjust the offset. */
571 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
572 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
573 - offset);
575 pos %= GET_MODE_BITSIZE (wanted_mode);
577 newmem = gen_rtx_MEM (wanted_mode,
578 plus_constant (XEXP (to, 0), offset));
579 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
580 MEM_COPY_ATTRIBUTES (newmem, to);
582 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
583 validate_change (object, &XEXP (x, 0), newmem, 1);
587 break;
589 default:
590 break;
593 /* For commutative or comparison operations we've already performed
594 replacements. Don't try to perform them again. */
595 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
597 fmt = GET_RTX_FORMAT (code);
598 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
600 if (fmt[i] == 'e')
601 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
602 else if (fmt[i] == 'E')
603 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
604 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
609 /* Try replacing every occurrence of FROM in INSN with TO. After all
610 changes have been made, validate by seeing if INSN is still valid. */
613 validate_replace_rtx (from, to, insn)
614 rtx from, to, insn;
616 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
617 return apply_change_group ();
620 /* Try replacing every occurrence of FROM in INSN with TO. After all
621 changes have been made, validate by seeing if INSN is still valid. */
623 void
624 validate_replace_rtx_group (from, to, insn)
625 rtx from, to, insn;
627 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
630 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
631 SET_DESTs. After all changes have been made, validate by seeing if
632 INSN is still valid. */
635 validate_replace_src (from, to, insn)
636 rtx from, to, insn;
638 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
639 || GET_CODE (PATTERN (insn)) != SET)
640 abort ();
642 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
643 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
644 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
645 from, to, insn);
646 return apply_change_group ();
649 #ifdef HAVE_cc0
650 /* Return 1 if the insn using CC0 set by INSN does not contain
651 any ordered tests applied to the condition codes.
652 EQ and NE tests do not count. */
655 next_insn_tests_no_inequality (insn)
656 rtx insn;
658 register rtx next = next_cc0_user (insn);
660 /* If there is no next insn, we have to take the conservative choice. */
661 if (next == 0)
662 return 0;
664 return ((GET_CODE (next) == JUMP_INSN
665 || GET_CODE (next) == INSN
666 || GET_CODE (next) == CALL_INSN)
667 && ! inequality_comparisons_p (PATTERN (next)));
670 #if 0 /* This is useless since the insn that sets the cc's
671 must be followed immediately by the use of them. */
672 /* Return 1 if the CC value set up by INSN is not used. */
675 next_insns_test_no_inequality (insn)
676 rtx insn;
678 register rtx next = NEXT_INSN (insn);
680 for (; next != 0; next = NEXT_INSN (next))
682 if (GET_CODE (next) == CODE_LABEL
683 || GET_CODE (next) == BARRIER)
684 return 1;
685 if (GET_CODE (next) == NOTE)
686 continue;
687 if (inequality_comparisons_p (PATTERN (next)))
688 return 0;
689 if (sets_cc0_p (PATTERN (next)) == 1)
690 return 1;
691 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
692 return 1;
694 return 1;
696 #endif
697 #endif
699 /* This is used by find_single_use to locate an rtx that contains exactly one
700 use of DEST, which is typically either a REG or CC0. It returns a
701 pointer to the innermost rtx expression containing DEST. Appearances of
702 DEST that are being used to totally replace it are not counted. */
704 static rtx *
705 find_single_use_1 (dest, loc)
706 rtx dest;
707 rtx *loc;
709 rtx x = *loc;
710 enum rtx_code code = GET_CODE (x);
711 rtx *result = 0;
712 rtx *this_result;
713 int i;
714 const char *fmt;
716 switch (code)
718 case CONST_INT:
719 case CONST:
720 case LABEL_REF:
721 case SYMBOL_REF:
722 case CONST_DOUBLE:
723 case CLOBBER:
724 return 0;
726 case SET:
727 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
728 of a REG that occupies all of the REG, the insn uses DEST if
729 it is mentioned in the destination or the source. Otherwise, we
730 need just check the source. */
731 if (GET_CODE (SET_DEST (x)) != CC0
732 && GET_CODE (SET_DEST (x)) != PC
733 && GET_CODE (SET_DEST (x)) != REG
734 && ! (GET_CODE (SET_DEST (x)) == SUBREG
735 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
736 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
737 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
738 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
739 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
740 break;
742 return find_single_use_1 (dest, &SET_SRC (x));
744 case MEM:
745 case SUBREG:
746 return find_single_use_1 (dest, &XEXP (x, 0));
748 default:
749 break;
752 /* If it wasn't one of the common cases above, check each expression and
753 vector of this code. Look for a unique usage of DEST. */
755 fmt = GET_RTX_FORMAT (code);
756 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
758 if (fmt[i] == 'e')
760 if (dest == XEXP (x, i)
761 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
762 && REGNO (dest) == REGNO (XEXP (x, i))))
763 this_result = loc;
764 else
765 this_result = find_single_use_1 (dest, &XEXP (x, i));
767 if (result == 0)
768 result = this_result;
769 else if (this_result)
770 /* Duplicate usage. */
771 return 0;
773 else if (fmt[i] == 'E')
775 int j;
777 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
779 if (XVECEXP (x, i, j) == dest
780 || (GET_CODE (dest) == REG
781 && GET_CODE (XVECEXP (x, i, j)) == REG
782 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
783 this_result = loc;
784 else
785 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
787 if (result == 0)
788 result = this_result;
789 else if (this_result)
790 return 0;
795 return result;
798 /* See if DEST, produced in INSN, is used only a single time in the
799 sequel. If so, return a pointer to the innermost rtx expression in which
800 it is used.
802 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
804 This routine will return usually zero either before flow is called (because
805 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
806 note can't be trusted).
808 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
809 care about REG_DEAD notes or LOG_LINKS.
811 Otherwise, we find the single use by finding an insn that has a
812 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
813 only referenced once in that insn, we know that it must be the first
814 and last insn referencing DEST. */
816 rtx *
817 find_single_use (dest, insn, ploc)
818 rtx dest;
819 rtx insn;
820 rtx *ploc;
822 rtx next;
823 rtx *result;
824 rtx link;
826 #ifdef HAVE_cc0
827 if (dest == cc0_rtx)
829 next = NEXT_INSN (insn);
830 if (next == 0
831 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
832 return 0;
834 result = find_single_use_1 (dest, &PATTERN (next));
835 if (result && ploc)
836 *ploc = next;
837 return result;
839 #endif
841 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
842 return 0;
844 for (next = next_nonnote_insn (insn);
845 next != 0 && GET_CODE (next) != CODE_LABEL;
846 next = next_nonnote_insn (next))
847 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
849 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
850 if (XEXP (link, 0) == insn)
851 break;
853 if (link)
855 result = find_single_use_1 (dest, &PATTERN (next));
856 if (ploc)
857 *ploc = next;
858 return result;
862 return 0;
865 /* Return 1 if OP is a valid general operand for machine mode MODE.
866 This is either a register reference, a memory reference,
867 or a constant. In the case of a memory reference, the address
868 is checked for general validity for the target machine.
870 Register and memory references must have mode MODE in order to be valid,
871 but some constants have no machine mode and are valid for any mode.
873 If MODE is VOIDmode, OP is checked for validity for whatever mode
874 it has.
876 The main use of this function is as a predicate in match_operand
877 expressions in the machine description.
879 For an explanation of this function's behavior for registers of
880 class NO_REGS, see the comment for `register_operand'. */
883 general_operand (op, mode)
884 register rtx op;
885 enum machine_mode mode;
887 register enum rtx_code code = GET_CODE (op);
888 int mode_altering_drug = 0;
890 if (mode == VOIDmode)
891 mode = GET_MODE (op);
893 /* Don't accept CONST_INT or anything similar
894 if the caller wants something floating. */
895 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
896 && GET_MODE_CLASS (mode) != MODE_INT
897 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
898 return 0;
900 if (CONSTANT_P (op))
901 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
902 #ifdef LEGITIMATE_PIC_OPERAND_P
903 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
904 #endif
905 && LEGITIMATE_CONSTANT_P (op));
907 /* Except for certain constants with VOIDmode, already checked for,
908 OP's mode must match MODE if MODE specifies a mode. */
910 if (GET_MODE (op) != mode)
911 return 0;
913 if (code == SUBREG)
915 #ifdef INSN_SCHEDULING
916 /* On machines that have insn scheduling, we want all memory
917 reference to be explicit, so outlaw paradoxical SUBREGs. */
918 if (GET_CODE (SUBREG_REG (op)) == MEM
919 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
920 return 0;
921 #endif
923 op = SUBREG_REG (op);
924 code = GET_CODE (op);
925 #if 0
926 /* No longer needed, since (SUBREG (MEM...))
927 will load the MEM into a reload reg in the MEM's own mode. */
928 mode_altering_drug = 1;
929 #endif
932 if (code == REG)
933 /* A register whose class is NO_REGS is not a general operand. */
934 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
935 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
937 if (code == MEM)
939 register rtx y = XEXP (op, 0);
940 if (! volatile_ok && MEM_VOLATILE_P (op))
941 return 0;
942 if (GET_CODE (y) == ADDRESSOF)
943 return 1;
944 /* Use the mem's mode, since it will be reloaded thus. */
945 mode = GET_MODE (op);
946 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
949 /* Pretend this is an operand for now; we'll run force_operand
950 on its replacement in fixup_var_refs_1. */
951 if (code == ADDRESSOF)
952 return 1;
954 return 0;
956 win:
957 if (mode_altering_drug)
958 return ! mode_dependent_address_p (XEXP (op, 0));
959 return 1;
962 /* Return 1 if OP is a valid memory address for a memory reference
963 of mode MODE.
965 The main use of this function is as a predicate in match_operand
966 expressions in the machine description. */
969 address_operand (op, mode)
970 register rtx op;
971 enum machine_mode mode;
973 return memory_address_p (mode, op);
976 /* Return 1 if OP is a register reference of mode MODE.
977 If MODE is VOIDmode, accept a register in any mode.
979 The main use of this function is as a predicate in match_operand
980 expressions in the machine description.
982 As a special exception, registers whose class is NO_REGS are
983 not accepted by `register_operand'. The reason for this change
984 is to allow the representation of special architecture artifacts
985 (such as a condition code register) without extending the rtl
986 definitions. Since registers of class NO_REGS cannot be used
987 as registers in any case where register classes are examined,
988 it is most consistent to keep this function from accepting them. */
991 register_operand (op, mode)
992 register rtx op;
993 enum machine_mode mode;
995 if (GET_MODE (op) != mode && mode != VOIDmode)
996 return 0;
998 if (GET_CODE (op) == SUBREG)
1000 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1001 because it is guaranteed to be reloaded into one.
1002 Just make sure the MEM is valid in itself.
1003 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1004 but currently it does result from (SUBREG (REG)...) where the
1005 reg went on the stack.) */
1006 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1007 return general_operand (op, mode);
1009 #ifdef CLASS_CANNOT_CHANGE_SIZE
1010 if (GET_CODE (SUBREG_REG (op)) == REG
1011 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1012 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1013 REGNO (SUBREG_REG (op)))
1014 && (GET_MODE_SIZE (mode)
1015 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1017 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1018 return 0;
1019 #endif
1021 op = SUBREG_REG (op);
1024 /* If we have an ADDRESSOF, consider it valid since it will be
1025 converted into something that will not be a MEM. */
1026 if (GET_CODE (op) == ADDRESSOF)
1027 return 1;
1029 /* We don't consider registers whose class is NO_REGS
1030 to be a register operand. */
1031 return (GET_CODE (op) == REG
1032 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1033 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1036 /* Return 1 for a register in Pmode; ignore the tested mode. */
1039 pmode_register_operand (op, mode)
1040 rtx op;
1041 enum machine_mode mode ATTRIBUTE_UNUSED;
1043 return register_operand (op, Pmode);
1046 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1047 or a hard register. */
1050 scratch_operand (op, mode)
1051 register rtx op;
1052 enum machine_mode mode;
1054 if (GET_MODE (op) != mode && mode != VOIDmode)
1055 return 0;
1057 return (GET_CODE (op) == SCRATCH
1058 || (GET_CODE (op) == REG
1059 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1062 /* Return 1 if OP is a valid immediate operand for mode MODE.
1064 The main use of this function is as a predicate in match_operand
1065 expressions in the machine description. */
1068 immediate_operand (op, mode)
1069 register rtx op;
1070 enum machine_mode mode;
1072 /* Don't accept CONST_INT or anything similar
1073 if the caller wants something floating. */
1074 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1075 && GET_MODE_CLASS (mode) != MODE_INT
1076 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1077 return 0;
1079 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1080 result in 0/1. It seems a safe assumption that this is
1081 in range for everyone. */
1082 if (GET_CODE (op) == CONSTANT_P_RTX)
1083 return 1;
1085 return (CONSTANT_P (op)
1086 && (GET_MODE (op) == mode || mode == VOIDmode
1087 || GET_MODE (op) == VOIDmode)
1088 #ifdef LEGITIMATE_PIC_OPERAND_P
1089 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1090 #endif
1091 && LEGITIMATE_CONSTANT_P (op));
1094 /* Returns 1 if OP is an operand that is a CONST_INT. */
1097 const_int_operand (op, mode)
1098 register rtx op;
1099 enum machine_mode mode ATTRIBUTE_UNUSED;
1101 return GET_CODE (op) == CONST_INT;
1104 /* Returns 1 if OP is an operand that is a constant integer or constant
1105 floating-point number. */
1108 const_double_operand (op, mode)
1109 register rtx op;
1110 enum machine_mode mode;
1112 /* Don't accept CONST_INT or anything similar
1113 if the caller wants something floating. */
1114 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1115 && GET_MODE_CLASS (mode) != MODE_INT
1116 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1117 return 0;
1119 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1120 && (mode == VOIDmode || GET_MODE (op) == mode
1121 || GET_MODE (op) == VOIDmode));
1124 /* Return 1 if OP is a general operand that is not an immediate operand. */
1127 nonimmediate_operand (op, mode)
1128 register rtx op;
1129 enum machine_mode mode;
1131 return (general_operand (op, mode) && ! CONSTANT_P (op));
1134 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1137 nonmemory_operand (op, mode)
1138 register rtx op;
1139 enum machine_mode mode;
1141 if (CONSTANT_P (op))
1143 /* Don't accept CONST_INT or anything similar
1144 if the caller wants something floating. */
1145 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1146 && GET_MODE_CLASS (mode) != MODE_INT
1147 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1148 return 0;
1150 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1151 #ifdef LEGITIMATE_PIC_OPERAND_P
1152 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1153 #endif
1154 && LEGITIMATE_CONSTANT_P (op));
1157 if (GET_MODE (op) != mode && mode != VOIDmode)
1158 return 0;
1160 if (GET_CODE (op) == SUBREG)
1162 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1163 because it is guaranteed to be reloaded into one.
1164 Just make sure the MEM is valid in itself.
1165 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1166 but currently it does result from (SUBREG (REG)...) where the
1167 reg went on the stack.) */
1168 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1169 return general_operand (op, mode);
1170 op = SUBREG_REG (op);
1173 /* We don't consider registers whose class is NO_REGS
1174 to be a register operand. */
1175 return (GET_CODE (op) == REG
1176 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1177 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1180 /* Return 1 if OP is a valid operand that stands for pushing a
1181 value of mode MODE onto the stack.
1183 The main use of this function is as a predicate in match_operand
1184 expressions in the machine description. */
1187 push_operand (op, mode)
1188 rtx op;
1189 enum machine_mode mode;
1191 if (GET_CODE (op) != MEM)
1192 return 0;
1194 if (mode != VOIDmode && GET_MODE (op) != mode)
1195 return 0;
1197 op = XEXP (op, 0);
1199 if (GET_CODE (op) != STACK_PUSH_CODE)
1200 return 0;
1202 return XEXP (op, 0) == stack_pointer_rtx;
1205 /* Return 1 if OP is a valid operand that stands for popping a
1206 value of mode MODE off the stack.
1208 The main use of this function is as a predicate in match_operand
1209 expressions in the machine description. */
1212 pop_operand (op, mode)
1213 rtx op;
1214 enum machine_mode mode;
1216 if (GET_CODE (op) != MEM)
1217 return 0;
1219 if (mode != VOIDmode && GET_MODE (op) != mode)
1220 return 0;
1222 op = XEXP (op, 0);
1224 if (GET_CODE (op) != STACK_POP_CODE)
1225 return 0;
1227 return XEXP (op, 0) == stack_pointer_rtx;
1230 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1233 memory_address_p (mode, addr)
1234 enum machine_mode mode ATTRIBUTE_UNUSED;
1235 register rtx addr;
1237 if (GET_CODE (addr) == ADDRESSOF)
1238 return 1;
1240 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1241 return 0;
1243 win:
1244 return 1;
1247 /* Return 1 if OP is a valid memory reference with mode MODE,
1248 including a valid address.
1250 The main use of this function is as a predicate in match_operand
1251 expressions in the machine description. */
1254 memory_operand (op, mode)
1255 register rtx op;
1256 enum machine_mode mode;
1258 rtx inner;
1260 if (! reload_completed)
1261 /* Note that no SUBREG is a memory operand before end of reload pass,
1262 because (SUBREG (MEM...)) forces reloading into a register. */
1263 return GET_CODE (op) == MEM && general_operand (op, mode);
1265 if (mode != VOIDmode && GET_MODE (op) != mode)
1266 return 0;
1268 inner = op;
1269 if (GET_CODE (inner) == SUBREG)
1270 inner = SUBREG_REG (inner);
1272 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1275 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1276 that is, a memory reference whose address is a general_operand. */
1279 indirect_operand (op, mode)
1280 register rtx op;
1281 enum machine_mode mode;
1283 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1284 if (! reload_completed
1285 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1287 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1288 rtx inner = SUBREG_REG (op);
1290 if (BYTES_BIG_ENDIAN)
1291 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1292 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1294 if (mode != VOIDmode && GET_MODE (op) != mode)
1295 return 0;
1297 /* The only way that we can have a general_operand as the resulting
1298 address is if OFFSET is zero and the address already is an operand
1299 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1300 operand. */
1302 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1303 || (GET_CODE (XEXP (inner, 0)) == PLUS
1304 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1305 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1306 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1309 return (GET_CODE (op) == MEM
1310 && memory_operand (op, mode)
1311 && general_operand (XEXP (op, 0), Pmode));
1314 /* Return 1 if this is a comparison operator. This allows the use of
1315 MATCH_OPERATOR to recognize all the branch insns. */
1318 comparison_operator (op, mode)
1319 register rtx op;
1320 enum machine_mode mode;
1322 return ((mode == VOIDmode || GET_MODE (op) == mode)
1323 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1326 /* If BODY is an insn body that uses ASM_OPERANDS,
1327 return the number of operands (both input and output) in the insn.
1328 Otherwise return -1. */
1331 asm_noperands (body)
1332 rtx body;
1334 if (GET_CODE (body) == ASM_OPERANDS)
1335 /* No output operands: return number of input operands. */
1336 return ASM_OPERANDS_INPUT_LENGTH (body);
1337 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1338 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1339 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1340 else if (GET_CODE (body) == PARALLEL
1341 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1342 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1344 /* Multiple output operands, or 1 output plus some clobbers:
1345 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1346 int i;
1347 int n_sets;
1349 /* Count backwards through CLOBBERs to determine number of SETs. */
1350 for (i = XVECLEN (body, 0); i > 0; i--)
1352 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1353 break;
1354 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1355 return -1;
1358 /* N_SETS is now number of output operands. */
1359 n_sets = i;
1361 /* Verify that all the SETs we have
1362 came from a single original asm_operands insn
1363 (so that invalid combinations are blocked). */
1364 for (i = 0; i < n_sets; i++)
1366 rtx elt = XVECEXP (body, 0, i);
1367 if (GET_CODE (elt) != SET)
1368 return -1;
1369 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1370 return -1;
1371 /* If these ASM_OPERANDS rtx's came from different original insns
1372 then they aren't allowed together. */
1373 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1374 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1375 return -1;
1377 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1378 + n_sets);
1380 else if (GET_CODE (body) == PARALLEL
1381 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1383 /* 0 outputs, but some clobbers:
1384 body is [(asm_operands ...) (clobber (reg ...))...]. */
1385 int i;
1387 /* Make sure all the other parallel things really are clobbers. */
1388 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1389 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1390 return -1;
1392 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1394 else
1395 return -1;
1398 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1399 copy its operands (both input and output) into the vector OPERANDS,
1400 the locations of the operands within the insn into the vector OPERAND_LOCS,
1401 and the constraints for the operands into CONSTRAINTS.
1402 Write the modes of the operands into MODES.
1403 Return the assembler-template.
1405 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1406 we don't store that info. */
1408 const char *
1409 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1410 rtx body;
1411 rtx *operands;
1412 rtx **operand_locs;
1413 const char **constraints;
1414 enum machine_mode *modes;
1416 register int i;
1417 int noperands;
1418 const char *template = 0;
1420 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1422 rtx asmop = SET_SRC (body);
1423 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1425 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1427 for (i = 1; i < noperands; i++)
1429 if (operand_locs)
1430 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1431 if (operands)
1432 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1433 if (constraints)
1434 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1435 if (modes)
1436 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1439 /* The output is in the SET.
1440 Its constraint is in the ASM_OPERANDS itself. */
1441 if (operands)
1442 operands[0] = SET_DEST (body);
1443 if (operand_locs)
1444 operand_locs[0] = &SET_DEST (body);
1445 if (constraints)
1446 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1447 if (modes)
1448 modes[0] = GET_MODE (SET_DEST (body));
1449 template = ASM_OPERANDS_TEMPLATE (asmop);
1451 else if (GET_CODE (body) == ASM_OPERANDS)
1453 rtx asmop = body;
1454 /* No output operands: BODY is (asm_operands ....). */
1456 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1458 /* The input operands are found in the 1st element vector. */
1459 /* Constraints for inputs are in the 2nd element vector. */
1460 for (i = 0; i < noperands; i++)
1462 if (operand_locs)
1463 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1464 if (operands)
1465 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1466 if (constraints)
1467 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1468 if (modes)
1469 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1471 template = ASM_OPERANDS_TEMPLATE (asmop);
1473 else if (GET_CODE (body) == PARALLEL
1474 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1476 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1477 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1478 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1479 int nout = 0; /* Does not include CLOBBERs. */
1481 /* At least one output, plus some CLOBBERs. */
1483 /* The outputs are in the SETs.
1484 Their constraints are in the ASM_OPERANDS itself. */
1485 for (i = 0; i < nparallel; i++)
1487 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1488 break; /* Past last SET */
1490 if (operands)
1491 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1492 if (operand_locs)
1493 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1494 if (constraints)
1495 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1496 if (modes)
1497 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1498 nout++;
1501 for (i = 0; i < nin; i++)
1503 if (operand_locs)
1504 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1505 if (operands)
1506 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1507 if (constraints)
1508 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1509 if (modes)
1510 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1513 template = ASM_OPERANDS_TEMPLATE (asmop);
1515 else if (GET_CODE (body) == PARALLEL
1516 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1518 /* No outputs, but some CLOBBERs. */
1520 rtx asmop = XVECEXP (body, 0, 0);
1521 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1523 for (i = 0; i < nin; i++)
1525 if (operand_locs)
1526 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1527 if (operands)
1528 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1529 if (constraints)
1530 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1531 if (modes)
1532 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1535 template = ASM_OPERANDS_TEMPLATE (asmop);
1538 return template;
1541 /* Check if an asm_operand matches it's constraints.
1542 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1545 asm_operand_ok (op, constraint)
1546 rtx op;
1547 const char *constraint;
1549 int result = 0;
1551 /* Use constrain_operands after reload. */
1552 if (reload_completed)
1553 abort ();
1555 while (*constraint)
1557 switch (*constraint++)
1559 case '=':
1560 case '+':
1561 case '*':
1562 case '%':
1563 case '?':
1564 case '!':
1565 case '#':
1566 case '&':
1567 case ',':
1568 break;
1570 case '0': case '1': case '2': case '3': case '4':
1571 case '5': case '6': case '7': case '8': case '9':
1572 /* For best results, our caller should have given us the
1573 proper matching constraint, but we can't actually fail
1574 the check if they didn't. Indicate that results are
1575 inconclusive. */
1576 result = -1;
1577 break;
1579 case 'p':
1580 if (address_operand (op, VOIDmode))
1581 return 1;
1582 break;
1584 case 'm':
1585 case 'V': /* non-offsettable */
1586 if (memory_operand (op, VOIDmode))
1587 return 1;
1588 break;
1590 case 'o': /* offsettable */
1591 if (offsettable_nonstrict_memref_p (op))
1592 return 1;
1593 break;
1595 case '<':
1596 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1597 excepting those that expand_call created. Further, on some
1598 machines which do not have generalized auto inc/dec, an inc/dec
1599 is not a memory_operand.
1601 Match any memory and hope things are resolved after reload. */
1603 if (GET_CODE (op) == MEM
1604 && (1
1605 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1606 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1607 return 1;
1608 break;
1610 case '>':
1611 if (GET_CODE (op) == MEM
1612 && (1
1613 || GET_CODE (XEXP (op, 0)) == PRE_INC
1614 || GET_CODE (XEXP (op, 0)) == POST_INC))
1615 return 1;
1616 break;
1618 case 'E':
1619 #ifndef REAL_ARITHMETIC
1620 /* Match any floating double constant, but only if
1621 we can examine the bits of it reliably. */
1622 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1623 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1624 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1625 break;
1626 #endif
1627 /* FALLTHRU */
1629 case 'F':
1630 if (GET_CODE (op) == CONST_DOUBLE)
1631 return 1;
1632 break;
1634 case 'G':
1635 if (GET_CODE (op) == CONST_DOUBLE
1636 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1637 return 1;
1638 break;
1639 case 'H':
1640 if (GET_CODE (op) == CONST_DOUBLE
1641 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1642 return 1;
1643 break;
1645 case 's':
1646 if (GET_CODE (op) == CONST_INT
1647 || (GET_CODE (op) == CONST_DOUBLE
1648 && GET_MODE (op) == VOIDmode))
1649 break;
1650 /* FALLTHRU */
1652 case 'i':
1653 if (CONSTANT_P (op)
1654 #ifdef LEGITIMATE_PIC_OPERAND_P
1655 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1656 #endif
1658 return 1;
1659 break;
1661 case 'n':
1662 if (GET_CODE (op) == CONST_INT
1663 || (GET_CODE (op) == CONST_DOUBLE
1664 && GET_MODE (op) == VOIDmode))
1665 return 1;
1666 break;
1668 case 'I':
1669 if (GET_CODE (op) == CONST_INT
1670 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1671 return 1;
1672 break;
1673 case 'J':
1674 if (GET_CODE (op) == CONST_INT
1675 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1676 return 1;
1677 break;
1678 case 'K':
1679 if (GET_CODE (op) == CONST_INT
1680 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1681 return 1;
1682 break;
1683 case 'L':
1684 if (GET_CODE (op) == CONST_INT
1685 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1686 return 1;
1687 break;
1688 case 'M':
1689 if (GET_CODE (op) == CONST_INT
1690 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1691 return 1;
1692 break;
1693 case 'N':
1694 if (GET_CODE (op) == CONST_INT
1695 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1696 return 1;
1697 break;
1698 case 'O':
1699 if (GET_CODE (op) == CONST_INT
1700 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1701 return 1;
1702 break;
1703 case 'P':
1704 if (GET_CODE (op) == CONST_INT
1705 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1706 return 1;
1707 break;
1709 case 'X':
1710 return 1;
1712 case 'g':
1713 if (general_operand (op, VOIDmode))
1714 return 1;
1715 break;
1717 #ifdef EXTRA_CONSTRAINT
1718 case 'Q':
1719 if (EXTRA_CONSTRAINT (op, 'Q'))
1720 return 1;
1721 break;
1722 case 'R':
1723 if (EXTRA_CONSTRAINT (op, 'R'))
1724 return 1;
1725 break;
1726 case 'S':
1727 if (EXTRA_CONSTRAINT (op, 'S'))
1728 return 1;
1729 break;
1730 case 'T':
1731 if (EXTRA_CONSTRAINT (op, 'T'))
1732 return 1;
1733 break;
1734 case 'U':
1735 if (EXTRA_CONSTRAINT (op, 'U'))
1736 return 1;
1737 break;
1738 #endif
1740 case 'r':
1741 default:
1742 if (GET_MODE (op) == BLKmode)
1743 break;
1744 if (register_operand (op, VOIDmode))
1745 return 1;
1746 break;
1750 return result;
1753 /* Given an rtx *P, if it is a sum containing an integer constant term,
1754 return the location (type rtx *) of the pointer to that constant term.
1755 Otherwise, return a null pointer. */
1757 static rtx *
1758 find_constant_term_loc (p)
1759 rtx *p;
1761 register rtx *tem;
1762 register enum rtx_code code = GET_CODE (*p);
1764 /* If *P IS such a constant term, P is its location. */
1766 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1767 || code == CONST)
1768 return p;
1770 /* Otherwise, if not a sum, it has no constant term. */
1772 if (GET_CODE (*p) != PLUS)
1773 return 0;
1775 /* If one of the summands is constant, return its location. */
1777 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1778 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1779 return p;
1781 /* Otherwise, check each summand for containing a constant term. */
1783 if (XEXP (*p, 0) != 0)
1785 tem = find_constant_term_loc (&XEXP (*p, 0));
1786 if (tem != 0)
1787 return tem;
1790 if (XEXP (*p, 1) != 0)
1792 tem = find_constant_term_loc (&XEXP (*p, 1));
1793 if (tem != 0)
1794 return tem;
1797 return 0;
1800 /* Return 1 if OP is a memory reference
1801 whose address contains no side effects
1802 and remains valid after the addition
1803 of a positive integer less than the
1804 size of the object being referenced.
1806 We assume that the original address is valid and do not check it.
1808 This uses strict_memory_address_p as a subroutine, so
1809 don't use it before reload. */
1812 offsettable_memref_p (op)
1813 rtx op;
1815 return ((GET_CODE (op) == MEM)
1816 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1819 /* Similar, but don't require a strictly valid mem ref:
1820 consider pseudo-regs valid as index or base regs. */
1823 offsettable_nonstrict_memref_p (op)
1824 rtx op;
1826 return ((GET_CODE (op) == MEM)
1827 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1830 /* Return 1 if Y is a memory address which contains no side effects
1831 and would remain valid after the addition of a positive integer
1832 less than the size of that mode.
1834 We assume that the original address is valid and do not check it.
1835 We do check that it is valid for narrower modes.
1837 If STRICTP is nonzero, we require a strictly valid address,
1838 for the sake of use in reload.c. */
1841 offsettable_address_p (strictp, mode, y)
1842 int strictp;
1843 enum machine_mode mode;
1844 register rtx y;
1846 register enum rtx_code ycode = GET_CODE (y);
1847 register rtx z;
1848 rtx y1 = y;
1849 rtx *y2;
1850 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1851 (strictp ? strict_memory_address_p : memory_address_p);
1853 if (CONSTANT_ADDRESS_P (y))
1854 return 1;
1856 /* Adjusting an offsettable address involves changing to a narrower mode.
1857 Make sure that's OK. */
1859 if (mode_dependent_address_p (y))
1860 return 0;
1862 /* If the expression contains a constant term,
1863 see if it remains valid when max possible offset is added. */
1865 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1867 int good;
1869 y1 = *y2;
1870 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1871 /* Use QImode because an odd displacement may be automatically invalid
1872 for any wider mode. But it should be valid for a single byte. */
1873 good = (*addressp) (QImode, y);
1875 /* In any case, restore old contents of memory. */
1876 *y2 = y1;
1877 return good;
1880 if (ycode == PRE_DEC || ycode == PRE_INC
1881 || ycode == POST_DEC || ycode == POST_INC)
1882 return 0;
1884 /* The offset added here is chosen as the maximum offset that
1885 any instruction could need to add when operating on something
1886 of the specified mode. We assume that if Y and Y+c are
1887 valid addresses then so is Y+d for all 0<d<c. */
1889 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1891 /* Use QImode because an odd displacement may be automatically invalid
1892 for any wider mode. But it should be valid for a single byte. */
1893 return (*addressp) (QImode, z);
1896 /* Return 1 if ADDR is an address-expression whose effect depends
1897 on the mode of the memory reference it is used in.
1899 Autoincrement addressing is a typical example of mode-dependence
1900 because the amount of the increment depends on the mode. */
1903 mode_dependent_address_p (addr)
1904 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1906 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1907 return 0;
1908 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1909 win: ATTRIBUTE_UNUSED_LABEL
1910 return 1;
1913 /* Return 1 if OP is a general operand
1914 other than a memory ref with a mode dependent address. */
1917 mode_independent_operand (op, mode)
1918 enum machine_mode mode;
1919 rtx op;
1921 rtx addr;
1923 if (! general_operand (op, mode))
1924 return 0;
1926 if (GET_CODE (op) != MEM)
1927 return 1;
1929 addr = XEXP (op, 0);
1930 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1931 return 1;
1932 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1933 lose: ATTRIBUTE_UNUSED_LABEL
1934 return 0;
1937 /* Given an operand OP that is a valid memory reference
1938 which satisfies offsettable_memref_p,
1939 return a new memory reference whose address has been adjusted by OFFSET.
1940 OFFSET should be positive and less than the size of the object referenced.
1944 adj_offsettable_operand (op, offset)
1945 rtx op;
1946 int offset;
1948 register enum rtx_code code = GET_CODE (op);
1950 if (code == MEM)
1952 register rtx y = XEXP (op, 0);
1953 register rtx new;
1955 if (CONSTANT_ADDRESS_P (y))
1957 new = gen_rtx_MEM (GET_MODE (op),
1958 plus_constant_for_output (y, offset));
1959 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1960 return new;
1963 if (GET_CODE (y) == PLUS)
1965 rtx z = y;
1966 register rtx *const_loc;
1968 op = copy_rtx (op);
1969 z = XEXP (op, 0);
1970 const_loc = find_constant_term_loc (&z);
1971 if (const_loc)
1973 *const_loc = plus_constant_for_output (*const_loc, offset);
1974 return op;
1978 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1979 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1980 return new;
1982 abort ();
1985 /* Analyze INSN and fill in recog_data. */
1987 void
1988 extract_insn (insn)
1989 rtx insn;
1991 int i;
1992 int icode;
1993 int noperands;
1994 rtx body = PATTERN (insn);
1996 recog_data.n_operands = 0;
1997 recog_data.n_alternatives = 0;
1998 recog_data.n_dups = 0;
2000 switch (GET_CODE (body))
2002 case USE:
2003 case CLOBBER:
2004 case ASM_INPUT:
2005 case ADDR_VEC:
2006 case ADDR_DIFF_VEC:
2007 return;
2009 case SET:
2010 case PARALLEL:
2011 case ASM_OPERANDS:
2012 recog_data.n_operands = noperands = asm_noperands (body);
2013 if (noperands >= 0)
2015 /* This insn is an `asm' with operands. */
2017 /* expand_asm_operands makes sure there aren't too many operands. */
2018 if (noperands > MAX_RECOG_OPERANDS)
2019 abort ();
2021 /* Now get the operand values and constraints out of the insn. */
2022 decode_asm_operands (body, recog_data.operand,
2023 recog_data.operand_loc,
2024 recog_data.constraints,
2025 recog_data.operand_mode);
2026 if (noperands > 0)
2028 const char *p = recog_data.constraints[0];
2029 recog_data.n_alternatives = 1;
2030 while (*p)
2031 recog_data.n_alternatives += (*p++ == ',');
2033 break;
2036 /* FALLTHROUGH */
2038 default:
2039 /* Ordinary insn: recognize it, get the operands via insn_extract
2040 and get the constraints. */
2042 icode = recog_memoized (insn);
2043 if (icode < 0)
2044 fatal_insn_not_found (insn);
2046 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2047 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2048 recog_data.n_dups = insn_data[icode].n_dups;
2050 insn_extract (insn);
2052 for (i = 0; i < noperands; i++)
2054 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2055 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2058 for (i = 0; i < noperands; i++)
2059 recog_data.operand_type[i]
2060 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2061 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2062 : OP_IN);
2064 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2065 abort ();
2068 /* After calling extract_insn, you can use this function to extract some
2069 information from the constraint strings into a more usable form.
2070 The collected data is stored in recog_op_alt. */
2071 void
2072 preprocess_constraints ()
2074 int i;
2076 memset (recog_op_alt, 0, sizeof recog_op_alt);
2077 for (i = 0; i < recog_data.n_operands; i++)
2079 int j;
2080 struct operand_alternative *op_alt;
2081 const char *p = recog_data.constraints[i];
2083 op_alt = recog_op_alt[i];
2085 for (j = 0; j < recog_data.n_alternatives; j++)
2087 op_alt[j].class = NO_REGS;
2088 op_alt[j].constraint = p;
2089 op_alt[j].matches = -1;
2090 op_alt[j].matched = -1;
2092 if (*p == '\0' || *p == ',')
2094 op_alt[j].anything_ok = 1;
2095 continue;
2098 for (;;)
2100 char c = *p++;
2101 if (c == '#')
2103 c = *p++;
2104 while (c != ',' && c != '\0');
2105 if (c == ',' || c == '\0')
2106 break;
2108 switch (c)
2110 case '=': case '+': case '*': case '%':
2111 case 'E': case 'F': case 'G': case 'H':
2112 case 's': case 'i': case 'n':
2113 case 'I': case 'J': case 'K': case 'L':
2114 case 'M': case 'N': case 'O': case 'P':
2115 #ifdef EXTRA_CONSTRAINT
2116 case 'Q': case 'R': case 'S': case 'T': case 'U':
2117 #endif
2118 /* These don't say anything we care about. */
2119 break;
2121 case '?':
2122 op_alt[j].reject += 6;
2123 break;
2124 case '!':
2125 op_alt[j].reject += 600;
2126 break;
2127 case '&':
2128 op_alt[j].earlyclobber = 1;
2129 break;
2131 case '0': case '1': case '2': case '3': case '4':
2132 case '5': case '6': case '7': case '8': case '9':
2133 op_alt[j].matches = c - '0';
2134 op_alt[op_alt[j].matches].matched = i;
2135 break;
2137 case 'm':
2138 op_alt[j].memory_ok = 1;
2139 break;
2140 case '<':
2141 op_alt[j].decmem_ok = 1;
2142 break;
2143 case '>':
2144 op_alt[j].incmem_ok = 1;
2145 break;
2146 case 'V':
2147 op_alt[j].nonoffmem_ok = 1;
2148 break;
2149 case 'o':
2150 op_alt[j].offmem_ok = 1;
2151 break;
2152 case 'X':
2153 op_alt[j].anything_ok = 1;
2154 break;
2156 case 'p':
2157 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2158 break;
2160 case 'g': case 'r':
2161 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2162 break;
2164 default:
2165 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2166 break;
2173 /* Check the operands of an insn against the insn's operand constraints
2174 and return 1 if they are valid.
2175 The information about the insn's operands, constraints, operand modes
2176 etc. is obtained from the global variables set up by extract_insn.
2178 WHICH_ALTERNATIVE is set to a number which indicates which
2179 alternative of constraints was matched: 0 for the first alternative,
2180 1 for the next, etc.
2182 In addition, when two operands are match
2183 and it happens that the output operand is (reg) while the
2184 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2185 make the output operand look like the input.
2186 This is because the output operand is the one the template will print.
2188 This is used in final, just before printing the assembler code and by
2189 the routines that determine an insn's attribute.
2191 If STRICT is a positive non-zero value, it means that we have been
2192 called after reload has been completed. In that case, we must
2193 do all checks strictly. If it is zero, it means that we have been called
2194 before reload has completed. In that case, we first try to see if we can
2195 find an alternative that matches strictly. If not, we try again, this
2196 time assuming that reload will fix up the insn. This provides a "best
2197 guess" for the alternative and is used to compute attributes of insns prior
2198 to reload. A negative value of STRICT is used for this internal call. */
2200 struct funny_match
2202 int this, other;
2206 constrain_operands (strict)
2207 int strict;
2209 const char *constraints[MAX_RECOG_OPERANDS];
2210 int matching_operands[MAX_RECOG_OPERANDS];
2211 int earlyclobber[MAX_RECOG_OPERANDS];
2212 register int c;
2214 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2215 int funny_match_index;
2217 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2218 return 1;
2220 for (c = 0; c < recog_data.n_operands; c++)
2222 constraints[c] = recog_data.constraints[c];
2223 matching_operands[c] = -1;
2226 which_alternative = 0;
2228 while (which_alternative < recog_data.n_alternatives)
2230 register int opno;
2231 int lose = 0;
2232 funny_match_index = 0;
2234 for (opno = 0; opno < recog_data.n_operands; opno++)
2236 register rtx op = recog_data.operand[opno];
2237 enum machine_mode mode = GET_MODE (op);
2238 register const char *p = constraints[opno];
2239 int offset = 0;
2240 int win = 0;
2241 int val;
2243 earlyclobber[opno] = 0;
2245 /* A unary operator may be accepted by the predicate, but it
2246 is irrelevant for matching constraints. */
2247 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2248 op = XEXP (op, 0);
2250 if (GET_CODE (op) == SUBREG)
2252 if (GET_CODE (SUBREG_REG (op)) == REG
2253 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2254 offset = SUBREG_WORD (op);
2255 op = SUBREG_REG (op);
2258 /* An empty constraint or empty alternative
2259 allows anything which matched the pattern. */
2260 if (*p == 0 || *p == ',')
2261 win = 1;
2263 while (*p && (c = *p++) != ',')
2264 switch (c)
2266 case '?': case '!': case '*': case '%':
2267 case '=': case '+':
2268 break;
2270 case '#':
2271 /* Ignore rest of this alternative as far as
2272 constraint checking is concerned. */
2273 while (*p && *p != ',')
2274 p++;
2275 break;
2277 case '&':
2278 earlyclobber[opno] = 1;
2279 break;
2281 case '0': case '1': case '2': case '3': case '4':
2282 case '5': case '6': case '7': case '8': case '9':
2284 /* This operand must be the same as a previous one.
2285 This kind of constraint is used for instructions such
2286 as add when they take only two operands.
2288 Note that the lower-numbered operand is passed first.
2290 If we are not testing strictly, assume that this constraint
2291 will be satisfied. */
2292 if (strict < 0)
2293 val = 1;
2294 else
2296 rtx op1 = recog_data.operand[c - '0'];
2297 rtx op2 = recog_data.operand[opno];
2299 /* A unary operator may be accepted by the predicate,
2300 but it is irrelevant for matching constraints. */
2301 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2302 op1 = XEXP (op1, 0);
2303 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2304 op2 = XEXP (op2, 0);
2306 val = operands_match_p (op1, op2);
2309 matching_operands[opno] = c - '0';
2310 matching_operands[c - '0'] = opno;
2312 if (val != 0)
2313 win = 1;
2314 /* If output is *x and input is *--x,
2315 arrange later to change the output to *--x as well,
2316 since the output op is the one that will be printed. */
2317 if (val == 2 && strict > 0)
2319 funny_match[funny_match_index].this = opno;
2320 funny_match[funny_match_index++].other = c - '0';
2322 break;
2324 case 'p':
2325 /* p is used for address_operands. When we are called by
2326 gen_reload, no one will have checked that the address is
2327 strictly valid, i.e., that all pseudos requiring hard regs
2328 have gotten them. */
2329 if (strict <= 0
2330 || (strict_memory_address_p (recog_data.operand_mode[opno],
2331 op)))
2332 win = 1;
2333 break;
2335 /* No need to check general_operand again;
2336 it was done in insn-recog.c. */
2337 case 'g':
2338 /* Anything goes unless it is a REG and really has a hard reg
2339 but the hard reg is not in the class GENERAL_REGS. */
2340 if (strict < 0
2341 || GENERAL_REGS == ALL_REGS
2342 || GET_CODE (op) != REG
2343 || (reload_in_progress
2344 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2345 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2346 win = 1;
2347 break;
2349 case 'r':
2350 if (strict < 0
2351 || (strict == 0
2352 && GET_CODE (op) == REG
2353 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2354 || (strict == 0 && GET_CODE (op) == SCRATCH)
2355 || (GET_CODE (op) == REG
2356 && ((GENERAL_REGS == ALL_REGS
2357 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2358 || reg_fits_class_p (op, GENERAL_REGS,
2359 offset, mode))))
2360 win = 1;
2361 break;
2363 case 'X':
2364 /* This is used for a MATCH_SCRATCH in the cases when
2365 we don't actually need anything. So anything goes
2366 any time. */
2367 win = 1;
2368 break;
2370 case 'm':
2371 if (GET_CODE (op) == MEM
2372 /* Before reload, accept what reload can turn into mem. */
2373 || (strict < 0 && CONSTANT_P (op))
2374 /* During reload, accept a pseudo */
2375 || (reload_in_progress && GET_CODE (op) == REG
2376 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2377 win = 1;
2378 break;
2380 case '<':
2381 if (GET_CODE (op) == MEM
2382 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2383 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2384 win = 1;
2385 break;
2387 case '>':
2388 if (GET_CODE (op) == MEM
2389 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2390 || GET_CODE (XEXP (op, 0)) == POST_INC))
2391 win = 1;
2392 break;
2394 case 'E':
2395 #ifndef REAL_ARITHMETIC
2396 /* Match any CONST_DOUBLE, but only if
2397 we can examine the bits of it reliably. */
2398 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2399 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2400 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2401 break;
2402 #endif
2403 if (GET_CODE (op) == CONST_DOUBLE)
2404 win = 1;
2405 break;
2407 case 'F':
2408 if (GET_CODE (op) == CONST_DOUBLE)
2409 win = 1;
2410 break;
2412 case 'G':
2413 case 'H':
2414 if (GET_CODE (op) == CONST_DOUBLE
2415 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2416 win = 1;
2417 break;
2419 case 's':
2420 if (GET_CODE (op) == CONST_INT
2421 || (GET_CODE (op) == CONST_DOUBLE
2422 && GET_MODE (op) == VOIDmode))
2423 break;
2424 case 'i':
2425 if (CONSTANT_P (op))
2426 win = 1;
2427 break;
2429 case 'n':
2430 if (GET_CODE (op) == CONST_INT
2431 || (GET_CODE (op) == CONST_DOUBLE
2432 && GET_MODE (op) == VOIDmode))
2433 win = 1;
2434 break;
2436 case 'I':
2437 case 'J':
2438 case 'K':
2439 case 'L':
2440 case 'M':
2441 case 'N':
2442 case 'O':
2443 case 'P':
2444 if (GET_CODE (op) == CONST_INT
2445 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2446 win = 1;
2447 break;
2449 #ifdef EXTRA_CONSTRAINT
2450 case 'Q':
2451 case 'R':
2452 case 'S':
2453 case 'T':
2454 case 'U':
2455 if (EXTRA_CONSTRAINT (op, c))
2456 win = 1;
2457 break;
2458 #endif
2460 case 'V':
2461 if (GET_CODE (op) == MEM
2462 && ((strict > 0 && ! offsettable_memref_p (op))
2463 || (strict < 0
2464 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2465 || (reload_in_progress
2466 && !(GET_CODE (op) == REG
2467 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2468 win = 1;
2469 break;
2471 case 'o':
2472 if ((strict > 0 && offsettable_memref_p (op))
2473 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2474 /* Before reload, accept what reload can handle. */
2475 || (strict < 0
2476 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2477 /* During reload, accept a pseudo */
2478 || (reload_in_progress && GET_CODE (op) == REG
2479 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2480 win = 1;
2481 break;
2483 default:
2484 if (strict < 0
2485 || (strict == 0
2486 && GET_CODE (op) == REG
2487 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2488 || (strict == 0 && GET_CODE (op) == SCRATCH)
2489 || (GET_CODE (op) == REG
2490 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2491 offset, mode)))
2492 win = 1;
2495 constraints[opno] = p;
2496 /* If this operand did not win somehow,
2497 this alternative loses. */
2498 if (! win)
2499 lose = 1;
2501 /* This alternative won; the operands are ok.
2502 Change whichever operands this alternative says to change. */
2503 if (! lose)
2505 int opno, eopno;
2507 /* See if any earlyclobber operand conflicts with some other
2508 operand. */
2510 if (strict > 0)
2511 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2512 /* Ignore earlyclobber operands now in memory,
2513 because we would often report failure when we have
2514 two memory operands, one of which was formerly a REG. */
2515 if (earlyclobber[eopno]
2516 && GET_CODE (recog_data.operand[eopno]) == REG)
2517 for (opno = 0; opno < recog_data.n_operands; opno++)
2518 if ((GET_CODE (recog_data.operand[opno]) == MEM
2519 || recog_data.operand_type[opno] != OP_OUT)
2520 && opno != eopno
2521 /* Ignore things like match_operator operands. */
2522 && *recog_data.constraints[opno] != 0
2523 && ! (matching_operands[opno] == eopno
2524 && operands_match_p (recog_data.operand[opno],
2525 recog_data.operand[eopno]))
2526 && ! safe_from_earlyclobber (recog_data.operand[opno],
2527 recog_data.operand[eopno]))
2528 lose = 1;
2530 if (! lose)
2532 while (--funny_match_index >= 0)
2534 recog_data.operand[funny_match[funny_match_index].other]
2535 = recog_data.operand[funny_match[funny_match_index].this];
2538 return 1;
2542 which_alternative++;
2545 /* If we are about to reject this, but we are not to test strictly,
2546 try a very loose test. Only return failure if it fails also. */
2547 if (strict == 0)
2548 return constrain_operands (-1);
2549 else
2550 return 0;
2553 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2554 is a hard reg in class CLASS when its regno is offset by OFFSET
2555 and changed to mode MODE.
2556 If REG occupies multiple hard regs, all of them must be in CLASS. */
2559 reg_fits_class_p (operand, class, offset, mode)
2560 rtx operand;
2561 register enum reg_class class;
2562 int offset;
2563 enum machine_mode mode;
2565 register int regno = REGNO (operand);
2566 if (regno < FIRST_PSEUDO_REGISTER
2567 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2568 regno + offset))
2570 register int sr;
2571 regno += offset;
2572 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2573 sr > 0; sr--)
2574 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2575 regno + sr))
2576 break;
2577 return sr == 0;
2580 return 0;
2583 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2585 void
2586 split_all_insns (upd_life)
2587 int upd_life;
2589 sbitmap blocks;
2590 int changed;
2591 int i;
2593 blocks = sbitmap_alloc (n_basic_blocks);
2594 sbitmap_zero (blocks);
2595 changed = 0;
2597 for (i = n_basic_blocks - 1; i >= 0; --i)
2599 basic_block bb = BASIC_BLOCK (i);
2600 rtx insn, next;
2602 for (insn = bb->head; insn ; insn = next)
2604 rtx set;
2606 /* Can't use `next_real_insn' because that might go across
2607 CODE_LABELS and short-out basic blocks. */
2608 next = NEXT_INSN (insn);
2609 if (GET_CODE (insn) != INSN)
2612 /* Don't split no-op move insns. These should silently
2613 disappear later in final. Splitting such insns would
2614 break the code that handles REG_NO_CONFLICT blocks. */
2616 else if ((set = single_set (insn)) != NULL
2617 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2619 /* Nops get in the way while scheduling, so delete them
2620 now if register allocation has already been done. It
2621 is too risky to try to do this before register
2622 allocation, and there are unlikely to be very many
2623 nops then anyways. */
2624 if (reload_completed)
2626 PUT_CODE (insn, NOTE);
2627 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2628 NOTE_SOURCE_FILE (insn) = 0;
2631 else
2633 /* Split insns here to get max fine-grain parallelism. */
2634 rtx first = PREV_INSN (insn);
2635 rtx last = try_split (PATTERN (insn), insn, 1);
2637 if (last != insn)
2639 SET_BIT (blocks, i);
2640 changed = 1;
2642 /* try_split returns the NOTE that INSN became. */
2643 first = NEXT_INSN (first);
2644 PUT_CODE (insn, NOTE);
2645 NOTE_SOURCE_FILE (insn) = 0;
2646 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2648 if (insn == bb->end)
2650 bb->end = last;
2651 break;
2656 if (insn == bb->end)
2657 break;
2660 /* ??? When we're called from just after reload, the CFG is in bad
2661 shape, and we may have fallen off the end. This could be fixed
2662 by having reload not try to delete unreachable code. Otherwise
2663 assert we found the end insn. */
2664 if (insn == NULL && upd_life)
2665 abort ();
2668 if (changed && upd_life)
2670 compute_bb_for_insn (get_max_uid ());
2671 count_or_remove_death_notes (blocks, 1);
2672 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2675 sbitmap_free (blocks);
2678 #ifdef HAVE_peephole2
2679 /* This is the last insn we'll allow recog_next_insn to consider. */
2680 static rtx recog_last_allowed_insn;
2682 /* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
2683 not exist. Used by the recognizer to find the next insn to match in a
2684 multi-insn pattern. */
2686 recog_next_insn (insn, n)
2687 rtx insn;
2688 int n;
2690 if (insn != NULL_RTX)
2692 while (n > 0)
2694 if (insn == recog_last_allowed_insn)
2695 return NULL_RTX;
2697 insn = NEXT_INSN (insn);
2698 if (insn == NULL_RTX)
2699 break;
2701 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2702 n -= 1;
2706 return insn;
2709 /* Perform the peephole2 optimization pass. */
2710 void
2711 peephole2_optimize (dump_file)
2712 FILE *dump_file ATTRIBUTE_UNUSED;
2714 rtx insn, prev;
2715 int i, changed;
2716 sbitmap blocks;
2718 /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
2719 and backtrack insn by insn as we proceed through the block. In this
2720 way we'll not need to keep searching forward from the beginning of
2721 basic blocks to find register life info. */
2723 init_resource_info (NULL);
2725 blocks = sbitmap_alloc (n_basic_blocks);
2726 sbitmap_zero (blocks);
2727 changed = 0;
2729 for (i = n_basic_blocks - 1; i >= 0; --i)
2731 basic_block bb = BASIC_BLOCK (i);
2733 /* Since we don't update life info until the very end, we can't
2734 allow matching instructions that we've replaced before. Walk
2735 backward through the basic block so that we don't have to
2736 care about subsequent life info; recog_last_allowed_insn to
2737 restrict how far forward we will allow the match to proceed. */
2739 recog_last_allowed_insn = NEXT_INSN (bb->end);
2740 for (insn = bb->end; ; insn = prev)
2742 prev = PREV_INSN (insn);
2743 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2745 rtx try, last_insn;
2747 try = peephole2_insns (PATTERN (insn), insn, &last_insn);
2748 if (try != NULL)
2750 flow_delete_insn_chain (insn, last_insn);
2751 try = emit_insn_after (try, prev);
2753 if (last_insn == bb->end)
2754 bb->end = try;
2755 if (insn == bb->head)
2756 bb->head = NEXT_INSN (prev);
2758 recog_last_allowed_insn = NEXT_INSN (prev);
2759 SET_BIT (blocks, i);
2760 changed = 1;
2764 if (insn == bb->head)
2765 break;
2769 free_resource_info ();
2771 compute_bb_for_insn (get_max_uid ());
2772 count_or_remove_death_notes (blocks, 1);
2773 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2775 #endif