* dwarfout.c (field_byte_offset): Correctly compute the object's
[official-gcc.git] / gcc / recog.c
blob94e7abbffff519e16cf6f3ecb23ba5a47888442e
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "recog.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "toplev.h"
35 #include "basic-block.h"
37 #ifndef STACK_PUSH_CODE
38 #ifdef STACK_GROWS_DOWNWARD
39 #define STACK_PUSH_CODE PRE_DEC
40 #else
41 #define STACK_PUSH_CODE PRE_INC
42 #endif
43 #endif
45 #ifndef STACK_POP_CODE
46 #ifdef STACK_GROWS_DOWNWARD
47 #define STACK_POP_CODE POST_INC
48 #else
49 #define STACK_POP_CODE POST_DEC
50 #endif
51 #endif
53 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
54 static rtx *find_single_use_1 PROTO((rtx, rtx *));
55 static rtx *find_constant_term_loc PROTO((rtx *));
56 static int insn_invalid_p PROTO((rtx));
58 /* Nonzero means allow operands to be volatile.
59 This should be 0 if you are generating rtl, such as if you are calling
60 the functions in optabs.c and expmed.c (most of the time).
61 This should be 1 if all valid insns need to be recognized,
62 such as in regclass.c and final.c and reload.c.
64 init_recog and init_recog_no_volatile are responsible for setting this. */
66 int volatile_ok;
68 /* The next variables are set up by extract_insn. The first four of them
69 are also set up during insn_extract. */
71 /* Indexed by N, gives value of operand N. */
72 rtx recog_operand[MAX_RECOG_OPERANDS];
74 /* Indexed by N, gives location where operand N was found. */
75 rtx *recog_operand_loc[MAX_RECOG_OPERANDS];
77 /* Indexed by N, gives location where the Nth duplicate-appearance of
78 an operand was found. This is something that matched MATCH_DUP. */
79 rtx *recog_dup_loc[MAX_RECOG_OPERANDS];
81 /* Indexed by N, gives the operand number that was duplicated in the
82 Nth duplicate-appearance of an operand. */
83 char recog_dup_num[MAX_RECOG_OPERANDS];
85 /* The number of operands of the insn. */
86 int recog_n_operands;
88 /* The number of MATCH_DUPs in the insn. */
89 int recog_n_dups;
91 /* The number of alternatives in the constraints for the insn. */
92 int recog_n_alternatives;
94 /* Indexed by N, gives the mode of operand N. */
95 enum machine_mode recog_operand_mode[MAX_RECOG_OPERANDS];
97 /* Indexed by N, gives the constraint string for operand N. */
98 const char *recog_constraints[MAX_RECOG_OPERANDS];
100 /* Indexed by N, gives the type (in, out, inout) for operand N. */
101 enum op_type recog_op_type[MAX_RECOG_OPERANDS];
103 #ifndef REGISTER_CONSTRAINTS
104 /* Indexed by N, nonzero if operand N should be an address. */
105 char recog_operand_address_p[MAX_RECOG_OPERANDS];
106 #endif
108 /* Contains a vector of operand_alternative structures for every operand.
109 Set up by preprocess_constraints. */
110 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
112 /* On return from `constrain_operands', indicate which alternative
113 was satisfied. */
115 int which_alternative;
117 /* Nonzero after end of reload pass.
118 Set to 1 or 0 by toplev.c.
119 Controls the significance of (SUBREG (MEM)). */
121 int reload_completed;
123 /* Initialize data used by the function `recog'.
124 This must be called once in the compilation of a function
125 before any insn recognition may be done in the function. */
127 void
128 init_recog_no_volatile ()
130 volatile_ok = 0;
133 void
134 init_recog ()
136 volatile_ok = 1;
139 /* Try recognizing the instruction INSN,
140 and return the code number that results.
141 Remember the code so that repeated calls do not
142 need to spend the time for actual rerecognition.
144 This function is the normal interface to instruction recognition.
145 The automatically-generated function `recog' is normally called
146 through this one. (The only exception is in combine.c.) */
149 recog_memoized (insn)
150 rtx insn;
152 if (INSN_CODE (insn) < 0)
153 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
154 return INSN_CODE (insn);
157 /* Check that X is an insn-body for an `asm' with operands
158 and that the operands mentioned in it are legitimate. */
161 check_asm_operands (x)
162 rtx x;
164 int noperands;
165 rtx *operands;
166 const char **constraints;
167 int i;
169 /* Post-reload, be more strict with things. */
170 if (reload_completed)
172 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
173 extract_insn (make_insn_raw (x));
174 constrain_operands (1);
175 return which_alternative >= 0;
178 noperands = asm_noperands (x);
179 if (noperands < 0)
180 return 0;
181 if (noperands == 0)
182 return 1;
184 operands = (rtx *) alloca (noperands * sizeof (rtx));
185 constraints = (const char **) alloca (noperands * sizeof (char *));
187 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
189 for (i = 0; i < noperands; i++)
191 const char *c = constraints[i];
192 if (c[0] == '%')
193 c++;
194 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
195 c = constraints[c[0] - '0'];
197 if (! asm_operand_ok (operands[i], c))
198 return 0;
201 return 1;
204 /* Static data for the next two routines. */
206 typedef struct change_t
208 rtx object;
209 int old_code;
210 rtx *loc;
211 rtx old;
212 } change_t;
214 static change_t *changes;
215 static int changes_allocated;
217 static int num_changes = 0;
219 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
220 at which NEW will be placed. If OBJECT is zero, no validation is done,
221 the change is simply made.
223 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
224 will be called with the address and mode as parameters. If OBJECT is
225 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
226 the change in place.
228 IN_GROUP is non-zero if this is part of a group of changes that must be
229 performed as a group. In that case, the changes will be stored. The
230 function `apply_change_group' will validate and apply the changes.
232 If IN_GROUP is zero, this is a single change. Try to recognize the insn
233 or validate the memory reference with the change applied. If the result
234 is not valid for the machine, suppress the change and return zero.
235 Otherwise, perform the change and return 1. */
238 validate_change (object, loc, new, in_group)
239 rtx object;
240 rtx *loc;
241 rtx new;
242 int in_group;
244 rtx old = *loc;
246 if (old == new || rtx_equal_p (old, new))
247 return 1;
249 if (in_group == 0 && num_changes != 0)
250 abort ();
252 *loc = new;
254 /* Save the information describing this change. */
255 if (num_changes >= changes_allocated)
257 if (changes_allocated == 0)
258 /* This value allows for repeated substitutions inside complex
259 indexed addresses, or changes in up to 5 insns. */
260 changes_allocated = MAX_RECOG_OPERANDS * 5;
261 else
262 changes_allocated *= 2;
264 changes =
265 (change_t*) xrealloc (changes,
266 sizeof (change_t) * changes_allocated);
269 changes[num_changes].object = object;
270 changes[num_changes].loc = loc;
271 changes[num_changes].old = old;
273 if (object && GET_CODE (object) != MEM)
275 /* Set INSN_CODE to force rerecognition of insn. Save old code in
276 case invalid. */
277 changes[num_changes].old_code = INSN_CODE (object);
278 INSN_CODE (object) = -1;
281 num_changes++;
283 /* If we are making a group of changes, return 1. Otherwise, validate the
284 change group we made. */
286 if (in_group)
287 return 1;
288 else
289 return apply_change_group ();
292 /* This subroutine of apply_change_group verifies whether the changes to INSN
293 were valid; i.e. whether INSN can still be recognized. */
295 static int
296 insn_invalid_p (insn)
297 rtx insn;
299 int icode = recog_memoized (insn);
300 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
302 if (is_asm && ! check_asm_operands (PATTERN (insn)))
303 return 1;
304 if (! is_asm && icode < 0)
305 return 1;
307 /* After reload, verify that all constraints are satisfied. */
308 if (reload_completed)
310 extract_insn (insn);
312 if (! constrain_operands (1))
313 return 1;
316 return 0;
319 /* Apply a group of changes previously issued with `validate_change'.
320 Return 1 if all changes are valid, zero otherwise. */
323 apply_change_group ()
325 int i;
327 /* The changes have been applied and all INSN_CODEs have been reset to force
328 rerecognition.
330 The changes are valid if we aren't given an object, or if we are
331 given a MEM and it still is a valid address, or if this is in insn
332 and it is recognized. In the latter case, if reload has completed,
333 we also require that the operands meet the constraints for
334 the insn. */
336 for (i = 0; i < num_changes; i++)
338 rtx object = changes[i].object;
340 if (object == 0)
341 continue;
343 if (GET_CODE (object) == MEM)
345 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
346 break;
348 else if (insn_invalid_p (object))
350 rtx pat = PATTERN (object);
352 /* Perhaps we couldn't recognize the insn because there were
353 extra CLOBBERs at the end. If so, try to re-recognize
354 without the last CLOBBER (later iterations will cause each of
355 them to be eliminated, in turn). But don't do this if we
356 have an ASM_OPERAND. */
357 if (GET_CODE (pat) == PARALLEL
358 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
359 && asm_noperands (PATTERN (object)) < 0)
361 rtx newpat;
363 if (XVECLEN (pat, 0) == 2)
364 newpat = XVECEXP (pat, 0, 0);
365 else
367 int j;
369 newpat = gen_rtx_PARALLEL (VOIDmode,
370 gen_rtvec (XVECLEN (pat, 0) - 1));
371 for (j = 0; j < XVECLEN (newpat, 0); j++)
372 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
375 /* Add a new change to this group to replace the pattern
376 with this new pattern. Then consider this change
377 as having succeeded. The change we added will
378 cause the entire call to fail if things remain invalid.
380 Note that this can lose if a later change than the one
381 we are processing specified &XVECEXP (PATTERN (object), 0, X)
382 but this shouldn't occur. */
384 validate_change (object, &PATTERN (object), newpat, 1);
386 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
387 /* If this insn is a CLOBBER or USE, it is always valid, but is
388 never recognized. */
389 continue;
390 else
391 break;
395 if (i == num_changes)
397 num_changes = 0;
398 return 1;
400 else
402 cancel_changes (0);
403 return 0;
407 /* Return the number of changes so far in the current group. */
410 num_validated_changes ()
412 return num_changes;
415 /* Retract the changes numbered NUM and up. */
417 void
418 cancel_changes (num)
419 int num;
421 int i;
423 /* Back out all the changes. Do this in the opposite order in which
424 they were made. */
425 for (i = num_changes - 1; i >= num; i--)
427 *changes[i].loc = changes[i].old;
428 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
429 INSN_CODE (changes[i].object) = changes[i].old_code;
431 num_changes = num;
434 /* Replace every occurrence of FROM in X with TO. Mark each change with
435 validate_change passing OBJECT. */
437 static void
438 validate_replace_rtx_1 (loc, from, to, object)
439 rtx *loc;
440 rtx from, to, object;
442 register int i, j;
443 register char *fmt;
444 register rtx x = *loc;
445 enum rtx_code code = GET_CODE (x);
447 /* X matches FROM if it is the same rtx or they are both referring to the
448 same register in the same mode. Avoid calling rtx_equal_p unless the
449 operands look similar. */
451 if (x == from
452 || (GET_CODE (x) == REG && GET_CODE (from) == REG
453 && GET_MODE (x) == GET_MODE (from)
454 && REGNO (x) == REGNO (from))
455 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
456 && rtx_equal_p (x, from)))
458 validate_change (object, loc, to, 1);
459 return;
462 /* For commutative or comparison operations, try replacing each argument
463 separately and seeing if we made any changes. If so, put a constant
464 argument last.*/
465 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
467 int prev_changes = num_changes;
469 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
470 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
471 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
473 validate_change (object, loc,
474 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
475 : swap_condition (code),
476 GET_MODE (x), XEXP (x, 1),
477 XEXP (x, 0)),
479 x = *loc;
480 code = GET_CODE (x);
484 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
485 done the substitution, otherwise we won't. */
487 switch (code)
489 case PLUS:
490 /* If we have a PLUS whose second operand is now a CONST_INT, use
491 plus_constant to try to simplify it. */
492 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
493 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
495 return;
497 case MINUS:
498 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
500 validate_change (object, loc,
501 plus_constant (XEXP (x, 0), - INTVAL (to)),
503 return;
505 break;
507 case ZERO_EXTEND:
508 case SIGN_EXTEND:
509 /* In these cases, the operation to be performed depends on the mode
510 of the operand. If we are replacing the operand with a VOIDmode
511 constant, we lose the information. So try to simplify the operation
512 in that case. If it fails, substitute in something that we know
513 won't be recognized. */
514 if (GET_MODE (to) == VOIDmode
515 && (XEXP (x, 0) == from
516 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
517 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
518 && REGNO (XEXP (x, 0)) == REGNO (from))))
520 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
521 GET_MODE (from));
522 if (new == 0)
523 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
525 validate_change (object, loc, new, 1);
526 return;
528 break;
530 case SUBREG:
531 /* If we have a SUBREG of a register that we are replacing and we are
532 replacing it with a MEM, make a new MEM and try replacing the
533 SUBREG with it. Don't do this if the MEM has a mode-dependent address
534 or if we would be widening it. */
536 if (SUBREG_REG (x) == from
537 && GET_CODE (from) == REG
538 && GET_CODE (to) == MEM
539 && ! mode_dependent_address_p (XEXP (to, 0))
540 && ! MEM_VOLATILE_P (to)
541 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
543 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
544 enum machine_mode mode = GET_MODE (x);
545 rtx new;
547 if (BYTES_BIG_ENDIAN)
548 offset += (MIN (UNITS_PER_WORD,
549 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
550 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
552 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
553 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
554 MEM_COPY_ATTRIBUTES (new, to);
555 validate_change (object, loc, new, 1);
556 return;
558 break;
560 case ZERO_EXTRACT:
561 case SIGN_EXTRACT:
562 /* If we are replacing a register with memory, try to change the memory
563 to be the mode required for memory in extract operations (this isn't
564 likely to be an insertion operation; if it was, nothing bad will
565 happen, we might just fail in some cases). */
567 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
568 && GET_CODE (XEXP (x, 1)) == CONST_INT
569 && GET_CODE (XEXP (x, 2)) == CONST_INT
570 && ! mode_dependent_address_p (XEXP (to, 0))
571 && ! MEM_VOLATILE_P (to))
573 enum machine_mode wanted_mode = VOIDmode;
574 enum machine_mode is_mode = GET_MODE (to);
575 int pos = INTVAL (XEXP (x, 2));
577 #ifdef HAVE_extzv
578 if (code == ZERO_EXTRACT)
580 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
581 if (wanted_mode == VOIDmode)
582 wanted_mode = word_mode;
584 #endif
585 #ifdef HAVE_extv
586 if (code == SIGN_EXTRACT)
588 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
589 if (wanted_mode == VOIDmode)
590 wanted_mode = word_mode;
592 #endif
594 /* If we have a narrower mode, we can do something. */
595 if (wanted_mode != VOIDmode
596 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
598 int offset = pos / BITS_PER_UNIT;
599 rtx newmem;
601 /* If the bytes and bits are counted differently, we
602 must adjust the offset. */
603 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
604 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
605 - offset);
607 pos %= GET_MODE_BITSIZE (wanted_mode);
609 newmem = gen_rtx_MEM (wanted_mode,
610 plus_constant (XEXP (to, 0), offset));
611 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
612 MEM_COPY_ATTRIBUTES (newmem, to);
614 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
615 validate_change (object, &XEXP (x, 0), newmem, 1);
619 break;
621 default:
622 break;
625 /* For commutative or comparison operations we've already performed
626 replacements. Don't try to perform them again. */
627 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
629 fmt = GET_RTX_FORMAT (code);
630 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
632 if (fmt[i] == 'e')
633 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
634 else if (fmt[i] == 'E')
635 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
636 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
641 /* Try replacing every occurrence of FROM in INSN with TO. After all
642 changes have been made, validate by seeing if INSN is still valid. */
645 validate_replace_rtx (from, to, insn)
646 rtx from, to, insn;
648 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
649 return apply_change_group ();
652 /* Try replacing every occurrence of FROM in INSN with TO. After all
653 changes have been made, validate by seeing if INSN is still valid. */
655 void
656 validate_replace_rtx_group (from, to, insn)
657 rtx from, to, insn;
659 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
662 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
663 SET_DESTs. After all changes have been made, validate by seeing if
664 INSN is still valid. */
667 validate_replace_src (from, to, insn)
668 rtx from, to, insn;
670 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
671 || GET_CODE (PATTERN (insn)) != SET)
672 abort ();
674 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
675 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
676 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
677 from, to, insn);
678 return apply_change_group ();
681 #ifdef HAVE_cc0
682 /* Return 1 if the insn using CC0 set by INSN does not contain
683 any ordered tests applied to the condition codes.
684 EQ and NE tests do not count. */
687 next_insn_tests_no_inequality (insn)
688 rtx insn;
690 register rtx next = next_cc0_user (insn);
692 /* If there is no next insn, we have to take the conservative choice. */
693 if (next == 0)
694 return 0;
696 return ((GET_CODE (next) == JUMP_INSN
697 || GET_CODE (next) == INSN
698 || GET_CODE (next) == CALL_INSN)
699 && ! inequality_comparisons_p (PATTERN (next)));
702 #if 0 /* This is useless since the insn that sets the cc's
703 must be followed immediately by the use of them. */
704 /* Return 1 if the CC value set up by INSN is not used. */
707 next_insns_test_no_inequality (insn)
708 rtx insn;
710 register rtx next = NEXT_INSN (insn);
712 for (; next != 0; next = NEXT_INSN (next))
714 if (GET_CODE (next) == CODE_LABEL
715 || GET_CODE (next) == BARRIER)
716 return 1;
717 if (GET_CODE (next) == NOTE)
718 continue;
719 if (inequality_comparisons_p (PATTERN (next)))
720 return 0;
721 if (sets_cc0_p (PATTERN (next)) == 1)
722 return 1;
723 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
724 return 1;
726 return 1;
728 #endif
729 #endif
731 /* This is used by find_single_use to locate an rtx that contains exactly one
732 use of DEST, which is typically either a REG or CC0. It returns a
733 pointer to the innermost rtx expression containing DEST. Appearances of
734 DEST that are being used to totally replace it are not counted. */
736 static rtx *
737 find_single_use_1 (dest, loc)
738 rtx dest;
739 rtx *loc;
741 rtx x = *loc;
742 enum rtx_code code = GET_CODE (x);
743 rtx *result = 0;
744 rtx *this_result;
745 int i;
746 char *fmt;
748 switch (code)
750 case CONST_INT:
751 case CONST:
752 case LABEL_REF:
753 case SYMBOL_REF:
754 case CONST_DOUBLE:
755 case CLOBBER:
756 return 0;
758 case SET:
759 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
760 of a REG that occupies all of the REG, the insn uses DEST if
761 it is mentioned in the destination or the source. Otherwise, we
762 need just check the source. */
763 if (GET_CODE (SET_DEST (x)) != CC0
764 && GET_CODE (SET_DEST (x)) != PC
765 && GET_CODE (SET_DEST (x)) != REG
766 && ! (GET_CODE (SET_DEST (x)) == SUBREG
767 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
768 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
769 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
770 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
771 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
772 break;
774 return find_single_use_1 (dest, &SET_SRC (x));
776 case MEM:
777 case SUBREG:
778 return find_single_use_1 (dest, &XEXP (x, 0));
780 default:
781 break;
784 /* If it wasn't one of the common cases above, check each expression and
785 vector of this code. Look for a unique usage of DEST. */
787 fmt = GET_RTX_FORMAT (code);
788 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
790 if (fmt[i] == 'e')
792 if (dest == XEXP (x, i)
793 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
794 && REGNO (dest) == REGNO (XEXP (x, i))))
795 this_result = loc;
796 else
797 this_result = find_single_use_1 (dest, &XEXP (x, i));
799 if (result == 0)
800 result = this_result;
801 else if (this_result)
802 /* Duplicate usage. */
803 return 0;
805 else if (fmt[i] == 'E')
807 int j;
809 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
811 if (XVECEXP (x, i, j) == dest
812 || (GET_CODE (dest) == REG
813 && GET_CODE (XVECEXP (x, i, j)) == REG
814 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
815 this_result = loc;
816 else
817 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
819 if (result == 0)
820 result = this_result;
821 else if (this_result)
822 return 0;
827 return result;
830 /* See if DEST, produced in INSN, is used only a single time in the
831 sequel. If so, return a pointer to the innermost rtx expression in which
832 it is used.
834 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
836 This routine will return usually zero either before flow is called (because
837 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
838 note can't be trusted).
840 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
841 care about REG_DEAD notes or LOG_LINKS.
843 Otherwise, we find the single use by finding an insn that has a
844 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
845 only referenced once in that insn, we know that it must be the first
846 and last insn referencing DEST. */
848 rtx *
849 find_single_use (dest, insn, ploc)
850 rtx dest;
851 rtx insn;
852 rtx *ploc;
854 rtx next;
855 rtx *result;
856 rtx link;
858 #ifdef HAVE_cc0
859 if (dest == cc0_rtx)
861 next = NEXT_INSN (insn);
862 if (next == 0
863 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
864 return 0;
866 result = find_single_use_1 (dest, &PATTERN (next));
867 if (result && ploc)
868 *ploc = next;
869 return result;
871 #endif
873 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
874 return 0;
876 for (next = next_nonnote_insn (insn);
877 next != 0 && GET_CODE (next) != CODE_LABEL;
878 next = next_nonnote_insn (next))
879 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
881 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
882 if (XEXP (link, 0) == insn)
883 break;
885 if (link)
887 result = find_single_use_1 (dest, &PATTERN (next));
888 if (ploc)
889 *ploc = next;
890 return result;
894 return 0;
897 /* Return 1 if OP is a valid general operand for machine mode MODE.
898 This is either a register reference, a memory reference,
899 or a constant. In the case of a memory reference, the address
900 is checked for general validity for the target machine.
902 Register and memory references must have mode MODE in order to be valid,
903 but some constants have no machine mode and are valid for any mode.
905 If MODE is VOIDmode, OP is checked for validity for whatever mode
906 it has.
908 The main use of this function is as a predicate in match_operand
909 expressions in the machine description.
911 For an explanation of this function's behavior for registers of
912 class NO_REGS, see the comment for `register_operand'. */
915 general_operand (op, mode)
916 register rtx op;
917 enum machine_mode mode;
919 register enum rtx_code code = GET_CODE (op);
920 int mode_altering_drug = 0;
922 if (mode == VOIDmode)
923 mode = GET_MODE (op);
925 /* Don't accept CONST_INT or anything similar
926 if the caller wants something floating. */
927 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
928 && GET_MODE_CLASS (mode) != MODE_INT
929 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
930 return 0;
932 if (CONSTANT_P (op))
933 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
934 #ifdef LEGITIMATE_PIC_OPERAND_P
935 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
936 #endif
937 && LEGITIMATE_CONSTANT_P (op));
939 /* Except for certain constants with VOIDmode, already checked for,
940 OP's mode must match MODE if MODE specifies a mode. */
942 if (GET_MODE (op) != mode)
943 return 0;
945 if (code == SUBREG)
947 #ifdef INSN_SCHEDULING
948 /* On machines that have insn scheduling, we want all memory
949 reference to be explicit, so outlaw paradoxical SUBREGs. */
950 if (GET_CODE (SUBREG_REG (op)) == MEM
951 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
952 return 0;
953 #endif
955 op = SUBREG_REG (op);
956 code = GET_CODE (op);
957 #if 0
958 /* No longer needed, since (SUBREG (MEM...))
959 will load the MEM into a reload reg in the MEM's own mode. */
960 mode_altering_drug = 1;
961 #endif
964 if (code == REG)
965 /* A register whose class is NO_REGS is not a general operand. */
966 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
967 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
969 if (code == MEM)
971 register rtx y = XEXP (op, 0);
972 if (! volatile_ok && MEM_VOLATILE_P (op))
973 return 0;
974 if (GET_CODE (y) == ADDRESSOF)
975 return 1;
976 /* Use the mem's mode, since it will be reloaded thus. */
977 mode = GET_MODE (op);
978 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
981 /* Pretend this is an operand for now; we'll run force_operand
982 on its replacement in fixup_var_refs_1. */
983 if (code == ADDRESSOF)
984 return 1;
986 return 0;
988 win:
989 if (mode_altering_drug)
990 return ! mode_dependent_address_p (XEXP (op, 0));
991 return 1;
994 /* Return 1 if OP is a valid memory address for a memory reference
995 of mode MODE.
997 The main use of this function is as a predicate in match_operand
998 expressions in the machine description. */
1001 address_operand (op, mode)
1002 register rtx op;
1003 enum machine_mode mode;
1005 return memory_address_p (mode, op);
1008 /* Return 1 if OP is a register reference of mode MODE.
1009 If MODE is VOIDmode, accept a register in any mode.
1011 The main use of this function is as a predicate in match_operand
1012 expressions in the machine description.
1014 As a special exception, registers whose class is NO_REGS are
1015 not accepted by `register_operand'. The reason for this change
1016 is to allow the representation of special architecture artifacts
1017 (such as a condition code register) without extending the rtl
1018 definitions. Since registers of class NO_REGS cannot be used
1019 as registers in any case where register classes are examined,
1020 it is most consistent to keep this function from accepting them. */
1023 register_operand (op, mode)
1024 register rtx op;
1025 enum machine_mode mode;
1027 if (GET_MODE (op) != mode && mode != VOIDmode)
1028 return 0;
1030 if (GET_CODE (op) == SUBREG)
1032 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1033 because it is guaranteed to be reloaded into one.
1034 Just make sure the MEM is valid in itself.
1035 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1036 but currently it does result from (SUBREG (REG)...) where the
1037 reg went on the stack.) */
1038 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1039 return general_operand (op, mode);
1041 #ifdef CLASS_CANNOT_CHANGE_SIZE
1042 if (GET_CODE (SUBREG_REG (op)) == REG
1043 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1044 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1045 REGNO (SUBREG_REG (op)))
1046 && (GET_MODE_SIZE (mode)
1047 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1048 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1049 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1050 return 0;
1051 #endif
1053 op = SUBREG_REG (op);
1056 /* We don't consider registers whose class is NO_REGS
1057 to be a register operand. */
1058 return (GET_CODE (op) == REG
1059 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1060 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1063 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1064 or a hard register. */
1067 scratch_operand (op, mode)
1068 register rtx op;
1069 enum machine_mode mode;
1071 return (GET_MODE (op) == mode
1072 && (GET_CODE (op) == SCRATCH
1073 || (GET_CODE (op) == REG
1074 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
1077 /* Return 1 if OP is a valid immediate operand for mode MODE.
1079 The main use of this function is as a predicate in match_operand
1080 expressions in the machine description. */
1083 immediate_operand (op, mode)
1084 register rtx op;
1085 enum machine_mode mode;
1087 /* Don't accept CONST_INT or anything similar
1088 if the caller wants something floating. */
1089 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1090 && GET_MODE_CLASS (mode) != MODE_INT
1091 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1092 return 0;
1094 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1095 result in 0/1. It seems a safe assumption that this is
1096 in range for everyone. */
1097 if (GET_CODE (op) == CONSTANT_P_RTX)
1098 return 1;
1100 return (CONSTANT_P (op)
1101 && (GET_MODE (op) == mode || mode == VOIDmode
1102 || GET_MODE (op) == VOIDmode)
1103 #ifdef LEGITIMATE_PIC_OPERAND_P
1104 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1105 #endif
1106 && LEGITIMATE_CONSTANT_P (op));
1109 /* Returns 1 if OP is an operand that is a CONST_INT. */
1112 const_int_operand (op, mode)
1113 register rtx op;
1114 enum machine_mode mode ATTRIBUTE_UNUSED;
1116 return GET_CODE (op) == CONST_INT;
1119 /* Returns 1 if OP is an operand that is a constant integer or constant
1120 floating-point number. */
1123 const_double_operand (op, mode)
1124 register rtx op;
1125 enum machine_mode mode;
1127 /* Don't accept CONST_INT or anything similar
1128 if the caller wants something floating. */
1129 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1130 && GET_MODE_CLASS (mode) != MODE_INT
1131 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1132 return 0;
1134 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1135 && (mode == VOIDmode || GET_MODE (op) == mode
1136 || GET_MODE (op) == VOIDmode));
1139 /* Return 1 if OP is a general operand that is not an immediate operand. */
1142 nonimmediate_operand (op, mode)
1143 register rtx op;
1144 enum machine_mode mode;
1146 return (general_operand (op, mode) && ! CONSTANT_P (op));
1149 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1152 nonmemory_operand (op, mode)
1153 register rtx op;
1154 enum machine_mode mode;
1156 if (CONSTANT_P (op))
1158 /* Don't accept CONST_INT or anything similar
1159 if the caller wants something floating. */
1160 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1161 && GET_MODE_CLASS (mode) != MODE_INT
1162 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1163 return 0;
1165 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1166 #ifdef LEGITIMATE_PIC_OPERAND_P
1167 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1168 #endif
1169 && LEGITIMATE_CONSTANT_P (op));
1172 if (GET_MODE (op) != mode && mode != VOIDmode)
1173 return 0;
1175 if (GET_CODE (op) == SUBREG)
1177 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1178 because it is guaranteed to be reloaded into one.
1179 Just make sure the MEM is valid in itself.
1180 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1181 but currently it does result from (SUBREG (REG)...) where the
1182 reg went on the stack.) */
1183 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1184 return general_operand (op, mode);
1185 op = SUBREG_REG (op);
1188 /* We don't consider registers whose class is NO_REGS
1189 to be a register operand. */
1190 return (GET_CODE (op) == REG
1191 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1192 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1195 /* Return 1 if OP is a valid operand that stands for pushing a
1196 value of mode MODE onto the stack.
1198 The main use of this function is as a predicate in match_operand
1199 expressions in the machine description. */
1202 push_operand (op, mode)
1203 rtx op;
1204 enum machine_mode mode;
1206 if (GET_CODE (op) != MEM)
1207 return 0;
1209 if (mode != VOIDmode && GET_MODE (op) != mode)
1210 return 0;
1212 op = XEXP (op, 0);
1214 if (GET_CODE (op) != STACK_PUSH_CODE)
1215 return 0;
1217 return XEXP (op, 0) == stack_pointer_rtx;
1220 /* Return 1 if OP is a valid operand that stands for popping a
1221 value of mode MODE off the stack.
1223 The main use of this function is as a predicate in match_operand
1224 expressions in the machine description. */
1227 pop_operand (op, mode)
1228 rtx op;
1229 enum machine_mode mode;
1231 if (GET_CODE (op) != MEM)
1232 return 0;
1234 if (mode != VOIDmode && GET_MODE (op) != mode)
1235 return 0;
1237 op = XEXP (op, 0);
1239 if (GET_CODE (op) != STACK_POP_CODE)
1240 return 0;
1242 return XEXP (op, 0) == stack_pointer_rtx;
1245 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1248 memory_address_p (mode, addr)
1249 enum machine_mode mode;
1250 register rtx addr;
1252 if (GET_CODE (addr) == ADDRESSOF)
1253 return 1;
1255 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1256 return 0;
1258 win:
1259 return 1;
1262 /* Return 1 if OP is a valid memory reference with mode MODE,
1263 including a valid address.
1265 The main use of this function is as a predicate in match_operand
1266 expressions in the machine description. */
1269 memory_operand (op, mode)
1270 register rtx op;
1271 enum machine_mode mode;
1273 rtx inner;
1275 if (! reload_completed)
1276 /* Note that no SUBREG is a memory operand before end of reload pass,
1277 because (SUBREG (MEM...)) forces reloading into a register. */
1278 return GET_CODE (op) == MEM && general_operand (op, mode);
1280 if (mode != VOIDmode && GET_MODE (op) != mode)
1281 return 0;
1283 inner = op;
1284 if (GET_CODE (inner) == SUBREG)
1285 inner = SUBREG_REG (inner);
1287 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1290 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1291 that is, a memory reference whose address is a general_operand. */
1294 indirect_operand (op, mode)
1295 register rtx op;
1296 enum machine_mode mode;
1298 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1299 if (! reload_completed
1300 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1302 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1303 rtx inner = SUBREG_REG (op);
1305 if (BYTES_BIG_ENDIAN)
1306 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1307 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1309 if (mode != VOIDmode && GET_MODE (op) != mode)
1310 return 0;
1312 /* The only way that we can have a general_operand as the resulting
1313 address is if OFFSET is zero and the address already is an operand
1314 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1315 operand. */
1317 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1318 || (GET_CODE (XEXP (inner, 0)) == PLUS
1319 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1320 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1321 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1324 return (GET_CODE (op) == MEM
1325 && memory_operand (op, mode)
1326 && general_operand (XEXP (op, 0), Pmode));
1329 /* Return 1 if this is a comparison operator. This allows the use of
1330 MATCH_OPERATOR to recognize all the branch insns. */
1333 comparison_operator (op, mode)
1334 register rtx op;
1335 enum machine_mode mode;
1337 return ((mode == VOIDmode || GET_MODE (op) == mode)
1338 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1341 /* If BODY is an insn body that uses ASM_OPERANDS,
1342 return the number of operands (both input and output) in the insn.
1343 Otherwise return -1. */
1346 asm_noperands (body)
1347 rtx body;
1349 if (GET_CODE (body) == ASM_OPERANDS)
1350 /* No output operands: return number of input operands. */
1351 return ASM_OPERANDS_INPUT_LENGTH (body);
1352 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1353 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1354 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1355 else if (GET_CODE (body) == PARALLEL
1356 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1357 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1359 /* Multiple output operands, or 1 output plus some clobbers:
1360 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1361 int i;
1362 int n_sets;
1364 /* Count backwards through CLOBBERs to determine number of SETs. */
1365 for (i = XVECLEN (body, 0); i > 0; i--)
1367 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1368 break;
1369 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1370 return -1;
1373 /* N_SETS is now number of output operands. */
1374 n_sets = i;
1376 /* Verify that all the SETs we have
1377 came from a single original asm_operands insn
1378 (so that invalid combinations are blocked). */
1379 for (i = 0; i < n_sets; i++)
1381 rtx elt = XVECEXP (body, 0, i);
1382 if (GET_CODE (elt) != SET)
1383 return -1;
1384 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1385 return -1;
1386 /* If these ASM_OPERANDS rtx's came from different original insns
1387 then they aren't allowed together. */
1388 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1389 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1390 return -1;
1392 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1393 + n_sets);
1395 else if (GET_CODE (body) == PARALLEL
1396 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1398 /* 0 outputs, but some clobbers:
1399 body is [(asm_operands ...) (clobber (reg ...))...]. */
1400 int i;
1402 /* Make sure all the other parallel things really are clobbers. */
1403 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1404 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1405 return -1;
1407 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1409 else
1410 return -1;
1413 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1414 copy its operands (both input and output) into the vector OPERANDS,
1415 the locations of the operands within the insn into the vector OPERAND_LOCS,
1416 and the constraints for the operands into CONSTRAINTS.
1417 Write the modes of the operands into MODES.
1418 Return the assembler-template.
1420 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1421 we don't store that info. */
1423 char *
1424 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1425 rtx body;
1426 rtx *operands;
1427 rtx **operand_locs;
1428 const char **constraints;
1429 enum machine_mode *modes;
1431 register int i;
1432 int noperands;
1433 char *template = 0;
1435 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1437 rtx asmop = SET_SRC (body);
1438 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1440 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1442 for (i = 1; i < noperands; i++)
1444 if (operand_locs)
1445 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1446 if (operands)
1447 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1448 if (constraints)
1449 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1450 if (modes)
1451 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1454 /* The output is in the SET.
1455 Its constraint is in the ASM_OPERANDS itself. */
1456 if (operands)
1457 operands[0] = SET_DEST (body);
1458 if (operand_locs)
1459 operand_locs[0] = &SET_DEST (body);
1460 if (constraints)
1461 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1462 if (modes)
1463 modes[0] = GET_MODE (SET_DEST (body));
1464 template = ASM_OPERANDS_TEMPLATE (asmop);
1466 else if (GET_CODE (body) == ASM_OPERANDS)
1468 rtx asmop = body;
1469 /* No output operands: BODY is (asm_operands ....). */
1471 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1473 /* The input operands are found in the 1st element vector. */
1474 /* Constraints for inputs are in the 2nd element vector. */
1475 for (i = 0; i < noperands; i++)
1477 if (operand_locs)
1478 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1479 if (operands)
1480 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1481 if (constraints)
1482 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1483 if (modes)
1484 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1486 template = ASM_OPERANDS_TEMPLATE (asmop);
1488 else if (GET_CODE (body) == PARALLEL
1489 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1491 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1492 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1493 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1494 int nout = 0; /* Does not include CLOBBERs. */
1496 /* At least one output, plus some CLOBBERs. */
1498 /* The outputs are in the SETs.
1499 Their constraints are in the ASM_OPERANDS itself. */
1500 for (i = 0; i < nparallel; i++)
1502 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1503 break; /* Past last SET */
1505 if (operands)
1506 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1507 if (operand_locs)
1508 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1509 if (constraints)
1510 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1511 if (modes)
1512 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1513 nout++;
1516 for (i = 0; i < nin; i++)
1518 if (operand_locs)
1519 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1520 if (operands)
1521 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1522 if (constraints)
1523 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1524 if (modes)
1525 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1528 template = ASM_OPERANDS_TEMPLATE (asmop);
1530 else if (GET_CODE (body) == PARALLEL
1531 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1533 /* No outputs, but some CLOBBERs. */
1535 rtx asmop = XVECEXP (body, 0, 0);
1536 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1538 for (i = 0; i < nin; i++)
1540 if (operand_locs)
1541 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1542 if (operands)
1543 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1544 if (constraints)
1545 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1546 if (modes)
1547 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1550 template = ASM_OPERANDS_TEMPLATE (asmop);
1553 return template;
1556 /* Check if an asm_operand matches it's constraints.
1557 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1560 asm_operand_ok (op, constraint)
1561 rtx op;
1562 const char *constraint;
1564 int result = 0;
1566 /* Use constrain_operands after reload. */
1567 if (reload_completed)
1568 abort ();
1570 while (*constraint)
1572 switch (*constraint++)
1574 case '=':
1575 case '+':
1576 case '*':
1577 case '%':
1578 case '?':
1579 case '!':
1580 case '#':
1581 case '&':
1582 case ',':
1583 break;
1585 case '0': case '1': case '2': case '3': case '4':
1586 case '5': case '6': case '7': case '8': case '9':
1587 /* For best results, our caller should have given us the
1588 proper matching constraint, but we can't actually fail
1589 the check if they didn't. Indicate that results are
1590 inconclusive. */
1591 result = -1;
1592 break;
1594 case 'p':
1595 if (address_operand (op, VOIDmode))
1596 return 1;
1597 break;
1599 case 'm':
1600 case 'V': /* non-offsettable */
1601 if (memory_operand (op, VOIDmode))
1602 return 1;
1603 break;
1605 case 'o': /* offsettable */
1606 if (offsettable_nonstrict_memref_p (op))
1607 return 1;
1608 break;
1610 case '<':
1611 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1612 excepting those that expand_call created. Further, on some
1613 machines which do not have generalized auto inc/dec, an inc/dec
1614 is not a memory_operand.
1616 Match any memory and hope things are resolved after reload. */
1618 if (GET_CODE (op) == MEM
1619 && (1
1620 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1621 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1622 return 1;
1623 break;
1625 case '>':
1626 if (GET_CODE (op) == MEM
1627 && (1
1628 || GET_CODE (XEXP (op, 0)) == PRE_INC
1629 || GET_CODE (XEXP (op, 0)) == POST_INC))
1630 return 1;
1631 break;
1633 case 'E':
1634 #ifndef REAL_ARITHMETIC
1635 /* Match any floating double constant, but only if
1636 we can examine the bits of it reliably. */
1637 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1638 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1639 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1640 break;
1641 #endif
1642 /* FALLTHRU */
1644 case 'F':
1645 if (GET_CODE (op) == CONST_DOUBLE)
1646 return 1;
1647 break;
1649 case 'G':
1650 if (GET_CODE (op) == CONST_DOUBLE
1651 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1652 return 1;
1653 break;
1654 case 'H':
1655 if (GET_CODE (op) == CONST_DOUBLE
1656 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1657 return 1;
1658 break;
1660 case 's':
1661 if (GET_CODE (op) == CONST_INT
1662 || (GET_CODE (op) == CONST_DOUBLE
1663 && GET_MODE (op) == VOIDmode))
1664 break;
1665 /* FALLTHRU */
1667 case 'i':
1668 if (CONSTANT_P (op)
1669 #ifdef LEGITIMATE_PIC_OPERAND_P
1670 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1671 #endif
1673 return 1;
1674 break;
1676 case 'n':
1677 if (GET_CODE (op) == CONST_INT
1678 || (GET_CODE (op) == CONST_DOUBLE
1679 && GET_MODE (op) == VOIDmode))
1680 return 1;
1681 break;
1683 case 'I':
1684 if (GET_CODE (op) == CONST_INT
1685 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1686 return 1;
1687 break;
1688 case 'J':
1689 if (GET_CODE (op) == CONST_INT
1690 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1691 return 1;
1692 break;
1693 case 'K':
1694 if (GET_CODE (op) == CONST_INT
1695 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1696 return 1;
1697 break;
1698 case 'L':
1699 if (GET_CODE (op) == CONST_INT
1700 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1701 return 1;
1702 break;
1703 case 'M':
1704 if (GET_CODE (op) == CONST_INT
1705 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1706 return 1;
1707 break;
1708 case 'N':
1709 if (GET_CODE (op) == CONST_INT
1710 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1711 return 1;
1712 break;
1713 case 'O':
1714 if (GET_CODE (op) == CONST_INT
1715 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1716 return 1;
1717 break;
1718 case 'P':
1719 if (GET_CODE (op) == CONST_INT
1720 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1721 return 1;
1722 break;
1724 case 'X':
1725 return 1;
1727 case 'g':
1728 if (general_operand (op, VOIDmode))
1729 return 1;
1730 break;
1732 #ifdef EXTRA_CONSTRAINT
1733 case 'Q':
1734 if (EXTRA_CONSTRAINT (op, 'Q'))
1735 return 1;
1736 break;
1737 case 'R':
1738 if (EXTRA_CONSTRAINT (op, 'R'))
1739 return 1;
1740 break;
1741 case 'S':
1742 if (EXTRA_CONSTRAINT (op, 'S'))
1743 return 1;
1744 break;
1745 case 'T':
1746 if (EXTRA_CONSTRAINT (op, 'T'))
1747 return 1;
1748 break;
1749 case 'U':
1750 if (EXTRA_CONSTRAINT (op, 'U'))
1751 return 1;
1752 break;
1753 #endif
1755 case 'r':
1756 default:
1757 if (GET_MODE (op) == BLKmode)
1758 break;
1759 if (register_operand (op, VOIDmode))
1760 return 1;
1761 break;
1765 return result;
1768 /* Given an rtx *P, if it is a sum containing an integer constant term,
1769 return the location (type rtx *) of the pointer to that constant term.
1770 Otherwise, return a null pointer. */
1772 static rtx *
1773 find_constant_term_loc (p)
1774 rtx *p;
1776 register rtx *tem;
1777 register enum rtx_code code = GET_CODE (*p);
1779 /* If *P IS such a constant term, P is its location. */
1781 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1782 || code == CONST)
1783 return p;
1785 /* Otherwise, if not a sum, it has no constant term. */
1787 if (GET_CODE (*p) != PLUS)
1788 return 0;
1790 /* If one of the summands is constant, return its location. */
1792 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1793 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1794 return p;
1796 /* Otherwise, check each summand for containing a constant term. */
1798 if (XEXP (*p, 0) != 0)
1800 tem = find_constant_term_loc (&XEXP (*p, 0));
1801 if (tem != 0)
1802 return tem;
1805 if (XEXP (*p, 1) != 0)
1807 tem = find_constant_term_loc (&XEXP (*p, 1));
1808 if (tem != 0)
1809 return tem;
1812 return 0;
1815 /* Return 1 if OP is a memory reference
1816 whose address contains no side effects
1817 and remains valid after the addition
1818 of a positive integer less than the
1819 size of the object being referenced.
1821 We assume that the original address is valid and do not check it.
1823 This uses strict_memory_address_p as a subroutine, so
1824 don't use it before reload. */
1827 offsettable_memref_p (op)
1828 rtx op;
1830 return ((GET_CODE (op) == MEM)
1831 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1834 /* Similar, but don't require a strictly valid mem ref:
1835 consider pseudo-regs valid as index or base regs. */
1838 offsettable_nonstrict_memref_p (op)
1839 rtx op;
1841 return ((GET_CODE (op) == MEM)
1842 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1845 /* Return 1 if Y is a memory address which contains no side effects
1846 and would remain valid after the addition of a positive integer
1847 less than the size of that mode.
1849 We assume that the original address is valid and do not check it.
1850 We do check that it is valid for narrower modes.
1852 If STRICTP is nonzero, we require a strictly valid address,
1853 for the sake of use in reload.c. */
1856 offsettable_address_p (strictp, mode, y)
1857 int strictp;
1858 enum machine_mode mode;
1859 register rtx y;
1861 register enum rtx_code ycode = GET_CODE (y);
1862 register rtx z;
1863 rtx y1 = y;
1864 rtx *y2;
1865 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1867 if (CONSTANT_ADDRESS_P (y))
1868 return 1;
1870 /* Adjusting an offsettable address involves changing to a narrower mode.
1871 Make sure that's OK. */
1873 if (mode_dependent_address_p (y))
1874 return 0;
1876 /* If the expression contains a constant term,
1877 see if it remains valid when max possible offset is added. */
1879 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1881 int good;
1883 y1 = *y2;
1884 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1885 /* Use QImode because an odd displacement may be automatically invalid
1886 for any wider mode. But it should be valid for a single byte. */
1887 good = (*addressp) (QImode, y);
1889 /* In any case, restore old contents of memory. */
1890 *y2 = y1;
1891 return good;
1894 if (ycode == PRE_DEC || ycode == PRE_INC
1895 || ycode == POST_DEC || ycode == POST_INC)
1896 return 0;
1898 /* The offset added here is chosen as the maximum offset that
1899 any instruction could need to add when operating on something
1900 of the specified mode. We assume that if Y and Y+c are
1901 valid addresses then so is Y+d for all 0<d<c. */
1903 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1905 /* Use QImode because an odd displacement may be automatically invalid
1906 for any wider mode. But it should be valid for a single byte. */
1907 return (*addressp) (QImode, z);
1910 /* Return 1 if ADDR is an address-expression whose effect depends
1911 on the mode of the memory reference it is used in.
1913 Autoincrement addressing is a typical example of mode-dependence
1914 because the amount of the increment depends on the mode. */
1917 mode_dependent_address_p (addr)
1918 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1920 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1921 return 0;
1922 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1923 win: ATTRIBUTE_UNUSED_LABEL
1924 return 1;
1927 /* Return 1 if OP is a general operand
1928 other than a memory ref with a mode dependent address. */
1931 mode_independent_operand (op, mode)
1932 enum machine_mode mode;
1933 rtx op;
1935 rtx addr;
1937 if (! general_operand (op, mode))
1938 return 0;
1940 if (GET_CODE (op) != MEM)
1941 return 1;
1943 addr = XEXP (op, 0);
1944 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1945 return 1;
1946 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1947 lose: ATTRIBUTE_UNUSED_LABEL
1948 return 0;
1951 /* Given an operand OP that is a valid memory reference
1952 which satisfies offsettable_memref_p,
1953 return a new memory reference whose address has been adjusted by OFFSET.
1954 OFFSET should be positive and less than the size of the object referenced.
1958 adj_offsettable_operand (op, offset)
1959 rtx op;
1960 int offset;
1962 register enum rtx_code code = GET_CODE (op);
1964 if (code == MEM)
1966 register rtx y = XEXP (op, 0);
1967 register rtx new;
1969 if (CONSTANT_ADDRESS_P (y))
1971 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1972 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1973 return new;
1976 if (GET_CODE (y) == PLUS)
1978 rtx z = y;
1979 register rtx *const_loc;
1981 op = copy_rtx (op);
1982 z = XEXP (op, 0);
1983 const_loc = find_constant_term_loc (&z);
1984 if (const_loc)
1986 *const_loc = plus_constant_for_output (*const_loc, offset);
1987 return op;
1991 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1992 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1993 return new;
1995 abort ();
1998 /* Analyze INSN and compute the variables recog_n_operands, recog_n_dups,
1999 recog_n_alternatives, recog_operand, recog_operand_loc, recog_constraints,
2000 recog_operand_mode, recog_dup_loc and recog_dup_num.
2001 If REGISTER_CONSTRAINTS is not defined, also compute
2002 recog_operand_address_p. */
2003 void
2004 extract_insn (insn)
2005 rtx insn;
2007 int i;
2008 int icode;
2009 int noperands;
2010 rtx body = PATTERN (insn);
2012 recog_n_operands = 0;
2013 recog_n_alternatives = 0;
2014 recog_n_dups = 0;
2016 switch (GET_CODE (body))
2018 case USE:
2019 case CLOBBER:
2020 case ASM_INPUT:
2021 case ADDR_VEC:
2022 case ADDR_DIFF_VEC:
2023 return;
2025 case SET:
2026 case PARALLEL:
2027 case ASM_OPERANDS:
2028 recog_n_operands = noperands = asm_noperands (body);
2029 if (noperands >= 0)
2031 /* This insn is an `asm' with operands. */
2033 /* expand_asm_operands makes sure there aren't too many operands. */
2034 if (noperands > MAX_RECOG_OPERANDS)
2035 abort ();
2037 /* Now get the operand values and constraints out of the insn. */
2038 decode_asm_operands (body, recog_operand, recog_operand_loc,
2039 recog_constraints, recog_operand_mode);
2040 if (noperands > 0)
2042 const char *p = recog_constraints[0];
2043 recog_n_alternatives = 1;
2044 while (*p)
2045 recog_n_alternatives += (*p++ == ',');
2047 #ifndef REGISTER_CONSTRAINTS
2048 bzero (recog_operand_address_p, sizeof recog_operand_address_p);
2049 #endif
2050 break;
2053 /* FALLTHROUGH */
2055 default:
2056 /* Ordinary insn: recognize it, get the operands via insn_extract
2057 and get the constraints. */
2059 icode = recog_memoized (insn);
2060 if (icode < 0)
2061 fatal_insn_not_found (insn);
2063 recog_n_operands = noperands = insn_n_operands[icode];
2064 recog_n_alternatives = insn_n_alternatives[icode];
2065 recog_n_dups = insn_n_dups[icode];
2067 insn_extract (insn);
2069 for (i = 0; i < noperands; i++)
2071 #ifdef REGISTER_CONSTRAINTS
2072 recog_constraints[i] = insn_operand_constraint[icode][i];
2073 #else
2074 recog_operand_address_p[i] = insn_operand_address_p[icode][i];
2075 #endif
2076 recog_operand_mode[i] = insn_operand_mode[icode][i];
2079 for (i = 0; i < noperands; i++)
2080 recog_op_type[i] = (recog_constraints[i][0] == '=' ? OP_OUT
2081 : recog_constraints[i][0] == '+' ? OP_INOUT
2082 : OP_IN);
2084 if (recog_n_alternatives > MAX_RECOG_ALTERNATIVES)
2085 abort ();
2088 /* After calling extract_insn, you can use this function to extract some
2089 information from the constraint strings into a more usable form.
2090 The collected data is stored in recog_op_alt. */
2091 void
2092 preprocess_constraints ()
2094 int i;
2096 for (i = 0; i < recog_n_operands; i++)
2098 int j;
2099 struct operand_alternative *op_alt;
2100 const char *p = recog_constraints[i];
2102 op_alt = recog_op_alt[i];
2104 for (j = 0; j < recog_n_alternatives; j++)
2106 op_alt[j].class = NO_REGS;
2107 op_alt[j].constraint = p;
2108 op_alt[j].matches = -1;
2109 op_alt[j].matched = -1;
2111 if (*p == '\0' || *p == ',')
2113 op_alt[j].anything_ok = 1;
2114 continue;
2117 for (;;)
2119 char c = *p++;
2120 if (c == '#')
2122 c = *p++;
2123 while (c != ',' && c != '\0');
2124 if (c == ',' || c == '\0')
2125 break;
2127 switch (c)
2129 case '=': case '+': case '*': case '%':
2130 case 'E': case 'F': case 'G': case 'H':
2131 case 's': case 'i': case 'n':
2132 case 'I': case 'J': case 'K': case 'L':
2133 case 'M': case 'N': case 'O': case 'P':
2134 #ifdef EXTRA_CONSTRAINT
2135 case 'Q': case 'R': case 'S': case 'T': case 'U':
2136 #endif
2137 /* These don't say anything we care about. */
2138 break;
2140 case '?':
2141 op_alt[j].reject += 6;
2142 break;
2143 case '!':
2144 op_alt[j].reject += 600;
2145 break;
2146 case '&':
2147 op_alt[j].earlyclobber = 1;
2148 break;
2150 case '0': case '1': case '2': case '3': case '4':
2151 case '5': case '6': case '7': case '8': case '9':
2152 op_alt[j].matches = c - '0';
2153 op_alt[op_alt[j].matches].matched = i;
2154 break;
2156 case 'm':
2157 op_alt[j].memory_ok = 1;
2158 break;
2159 case '<':
2160 op_alt[j].decmem_ok = 1;
2161 break;
2162 case '>':
2163 op_alt[j].incmem_ok = 1;
2164 break;
2165 case 'V':
2166 op_alt[j].nonoffmem_ok = 1;
2167 break;
2168 case 'o':
2169 op_alt[j].offmem_ok = 1;
2170 break;
2171 case 'X':
2172 op_alt[j].anything_ok = 1;
2173 break;
2175 case 'p':
2176 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2177 break;
2179 case 'g': case 'r':
2180 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2181 break;
2183 default:
2184 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2185 break;
2192 #ifdef REGISTER_CONSTRAINTS
2194 /* Check the operands of an insn against the insn's operand constraints
2195 and return 1 if they are valid.
2196 The information about the insn's operands, constraints, operand modes
2197 etc. is obtained from the global variables set up by extract_insn.
2199 WHICH_ALTERNATIVE is set to a number which indicates which
2200 alternative of constraints was matched: 0 for the first alternative,
2201 1 for the next, etc.
2203 In addition, when two operands are match
2204 and it happens that the output operand is (reg) while the
2205 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2206 make the output operand look like the input.
2207 This is because the output operand is the one the template will print.
2209 This is used in final, just before printing the assembler code and by
2210 the routines that determine an insn's attribute.
2212 If STRICT is a positive non-zero value, it means that we have been
2213 called after reload has been completed. In that case, we must
2214 do all checks strictly. If it is zero, it means that we have been called
2215 before reload has completed. In that case, we first try to see if we can
2216 find an alternative that matches strictly. If not, we try again, this
2217 time assuming that reload will fix up the insn. This provides a "best
2218 guess" for the alternative and is used to compute attributes of insns prior
2219 to reload. A negative value of STRICT is used for this internal call. */
2221 struct funny_match
2223 int this, other;
2227 constrain_operands (strict)
2228 int strict;
2230 const char *constraints[MAX_RECOG_OPERANDS];
2231 int matching_operands[MAX_RECOG_OPERANDS];
2232 int earlyclobber[MAX_RECOG_OPERANDS];
2233 register int c;
2235 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2236 int funny_match_index;
2238 if (recog_n_operands == 0 || recog_n_alternatives == 0)
2239 return 1;
2241 for (c = 0; c < recog_n_operands; c++)
2243 constraints[c] = recog_constraints[c];
2244 matching_operands[c] = -1;
2247 which_alternative = 0;
2249 while (which_alternative < recog_n_alternatives)
2251 register int opno;
2252 int lose = 0;
2253 funny_match_index = 0;
2255 for (opno = 0; opno < recog_n_operands; opno++)
2257 register rtx op = recog_operand[opno];
2258 enum machine_mode mode = GET_MODE (op);
2259 register const char *p = constraints[opno];
2260 int offset = 0;
2261 int win = 0;
2262 int val;
2264 earlyclobber[opno] = 0;
2266 /* A unary operator may be accepted by the predicate, but it
2267 is irrelevant for matching constraints. */
2268 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2269 op = XEXP (op, 0);
2271 if (GET_CODE (op) == SUBREG)
2273 if (GET_CODE (SUBREG_REG (op)) == REG
2274 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2275 offset = SUBREG_WORD (op);
2276 op = SUBREG_REG (op);
2279 /* An empty constraint or empty alternative
2280 allows anything which matched the pattern. */
2281 if (*p == 0 || *p == ',')
2282 win = 1;
2284 while (*p && (c = *p++) != ',')
2285 switch (c)
2287 case '?':
2288 case '!':
2289 case '*':
2290 case '%':
2291 case '=':
2292 case '+':
2293 break;
2295 case '#':
2296 /* Ignore rest of this alternative as far as
2297 constraint checking is concerned. */
2298 while (*p && *p != ',')
2299 p++;
2300 break;
2302 case '&':
2303 earlyclobber[opno] = 1;
2304 break;
2306 case '0': case '1': case '2': case '3': case '4':
2307 case '5': case '6': case '7': case '8': case '9':
2308 /* This operand must be the same as a previous one.
2309 This kind of constraint is used for instructions such
2310 as add when they take only two operands.
2312 Note that the lower-numbered operand is passed first.
2314 If we are not testing strictly, assume that this constraint
2315 will be satisfied. */
2316 if (strict < 0)
2317 val = 1;
2318 else
2320 rtx op1 = recog_operand[c - '0'];
2321 rtx op2 = recog_operand[opno];
2323 /* A unary operator may be accepted by the predicate,
2324 but it is irrelevant for matching constraints. */
2325 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2326 op1 = XEXP (op1, 0);
2327 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2328 op2 = XEXP (op2, 0);
2330 val = operands_match_p (op1, op2);
2333 matching_operands[opno] = c - '0';
2334 matching_operands[c - '0'] = opno;
2336 if (val != 0)
2337 win = 1;
2338 /* If output is *x and input is *--x,
2339 arrange later to change the output to *--x as well,
2340 since the output op is the one that will be printed. */
2341 if (val == 2 && strict > 0)
2343 funny_match[funny_match_index].this = opno;
2344 funny_match[funny_match_index++].other = c - '0';
2346 break;
2348 case 'p':
2349 /* p is used for address_operands. When we are called by
2350 gen_reload, no one will have checked that the address is
2351 strictly valid, i.e., that all pseudos requiring hard regs
2352 have gotten them. */
2353 if (strict <= 0
2354 || (strict_memory_address_p (recog_operand_mode[opno],
2355 op)))
2356 win = 1;
2357 break;
2359 /* No need to check general_operand again;
2360 it was done in insn-recog.c. */
2361 case 'g':
2362 /* Anything goes unless it is a REG and really has a hard reg
2363 but the hard reg is not in the class GENERAL_REGS. */
2364 if (strict < 0
2365 || GENERAL_REGS == ALL_REGS
2366 || GET_CODE (op) != REG
2367 || (reload_in_progress
2368 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2369 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2370 win = 1;
2371 break;
2373 case 'r':
2374 if (strict < 0
2375 || (strict == 0
2376 && GET_CODE (op) == REG
2377 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2378 || (strict == 0 && GET_CODE (op) == SCRATCH)
2379 || (GET_CODE (op) == REG
2380 && ((GENERAL_REGS == ALL_REGS
2381 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2382 || reg_fits_class_p (op, GENERAL_REGS,
2383 offset, mode))))
2384 win = 1;
2385 break;
2387 case 'X':
2388 /* This is used for a MATCH_SCRATCH in the cases when
2389 we don't actually need anything. So anything goes
2390 any time. */
2391 win = 1;
2392 break;
2394 case 'm':
2395 if (GET_CODE (op) == MEM
2396 /* Before reload, accept what reload can turn into mem. */
2397 || (strict < 0 && CONSTANT_P (op))
2398 /* During reload, accept a pseudo */
2399 || (reload_in_progress && GET_CODE (op) == REG
2400 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2401 win = 1;
2402 break;
2404 case '<':
2405 if (GET_CODE (op) == MEM
2406 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2407 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2408 win = 1;
2409 break;
2411 case '>':
2412 if (GET_CODE (op) == MEM
2413 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2414 || GET_CODE (XEXP (op, 0)) == POST_INC))
2415 win = 1;
2416 break;
2418 case 'E':
2419 #ifndef REAL_ARITHMETIC
2420 /* Match any CONST_DOUBLE, but only if
2421 we can examine the bits of it reliably. */
2422 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2423 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2424 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2425 break;
2426 #endif
2427 if (GET_CODE (op) == CONST_DOUBLE)
2428 win = 1;
2429 break;
2431 case 'F':
2432 if (GET_CODE (op) == CONST_DOUBLE)
2433 win = 1;
2434 break;
2436 case 'G':
2437 case 'H':
2438 if (GET_CODE (op) == CONST_DOUBLE
2439 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2440 win = 1;
2441 break;
2443 case 's':
2444 if (GET_CODE (op) == CONST_INT
2445 || (GET_CODE (op) == CONST_DOUBLE
2446 && GET_MODE (op) == VOIDmode))
2447 break;
2448 case 'i':
2449 if (CONSTANT_P (op))
2450 win = 1;
2451 break;
2453 case 'n':
2454 if (GET_CODE (op) == CONST_INT
2455 || (GET_CODE (op) == CONST_DOUBLE
2456 && GET_MODE (op) == VOIDmode))
2457 win = 1;
2458 break;
2460 case 'I':
2461 case 'J':
2462 case 'K':
2463 case 'L':
2464 case 'M':
2465 case 'N':
2466 case 'O':
2467 case 'P':
2468 if (GET_CODE (op) == CONST_INT
2469 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2470 win = 1;
2471 break;
2473 #ifdef EXTRA_CONSTRAINT
2474 case 'Q':
2475 case 'R':
2476 case 'S':
2477 case 'T':
2478 case 'U':
2479 if (EXTRA_CONSTRAINT (op, c))
2480 win = 1;
2481 break;
2482 #endif
2484 case 'V':
2485 if (GET_CODE (op) == MEM
2486 && ((strict > 0 && ! offsettable_memref_p (op))
2487 || (strict < 0
2488 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2489 || (reload_in_progress
2490 && !(GET_CODE (op) == REG
2491 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2492 win = 1;
2493 break;
2495 case 'o':
2496 if ((strict > 0 && offsettable_memref_p (op))
2497 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2498 /* Before reload, accept what reload can handle. */
2499 || (strict < 0
2500 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2501 /* During reload, accept a pseudo */
2502 || (reload_in_progress && GET_CODE (op) == REG
2503 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2504 win = 1;
2505 break;
2507 default:
2508 if (strict < 0
2509 || (strict == 0
2510 && GET_CODE (op) == REG
2511 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2512 || (strict == 0 && GET_CODE (op) == SCRATCH)
2513 || (GET_CODE (op) == REG
2514 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2515 offset, mode)))
2516 win = 1;
2519 constraints[opno] = p;
2520 /* If this operand did not win somehow,
2521 this alternative loses. */
2522 if (! win)
2523 lose = 1;
2525 /* This alternative won; the operands are ok.
2526 Change whichever operands this alternative says to change. */
2527 if (! lose)
2529 int opno, eopno;
2531 /* See if any earlyclobber operand conflicts with some other
2532 operand. */
2534 if (strict > 0)
2535 for (eopno = 0; eopno < recog_n_operands; eopno++)
2536 /* Ignore earlyclobber operands now in memory,
2537 because we would often report failure when we have
2538 two memory operands, one of which was formerly a REG. */
2539 if (earlyclobber[eopno]
2540 && GET_CODE (recog_operand[eopno]) == REG)
2541 for (opno = 0; opno < recog_n_operands; opno++)
2542 if ((GET_CODE (recog_operand[opno]) == MEM
2543 || recog_op_type[opno] != OP_OUT)
2544 && opno != eopno
2545 /* Ignore things like match_operator operands. */
2546 && *recog_constraints[opno] != 0
2547 && ! (matching_operands[opno] == eopno
2548 && operands_match_p (recog_operand[opno],
2549 recog_operand[eopno]))
2550 && ! safe_from_earlyclobber (recog_operand[opno],
2551 recog_operand[eopno]))
2552 lose = 1;
2554 if (! lose)
2556 while (--funny_match_index >= 0)
2558 recog_operand[funny_match[funny_match_index].other]
2559 = recog_operand[funny_match[funny_match_index].this];
2562 return 1;
2566 which_alternative++;
2569 /* If we are about to reject this, but we are not to test strictly,
2570 try a very loose test. Only return failure if it fails also. */
2571 if (strict == 0)
2572 return constrain_operands (-1);
2573 else
2574 return 0;
2577 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2578 is a hard reg in class CLASS when its regno is offset by OFFSET
2579 and changed to mode MODE.
2580 If REG occupies multiple hard regs, all of them must be in CLASS. */
2583 reg_fits_class_p (operand, class, offset, mode)
2584 rtx operand;
2585 register enum reg_class class;
2586 int offset;
2587 enum machine_mode mode;
2589 register int regno = REGNO (operand);
2590 if (regno < FIRST_PSEUDO_REGISTER
2591 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2592 regno + offset))
2594 register int sr;
2595 regno += offset;
2596 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2597 sr > 0; sr--)
2598 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2599 regno + sr))
2600 break;
2601 return sr == 0;
2604 return 0;
2607 #endif /* REGISTER_CONSTRAINTS */
2609 /* Do the splitting of insns in the block B. Only try to actually split if
2610 DO_SPLIT is true; otherwise, just remove nops. */
2612 void
2613 split_block_insns (b, do_split)
2614 int b;
2615 int do_split;
2617 rtx insn, next;
2619 for (insn = BLOCK_HEAD (b);; insn = next)
2621 rtx set;
2623 /* Can't use `next_real_insn' because that
2624 might go across CODE_LABELS and short-out basic blocks. */
2625 next = NEXT_INSN (insn);
2626 if (GET_CODE (insn) != INSN)
2628 if (insn == BLOCK_END (b))
2629 break;
2631 continue;
2634 /* Don't split no-op move insns. These should silently disappear
2635 later in final. Splitting such insns would break the code
2636 that handles REG_NO_CONFLICT blocks. */
2637 set = single_set (insn);
2638 if (set && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2640 if (insn == BLOCK_END (b))
2641 break;
2643 /* Nops get in the way while scheduling, so delete them now if
2644 register allocation has already been done. It is too risky
2645 to try to do this before register allocation, and there are
2646 unlikely to be very many nops then anyways. */
2647 if (reload_completed)
2650 PUT_CODE (insn, NOTE);
2651 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2652 NOTE_SOURCE_FILE (insn) = 0;
2655 continue;
2658 if (do_split)
2660 /* Split insns here to get max fine-grain parallelism. */
2661 rtx first = PREV_INSN (insn);
2662 rtx notes = REG_NOTES (insn);
2663 rtx last = try_split (PATTERN (insn), insn, 1);
2665 if (last != insn)
2667 /* try_split returns the NOTE that INSN became. */
2668 first = NEXT_INSN (first);
2669 #ifdef INSN_SCHEDULING
2670 update_flow_info (notes, first, last, insn);
2671 #endif
2672 PUT_CODE (insn, NOTE);
2673 NOTE_SOURCE_FILE (insn) = 0;
2674 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2675 if (insn == BLOCK_HEAD (b))
2676 BLOCK_HEAD (b) = first;
2677 if (insn == BLOCK_END (b))
2679 BLOCK_END (b) = last;
2680 break;
2685 if (insn == BLOCK_END (b))
2686 break;