1999-10-23 08:51 -0700 Zack Weinberg <zack@bitmover.com>
[official-gcc.git] / gcc / recog.c
blob91c8af0d94f05abc4ce6f33270cfebbd271a5377
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tm_p.h"
26 #include "insn-config.h"
27 #include "insn-attr.h"
28 #include "insn-flags.h"
29 #include "insn-codes.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "resource.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
57 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PROTO((rtx, rtx *));
59 static rtx *find_constant_term_loc PROTO((rtx *));
60 static int insn_invalid_p PROTO((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
87 int reload_completed;
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
93 void
94 init_recog_no_volatile ()
96 volatile_ok = 0;
99 void
100 init_recog ()
102 volatile_ok = 1;
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized (insn)
116 rtx insn;
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
128 rtx x;
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
167 return 1;
170 /* Static data for the next two routines. */
172 typedef struct change_t
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
210 rtx old = *loc;
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
215 if (in_group == 0 && num_changes != 0)
216 abort ();
218 *loc = new;
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
261 static int
262 insn_invalid_p (insn)
263 rtx insn;
265 int icode = recog_memoized (insn);
266 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
268 if (is_asm && ! check_asm_operands (PATTERN (insn)))
269 return 1;
270 if (! is_asm && icode < 0)
271 return 1;
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed)
276 extract_insn (insn);
278 if (! constrain_operands (1))
279 return 1;
282 return 0;
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
291 int i;
293 /* The changes have been applied and all INSN_CODEs have been reset to force
294 rerecognition.
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
300 the insn. */
302 for (i = 0; i < num_changes; i++)
304 rtx object = changes[i].object;
306 if (object == 0)
307 continue;
309 if (GET_CODE (object) == MEM)
311 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
312 break;
314 else if (insn_invalid_p (object))
316 rtx pat = PATTERN (object);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat) == PARALLEL
324 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object)) < 0)
327 rtx newpat;
329 if (XVECLEN (pat, 0) == 2)
330 newpat = XVECEXP (pat, 0, 0);
331 else
333 int j;
335 newpat
336 = gen_rtx_PARALLEL (VOIDmode,
337 gen_rtvec (XVECLEN (pat, 0) - 1));
338 for (j = 0; j < XVECLEN (newpat, 0); j++)
339 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
351 validate_change (object, &PATTERN (object), newpat, 1);
353 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
355 never recognized. */
356 continue;
357 else
358 break;
362 if (i == num_changes)
364 num_changes = 0;
365 return 1;
367 else
369 cancel_changes (0);
370 return 0;
374 /* Return the number of changes so far in the current group. */
377 num_validated_changes ()
379 return num_changes;
382 /* Retract the changes numbered NUM and up. */
384 void
385 cancel_changes (num)
386 int num;
388 int i;
390 /* Back out all the changes. Do this in the opposite order in which
391 they were made. */
392 for (i = num_changes - 1; i >= num; i--)
394 *changes[i].loc = changes[i].old;
395 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
396 INSN_CODE (changes[i].object) = changes[i].old_code;
398 num_changes = num;
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
404 static void
405 validate_replace_rtx_1 (loc, from, to, object)
406 rtx *loc;
407 rtx from, to, object;
409 register int i, j;
410 register const char *fmt;
411 register rtx x = *loc;
412 enum rtx_code code = GET_CODE (x);
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
418 if (x == from
419 || (GET_CODE (x) == REG && GET_CODE (from) == REG
420 && GET_MODE (x) == GET_MODE (from)
421 && REGNO (x) == REGNO (from))
422 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
423 && rtx_equal_p (x, from)))
425 validate_change (object, loc, to, 1);
426 return;
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
431 argument last.*/
432 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
434 int prev_changes = num_changes;
436 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
437 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
438 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
440 validate_change (object, loc,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
442 : swap_condition (code),
443 GET_MODE (x), XEXP (x, 1),
444 XEXP (x, 0)),
446 x = *loc;
447 code = GET_CODE (x);
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
454 switch (code)
456 case PLUS:
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
460 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
462 return;
464 case MINUS:
465 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
467 validate_change (object, loc,
468 plus_constant (XEXP (x, 0), - INTVAL (to)),
470 return;
472 break;
474 case ZERO_EXTEND:
475 case SIGN_EXTEND:
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to) == VOIDmode
482 && (XEXP (x, 0) == from
483 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
484 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
485 && REGNO (XEXP (x, 0)) == REGNO (from))))
487 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
488 GET_MODE (from));
489 if (new == 0)
490 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
492 validate_change (object, loc, new, 1);
493 return;
495 break;
497 case SUBREG:
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
503 if (SUBREG_REG (x) == from
504 && GET_CODE (from) == REG
505 && GET_CODE (to) == MEM
506 && ! mode_dependent_address_p (XEXP (to, 0))
507 && ! MEM_VOLATILE_P (to)
508 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
510 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
511 enum machine_mode mode = GET_MODE (x);
512 rtx new;
514 if (BYTES_BIG_ENDIAN)
515 offset += (MIN (UNITS_PER_WORD,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
517 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
519 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
520 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
521 MEM_COPY_ATTRIBUTES (new, to);
522 validate_change (object, loc, new, 1);
523 return;
525 break;
527 case ZERO_EXTRACT:
528 case SIGN_EXTRACT:
529 /* If we are replacing a register with memory, try to change the memory
530 to be the mode required for memory in extract operations (this isn't
531 likely to be an insertion operation; if it was, nothing bad will
532 happen, we might just fail in some cases). */
534 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
535 && GET_CODE (XEXP (x, 1)) == CONST_INT
536 && GET_CODE (XEXP (x, 2)) == CONST_INT
537 && ! mode_dependent_address_p (XEXP (to, 0))
538 && ! MEM_VOLATILE_P (to))
540 enum machine_mode wanted_mode = VOIDmode;
541 enum machine_mode is_mode = GET_MODE (to);
542 int pos = INTVAL (XEXP (x, 2));
544 #ifdef HAVE_extzv
545 if (code == ZERO_EXTRACT)
547 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
548 if (wanted_mode == VOIDmode)
549 wanted_mode = word_mode;
551 #endif
552 #ifdef HAVE_extv
553 if (code == SIGN_EXTRACT)
555 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
556 if (wanted_mode == VOIDmode)
557 wanted_mode = word_mode;
559 #endif
561 /* If we have a narrower mode, we can do something. */
562 if (wanted_mode != VOIDmode
563 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
565 int offset = pos / BITS_PER_UNIT;
566 rtx newmem;
568 /* If the bytes and bits are counted differently, we
569 must adjust the offset. */
570 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
571 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
572 - offset);
574 pos %= GET_MODE_BITSIZE (wanted_mode);
576 newmem = gen_rtx_MEM (wanted_mode,
577 plus_constant (XEXP (to, 0), offset));
578 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
579 MEM_COPY_ATTRIBUTES (newmem, to);
581 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
582 validate_change (object, &XEXP (x, 0), newmem, 1);
586 break;
588 default:
589 break;
592 /* For commutative or comparison operations we've already performed
593 replacements. Don't try to perform them again. */
594 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
596 fmt = GET_RTX_FORMAT (code);
597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
599 if (fmt[i] == 'e')
600 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
601 else if (fmt[i] == 'E')
602 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
603 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
608 /* Try replacing every occurrence of FROM in INSN with TO. After all
609 changes have been made, validate by seeing if INSN is still valid. */
612 validate_replace_rtx (from, to, insn)
613 rtx from, to, insn;
615 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
616 return apply_change_group ();
619 /* Try replacing every occurrence of FROM in INSN with TO. After all
620 changes have been made, validate by seeing if INSN is still valid. */
622 void
623 validate_replace_rtx_group (from, to, insn)
624 rtx from, to, insn;
626 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
629 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
630 SET_DESTs. After all changes have been made, validate by seeing if
631 INSN is still valid. */
634 validate_replace_src (from, to, insn)
635 rtx from, to, insn;
637 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
638 || GET_CODE (PATTERN (insn)) != SET)
639 abort ();
641 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
642 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
643 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
644 from, to, insn);
645 return apply_change_group ();
648 #ifdef HAVE_cc0
649 /* Return 1 if the insn using CC0 set by INSN does not contain
650 any ordered tests applied to the condition codes.
651 EQ and NE tests do not count. */
654 next_insn_tests_no_inequality (insn)
655 rtx insn;
657 register rtx next = next_cc0_user (insn);
659 /* If there is no next insn, we have to take the conservative choice. */
660 if (next == 0)
661 return 0;
663 return ((GET_CODE (next) == JUMP_INSN
664 || GET_CODE (next) == INSN
665 || GET_CODE (next) == CALL_INSN)
666 && ! inequality_comparisons_p (PATTERN (next)));
669 #if 0 /* This is useless since the insn that sets the cc's
670 must be followed immediately by the use of them. */
671 /* Return 1 if the CC value set up by INSN is not used. */
674 next_insns_test_no_inequality (insn)
675 rtx insn;
677 register rtx next = NEXT_INSN (insn);
679 for (; next != 0; next = NEXT_INSN (next))
681 if (GET_CODE (next) == CODE_LABEL
682 || GET_CODE (next) == BARRIER)
683 return 1;
684 if (GET_CODE (next) == NOTE)
685 continue;
686 if (inequality_comparisons_p (PATTERN (next)))
687 return 0;
688 if (sets_cc0_p (PATTERN (next)) == 1)
689 return 1;
690 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
691 return 1;
693 return 1;
695 #endif
696 #endif
698 /* This is used by find_single_use to locate an rtx that contains exactly one
699 use of DEST, which is typically either a REG or CC0. It returns a
700 pointer to the innermost rtx expression containing DEST. Appearances of
701 DEST that are being used to totally replace it are not counted. */
703 static rtx *
704 find_single_use_1 (dest, loc)
705 rtx dest;
706 rtx *loc;
708 rtx x = *loc;
709 enum rtx_code code = GET_CODE (x);
710 rtx *result = 0;
711 rtx *this_result;
712 int i;
713 const char *fmt;
715 switch (code)
717 case CONST_INT:
718 case CONST:
719 case LABEL_REF:
720 case SYMBOL_REF:
721 case CONST_DOUBLE:
722 case CLOBBER:
723 return 0;
725 case SET:
726 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
727 of a REG that occupies all of the REG, the insn uses DEST if
728 it is mentioned in the destination or the source. Otherwise, we
729 need just check the source. */
730 if (GET_CODE (SET_DEST (x)) != CC0
731 && GET_CODE (SET_DEST (x)) != PC
732 && GET_CODE (SET_DEST (x)) != REG
733 && ! (GET_CODE (SET_DEST (x)) == SUBREG
734 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
737 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
738 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
739 break;
741 return find_single_use_1 (dest, &SET_SRC (x));
743 case MEM:
744 case SUBREG:
745 return find_single_use_1 (dest, &XEXP (x, 0));
747 default:
748 break;
751 /* If it wasn't one of the common cases above, check each expression and
752 vector of this code. Look for a unique usage of DEST. */
754 fmt = GET_RTX_FORMAT (code);
755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
757 if (fmt[i] == 'e')
759 if (dest == XEXP (x, i)
760 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
761 && REGNO (dest) == REGNO (XEXP (x, i))))
762 this_result = loc;
763 else
764 this_result = find_single_use_1 (dest, &XEXP (x, i));
766 if (result == 0)
767 result = this_result;
768 else if (this_result)
769 /* Duplicate usage. */
770 return 0;
772 else if (fmt[i] == 'E')
774 int j;
776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
778 if (XVECEXP (x, i, j) == dest
779 || (GET_CODE (dest) == REG
780 && GET_CODE (XVECEXP (x, i, j)) == REG
781 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
782 this_result = loc;
783 else
784 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
786 if (result == 0)
787 result = this_result;
788 else if (this_result)
789 return 0;
794 return result;
797 /* See if DEST, produced in INSN, is used only a single time in the
798 sequel. If so, return a pointer to the innermost rtx expression in which
799 it is used.
801 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
803 This routine will return usually zero either before flow is called (because
804 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
805 note can't be trusted).
807 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
808 care about REG_DEAD notes or LOG_LINKS.
810 Otherwise, we find the single use by finding an insn that has a
811 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
812 only referenced once in that insn, we know that it must be the first
813 and last insn referencing DEST. */
815 rtx *
816 find_single_use (dest, insn, ploc)
817 rtx dest;
818 rtx insn;
819 rtx *ploc;
821 rtx next;
822 rtx *result;
823 rtx link;
825 #ifdef HAVE_cc0
826 if (dest == cc0_rtx)
828 next = NEXT_INSN (insn);
829 if (next == 0
830 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
831 return 0;
833 result = find_single_use_1 (dest, &PATTERN (next));
834 if (result && ploc)
835 *ploc = next;
836 return result;
838 #endif
840 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
841 return 0;
843 for (next = next_nonnote_insn (insn);
844 next != 0 && GET_CODE (next) != CODE_LABEL;
845 next = next_nonnote_insn (next))
846 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
848 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
849 if (XEXP (link, 0) == insn)
850 break;
852 if (link)
854 result = find_single_use_1 (dest, &PATTERN (next));
855 if (ploc)
856 *ploc = next;
857 return result;
861 return 0;
864 /* Return 1 if OP is a valid general operand for machine mode MODE.
865 This is either a register reference, a memory reference,
866 or a constant. In the case of a memory reference, the address
867 is checked for general validity for the target machine.
869 Register and memory references must have mode MODE in order to be valid,
870 but some constants have no machine mode and are valid for any mode.
872 If MODE is VOIDmode, OP is checked for validity for whatever mode
873 it has.
875 The main use of this function is as a predicate in match_operand
876 expressions in the machine description.
878 For an explanation of this function's behavior for registers of
879 class NO_REGS, see the comment for `register_operand'. */
882 general_operand (op, mode)
883 register rtx op;
884 enum machine_mode mode;
886 register enum rtx_code code = GET_CODE (op);
887 int mode_altering_drug = 0;
889 if (mode == VOIDmode)
890 mode = GET_MODE (op);
892 /* Don't accept CONST_INT or anything similar
893 if the caller wants something floating. */
894 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
895 && GET_MODE_CLASS (mode) != MODE_INT
896 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
897 return 0;
899 if (CONSTANT_P (op))
900 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
901 #ifdef LEGITIMATE_PIC_OPERAND_P
902 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
903 #endif
904 && LEGITIMATE_CONSTANT_P (op));
906 /* Except for certain constants with VOIDmode, already checked for,
907 OP's mode must match MODE if MODE specifies a mode. */
909 if (GET_MODE (op) != mode)
910 return 0;
912 if (code == SUBREG)
914 #ifdef INSN_SCHEDULING
915 /* On machines that have insn scheduling, we want all memory
916 reference to be explicit, so outlaw paradoxical SUBREGs. */
917 if (GET_CODE (SUBREG_REG (op)) == MEM
918 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
919 return 0;
920 #endif
922 op = SUBREG_REG (op);
923 code = GET_CODE (op);
924 #if 0
925 /* No longer needed, since (SUBREG (MEM...))
926 will load the MEM into a reload reg in the MEM's own mode. */
927 mode_altering_drug = 1;
928 #endif
931 if (code == REG)
932 /* A register whose class is NO_REGS is not a general operand. */
933 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
934 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
936 if (code == MEM)
938 register rtx y = XEXP (op, 0);
939 if (! volatile_ok && MEM_VOLATILE_P (op))
940 return 0;
941 if (GET_CODE (y) == ADDRESSOF)
942 return 1;
943 /* Use the mem's mode, since it will be reloaded thus. */
944 mode = GET_MODE (op);
945 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
948 /* Pretend this is an operand for now; we'll run force_operand
949 on its replacement in fixup_var_refs_1. */
950 if (code == ADDRESSOF)
951 return 1;
953 return 0;
955 win:
956 if (mode_altering_drug)
957 return ! mode_dependent_address_p (XEXP (op, 0));
958 return 1;
961 /* Return 1 if OP is a valid memory address for a memory reference
962 of mode MODE.
964 The main use of this function is as a predicate in match_operand
965 expressions in the machine description. */
968 address_operand (op, mode)
969 register rtx op;
970 enum machine_mode mode;
972 return memory_address_p (mode, op);
975 /* Return 1 if OP is a register reference of mode MODE.
976 If MODE is VOIDmode, accept a register in any mode.
978 The main use of this function is as a predicate in match_operand
979 expressions in the machine description.
981 As a special exception, registers whose class is NO_REGS are
982 not accepted by `register_operand'. The reason for this change
983 is to allow the representation of special architecture artifacts
984 (such as a condition code register) without extending the rtl
985 definitions. Since registers of class NO_REGS cannot be used
986 as registers in any case where register classes are examined,
987 it is most consistent to keep this function from accepting them. */
990 register_operand (op, mode)
991 register rtx op;
992 enum machine_mode mode;
994 if (GET_MODE (op) != mode && mode != VOIDmode)
995 return 0;
997 if (GET_CODE (op) == SUBREG)
999 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1000 because it is guaranteed to be reloaded into one.
1001 Just make sure the MEM is valid in itself.
1002 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1003 but currently it does result from (SUBREG (REG)...) where the
1004 reg went on the stack.) */
1005 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1006 return general_operand (op, mode);
1008 #ifdef CLASS_CANNOT_CHANGE_SIZE
1009 if (GET_CODE (SUBREG_REG (op)) == REG
1010 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1011 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1012 REGNO (SUBREG_REG (op)))
1013 && (GET_MODE_SIZE (mode)
1014 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1015 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1017 return 0;
1018 #endif
1020 op = SUBREG_REG (op);
1023 /* We don't consider registers whose class is NO_REGS
1024 to be a register operand. */
1025 return (GET_CODE (op) == REG
1026 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1027 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1030 /* Return 1 for a register in Pmode; ignore the tested mode. */
1033 pmode_register_operand (op, mode)
1034 rtx op;
1035 enum machine_mode mode ATTRIBUTE_UNUSED;
1037 return register_operand (op, Pmode);
1040 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1041 or a hard register. */
1044 scratch_operand (op, mode)
1045 register rtx op;
1046 enum machine_mode mode;
1048 return (GET_MODE (op) == mode
1049 && (GET_CODE (op) == SCRATCH
1050 || (GET_CODE (op) == REG
1051 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
1054 /* Return 1 if OP is a valid immediate operand for mode MODE.
1056 The main use of this function is as a predicate in match_operand
1057 expressions in the machine description. */
1060 immediate_operand (op, mode)
1061 register rtx op;
1062 enum machine_mode mode;
1064 /* Don't accept CONST_INT or anything similar
1065 if the caller wants something floating. */
1066 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1067 && GET_MODE_CLASS (mode) != MODE_INT
1068 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1069 return 0;
1071 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1072 result in 0/1. It seems a safe assumption that this is
1073 in range for everyone. */
1074 if (GET_CODE (op) == CONSTANT_P_RTX)
1075 return 1;
1077 return (CONSTANT_P (op)
1078 && (GET_MODE (op) == mode || mode == VOIDmode
1079 || GET_MODE (op) == VOIDmode)
1080 #ifdef LEGITIMATE_PIC_OPERAND_P
1081 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1082 #endif
1083 && LEGITIMATE_CONSTANT_P (op));
1086 /* Returns 1 if OP is an operand that is a CONST_INT. */
1089 const_int_operand (op, mode)
1090 register rtx op;
1091 enum machine_mode mode ATTRIBUTE_UNUSED;
1093 return GET_CODE (op) == CONST_INT;
1096 /* Returns 1 if OP is an operand that is a constant integer or constant
1097 floating-point number. */
1100 const_double_operand (op, mode)
1101 register rtx op;
1102 enum machine_mode mode;
1104 /* Don't accept CONST_INT or anything similar
1105 if the caller wants something floating. */
1106 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1107 && GET_MODE_CLASS (mode) != MODE_INT
1108 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1109 return 0;
1111 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1112 && (mode == VOIDmode || GET_MODE (op) == mode
1113 || GET_MODE (op) == VOIDmode));
1116 /* Return 1 if OP is a general operand that is not an immediate operand. */
1119 nonimmediate_operand (op, mode)
1120 register rtx op;
1121 enum machine_mode mode;
1123 return (general_operand (op, mode) && ! CONSTANT_P (op));
1126 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1129 nonmemory_operand (op, mode)
1130 register rtx op;
1131 enum machine_mode mode;
1133 if (CONSTANT_P (op))
1135 /* Don't accept CONST_INT or anything similar
1136 if the caller wants something floating. */
1137 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1138 && GET_MODE_CLASS (mode) != MODE_INT
1139 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1140 return 0;
1142 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1143 #ifdef LEGITIMATE_PIC_OPERAND_P
1144 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1145 #endif
1146 && LEGITIMATE_CONSTANT_P (op));
1149 if (GET_MODE (op) != mode && mode != VOIDmode)
1150 return 0;
1152 if (GET_CODE (op) == SUBREG)
1154 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1155 because it is guaranteed to be reloaded into one.
1156 Just make sure the MEM is valid in itself.
1157 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1158 but currently it does result from (SUBREG (REG)...) where the
1159 reg went on the stack.) */
1160 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1161 return general_operand (op, mode);
1162 op = SUBREG_REG (op);
1165 /* We don't consider registers whose class is NO_REGS
1166 to be a register operand. */
1167 return (GET_CODE (op) == REG
1168 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1169 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1172 /* Return 1 if OP is a valid operand that stands for pushing a
1173 value of mode MODE onto the stack.
1175 The main use of this function is as a predicate in match_operand
1176 expressions in the machine description. */
1179 push_operand (op, mode)
1180 rtx op;
1181 enum machine_mode mode;
1183 if (GET_CODE (op) != MEM)
1184 return 0;
1186 if (mode != VOIDmode && GET_MODE (op) != mode)
1187 return 0;
1189 op = XEXP (op, 0);
1191 if (GET_CODE (op) != STACK_PUSH_CODE)
1192 return 0;
1194 return XEXP (op, 0) == stack_pointer_rtx;
1197 /* Return 1 if OP is a valid operand that stands for popping a
1198 value of mode MODE off the stack.
1200 The main use of this function is as a predicate in match_operand
1201 expressions in the machine description. */
1204 pop_operand (op, mode)
1205 rtx op;
1206 enum machine_mode mode;
1208 if (GET_CODE (op) != MEM)
1209 return 0;
1211 if (mode != VOIDmode && GET_MODE (op) != mode)
1212 return 0;
1214 op = XEXP (op, 0);
1216 if (GET_CODE (op) != STACK_POP_CODE)
1217 return 0;
1219 return XEXP (op, 0) == stack_pointer_rtx;
1222 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1225 memory_address_p (mode, addr)
1226 enum machine_mode mode;
1227 register rtx addr;
1229 if (GET_CODE (addr) == ADDRESSOF)
1230 return 1;
1232 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1233 return 0;
1235 win:
1236 return 1;
1239 /* Return 1 if OP is a valid memory reference with mode MODE,
1240 including a valid address.
1242 The main use of this function is as a predicate in match_operand
1243 expressions in the machine description. */
1246 memory_operand (op, mode)
1247 register rtx op;
1248 enum machine_mode mode;
1250 rtx inner;
1252 if (! reload_completed)
1253 /* Note that no SUBREG is a memory operand before end of reload pass,
1254 because (SUBREG (MEM...)) forces reloading into a register. */
1255 return GET_CODE (op) == MEM && general_operand (op, mode);
1257 if (mode != VOIDmode && GET_MODE (op) != mode)
1258 return 0;
1260 inner = op;
1261 if (GET_CODE (inner) == SUBREG)
1262 inner = SUBREG_REG (inner);
1264 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1267 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1268 that is, a memory reference whose address is a general_operand. */
1271 indirect_operand (op, mode)
1272 register rtx op;
1273 enum machine_mode mode;
1275 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1276 if (! reload_completed
1277 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1279 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1280 rtx inner = SUBREG_REG (op);
1282 if (BYTES_BIG_ENDIAN)
1283 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1284 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1286 if (mode != VOIDmode && GET_MODE (op) != mode)
1287 return 0;
1289 /* The only way that we can have a general_operand as the resulting
1290 address is if OFFSET is zero and the address already is an operand
1291 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1292 operand. */
1294 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1295 || (GET_CODE (XEXP (inner, 0)) == PLUS
1296 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1297 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1298 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1301 return (GET_CODE (op) == MEM
1302 && memory_operand (op, mode)
1303 && general_operand (XEXP (op, 0), Pmode));
1306 /* Return 1 if this is a comparison operator. This allows the use of
1307 MATCH_OPERATOR to recognize all the branch insns. */
1310 comparison_operator (op, mode)
1311 register rtx op;
1312 enum machine_mode mode;
1314 return ((mode == VOIDmode || GET_MODE (op) == mode)
1315 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1318 /* If BODY is an insn body that uses ASM_OPERANDS,
1319 return the number of operands (both input and output) in the insn.
1320 Otherwise return -1. */
1323 asm_noperands (body)
1324 rtx body;
1326 if (GET_CODE (body) == ASM_OPERANDS)
1327 /* No output operands: return number of input operands. */
1328 return ASM_OPERANDS_INPUT_LENGTH (body);
1329 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1330 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1331 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1332 else if (GET_CODE (body) == PARALLEL
1333 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1334 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1336 /* Multiple output operands, or 1 output plus some clobbers:
1337 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1338 int i;
1339 int n_sets;
1341 /* Count backwards through CLOBBERs to determine number of SETs. */
1342 for (i = XVECLEN (body, 0); i > 0; i--)
1344 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1345 break;
1346 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1347 return -1;
1350 /* N_SETS is now number of output operands. */
1351 n_sets = i;
1353 /* Verify that all the SETs we have
1354 came from a single original asm_operands insn
1355 (so that invalid combinations are blocked). */
1356 for (i = 0; i < n_sets; i++)
1358 rtx elt = XVECEXP (body, 0, i);
1359 if (GET_CODE (elt) != SET)
1360 return -1;
1361 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1362 return -1;
1363 /* If these ASM_OPERANDS rtx's came from different original insns
1364 then they aren't allowed together. */
1365 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1366 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1367 return -1;
1369 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1370 + n_sets);
1372 else if (GET_CODE (body) == PARALLEL
1373 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1375 /* 0 outputs, but some clobbers:
1376 body is [(asm_operands ...) (clobber (reg ...))...]. */
1377 int i;
1379 /* Make sure all the other parallel things really are clobbers. */
1380 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1381 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1382 return -1;
1384 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1386 else
1387 return -1;
1390 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1391 copy its operands (both input and output) into the vector OPERANDS,
1392 the locations of the operands within the insn into the vector OPERAND_LOCS,
1393 and the constraints for the operands into CONSTRAINTS.
1394 Write the modes of the operands into MODES.
1395 Return the assembler-template.
1397 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1398 we don't store that info. */
1400 char *
1401 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1402 rtx body;
1403 rtx *operands;
1404 rtx **operand_locs;
1405 const char **constraints;
1406 enum machine_mode *modes;
1408 register int i;
1409 int noperands;
1410 char *template = 0;
1412 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1414 rtx asmop = SET_SRC (body);
1415 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1417 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1419 for (i = 1; i < noperands; i++)
1421 if (operand_locs)
1422 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1423 if (operands)
1424 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1425 if (constraints)
1426 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1427 if (modes)
1428 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1431 /* The output is in the SET.
1432 Its constraint is in the ASM_OPERANDS itself. */
1433 if (operands)
1434 operands[0] = SET_DEST (body);
1435 if (operand_locs)
1436 operand_locs[0] = &SET_DEST (body);
1437 if (constraints)
1438 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1439 if (modes)
1440 modes[0] = GET_MODE (SET_DEST (body));
1441 template = ASM_OPERANDS_TEMPLATE (asmop);
1443 else if (GET_CODE (body) == ASM_OPERANDS)
1445 rtx asmop = body;
1446 /* No output operands: BODY is (asm_operands ....). */
1448 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1450 /* The input operands are found in the 1st element vector. */
1451 /* Constraints for inputs are in the 2nd element vector. */
1452 for (i = 0; i < noperands; i++)
1454 if (operand_locs)
1455 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1456 if (operands)
1457 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1458 if (constraints)
1459 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1460 if (modes)
1461 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1463 template = ASM_OPERANDS_TEMPLATE (asmop);
1465 else if (GET_CODE (body) == PARALLEL
1466 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1468 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1469 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1470 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1471 int nout = 0; /* Does not include CLOBBERs. */
1473 /* At least one output, plus some CLOBBERs. */
1475 /* The outputs are in the SETs.
1476 Their constraints are in the ASM_OPERANDS itself. */
1477 for (i = 0; i < nparallel; i++)
1479 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1480 break; /* Past last SET */
1482 if (operands)
1483 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1484 if (operand_locs)
1485 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1486 if (constraints)
1487 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1488 if (modes)
1489 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1490 nout++;
1493 for (i = 0; i < nin; i++)
1495 if (operand_locs)
1496 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1497 if (operands)
1498 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1499 if (constraints)
1500 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1501 if (modes)
1502 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1505 template = ASM_OPERANDS_TEMPLATE (asmop);
1507 else if (GET_CODE (body) == PARALLEL
1508 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1510 /* No outputs, but some CLOBBERs. */
1512 rtx asmop = XVECEXP (body, 0, 0);
1513 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1515 for (i = 0; i < nin; i++)
1517 if (operand_locs)
1518 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1519 if (operands)
1520 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1521 if (constraints)
1522 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1523 if (modes)
1524 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1527 template = ASM_OPERANDS_TEMPLATE (asmop);
1530 return template;
1533 /* Check if an asm_operand matches it's constraints.
1534 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1537 asm_operand_ok (op, constraint)
1538 rtx op;
1539 const char *constraint;
1541 int result = 0;
1543 /* Use constrain_operands after reload. */
1544 if (reload_completed)
1545 abort ();
1547 while (*constraint)
1549 switch (*constraint++)
1551 case '=':
1552 case '+':
1553 case '*':
1554 case '%':
1555 case '?':
1556 case '!':
1557 case '#':
1558 case '&':
1559 case ',':
1560 break;
1562 case '0': case '1': case '2': case '3': case '4':
1563 case '5': case '6': case '7': case '8': case '9':
1564 /* For best results, our caller should have given us the
1565 proper matching constraint, but we can't actually fail
1566 the check if they didn't. Indicate that results are
1567 inconclusive. */
1568 result = -1;
1569 break;
1571 case 'p':
1572 if (address_operand (op, VOIDmode))
1573 return 1;
1574 break;
1576 case 'm':
1577 case 'V': /* non-offsettable */
1578 if (memory_operand (op, VOIDmode))
1579 return 1;
1580 break;
1582 case 'o': /* offsettable */
1583 if (offsettable_nonstrict_memref_p (op))
1584 return 1;
1585 break;
1587 case '<':
1588 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1589 excepting those that expand_call created. Further, on some
1590 machines which do not have generalized auto inc/dec, an inc/dec
1591 is not a memory_operand.
1593 Match any memory and hope things are resolved after reload. */
1595 if (GET_CODE (op) == MEM
1596 && (1
1597 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1598 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1599 return 1;
1600 break;
1602 case '>':
1603 if (GET_CODE (op) == MEM
1604 && (1
1605 || GET_CODE (XEXP (op, 0)) == PRE_INC
1606 || GET_CODE (XEXP (op, 0)) == POST_INC))
1607 return 1;
1608 break;
1610 case 'E':
1611 #ifndef REAL_ARITHMETIC
1612 /* Match any floating double constant, but only if
1613 we can examine the bits of it reliably. */
1614 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1615 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1616 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1617 break;
1618 #endif
1619 /* FALLTHRU */
1621 case 'F':
1622 if (GET_CODE (op) == CONST_DOUBLE)
1623 return 1;
1624 break;
1626 case 'G':
1627 if (GET_CODE (op) == CONST_DOUBLE
1628 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1629 return 1;
1630 break;
1631 case 'H':
1632 if (GET_CODE (op) == CONST_DOUBLE
1633 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1634 return 1;
1635 break;
1637 case 's':
1638 if (GET_CODE (op) == CONST_INT
1639 || (GET_CODE (op) == CONST_DOUBLE
1640 && GET_MODE (op) == VOIDmode))
1641 break;
1642 /* FALLTHRU */
1644 case 'i':
1645 if (CONSTANT_P (op)
1646 #ifdef LEGITIMATE_PIC_OPERAND_P
1647 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1648 #endif
1650 return 1;
1651 break;
1653 case 'n':
1654 if (GET_CODE (op) == CONST_INT
1655 || (GET_CODE (op) == CONST_DOUBLE
1656 && GET_MODE (op) == VOIDmode))
1657 return 1;
1658 break;
1660 case 'I':
1661 if (GET_CODE (op) == CONST_INT
1662 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1663 return 1;
1664 break;
1665 case 'J':
1666 if (GET_CODE (op) == CONST_INT
1667 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1668 return 1;
1669 break;
1670 case 'K':
1671 if (GET_CODE (op) == CONST_INT
1672 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1673 return 1;
1674 break;
1675 case 'L':
1676 if (GET_CODE (op) == CONST_INT
1677 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1678 return 1;
1679 break;
1680 case 'M':
1681 if (GET_CODE (op) == CONST_INT
1682 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1683 return 1;
1684 break;
1685 case 'N':
1686 if (GET_CODE (op) == CONST_INT
1687 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1688 return 1;
1689 break;
1690 case 'O':
1691 if (GET_CODE (op) == CONST_INT
1692 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1693 return 1;
1694 break;
1695 case 'P':
1696 if (GET_CODE (op) == CONST_INT
1697 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1698 return 1;
1699 break;
1701 case 'X':
1702 return 1;
1704 case 'g':
1705 if (general_operand (op, VOIDmode))
1706 return 1;
1707 break;
1709 #ifdef EXTRA_CONSTRAINT
1710 case 'Q':
1711 if (EXTRA_CONSTRAINT (op, 'Q'))
1712 return 1;
1713 break;
1714 case 'R':
1715 if (EXTRA_CONSTRAINT (op, 'R'))
1716 return 1;
1717 break;
1718 case 'S':
1719 if (EXTRA_CONSTRAINT (op, 'S'))
1720 return 1;
1721 break;
1722 case 'T':
1723 if (EXTRA_CONSTRAINT (op, 'T'))
1724 return 1;
1725 break;
1726 case 'U':
1727 if (EXTRA_CONSTRAINT (op, 'U'))
1728 return 1;
1729 break;
1730 #endif
1732 case 'r':
1733 default:
1734 if (GET_MODE (op) == BLKmode)
1735 break;
1736 if (register_operand (op, VOIDmode))
1737 return 1;
1738 break;
1742 return result;
1745 /* Given an rtx *P, if it is a sum containing an integer constant term,
1746 return the location (type rtx *) of the pointer to that constant term.
1747 Otherwise, return a null pointer. */
1749 static rtx *
1750 find_constant_term_loc (p)
1751 rtx *p;
1753 register rtx *tem;
1754 register enum rtx_code code = GET_CODE (*p);
1756 /* If *P IS such a constant term, P is its location. */
1758 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1759 || code == CONST)
1760 return p;
1762 /* Otherwise, if not a sum, it has no constant term. */
1764 if (GET_CODE (*p) != PLUS)
1765 return 0;
1767 /* If one of the summands is constant, return its location. */
1769 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1770 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1771 return p;
1773 /* Otherwise, check each summand for containing a constant term. */
1775 if (XEXP (*p, 0) != 0)
1777 tem = find_constant_term_loc (&XEXP (*p, 0));
1778 if (tem != 0)
1779 return tem;
1782 if (XEXP (*p, 1) != 0)
1784 tem = find_constant_term_loc (&XEXP (*p, 1));
1785 if (tem != 0)
1786 return tem;
1789 return 0;
1792 /* Return 1 if OP is a memory reference
1793 whose address contains no side effects
1794 and remains valid after the addition
1795 of a positive integer less than the
1796 size of the object being referenced.
1798 We assume that the original address is valid and do not check it.
1800 This uses strict_memory_address_p as a subroutine, so
1801 don't use it before reload. */
1804 offsettable_memref_p (op)
1805 rtx op;
1807 return ((GET_CODE (op) == MEM)
1808 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1811 /* Similar, but don't require a strictly valid mem ref:
1812 consider pseudo-regs valid as index or base regs. */
1815 offsettable_nonstrict_memref_p (op)
1816 rtx op;
1818 return ((GET_CODE (op) == MEM)
1819 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1822 /* Return 1 if Y is a memory address which contains no side effects
1823 and would remain valid after the addition of a positive integer
1824 less than the size of that mode.
1826 We assume that the original address is valid and do not check it.
1827 We do check that it is valid for narrower modes.
1829 If STRICTP is nonzero, we require a strictly valid address,
1830 for the sake of use in reload.c. */
1833 offsettable_address_p (strictp, mode, y)
1834 int strictp;
1835 enum machine_mode mode;
1836 register rtx y;
1838 register enum rtx_code ycode = GET_CODE (y);
1839 register rtx z;
1840 rtx y1 = y;
1841 rtx *y2;
1842 int (*addressp) PROTO ((enum machine_mode, rtx)) =
1843 (strictp ? strict_memory_address_p : memory_address_p);
1845 if (CONSTANT_ADDRESS_P (y))
1846 return 1;
1848 /* Adjusting an offsettable address involves changing to a narrower mode.
1849 Make sure that's OK. */
1851 if (mode_dependent_address_p (y))
1852 return 0;
1854 /* If the expression contains a constant term,
1855 see if it remains valid when max possible offset is added. */
1857 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1859 int good;
1861 y1 = *y2;
1862 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1863 /* Use QImode because an odd displacement may be automatically invalid
1864 for any wider mode. But it should be valid for a single byte. */
1865 good = (*addressp) (QImode, y);
1867 /* In any case, restore old contents of memory. */
1868 *y2 = y1;
1869 return good;
1872 if (ycode == PRE_DEC || ycode == PRE_INC
1873 || ycode == POST_DEC || ycode == POST_INC)
1874 return 0;
1876 /* The offset added here is chosen as the maximum offset that
1877 any instruction could need to add when operating on something
1878 of the specified mode. We assume that if Y and Y+c are
1879 valid addresses then so is Y+d for all 0<d<c. */
1881 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1883 /* Use QImode because an odd displacement may be automatically invalid
1884 for any wider mode. But it should be valid for a single byte. */
1885 return (*addressp) (QImode, z);
1888 /* Return 1 if ADDR is an address-expression whose effect depends
1889 on the mode of the memory reference it is used in.
1891 Autoincrement addressing is a typical example of mode-dependence
1892 because the amount of the increment depends on the mode. */
1895 mode_dependent_address_p (addr)
1896 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1898 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1899 return 0;
1900 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1901 win: ATTRIBUTE_UNUSED_LABEL
1902 return 1;
1905 /* Return 1 if OP is a general operand
1906 other than a memory ref with a mode dependent address. */
1909 mode_independent_operand (op, mode)
1910 enum machine_mode mode;
1911 rtx op;
1913 rtx addr;
1915 if (! general_operand (op, mode))
1916 return 0;
1918 if (GET_CODE (op) != MEM)
1919 return 1;
1921 addr = XEXP (op, 0);
1922 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1923 return 1;
1924 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1925 lose: ATTRIBUTE_UNUSED_LABEL
1926 return 0;
1929 /* Given an operand OP that is a valid memory reference
1930 which satisfies offsettable_memref_p,
1931 return a new memory reference whose address has been adjusted by OFFSET.
1932 OFFSET should be positive and less than the size of the object referenced.
1936 adj_offsettable_operand (op, offset)
1937 rtx op;
1938 int offset;
1940 register enum rtx_code code = GET_CODE (op);
1942 if (code == MEM)
1944 register rtx y = XEXP (op, 0);
1945 register rtx new;
1947 if (CONSTANT_ADDRESS_P (y))
1949 new = gen_rtx_MEM (GET_MODE (op),
1950 plus_constant_for_output (y, offset));
1951 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1952 return new;
1955 if (GET_CODE (y) == PLUS)
1957 rtx z = y;
1958 register rtx *const_loc;
1960 op = copy_rtx (op);
1961 z = XEXP (op, 0);
1962 const_loc = find_constant_term_loc (&z);
1963 if (const_loc)
1965 *const_loc = plus_constant_for_output (*const_loc, offset);
1966 return op;
1970 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1971 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1972 return new;
1974 abort ();
1977 /* Analyze INSN and fill in recog_data. */
1979 void
1980 extract_insn (insn)
1981 rtx insn;
1983 int i;
1984 int icode;
1985 int noperands;
1986 rtx body = PATTERN (insn);
1988 recog_data.n_operands = 0;
1989 recog_data.n_alternatives = 0;
1990 recog_data.n_dups = 0;
1992 switch (GET_CODE (body))
1994 case USE:
1995 case CLOBBER:
1996 case ASM_INPUT:
1997 case ADDR_VEC:
1998 case ADDR_DIFF_VEC:
1999 return;
2001 case SET:
2002 case PARALLEL:
2003 case ASM_OPERANDS:
2004 recog_data.n_operands = noperands = asm_noperands (body);
2005 if (noperands >= 0)
2007 /* This insn is an `asm' with operands. */
2009 /* expand_asm_operands makes sure there aren't too many operands. */
2010 if (noperands > MAX_RECOG_OPERANDS)
2011 abort ();
2013 /* Now get the operand values and constraints out of the insn. */
2014 decode_asm_operands (body, recog_data.operand,
2015 recog_data.operand_loc,
2016 recog_data.constraints,
2017 recog_data.operand_mode);
2018 if (noperands > 0)
2020 const char *p = recog_data.constraints[0];
2021 recog_data.n_alternatives = 1;
2022 while (*p)
2023 recog_data.n_alternatives += (*p++ == ',');
2025 break;
2028 /* FALLTHROUGH */
2030 default:
2031 /* Ordinary insn: recognize it, get the operands via insn_extract
2032 and get the constraints. */
2034 icode = recog_memoized (insn);
2035 if (icode < 0)
2036 fatal_insn_not_found (insn);
2038 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2039 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2040 recog_data.n_dups = insn_data[icode].n_dups;
2042 insn_extract (insn);
2044 for (i = 0; i < noperands; i++)
2046 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2047 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2050 for (i = 0; i < noperands; i++)
2051 recog_data.operand_type[i]
2052 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2053 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2054 : OP_IN);
2056 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2057 abort ();
2060 /* After calling extract_insn, you can use this function to extract some
2061 information from the constraint strings into a more usable form.
2062 The collected data is stored in recog_op_alt. */
2063 void
2064 preprocess_constraints ()
2066 int i;
2068 memset (recog_op_alt, 0, sizeof recog_op_alt);
2069 for (i = 0; i < recog_data.n_operands; i++)
2071 int j;
2072 struct operand_alternative *op_alt;
2073 const char *p = recog_data.constraints[i];
2075 op_alt = recog_op_alt[i];
2077 for (j = 0; j < recog_data.n_alternatives; j++)
2079 op_alt[j].class = NO_REGS;
2080 op_alt[j].constraint = p;
2081 op_alt[j].matches = -1;
2082 op_alt[j].matched = -1;
2084 if (*p == '\0' || *p == ',')
2086 op_alt[j].anything_ok = 1;
2087 continue;
2090 for (;;)
2092 char c = *p++;
2093 if (c == '#')
2095 c = *p++;
2096 while (c != ',' && c != '\0');
2097 if (c == ',' || c == '\0')
2098 break;
2100 switch (c)
2102 case '=': case '+': case '*': case '%':
2103 case 'E': case 'F': case 'G': case 'H':
2104 case 's': case 'i': case 'n':
2105 case 'I': case 'J': case 'K': case 'L':
2106 case 'M': case 'N': case 'O': case 'P':
2107 #ifdef EXTRA_CONSTRAINT
2108 case 'Q': case 'R': case 'S': case 'T': case 'U':
2109 #endif
2110 /* These don't say anything we care about. */
2111 break;
2113 case '?':
2114 op_alt[j].reject += 6;
2115 break;
2116 case '!':
2117 op_alt[j].reject += 600;
2118 break;
2119 case '&':
2120 op_alt[j].earlyclobber = 1;
2121 break;
2123 case '0': case '1': case '2': case '3': case '4':
2124 case '5': case '6': case '7': case '8': case '9':
2125 op_alt[j].matches = c - '0';
2126 op_alt[op_alt[j].matches].matched = i;
2127 break;
2129 case 'm':
2130 op_alt[j].memory_ok = 1;
2131 break;
2132 case '<':
2133 op_alt[j].decmem_ok = 1;
2134 break;
2135 case '>':
2136 op_alt[j].incmem_ok = 1;
2137 break;
2138 case 'V':
2139 op_alt[j].nonoffmem_ok = 1;
2140 break;
2141 case 'o':
2142 op_alt[j].offmem_ok = 1;
2143 break;
2144 case 'X':
2145 op_alt[j].anything_ok = 1;
2146 break;
2148 case 'p':
2149 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2150 break;
2152 case 'g': case 'r':
2153 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2154 break;
2156 default:
2157 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2158 break;
2165 /* Check the operands of an insn against the insn's operand constraints
2166 and return 1 if they are valid.
2167 The information about the insn's operands, constraints, operand modes
2168 etc. is obtained from the global variables set up by extract_insn.
2170 WHICH_ALTERNATIVE is set to a number which indicates which
2171 alternative of constraints was matched: 0 for the first alternative,
2172 1 for the next, etc.
2174 In addition, when two operands are match
2175 and it happens that the output operand is (reg) while the
2176 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2177 make the output operand look like the input.
2178 This is because the output operand is the one the template will print.
2180 This is used in final, just before printing the assembler code and by
2181 the routines that determine an insn's attribute.
2183 If STRICT is a positive non-zero value, it means that we have been
2184 called after reload has been completed. In that case, we must
2185 do all checks strictly. If it is zero, it means that we have been called
2186 before reload has completed. In that case, we first try to see if we can
2187 find an alternative that matches strictly. If not, we try again, this
2188 time assuming that reload will fix up the insn. This provides a "best
2189 guess" for the alternative and is used to compute attributes of insns prior
2190 to reload. A negative value of STRICT is used for this internal call. */
2192 struct funny_match
2194 int this, other;
2198 constrain_operands (strict)
2199 int strict;
2201 const char *constraints[MAX_RECOG_OPERANDS];
2202 int matching_operands[MAX_RECOG_OPERANDS];
2203 int earlyclobber[MAX_RECOG_OPERANDS];
2204 register int c;
2206 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2207 int funny_match_index;
2209 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2210 return 1;
2212 for (c = 0; c < recog_data.n_operands; c++)
2214 constraints[c] = recog_data.constraints[c];
2215 matching_operands[c] = -1;
2218 which_alternative = 0;
2220 while (which_alternative < recog_data.n_alternatives)
2222 register int opno;
2223 int lose = 0;
2224 funny_match_index = 0;
2226 for (opno = 0; opno < recog_data.n_operands; opno++)
2228 register rtx op = recog_data.operand[opno];
2229 enum machine_mode mode = GET_MODE (op);
2230 register const char *p = constraints[opno];
2231 int offset = 0;
2232 int win = 0;
2233 int val;
2235 earlyclobber[opno] = 0;
2237 /* A unary operator may be accepted by the predicate, but it
2238 is irrelevant for matching constraints. */
2239 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2240 op = XEXP (op, 0);
2242 if (GET_CODE (op) == SUBREG)
2244 if (GET_CODE (SUBREG_REG (op)) == REG
2245 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2246 offset = SUBREG_WORD (op);
2247 op = SUBREG_REG (op);
2250 /* An empty constraint or empty alternative
2251 allows anything which matched the pattern. */
2252 if (*p == 0 || *p == ',')
2253 win = 1;
2255 while (*p && (c = *p++) != ',')
2256 switch (c)
2258 case '?': case '!': case '*': case '%':
2259 case '=': case '+':
2260 break;
2262 case '#':
2263 /* Ignore rest of this alternative as far as
2264 constraint checking is concerned. */
2265 while (*p && *p != ',')
2266 p++;
2267 break;
2269 case '&':
2270 earlyclobber[opno] = 1;
2271 break;
2273 case '0': case '1': case '2': case '3': case '4':
2274 case '5': case '6': case '7': case '8': case '9':
2276 /* This operand must be the same as a previous one.
2277 This kind of constraint is used for instructions such
2278 as add when they take only two operands.
2280 Note that the lower-numbered operand is passed first.
2282 If we are not testing strictly, assume that this constraint
2283 will be satisfied. */
2284 if (strict < 0)
2285 val = 1;
2286 else
2288 rtx op1 = recog_data.operand[c - '0'];
2289 rtx op2 = recog_data.operand[opno];
2291 /* A unary operator may be accepted by the predicate,
2292 but it is irrelevant for matching constraints. */
2293 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2294 op1 = XEXP (op1, 0);
2295 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2296 op2 = XEXP (op2, 0);
2298 val = operands_match_p (op1, op2);
2301 matching_operands[opno] = c - '0';
2302 matching_operands[c - '0'] = opno;
2304 if (val != 0)
2305 win = 1;
2306 /* If output is *x and input is *--x,
2307 arrange later to change the output to *--x as well,
2308 since the output op is the one that will be printed. */
2309 if (val == 2 && strict > 0)
2311 funny_match[funny_match_index].this = opno;
2312 funny_match[funny_match_index++].other = c - '0';
2314 break;
2316 case 'p':
2317 /* p is used for address_operands. When we are called by
2318 gen_reload, no one will have checked that the address is
2319 strictly valid, i.e., that all pseudos requiring hard regs
2320 have gotten them. */
2321 if (strict <= 0
2322 || (strict_memory_address_p (recog_data.operand_mode[opno],
2323 op)))
2324 win = 1;
2325 break;
2327 /* No need to check general_operand again;
2328 it was done in insn-recog.c. */
2329 case 'g':
2330 /* Anything goes unless it is a REG and really has a hard reg
2331 but the hard reg is not in the class GENERAL_REGS. */
2332 if (strict < 0
2333 || GENERAL_REGS == ALL_REGS
2334 || GET_CODE (op) != REG
2335 || (reload_in_progress
2336 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2337 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2338 win = 1;
2339 break;
2341 case 'r':
2342 if (strict < 0
2343 || (strict == 0
2344 && GET_CODE (op) == REG
2345 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2346 || (strict == 0 && GET_CODE (op) == SCRATCH)
2347 || (GET_CODE (op) == REG
2348 && ((GENERAL_REGS == ALL_REGS
2349 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2350 || reg_fits_class_p (op, GENERAL_REGS,
2351 offset, mode))))
2352 win = 1;
2353 break;
2355 case 'X':
2356 /* This is used for a MATCH_SCRATCH in the cases when
2357 we don't actually need anything. So anything goes
2358 any time. */
2359 win = 1;
2360 break;
2362 case 'm':
2363 if (GET_CODE (op) == MEM
2364 /* Before reload, accept what reload can turn into mem. */
2365 || (strict < 0 && CONSTANT_P (op))
2366 /* During reload, accept a pseudo */
2367 || (reload_in_progress && GET_CODE (op) == REG
2368 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2369 win = 1;
2370 break;
2372 case '<':
2373 if (GET_CODE (op) == MEM
2374 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2375 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2376 win = 1;
2377 break;
2379 case '>':
2380 if (GET_CODE (op) == MEM
2381 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2382 || GET_CODE (XEXP (op, 0)) == POST_INC))
2383 win = 1;
2384 break;
2386 case 'E':
2387 #ifndef REAL_ARITHMETIC
2388 /* Match any CONST_DOUBLE, but only if
2389 we can examine the bits of it reliably. */
2390 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2391 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2392 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2393 break;
2394 #endif
2395 if (GET_CODE (op) == CONST_DOUBLE)
2396 win = 1;
2397 break;
2399 case 'F':
2400 if (GET_CODE (op) == CONST_DOUBLE)
2401 win = 1;
2402 break;
2404 case 'G':
2405 case 'H':
2406 if (GET_CODE (op) == CONST_DOUBLE
2407 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2408 win = 1;
2409 break;
2411 case 's':
2412 if (GET_CODE (op) == CONST_INT
2413 || (GET_CODE (op) == CONST_DOUBLE
2414 && GET_MODE (op) == VOIDmode))
2415 break;
2416 case 'i':
2417 if (CONSTANT_P (op))
2418 win = 1;
2419 break;
2421 case 'n':
2422 if (GET_CODE (op) == CONST_INT
2423 || (GET_CODE (op) == CONST_DOUBLE
2424 && GET_MODE (op) == VOIDmode))
2425 win = 1;
2426 break;
2428 case 'I':
2429 case 'J':
2430 case 'K':
2431 case 'L':
2432 case 'M':
2433 case 'N':
2434 case 'O':
2435 case 'P':
2436 if (GET_CODE (op) == CONST_INT
2437 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2438 win = 1;
2439 break;
2441 #ifdef EXTRA_CONSTRAINT
2442 case 'Q':
2443 case 'R':
2444 case 'S':
2445 case 'T':
2446 case 'U':
2447 if (EXTRA_CONSTRAINT (op, c))
2448 win = 1;
2449 break;
2450 #endif
2452 case 'V':
2453 if (GET_CODE (op) == MEM
2454 && ((strict > 0 && ! offsettable_memref_p (op))
2455 || (strict < 0
2456 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2457 || (reload_in_progress
2458 && !(GET_CODE (op) == REG
2459 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2460 win = 1;
2461 break;
2463 case 'o':
2464 if ((strict > 0 && offsettable_memref_p (op))
2465 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2466 /* Before reload, accept what reload can handle. */
2467 || (strict < 0
2468 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2469 /* During reload, accept a pseudo */
2470 || (reload_in_progress && GET_CODE (op) == REG
2471 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2472 win = 1;
2473 break;
2475 default:
2476 if (strict < 0
2477 || (strict == 0
2478 && GET_CODE (op) == REG
2479 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2480 || (strict == 0 && GET_CODE (op) == SCRATCH)
2481 || (GET_CODE (op) == REG
2482 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2483 offset, mode)))
2484 win = 1;
2487 constraints[opno] = p;
2488 /* If this operand did not win somehow,
2489 this alternative loses. */
2490 if (! win)
2491 lose = 1;
2493 /* This alternative won; the operands are ok.
2494 Change whichever operands this alternative says to change. */
2495 if (! lose)
2497 int opno, eopno;
2499 /* See if any earlyclobber operand conflicts with some other
2500 operand. */
2502 if (strict > 0)
2503 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2504 /* Ignore earlyclobber operands now in memory,
2505 because we would often report failure when we have
2506 two memory operands, one of which was formerly a REG. */
2507 if (earlyclobber[eopno]
2508 && GET_CODE (recog_data.operand[eopno]) == REG)
2509 for (opno = 0; opno < recog_data.n_operands; opno++)
2510 if ((GET_CODE (recog_data.operand[opno]) == MEM
2511 || recog_data.operand_type[opno] != OP_OUT)
2512 && opno != eopno
2513 /* Ignore things like match_operator operands. */
2514 && *recog_data.constraints[opno] != 0
2515 && ! (matching_operands[opno] == eopno
2516 && operands_match_p (recog_data.operand[opno],
2517 recog_data.operand[eopno]))
2518 && ! safe_from_earlyclobber (recog_data.operand[opno],
2519 recog_data.operand[eopno]))
2520 lose = 1;
2522 if (! lose)
2524 while (--funny_match_index >= 0)
2526 recog_data.operand[funny_match[funny_match_index].other]
2527 = recog_data.operand[funny_match[funny_match_index].this];
2530 return 1;
2534 which_alternative++;
2537 /* If we are about to reject this, but we are not to test strictly,
2538 try a very loose test. Only return failure if it fails also. */
2539 if (strict == 0)
2540 return constrain_operands (-1);
2541 else
2542 return 0;
2545 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2546 is a hard reg in class CLASS when its regno is offset by OFFSET
2547 and changed to mode MODE.
2548 If REG occupies multiple hard regs, all of them must be in CLASS. */
2551 reg_fits_class_p (operand, class, offset, mode)
2552 rtx operand;
2553 register enum reg_class class;
2554 int offset;
2555 enum machine_mode mode;
2557 register int regno = REGNO (operand);
2558 if (regno < FIRST_PSEUDO_REGISTER
2559 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2560 regno + offset))
2562 register int sr;
2563 regno += offset;
2564 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2565 sr > 0; sr--)
2566 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2567 regno + sr))
2568 break;
2569 return sr == 0;
2572 return 0;
2575 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2577 void
2578 split_all_insns (upd_life)
2579 int upd_life;
2581 sbitmap blocks;
2582 int changed;
2583 int i;
2585 blocks = sbitmap_alloc (n_basic_blocks);
2586 sbitmap_zero (blocks);
2587 changed = 0;
2589 for (i = n_basic_blocks - 1; i >= 0; --i)
2591 basic_block bb = BASIC_BLOCK (i);
2592 rtx insn, next;
2594 for (insn = bb->head; insn ; insn = next)
2596 rtx set;
2598 /* Can't use `next_real_insn' because that might go across
2599 CODE_LABELS and short-out basic blocks. */
2600 next = NEXT_INSN (insn);
2601 if (GET_CODE (insn) != INSN)
2604 /* Don't split no-op move insns. These should silently
2605 disappear later in final. Splitting such insns would
2606 break the code that handles REG_NO_CONFLICT blocks. */
2608 else if ((set = single_set (insn)) != NULL
2609 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2611 /* Nops get in the way while scheduling, so delete them
2612 now if register allocation has already been done. It
2613 is too risky to try to do this before register
2614 allocation, and there are unlikely to be very many
2615 nops then anyways. */
2616 if (reload_completed)
2618 PUT_CODE (insn, NOTE);
2619 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2620 NOTE_SOURCE_FILE (insn) = 0;
2623 else
2625 /* Split insns here to get max fine-grain parallelism. */
2626 rtx first = PREV_INSN (insn);
2627 rtx last = try_split (PATTERN (insn), insn, 1);
2629 if (last != insn)
2631 SET_BIT (blocks, i);
2632 changed = 1;
2634 /* try_split returns the NOTE that INSN became. */
2635 first = NEXT_INSN (first);
2636 PUT_CODE (insn, NOTE);
2637 NOTE_SOURCE_FILE (insn) = 0;
2638 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2640 if (insn == bb->end)
2642 bb->end = last;
2643 break;
2648 if (insn == bb->end)
2649 break;
2652 /* ??? When we're called from just after reload, the CFG is in bad
2653 shape, and we may have fallen off the end. This could be fixed
2654 by having reload not try to delete unreachable code. Otherwise
2655 assert we found the end insn. */
2656 if (insn == NULL && upd_life)
2657 abort ();
2660 if (changed && upd_life)
2662 compute_bb_for_insn (get_max_uid ());
2663 count_or_remove_death_notes (blocks, 1);
2664 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2667 sbitmap_free (blocks);
2670 #ifdef HAVE_peephole2
2671 /* This is the last insn we'll allow recog_next_insn to consider. */
2672 static rtx recog_last_allowed_insn;
2674 /* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
2675 not exist. Used by the recognizer to find the next insn to match in a
2676 multi-insn pattern. */
2678 recog_next_insn (insn, n)
2679 rtx insn;
2680 int n;
2682 if (insn != NULL_RTX)
2684 while (n > 0)
2686 if (insn == recog_last_allowed_insn)
2687 return NULL_RTX;
2689 insn = NEXT_INSN (insn);
2690 if (insn == NULL_RTX)
2691 break;
2693 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2694 n -= 1;
2698 return insn;
2701 /* Perform the peephole2 optimization pass. */
2702 void
2703 peephole2_optimize (dump_file)
2704 FILE *dump_file ATTRIBUTE_UNUSED;
2706 rtx insn, prev;
2707 int i, changed;
2708 sbitmap blocks;
2710 /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
2711 and backtrack insn by insn as we proceed through the block. In this
2712 way we'll not need to keep searching forward from the beginning of
2713 basic blocks to find register life info. */
2715 init_resource_info (NULL);
2717 blocks = sbitmap_alloc (n_basic_blocks);
2718 sbitmap_zero (blocks);
2719 changed = 0;
2721 for (i = n_basic_blocks - 1; i >= 0; --i)
2723 basic_block bb = BASIC_BLOCK (i);
2725 /* Since we don't update life info until the very end, we can't
2726 allow matching instructions that we've replaced before. Walk
2727 backward through the basic block so that we don't have to
2728 care about subsequent life info; recog_last_allowed_insn to
2729 restrict how far forward we will allow the match to proceed. */
2731 recog_last_allowed_insn = bb->end;
2732 for (insn = bb->end; ; insn = prev)
2734 prev = PREV_INSN (insn);
2735 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2737 rtx try, last_insn;
2739 try = peephole2_insns (PATTERN (insn), insn, &last_insn);
2740 if (try != NULL)
2742 flow_delete_insn_chain (insn, last_insn);
2743 try = emit_insn_after (try, prev);
2745 if (last_insn == bb->end)
2746 bb->end = try;
2747 if (insn == bb->head)
2748 bb->head = NEXT_INSN (prev);
2750 recog_last_allowed_insn = prev;
2751 SET_BIT (blocks, i);
2752 changed = 1;
2756 if (insn == bb->head)
2757 break;
2761 free_resource_info ();
2763 compute_bb_for_insn (get_max_uid ());
2764 count_or_remove_death_notes (blocks, 1);
2765 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2767 #endif