oops - fixed typo in previous delta
[official-gcc.git] / gcc / recog.c
blob2c1d61c357112399b16e894aa9ecb7528c660abf
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tm_p.h"
26 #include "insn-config.h"
27 #include "insn-attr.h"
28 #include "insn-flags.h"
29 #include "insn-codes.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "resource.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static rtx *find_constant_term_loc PARAMS ((rtx *));
60 static int insn_invalid_p PARAMS ((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
87 int reload_completed;
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
93 void
94 init_recog_no_volatile ()
96 volatile_ok = 0;
99 void
100 init_recog ()
102 volatile_ok = 1;
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized (insn)
116 rtx insn;
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
128 rtx x;
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
167 return 1;
170 /* Static data for the next two routines. */
172 typedef struct change_t
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
210 rtx old = *loc;
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
215 if (in_group == 0 && num_changes != 0)
216 abort ();
218 *loc = new;
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
261 static int
262 insn_invalid_p (insn)
263 rtx insn;
265 int icode = recog_memoized (insn);
266 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
268 if (is_asm && ! check_asm_operands (PATTERN (insn)))
269 return 1;
270 if (! is_asm && icode < 0)
271 return 1;
273 /* After reload, verify that all constraints are satisfied. */
274 if (reload_completed)
276 extract_insn (insn);
278 if (! constrain_operands (1))
279 return 1;
282 return 0;
285 /* Apply a group of changes previously issued with `validate_change'.
286 Return 1 if all changes are valid, zero otherwise. */
289 apply_change_group ()
291 int i;
293 /* The changes have been applied and all INSN_CODEs have been reset to force
294 rerecognition.
296 The changes are valid if we aren't given an object, or if we are
297 given a MEM and it still is a valid address, or if this is in insn
298 and it is recognized. In the latter case, if reload has completed,
299 we also require that the operands meet the constraints for
300 the insn. */
302 for (i = 0; i < num_changes; i++)
304 rtx object = changes[i].object;
306 if (object == 0)
307 continue;
309 if (GET_CODE (object) == MEM)
311 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
312 break;
314 else if (insn_invalid_p (object))
316 rtx pat = PATTERN (object);
318 /* Perhaps we couldn't recognize the insn because there were
319 extra CLOBBERs at the end. If so, try to re-recognize
320 without the last CLOBBER (later iterations will cause each of
321 them to be eliminated, in turn). But don't do this if we
322 have an ASM_OPERAND. */
323 if (GET_CODE (pat) == PARALLEL
324 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
325 && asm_noperands (PATTERN (object)) < 0)
327 rtx newpat;
329 if (XVECLEN (pat, 0) == 2)
330 newpat = XVECEXP (pat, 0, 0);
331 else
333 int j;
335 newpat
336 = gen_rtx_PARALLEL (VOIDmode,
337 gen_rtvec (XVECLEN (pat, 0) - 1));
338 for (j = 0; j < XVECLEN (newpat, 0); j++)
339 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
342 /* Add a new change to this group to replace the pattern
343 with this new pattern. Then consider this change
344 as having succeeded. The change we added will
345 cause the entire call to fail if things remain invalid.
347 Note that this can lose if a later change than the one
348 we are processing specified &XVECEXP (PATTERN (object), 0, X)
349 but this shouldn't occur. */
351 validate_change (object, &PATTERN (object), newpat, 1);
353 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
354 /* If this insn is a CLOBBER or USE, it is always valid, but is
355 never recognized. */
356 continue;
357 else
358 break;
362 if (i == num_changes)
364 num_changes = 0;
365 return 1;
367 else
369 cancel_changes (0);
370 return 0;
374 /* Return the number of changes so far in the current group. */
377 num_validated_changes ()
379 return num_changes;
382 /* Retract the changes numbered NUM and up. */
384 void
385 cancel_changes (num)
386 int num;
388 int i;
390 /* Back out all the changes. Do this in the opposite order in which
391 they were made. */
392 for (i = num_changes - 1; i >= num; i--)
394 *changes[i].loc = changes[i].old;
395 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
396 INSN_CODE (changes[i].object) = changes[i].old_code;
398 num_changes = num;
401 /* Replace every occurrence of FROM in X with TO. Mark each change with
402 validate_change passing OBJECT. */
404 static void
405 validate_replace_rtx_1 (loc, from, to, object)
406 rtx *loc;
407 rtx from, to, object;
409 register int i, j;
410 register const char *fmt;
411 register rtx x = *loc;
412 enum rtx_code code = GET_CODE (x);
414 /* X matches FROM if it is the same rtx or they are both referring to the
415 same register in the same mode. Avoid calling rtx_equal_p unless the
416 operands look similar. */
418 if (x == from
419 || (GET_CODE (x) == REG && GET_CODE (from) == REG
420 && GET_MODE (x) == GET_MODE (from)
421 && REGNO (x) == REGNO (from))
422 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
423 && rtx_equal_p (x, from)))
425 validate_change (object, loc, to, 1);
426 return;
429 /* For commutative or comparison operations, try replacing each argument
430 separately and seeing if we made any changes. If so, put a constant
431 argument last.*/
432 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
434 int prev_changes = num_changes;
436 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
437 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
438 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
440 validate_change (object, loc,
441 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
442 : swap_condition (code),
443 GET_MODE (x), XEXP (x, 1),
444 XEXP (x, 0)),
446 x = *loc;
447 code = GET_CODE (x);
451 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
452 done the substitution, otherwise we won't. */
454 switch (code)
456 case PLUS:
457 /* If we have a PLUS whose second operand is now a CONST_INT, use
458 plus_constant to try to simplify it. */
459 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
460 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
462 return;
464 case MINUS:
465 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
467 validate_change (object, loc,
468 plus_constant (XEXP (x, 0), - INTVAL (to)),
470 return;
472 break;
474 case ZERO_EXTEND:
475 case SIGN_EXTEND:
476 /* In these cases, the operation to be performed depends on the mode
477 of the operand. If we are replacing the operand with a VOIDmode
478 constant, we lose the information. So try to simplify the operation
479 in that case. If it fails, substitute in something that we know
480 won't be recognized. */
481 if (GET_MODE (to) == VOIDmode
482 && (XEXP (x, 0) == from
483 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
484 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
485 && REGNO (XEXP (x, 0)) == REGNO (from))))
487 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
488 GET_MODE (from));
489 if (new == 0)
490 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
492 validate_change (object, loc, new, 1);
493 return;
495 break;
497 case SUBREG:
498 /* If we have a SUBREG of a register that we are replacing and we are
499 replacing it with a MEM, make a new MEM and try replacing the
500 SUBREG with it. Don't do this if the MEM has a mode-dependent address
501 or if we would be widening it. */
503 if (SUBREG_REG (x) == from
504 && GET_CODE (from) == REG
505 && GET_CODE (to) == MEM
506 && ! mode_dependent_address_p (XEXP (to, 0))
507 && ! MEM_VOLATILE_P (to)
508 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
510 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
511 enum machine_mode mode = GET_MODE (x);
512 rtx new;
514 if (BYTES_BIG_ENDIAN)
515 offset += (MIN (UNITS_PER_WORD,
516 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
517 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
519 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
520 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
521 MEM_COPY_ATTRIBUTES (new, to);
522 validate_change (object, loc, new, 1);
523 return;
525 break;
527 case ZERO_EXTRACT:
528 case SIGN_EXTRACT:
529 /* If we are replacing a register with memory, try to change the memory
530 to be the mode required for memory in extract operations (this isn't
531 likely to be an insertion operation; if it was, nothing bad will
532 happen, we might just fail in some cases). */
534 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
535 && GET_CODE (XEXP (x, 1)) == CONST_INT
536 && GET_CODE (XEXP (x, 2)) == CONST_INT
537 && ! mode_dependent_address_p (XEXP (to, 0))
538 && ! MEM_VOLATILE_P (to))
540 enum machine_mode wanted_mode = VOIDmode;
541 enum machine_mode is_mode = GET_MODE (to);
542 int pos = INTVAL (XEXP (x, 2));
544 #ifdef HAVE_extzv
545 if (code == ZERO_EXTRACT)
547 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
548 if (wanted_mode == VOIDmode)
549 wanted_mode = word_mode;
551 #endif
552 #ifdef HAVE_extv
553 if (code == SIGN_EXTRACT)
555 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
556 if (wanted_mode == VOIDmode)
557 wanted_mode = word_mode;
559 #endif
561 /* If we have a narrower mode, we can do something. */
562 if (wanted_mode != VOIDmode
563 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
565 int offset = pos / BITS_PER_UNIT;
566 rtx newmem;
568 /* If the bytes and bits are counted differently, we
569 must adjust the offset. */
570 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
571 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
572 - offset);
574 pos %= GET_MODE_BITSIZE (wanted_mode);
576 newmem = gen_rtx_MEM (wanted_mode,
577 plus_constant (XEXP (to, 0), offset));
578 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
579 MEM_COPY_ATTRIBUTES (newmem, to);
581 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
582 validate_change (object, &XEXP (x, 0), newmem, 1);
586 break;
588 default:
589 break;
592 /* For commutative or comparison operations we've already performed
593 replacements. Don't try to perform them again. */
594 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
596 fmt = GET_RTX_FORMAT (code);
597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
599 if (fmt[i] == 'e')
600 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
601 else if (fmt[i] == 'E')
602 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
603 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
608 /* Try replacing every occurrence of FROM in INSN with TO. After all
609 changes have been made, validate by seeing if INSN is still valid. */
612 validate_replace_rtx (from, to, insn)
613 rtx from, to, insn;
615 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
616 return apply_change_group ();
619 /* Try replacing every occurrence of FROM in INSN with TO. After all
620 changes have been made, validate by seeing if INSN is still valid. */
622 void
623 validate_replace_rtx_group (from, to, insn)
624 rtx from, to, insn;
626 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
629 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
630 SET_DESTs. After all changes have been made, validate by seeing if
631 INSN is still valid. */
634 validate_replace_src (from, to, insn)
635 rtx from, to, insn;
637 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
638 || GET_CODE (PATTERN (insn)) != SET)
639 abort ();
641 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
642 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
643 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
644 from, to, insn);
645 return apply_change_group ();
648 #ifdef HAVE_cc0
649 /* Return 1 if the insn using CC0 set by INSN does not contain
650 any ordered tests applied to the condition codes.
651 EQ and NE tests do not count. */
654 next_insn_tests_no_inequality (insn)
655 rtx insn;
657 register rtx next = next_cc0_user (insn);
659 /* If there is no next insn, we have to take the conservative choice. */
660 if (next == 0)
661 return 0;
663 return ((GET_CODE (next) == JUMP_INSN
664 || GET_CODE (next) == INSN
665 || GET_CODE (next) == CALL_INSN)
666 && ! inequality_comparisons_p (PATTERN (next)));
669 #if 0 /* This is useless since the insn that sets the cc's
670 must be followed immediately by the use of them. */
671 /* Return 1 if the CC value set up by INSN is not used. */
674 next_insns_test_no_inequality (insn)
675 rtx insn;
677 register rtx next = NEXT_INSN (insn);
679 for (; next != 0; next = NEXT_INSN (next))
681 if (GET_CODE (next) == CODE_LABEL
682 || GET_CODE (next) == BARRIER)
683 return 1;
684 if (GET_CODE (next) == NOTE)
685 continue;
686 if (inequality_comparisons_p (PATTERN (next)))
687 return 0;
688 if (sets_cc0_p (PATTERN (next)) == 1)
689 return 1;
690 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
691 return 1;
693 return 1;
695 #endif
696 #endif
698 /* This is used by find_single_use to locate an rtx that contains exactly one
699 use of DEST, which is typically either a REG or CC0. It returns a
700 pointer to the innermost rtx expression containing DEST. Appearances of
701 DEST that are being used to totally replace it are not counted. */
703 static rtx *
704 find_single_use_1 (dest, loc)
705 rtx dest;
706 rtx *loc;
708 rtx x = *loc;
709 enum rtx_code code = GET_CODE (x);
710 rtx *result = 0;
711 rtx *this_result;
712 int i;
713 const char *fmt;
715 switch (code)
717 case CONST_INT:
718 case CONST:
719 case LABEL_REF:
720 case SYMBOL_REF:
721 case CONST_DOUBLE:
722 case CLOBBER:
723 return 0;
725 case SET:
726 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
727 of a REG that occupies all of the REG, the insn uses DEST if
728 it is mentioned in the destination or the source. Otherwise, we
729 need just check the source. */
730 if (GET_CODE (SET_DEST (x)) != CC0
731 && GET_CODE (SET_DEST (x)) != PC
732 && GET_CODE (SET_DEST (x)) != REG
733 && ! (GET_CODE (SET_DEST (x)) == SUBREG
734 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
737 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
738 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
739 break;
741 return find_single_use_1 (dest, &SET_SRC (x));
743 case MEM:
744 case SUBREG:
745 return find_single_use_1 (dest, &XEXP (x, 0));
747 default:
748 break;
751 /* If it wasn't one of the common cases above, check each expression and
752 vector of this code. Look for a unique usage of DEST. */
754 fmt = GET_RTX_FORMAT (code);
755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
757 if (fmt[i] == 'e')
759 if (dest == XEXP (x, i)
760 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
761 && REGNO (dest) == REGNO (XEXP (x, i))))
762 this_result = loc;
763 else
764 this_result = find_single_use_1 (dest, &XEXP (x, i));
766 if (result == 0)
767 result = this_result;
768 else if (this_result)
769 /* Duplicate usage. */
770 return 0;
772 else if (fmt[i] == 'E')
774 int j;
776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
778 if (XVECEXP (x, i, j) == dest
779 || (GET_CODE (dest) == REG
780 && GET_CODE (XVECEXP (x, i, j)) == REG
781 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
782 this_result = loc;
783 else
784 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
786 if (result == 0)
787 result = this_result;
788 else if (this_result)
789 return 0;
794 return result;
797 /* See if DEST, produced in INSN, is used only a single time in the
798 sequel. If so, return a pointer to the innermost rtx expression in which
799 it is used.
801 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
803 This routine will return usually zero either before flow is called (because
804 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
805 note can't be trusted).
807 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
808 care about REG_DEAD notes or LOG_LINKS.
810 Otherwise, we find the single use by finding an insn that has a
811 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
812 only referenced once in that insn, we know that it must be the first
813 and last insn referencing DEST. */
815 rtx *
816 find_single_use (dest, insn, ploc)
817 rtx dest;
818 rtx insn;
819 rtx *ploc;
821 rtx next;
822 rtx *result;
823 rtx link;
825 #ifdef HAVE_cc0
826 if (dest == cc0_rtx)
828 next = NEXT_INSN (insn);
829 if (next == 0
830 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
831 return 0;
833 result = find_single_use_1 (dest, &PATTERN (next));
834 if (result && ploc)
835 *ploc = next;
836 return result;
838 #endif
840 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
841 return 0;
843 for (next = next_nonnote_insn (insn);
844 next != 0 && GET_CODE (next) != CODE_LABEL;
845 next = next_nonnote_insn (next))
846 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
848 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
849 if (XEXP (link, 0) == insn)
850 break;
852 if (link)
854 result = find_single_use_1 (dest, &PATTERN (next));
855 if (ploc)
856 *ploc = next;
857 return result;
861 return 0;
864 /* Return 1 if OP is a valid general operand for machine mode MODE.
865 This is either a register reference, a memory reference,
866 or a constant. In the case of a memory reference, the address
867 is checked for general validity for the target machine.
869 Register and memory references must have mode MODE in order to be valid,
870 but some constants have no machine mode and are valid for any mode.
872 If MODE is VOIDmode, OP is checked for validity for whatever mode
873 it has.
875 The main use of this function is as a predicate in match_operand
876 expressions in the machine description.
878 For an explanation of this function's behavior for registers of
879 class NO_REGS, see the comment for `register_operand'. */
882 general_operand (op, mode)
883 register rtx op;
884 enum machine_mode mode;
886 register enum rtx_code code = GET_CODE (op);
887 int mode_altering_drug = 0;
889 if (mode == VOIDmode)
890 mode = GET_MODE (op);
892 /* Don't accept CONST_INT or anything similar
893 if the caller wants something floating. */
894 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
895 && GET_MODE_CLASS (mode) != MODE_INT
896 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
897 return 0;
899 if (CONSTANT_P (op))
900 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
901 #ifdef LEGITIMATE_PIC_OPERAND_P
902 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
903 #endif
904 && LEGITIMATE_CONSTANT_P (op));
906 /* Except for certain constants with VOIDmode, already checked for,
907 OP's mode must match MODE if MODE specifies a mode. */
909 if (GET_MODE (op) != mode)
910 return 0;
912 if (code == SUBREG)
914 #ifdef INSN_SCHEDULING
915 /* On machines that have insn scheduling, we want all memory
916 reference to be explicit, so outlaw paradoxical SUBREGs. */
917 if (GET_CODE (SUBREG_REG (op)) == MEM
918 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
919 return 0;
920 #endif
922 op = SUBREG_REG (op);
923 code = GET_CODE (op);
924 #if 0
925 /* No longer needed, since (SUBREG (MEM...))
926 will load the MEM into a reload reg in the MEM's own mode. */
927 mode_altering_drug = 1;
928 #endif
931 if (code == REG)
932 /* A register whose class is NO_REGS is not a general operand. */
933 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
934 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
936 if (code == MEM)
938 register rtx y = XEXP (op, 0);
939 if (! volatile_ok && MEM_VOLATILE_P (op))
940 return 0;
941 if (GET_CODE (y) == ADDRESSOF)
942 return 1;
943 /* Use the mem's mode, since it will be reloaded thus. */
944 mode = GET_MODE (op);
945 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
948 /* Pretend this is an operand for now; we'll run force_operand
949 on its replacement in fixup_var_refs_1. */
950 if (code == ADDRESSOF)
951 return 1;
953 return 0;
955 win:
956 if (mode_altering_drug)
957 return ! mode_dependent_address_p (XEXP (op, 0));
958 return 1;
961 /* Return 1 if OP is a valid memory address for a memory reference
962 of mode MODE.
964 The main use of this function is as a predicate in match_operand
965 expressions in the machine description. */
968 address_operand (op, mode)
969 register rtx op;
970 enum machine_mode mode;
972 return memory_address_p (mode, op);
975 /* Return 1 if OP is a register reference of mode MODE.
976 If MODE is VOIDmode, accept a register in any mode.
978 The main use of this function is as a predicate in match_operand
979 expressions in the machine description.
981 As a special exception, registers whose class is NO_REGS are
982 not accepted by `register_operand'. The reason for this change
983 is to allow the representation of special architecture artifacts
984 (such as a condition code register) without extending the rtl
985 definitions. Since registers of class NO_REGS cannot be used
986 as registers in any case where register classes are examined,
987 it is most consistent to keep this function from accepting them. */
990 register_operand (op, mode)
991 register rtx op;
992 enum machine_mode mode;
994 if (GET_MODE (op) != mode && mode != VOIDmode)
995 return 0;
997 if (GET_CODE (op) == SUBREG)
999 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1000 because it is guaranteed to be reloaded into one.
1001 Just make sure the MEM is valid in itself.
1002 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1003 but currently it does result from (SUBREG (REG)...) where the
1004 reg went on the stack.) */
1005 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1006 return general_operand (op, mode);
1008 #ifdef CLASS_CANNOT_CHANGE_SIZE
1009 if (GET_CODE (SUBREG_REG (op)) == REG
1010 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1011 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1012 REGNO (SUBREG_REG (op)))
1013 && (GET_MODE_SIZE (mode)
1014 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1015 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1016 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1017 return 0;
1018 #endif
1020 op = SUBREG_REG (op);
1023 /* If we have an ADDRESSOF, consider it valid since it will be
1024 converted into something that will not be a MEM. */
1025 if (GET_CODE (op) == ADDRESSOF)
1026 return 1;
1028 /* We don't consider registers whose class is NO_REGS
1029 to be a register operand. */
1030 return (GET_CODE (op) == REG
1031 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1032 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1035 /* Return 1 for a register in Pmode; ignore the tested mode. */
1038 pmode_register_operand (op, mode)
1039 rtx op;
1040 enum machine_mode mode ATTRIBUTE_UNUSED;
1042 return register_operand (op, Pmode);
1045 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1046 or a hard register. */
1049 scratch_operand (op, mode)
1050 register rtx op;
1051 enum machine_mode mode;
1053 if (GET_MODE (op) != mode && mode != VOIDmode)
1054 return 0;
1056 return (GET_CODE (op) == SCRATCH
1057 || (GET_CODE (op) == REG
1058 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1061 /* Return 1 if OP is a valid immediate operand for mode MODE.
1063 The main use of this function is as a predicate in match_operand
1064 expressions in the machine description. */
1067 immediate_operand (op, mode)
1068 register rtx op;
1069 enum machine_mode mode;
1071 /* Don't accept CONST_INT or anything similar
1072 if the caller wants something floating. */
1073 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1074 && GET_MODE_CLASS (mode) != MODE_INT
1075 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1076 return 0;
1078 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1079 result in 0/1. It seems a safe assumption that this is
1080 in range for everyone. */
1081 if (GET_CODE (op) == CONSTANT_P_RTX)
1082 return 1;
1084 return (CONSTANT_P (op)
1085 && (GET_MODE (op) == mode || mode == VOIDmode
1086 || GET_MODE (op) == VOIDmode)
1087 #ifdef LEGITIMATE_PIC_OPERAND_P
1088 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1089 #endif
1090 && LEGITIMATE_CONSTANT_P (op));
1093 /* Returns 1 if OP is an operand that is a CONST_INT. */
1096 const_int_operand (op, mode)
1097 register rtx op;
1098 enum machine_mode mode ATTRIBUTE_UNUSED;
1100 return GET_CODE (op) == CONST_INT;
1103 /* Returns 1 if OP is an operand that is a constant integer or constant
1104 floating-point number. */
1107 const_double_operand (op, mode)
1108 register rtx op;
1109 enum machine_mode mode;
1111 /* Don't accept CONST_INT or anything similar
1112 if the caller wants something floating. */
1113 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1114 && GET_MODE_CLASS (mode) != MODE_INT
1115 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1116 return 0;
1118 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1119 && (mode == VOIDmode || GET_MODE (op) == mode
1120 || GET_MODE (op) == VOIDmode));
1123 /* Return 1 if OP is a general operand that is not an immediate operand. */
1126 nonimmediate_operand (op, mode)
1127 register rtx op;
1128 enum machine_mode mode;
1130 return (general_operand (op, mode) && ! CONSTANT_P (op));
1133 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1136 nonmemory_operand (op, mode)
1137 register rtx op;
1138 enum machine_mode mode;
1140 if (CONSTANT_P (op))
1142 /* Don't accept CONST_INT or anything similar
1143 if the caller wants something floating. */
1144 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1145 && GET_MODE_CLASS (mode) != MODE_INT
1146 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1147 return 0;
1149 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1150 #ifdef LEGITIMATE_PIC_OPERAND_P
1151 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1152 #endif
1153 && LEGITIMATE_CONSTANT_P (op));
1156 if (GET_MODE (op) != mode && mode != VOIDmode)
1157 return 0;
1159 if (GET_CODE (op) == SUBREG)
1161 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1162 because it is guaranteed to be reloaded into one.
1163 Just make sure the MEM is valid in itself.
1164 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1165 but currently it does result from (SUBREG (REG)...) where the
1166 reg went on the stack.) */
1167 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1168 return general_operand (op, mode);
1169 op = SUBREG_REG (op);
1172 /* We don't consider registers whose class is NO_REGS
1173 to be a register operand. */
1174 return (GET_CODE (op) == REG
1175 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1176 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1179 /* Return 1 if OP is a valid operand that stands for pushing a
1180 value of mode MODE onto the stack.
1182 The main use of this function is as a predicate in match_operand
1183 expressions in the machine description. */
1186 push_operand (op, mode)
1187 rtx op;
1188 enum machine_mode mode;
1190 if (GET_CODE (op) != MEM)
1191 return 0;
1193 if (mode != VOIDmode && GET_MODE (op) != mode)
1194 return 0;
1196 op = XEXP (op, 0);
1198 if (GET_CODE (op) != STACK_PUSH_CODE)
1199 return 0;
1201 return XEXP (op, 0) == stack_pointer_rtx;
1204 /* Return 1 if OP is a valid operand that stands for popping a
1205 value of mode MODE off the stack.
1207 The main use of this function is as a predicate in match_operand
1208 expressions in the machine description. */
1211 pop_operand (op, mode)
1212 rtx op;
1213 enum machine_mode mode;
1215 if (GET_CODE (op) != MEM)
1216 return 0;
1218 if (mode != VOIDmode && GET_MODE (op) != mode)
1219 return 0;
1221 op = XEXP (op, 0);
1223 if (GET_CODE (op) != STACK_POP_CODE)
1224 return 0;
1226 return XEXP (op, 0) == stack_pointer_rtx;
1229 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1232 memory_address_p (mode, addr)
1233 enum machine_mode mode ATTRIBUTE_UNUSED;
1234 register rtx addr;
1236 if (GET_CODE (addr) == ADDRESSOF)
1237 return 1;
1239 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1240 return 0;
1242 win:
1243 return 1;
1246 /* Return 1 if OP is a valid memory reference with mode MODE,
1247 including a valid address.
1249 The main use of this function is as a predicate in match_operand
1250 expressions in the machine description. */
1253 memory_operand (op, mode)
1254 register rtx op;
1255 enum machine_mode mode;
1257 rtx inner;
1259 if (! reload_completed)
1260 /* Note that no SUBREG is a memory operand before end of reload pass,
1261 because (SUBREG (MEM...)) forces reloading into a register. */
1262 return GET_CODE (op) == MEM && general_operand (op, mode);
1264 if (mode != VOIDmode && GET_MODE (op) != mode)
1265 return 0;
1267 inner = op;
1268 if (GET_CODE (inner) == SUBREG)
1269 inner = SUBREG_REG (inner);
1271 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1274 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1275 that is, a memory reference whose address is a general_operand. */
1278 indirect_operand (op, mode)
1279 register rtx op;
1280 enum machine_mode mode;
1282 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1283 if (! reload_completed
1284 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1286 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1287 rtx inner = SUBREG_REG (op);
1289 if (BYTES_BIG_ENDIAN)
1290 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1291 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1293 if (mode != VOIDmode && GET_MODE (op) != mode)
1294 return 0;
1296 /* The only way that we can have a general_operand as the resulting
1297 address is if OFFSET is zero and the address already is an operand
1298 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1299 operand. */
1301 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1302 || (GET_CODE (XEXP (inner, 0)) == PLUS
1303 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1304 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1305 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1308 return (GET_CODE (op) == MEM
1309 && memory_operand (op, mode)
1310 && general_operand (XEXP (op, 0), Pmode));
1313 /* Return 1 if this is a comparison operator. This allows the use of
1314 MATCH_OPERATOR to recognize all the branch insns. */
1317 comparison_operator (op, mode)
1318 register rtx op;
1319 enum machine_mode mode;
1321 return ((mode == VOIDmode || GET_MODE (op) == mode)
1322 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1325 /* If BODY is an insn body that uses ASM_OPERANDS,
1326 return the number of operands (both input and output) in the insn.
1327 Otherwise return -1. */
1330 asm_noperands (body)
1331 rtx body;
1333 if (GET_CODE (body) == ASM_OPERANDS)
1334 /* No output operands: return number of input operands. */
1335 return ASM_OPERANDS_INPUT_LENGTH (body);
1336 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1337 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1338 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1339 else if (GET_CODE (body) == PARALLEL
1340 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1341 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1343 /* Multiple output operands, or 1 output plus some clobbers:
1344 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1345 int i;
1346 int n_sets;
1348 /* Count backwards through CLOBBERs to determine number of SETs. */
1349 for (i = XVECLEN (body, 0); i > 0; i--)
1351 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1352 break;
1353 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1354 return -1;
1357 /* N_SETS is now number of output operands. */
1358 n_sets = i;
1360 /* Verify that all the SETs we have
1361 came from a single original asm_operands insn
1362 (so that invalid combinations are blocked). */
1363 for (i = 0; i < n_sets; i++)
1365 rtx elt = XVECEXP (body, 0, i);
1366 if (GET_CODE (elt) != SET)
1367 return -1;
1368 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1369 return -1;
1370 /* If these ASM_OPERANDS rtx's came from different original insns
1371 then they aren't allowed together. */
1372 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1373 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1374 return -1;
1376 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1377 + n_sets);
1379 else if (GET_CODE (body) == PARALLEL
1380 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1382 /* 0 outputs, but some clobbers:
1383 body is [(asm_operands ...) (clobber (reg ...))...]. */
1384 int i;
1386 /* Make sure all the other parallel things really are clobbers. */
1387 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1388 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1389 return -1;
1391 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1393 else
1394 return -1;
1397 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1398 copy its operands (both input and output) into the vector OPERANDS,
1399 the locations of the operands within the insn into the vector OPERAND_LOCS,
1400 and the constraints for the operands into CONSTRAINTS.
1401 Write the modes of the operands into MODES.
1402 Return the assembler-template.
1404 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1405 we don't store that info. */
1407 char *
1408 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1409 rtx body;
1410 rtx *operands;
1411 rtx **operand_locs;
1412 const char **constraints;
1413 enum machine_mode *modes;
1415 register int i;
1416 int noperands;
1417 char *template = 0;
1419 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1421 rtx asmop = SET_SRC (body);
1422 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1424 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1426 for (i = 1; i < noperands; i++)
1428 if (operand_locs)
1429 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1430 if (operands)
1431 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1432 if (constraints)
1433 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1434 if (modes)
1435 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1438 /* The output is in the SET.
1439 Its constraint is in the ASM_OPERANDS itself. */
1440 if (operands)
1441 operands[0] = SET_DEST (body);
1442 if (operand_locs)
1443 operand_locs[0] = &SET_DEST (body);
1444 if (constraints)
1445 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1446 if (modes)
1447 modes[0] = GET_MODE (SET_DEST (body));
1448 template = ASM_OPERANDS_TEMPLATE (asmop);
1450 else if (GET_CODE (body) == ASM_OPERANDS)
1452 rtx asmop = body;
1453 /* No output operands: BODY is (asm_operands ....). */
1455 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1457 /* The input operands are found in the 1st element vector. */
1458 /* Constraints for inputs are in the 2nd element vector. */
1459 for (i = 0; i < noperands; i++)
1461 if (operand_locs)
1462 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1463 if (operands)
1464 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1465 if (constraints)
1466 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1467 if (modes)
1468 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1470 template = ASM_OPERANDS_TEMPLATE (asmop);
1472 else if (GET_CODE (body) == PARALLEL
1473 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1475 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1476 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1477 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1478 int nout = 0; /* Does not include CLOBBERs. */
1480 /* At least one output, plus some CLOBBERs. */
1482 /* The outputs are in the SETs.
1483 Their constraints are in the ASM_OPERANDS itself. */
1484 for (i = 0; i < nparallel; i++)
1486 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1487 break; /* Past last SET */
1489 if (operands)
1490 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1491 if (operand_locs)
1492 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1493 if (constraints)
1494 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1495 if (modes)
1496 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1497 nout++;
1500 for (i = 0; i < nin; i++)
1502 if (operand_locs)
1503 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1504 if (operands)
1505 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1506 if (constraints)
1507 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1508 if (modes)
1509 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1512 template = ASM_OPERANDS_TEMPLATE (asmop);
1514 else if (GET_CODE (body) == PARALLEL
1515 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1517 /* No outputs, but some CLOBBERs. */
1519 rtx asmop = XVECEXP (body, 0, 0);
1520 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1522 for (i = 0; i < nin; i++)
1524 if (operand_locs)
1525 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1526 if (operands)
1527 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1528 if (constraints)
1529 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1530 if (modes)
1531 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1534 template = ASM_OPERANDS_TEMPLATE (asmop);
1537 return template;
1540 /* Check if an asm_operand matches it's constraints.
1541 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1544 asm_operand_ok (op, constraint)
1545 rtx op;
1546 const char *constraint;
1548 int result = 0;
1550 /* Use constrain_operands after reload. */
1551 if (reload_completed)
1552 abort ();
1554 while (*constraint)
1556 switch (*constraint++)
1558 case '=':
1559 case '+':
1560 case '*':
1561 case '%':
1562 case '?':
1563 case '!':
1564 case '#':
1565 case '&':
1566 case ',':
1567 break;
1569 case '0': case '1': case '2': case '3': case '4':
1570 case '5': case '6': case '7': case '8': case '9':
1571 /* For best results, our caller should have given us the
1572 proper matching constraint, but we can't actually fail
1573 the check if they didn't. Indicate that results are
1574 inconclusive. */
1575 result = -1;
1576 break;
1578 case 'p':
1579 if (address_operand (op, VOIDmode))
1580 return 1;
1581 break;
1583 case 'm':
1584 case 'V': /* non-offsettable */
1585 if (memory_operand (op, VOIDmode))
1586 return 1;
1587 break;
1589 case 'o': /* offsettable */
1590 if (offsettable_nonstrict_memref_p (op))
1591 return 1;
1592 break;
1594 case '<':
1595 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1596 excepting those that expand_call created. Further, on some
1597 machines which do not have generalized auto inc/dec, an inc/dec
1598 is not a memory_operand.
1600 Match any memory and hope things are resolved after reload. */
1602 if (GET_CODE (op) == MEM
1603 && (1
1604 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1605 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1606 return 1;
1607 break;
1609 case '>':
1610 if (GET_CODE (op) == MEM
1611 && (1
1612 || GET_CODE (XEXP (op, 0)) == PRE_INC
1613 || GET_CODE (XEXP (op, 0)) == POST_INC))
1614 return 1;
1615 break;
1617 case 'E':
1618 #ifndef REAL_ARITHMETIC
1619 /* Match any floating double constant, but only if
1620 we can examine the bits of it reliably. */
1621 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1622 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1623 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1624 break;
1625 #endif
1626 /* FALLTHRU */
1628 case 'F':
1629 if (GET_CODE (op) == CONST_DOUBLE)
1630 return 1;
1631 break;
1633 case 'G':
1634 if (GET_CODE (op) == CONST_DOUBLE
1635 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1636 return 1;
1637 break;
1638 case 'H':
1639 if (GET_CODE (op) == CONST_DOUBLE
1640 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1641 return 1;
1642 break;
1644 case 's':
1645 if (GET_CODE (op) == CONST_INT
1646 || (GET_CODE (op) == CONST_DOUBLE
1647 && GET_MODE (op) == VOIDmode))
1648 break;
1649 /* FALLTHRU */
1651 case 'i':
1652 if (CONSTANT_P (op)
1653 #ifdef LEGITIMATE_PIC_OPERAND_P
1654 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1655 #endif
1657 return 1;
1658 break;
1660 case 'n':
1661 if (GET_CODE (op) == CONST_INT
1662 || (GET_CODE (op) == CONST_DOUBLE
1663 && GET_MODE (op) == VOIDmode))
1664 return 1;
1665 break;
1667 case 'I':
1668 if (GET_CODE (op) == CONST_INT
1669 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1670 return 1;
1671 break;
1672 case 'J':
1673 if (GET_CODE (op) == CONST_INT
1674 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1675 return 1;
1676 break;
1677 case 'K':
1678 if (GET_CODE (op) == CONST_INT
1679 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1680 return 1;
1681 break;
1682 case 'L':
1683 if (GET_CODE (op) == CONST_INT
1684 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1685 return 1;
1686 break;
1687 case 'M':
1688 if (GET_CODE (op) == CONST_INT
1689 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1690 return 1;
1691 break;
1692 case 'N':
1693 if (GET_CODE (op) == CONST_INT
1694 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1695 return 1;
1696 break;
1697 case 'O':
1698 if (GET_CODE (op) == CONST_INT
1699 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1700 return 1;
1701 break;
1702 case 'P':
1703 if (GET_CODE (op) == CONST_INT
1704 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1705 return 1;
1706 break;
1708 case 'X':
1709 return 1;
1711 case 'g':
1712 if (general_operand (op, VOIDmode))
1713 return 1;
1714 break;
1716 #ifdef EXTRA_CONSTRAINT
1717 case 'Q':
1718 if (EXTRA_CONSTRAINT (op, 'Q'))
1719 return 1;
1720 break;
1721 case 'R':
1722 if (EXTRA_CONSTRAINT (op, 'R'))
1723 return 1;
1724 break;
1725 case 'S':
1726 if (EXTRA_CONSTRAINT (op, 'S'))
1727 return 1;
1728 break;
1729 case 'T':
1730 if (EXTRA_CONSTRAINT (op, 'T'))
1731 return 1;
1732 break;
1733 case 'U':
1734 if (EXTRA_CONSTRAINT (op, 'U'))
1735 return 1;
1736 break;
1737 #endif
1739 case 'r':
1740 default:
1741 if (GET_MODE (op) == BLKmode)
1742 break;
1743 if (register_operand (op, VOIDmode))
1744 return 1;
1745 break;
1749 return result;
1752 /* Given an rtx *P, if it is a sum containing an integer constant term,
1753 return the location (type rtx *) of the pointer to that constant term.
1754 Otherwise, return a null pointer. */
1756 static rtx *
1757 find_constant_term_loc (p)
1758 rtx *p;
1760 register rtx *tem;
1761 register enum rtx_code code = GET_CODE (*p);
1763 /* If *P IS such a constant term, P is its location. */
1765 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1766 || code == CONST)
1767 return p;
1769 /* Otherwise, if not a sum, it has no constant term. */
1771 if (GET_CODE (*p) != PLUS)
1772 return 0;
1774 /* If one of the summands is constant, return its location. */
1776 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1777 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1778 return p;
1780 /* Otherwise, check each summand for containing a constant term. */
1782 if (XEXP (*p, 0) != 0)
1784 tem = find_constant_term_loc (&XEXP (*p, 0));
1785 if (tem != 0)
1786 return tem;
1789 if (XEXP (*p, 1) != 0)
1791 tem = find_constant_term_loc (&XEXP (*p, 1));
1792 if (tem != 0)
1793 return tem;
1796 return 0;
1799 /* Return 1 if OP is a memory reference
1800 whose address contains no side effects
1801 and remains valid after the addition
1802 of a positive integer less than the
1803 size of the object being referenced.
1805 We assume that the original address is valid and do not check it.
1807 This uses strict_memory_address_p as a subroutine, so
1808 don't use it before reload. */
1811 offsettable_memref_p (op)
1812 rtx op;
1814 return ((GET_CODE (op) == MEM)
1815 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1818 /* Similar, but don't require a strictly valid mem ref:
1819 consider pseudo-regs valid as index or base regs. */
1822 offsettable_nonstrict_memref_p (op)
1823 rtx op;
1825 return ((GET_CODE (op) == MEM)
1826 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1829 /* Return 1 if Y is a memory address which contains no side effects
1830 and would remain valid after the addition of a positive integer
1831 less than the size of that mode.
1833 We assume that the original address is valid and do not check it.
1834 We do check that it is valid for narrower modes.
1836 If STRICTP is nonzero, we require a strictly valid address,
1837 for the sake of use in reload.c. */
1840 offsettable_address_p (strictp, mode, y)
1841 int strictp;
1842 enum machine_mode mode;
1843 register rtx y;
1845 register enum rtx_code ycode = GET_CODE (y);
1846 register rtx z;
1847 rtx y1 = y;
1848 rtx *y2;
1849 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1850 (strictp ? strict_memory_address_p : memory_address_p);
1852 if (CONSTANT_ADDRESS_P (y))
1853 return 1;
1855 /* Adjusting an offsettable address involves changing to a narrower mode.
1856 Make sure that's OK. */
1858 if (mode_dependent_address_p (y))
1859 return 0;
1861 /* If the expression contains a constant term,
1862 see if it remains valid when max possible offset is added. */
1864 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1866 int good;
1868 y1 = *y2;
1869 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1870 /* Use QImode because an odd displacement may be automatically invalid
1871 for any wider mode. But it should be valid for a single byte. */
1872 good = (*addressp) (QImode, y);
1874 /* In any case, restore old contents of memory. */
1875 *y2 = y1;
1876 return good;
1879 if (ycode == PRE_DEC || ycode == PRE_INC
1880 || ycode == POST_DEC || ycode == POST_INC)
1881 return 0;
1883 /* The offset added here is chosen as the maximum offset that
1884 any instruction could need to add when operating on something
1885 of the specified mode. We assume that if Y and Y+c are
1886 valid addresses then so is Y+d for all 0<d<c. */
1888 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1890 /* Use QImode because an odd displacement may be automatically invalid
1891 for any wider mode. But it should be valid for a single byte. */
1892 return (*addressp) (QImode, z);
1895 /* Return 1 if ADDR is an address-expression whose effect depends
1896 on the mode of the memory reference it is used in.
1898 Autoincrement addressing is a typical example of mode-dependence
1899 because the amount of the increment depends on the mode. */
1902 mode_dependent_address_p (addr)
1903 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1905 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1906 return 0;
1907 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1908 win: ATTRIBUTE_UNUSED_LABEL
1909 return 1;
1912 /* Return 1 if OP is a general operand
1913 other than a memory ref with a mode dependent address. */
1916 mode_independent_operand (op, mode)
1917 enum machine_mode mode;
1918 rtx op;
1920 rtx addr;
1922 if (! general_operand (op, mode))
1923 return 0;
1925 if (GET_CODE (op) != MEM)
1926 return 1;
1928 addr = XEXP (op, 0);
1929 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1930 return 1;
1931 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1932 lose: ATTRIBUTE_UNUSED_LABEL
1933 return 0;
1936 /* Given an operand OP that is a valid memory reference
1937 which satisfies offsettable_memref_p,
1938 return a new memory reference whose address has been adjusted by OFFSET.
1939 OFFSET should be positive and less than the size of the object referenced.
1943 adj_offsettable_operand (op, offset)
1944 rtx op;
1945 int offset;
1947 register enum rtx_code code = GET_CODE (op);
1949 if (code == MEM)
1951 register rtx y = XEXP (op, 0);
1952 register rtx new;
1954 if (CONSTANT_ADDRESS_P (y))
1956 new = gen_rtx_MEM (GET_MODE (op),
1957 plus_constant_for_output (y, offset));
1958 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1959 return new;
1962 if (GET_CODE (y) == PLUS)
1964 rtx z = y;
1965 register rtx *const_loc;
1967 op = copy_rtx (op);
1968 z = XEXP (op, 0);
1969 const_loc = find_constant_term_loc (&z);
1970 if (const_loc)
1972 *const_loc = plus_constant_for_output (*const_loc, offset);
1973 return op;
1977 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1978 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1979 return new;
1981 abort ();
1984 /* Analyze INSN and fill in recog_data. */
1986 void
1987 extract_insn (insn)
1988 rtx insn;
1990 int i;
1991 int icode;
1992 int noperands;
1993 rtx body = PATTERN (insn);
1995 recog_data.n_operands = 0;
1996 recog_data.n_alternatives = 0;
1997 recog_data.n_dups = 0;
1999 switch (GET_CODE (body))
2001 case USE:
2002 case CLOBBER:
2003 case ASM_INPUT:
2004 case ADDR_VEC:
2005 case ADDR_DIFF_VEC:
2006 return;
2008 case SET:
2009 case PARALLEL:
2010 case ASM_OPERANDS:
2011 recog_data.n_operands = noperands = asm_noperands (body);
2012 if (noperands >= 0)
2014 /* This insn is an `asm' with operands. */
2016 /* expand_asm_operands makes sure there aren't too many operands. */
2017 if (noperands > MAX_RECOG_OPERANDS)
2018 abort ();
2020 /* Now get the operand values and constraints out of the insn. */
2021 decode_asm_operands (body, recog_data.operand,
2022 recog_data.operand_loc,
2023 recog_data.constraints,
2024 recog_data.operand_mode);
2025 if (noperands > 0)
2027 const char *p = recog_data.constraints[0];
2028 recog_data.n_alternatives = 1;
2029 while (*p)
2030 recog_data.n_alternatives += (*p++ == ',');
2032 break;
2035 /* FALLTHROUGH */
2037 default:
2038 /* Ordinary insn: recognize it, get the operands via insn_extract
2039 and get the constraints. */
2041 icode = recog_memoized (insn);
2042 if (icode < 0)
2043 fatal_insn_not_found (insn);
2045 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2046 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2047 recog_data.n_dups = insn_data[icode].n_dups;
2049 insn_extract (insn);
2051 for (i = 0; i < noperands; i++)
2053 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2054 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2057 for (i = 0; i < noperands; i++)
2058 recog_data.operand_type[i]
2059 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2060 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2061 : OP_IN);
2063 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2064 abort ();
2067 /* After calling extract_insn, you can use this function to extract some
2068 information from the constraint strings into a more usable form.
2069 The collected data is stored in recog_op_alt. */
2070 void
2071 preprocess_constraints ()
2073 int i;
2075 memset (recog_op_alt, 0, sizeof recog_op_alt);
2076 for (i = 0; i < recog_data.n_operands; i++)
2078 int j;
2079 struct operand_alternative *op_alt;
2080 const char *p = recog_data.constraints[i];
2082 op_alt = recog_op_alt[i];
2084 for (j = 0; j < recog_data.n_alternatives; j++)
2086 op_alt[j].class = NO_REGS;
2087 op_alt[j].constraint = p;
2088 op_alt[j].matches = -1;
2089 op_alt[j].matched = -1;
2091 if (*p == '\0' || *p == ',')
2093 op_alt[j].anything_ok = 1;
2094 continue;
2097 for (;;)
2099 char c = *p++;
2100 if (c == '#')
2102 c = *p++;
2103 while (c != ',' && c != '\0');
2104 if (c == ',' || c == '\0')
2105 break;
2107 switch (c)
2109 case '=': case '+': case '*': case '%':
2110 case 'E': case 'F': case 'G': case 'H':
2111 case 's': case 'i': case 'n':
2112 case 'I': case 'J': case 'K': case 'L':
2113 case 'M': case 'N': case 'O': case 'P':
2114 #ifdef EXTRA_CONSTRAINT
2115 case 'Q': case 'R': case 'S': case 'T': case 'U':
2116 #endif
2117 /* These don't say anything we care about. */
2118 break;
2120 case '?':
2121 op_alt[j].reject += 6;
2122 break;
2123 case '!':
2124 op_alt[j].reject += 600;
2125 break;
2126 case '&':
2127 op_alt[j].earlyclobber = 1;
2128 break;
2130 case '0': case '1': case '2': case '3': case '4':
2131 case '5': case '6': case '7': case '8': case '9':
2132 op_alt[j].matches = c - '0';
2133 op_alt[op_alt[j].matches].matched = i;
2134 break;
2136 case 'm':
2137 op_alt[j].memory_ok = 1;
2138 break;
2139 case '<':
2140 op_alt[j].decmem_ok = 1;
2141 break;
2142 case '>':
2143 op_alt[j].incmem_ok = 1;
2144 break;
2145 case 'V':
2146 op_alt[j].nonoffmem_ok = 1;
2147 break;
2148 case 'o':
2149 op_alt[j].offmem_ok = 1;
2150 break;
2151 case 'X':
2152 op_alt[j].anything_ok = 1;
2153 break;
2155 case 'p':
2156 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2157 break;
2159 case 'g': case 'r':
2160 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2161 break;
2163 default:
2164 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2165 break;
2172 /* Check the operands of an insn against the insn's operand constraints
2173 and return 1 if they are valid.
2174 The information about the insn's operands, constraints, operand modes
2175 etc. is obtained from the global variables set up by extract_insn.
2177 WHICH_ALTERNATIVE is set to a number which indicates which
2178 alternative of constraints was matched: 0 for the first alternative,
2179 1 for the next, etc.
2181 In addition, when two operands are match
2182 and it happens that the output operand is (reg) while the
2183 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2184 make the output operand look like the input.
2185 This is because the output operand is the one the template will print.
2187 This is used in final, just before printing the assembler code and by
2188 the routines that determine an insn's attribute.
2190 If STRICT is a positive non-zero value, it means that we have been
2191 called after reload has been completed. In that case, we must
2192 do all checks strictly. If it is zero, it means that we have been called
2193 before reload has completed. In that case, we first try to see if we can
2194 find an alternative that matches strictly. If not, we try again, this
2195 time assuming that reload will fix up the insn. This provides a "best
2196 guess" for the alternative and is used to compute attributes of insns prior
2197 to reload. A negative value of STRICT is used for this internal call. */
2199 struct funny_match
2201 int this, other;
2205 constrain_operands (strict)
2206 int strict;
2208 const char *constraints[MAX_RECOG_OPERANDS];
2209 int matching_operands[MAX_RECOG_OPERANDS];
2210 int earlyclobber[MAX_RECOG_OPERANDS];
2211 register int c;
2213 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2214 int funny_match_index;
2216 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2217 return 1;
2219 for (c = 0; c < recog_data.n_operands; c++)
2221 constraints[c] = recog_data.constraints[c];
2222 matching_operands[c] = -1;
2225 which_alternative = 0;
2227 while (which_alternative < recog_data.n_alternatives)
2229 register int opno;
2230 int lose = 0;
2231 funny_match_index = 0;
2233 for (opno = 0; opno < recog_data.n_operands; opno++)
2235 register rtx op = recog_data.operand[opno];
2236 enum machine_mode mode = GET_MODE (op);
2237 register const char *p = constraints[opno];
2238 int offset = 0;
2239 int win = 0;
2240 int val;
2242 earlyclobber[opno] = 0;
2244 /* A unary operator may be accepted by the predicate, but it
2245 is irrelevant for matching constraints. */
2246 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2247 op = XEXP (op, 0);
2249 if (GET_CODE (op) == SUBREG)
2251 if (GET_CODE (SUBREG_REG (op)) == REG
2252 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2253 offset = SUBREG_WORD (op);
2254 op = SUBREG_REG (op);
2257 /* An empty constraint or empty alternative
2258 allows anything which matched the pattern. */
2259 if (*p == 0 || *p == ',')
2260 win = 1;
2262 while (*p && (c = *p++) != ',')
2263 switch (c)
2265 case '?': case '!': case '*': case '%':
2266 case '=': case '+':
2267 break;
2269 case '#':
2270 /* Ignore rest of this alternative as far as
2271 constraint checking is concerned. */
2272 while (*p && *p != ',')
2273 p++;
2274 break;
2276 case '&':
2277 earlyclobber[opno] = 1;
2278 break;
2280 case '0': case '1': case '2': case '3': case '4':
2281 case '5': case '6': case '7': case '8': case '9':
2283 /* This operand must be the same as a previous one.
2284 This kind of constraint is used for instructions such
2285 as add when they take only two operands.
2287 Note that the lower-numbered operand is passed first.
2289 If we are not testing strictly, assume that this constraint
2290 will be satisfied. */
2291 if (strict < 0)
2292 val = 1;
2293 else
2295 rtx op1 = recog_data.operand[c - '0'];
2296 rtx op2 = recog_data.operand[opno];
2298 /* A unary operator may be accepted by the predicate,
2299 but it is irrelevant for matching constraints. */
2300 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2301 op1 = XEXP (op1, 0);
2302 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2303 op2 = XEXP (op2, 0);
2305 val = operands_match_p (op1, op2);
2308 matching_operands[opno] = c - '0';
2309 matching_operands[c - '0'] = opno;
2311 if (val != 0)
2312 win = 1;
2313 /* If output is *x and input is *--x,
2314 arrange later to change the output to *--x as well,
2315 since the output op is the one that will be printed. */
2316 if (val == 2 && strict > 0)
2318 funny_match[funny_match_index].this = opno;
2319 funny_match[funny_match_index++].other = c - '0';
2321 break;
2323 case 'p':
2324 /* p is used for address_operands. When we are called by
2325 gen_reload, no one will have checked that the address is
2326 strictly valid, i.e., that all pseudos requiring hard regs
2327 have gotten them. */
2328 if (strict <= 0
2329 || (strict_memory_address_p (recog_data.operand_mode[opno],
2330 op)))
2331 win = 1;
2332 break;
2334 /* No need to check general_operand again;
2335 it was done in insn-recog.c. */
2336 case 'g':
2337 /* Anything goes unless it is a REG and really has a hard reg
2338 but the hard reg is not in the class GENERAL_REGS. */
2339 if (strict < 0
2340 || GENERAL_REGS == ALL_REGS
2341 || GET_CODE (op) != REG
2342 || (reload_in_progress
2343 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2344 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2345 win = 1;
2346 break;
2348 case 'r':
2349 if (strict < 0
2350 || (strict == 0
2351 && GET_CODE (op) == REG
2352 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2353 || (strict == 0 && GET_CODE (op) == SCRATCH)
2354 || (GET_CODE (op) == REG
2355 && ((GENERAL_REGS == ALL_REGS
2356 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2357 || reg_fits_class_p (op, GENERAL_REGS,
2358 offset, mode))))
2359 win = 1;
2360 break;
2362 case 'X':
2363 /* This is used for a MATCH_SCRATCH in the cases when
2364 we don't actually need anything. So anything goes
2365 any time. */
2366 win = 1;
2367 break;
2369 case 'm':
2370 if (GET_CODE (op) == MEM
2371 /* Before reload, accept what reload can turn into mem. */
2372 || (strict < 0 && CONSTANT_P (op))
2373 /* During reload, accept a pseudo */
2374 || (reload_in_progress && GET_CODE (op) == REG
2375 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2376 win = 1;
2377 break;
2379 case '<':
2380 if (GET_CODE (op) == MEM
2381 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2382 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2383 win = 1;
2384 break;
2386 case '>':
2387 if (GET_CODE (op) == MEM
2388 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2389 || GET_CODE (XEXP (op, 0)) == POST_INC))
2390 win = 1;
2391 break;
2393 case 'E':
2394 #ifndef REAL_ARITHMETIC
2395 /* Match any CONST_DOUBLE, but only if
2396 we can examine the bits of it reliably. */
2397 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2398 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2399 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2400 break;
2401 #endif
2402 if (GET_CODE (op) == CONST_DOUBLE)
2403 win = 1;
2404 break;
2406 case 'F':
2407 if (GET_CODE (op) == CONST_DOUBLE)
2408 win = 1;
2409 break;
2411 case 'G':
2412 case 'H':
2413 if (GET_CODE (op) == CONST_DOUBLE
2414 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2415 win = 1;
2416 break;
2418 case 's':
2419 if (GET_CODE (op) == CONST_INT
2420 || (GET_CODE (op) == CONST_DOUBLE
2421 && GET_MODE (op) == VOIDmode))
2422 break;
2423 case 'i':
2424 if (CONSTANT_P (op))
2425 win = 1;
2426 break;
2428 case 'n':
2429 if (GET_CODE (op) == CONST_INT
2430 || (GET_CODE (op) == CONST_DOUBLE
2431 && GET_MODE (op) == VOIDmode))
2432 win = 1;
2433 break;
2435 case 'I':
2436 case 'J':
2437 case 'K':
2438 case 'L':
2439 case 'M':
2440 case 'N':
2441 case 'O':
2442 case 'P':
2443 if (GET_CODE (op) == CONST_INT
2444 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2445 win = 1;
2446 break;
2448 #ifdef EXTRA_CONSTRAINT
2449 case 'Q':
2450 case 'R':
2451 case 'S':
2452 case 'T':
2453 case 'U':
2454 if (EXTRA_CONSTRAINT (op, c))
2455 win = 1;
2456 break;
2457 #endif
2459 case 'V':
2460 if (GET_CODE (op) == MEM
2461 && ((strict > 0 && ! offsettable_memref_p (op))
2462 || (strict < 0
2463 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2464 || (reload_in_progress
2465 && !(GET_CODE (op) == REG
2466 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2467 win = 1;
2468 break;
2470 case 'o':
2471 if ((strict > 0 && offsettable_memref_p (op))
2472 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2473 /* Before reload, accept what reload can handle. */
2474 || (strict < 0
2475 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2476 /* During reload, accept a pseudo */
2477 || (reload_in_progress && GET_CODE (op) == REG
2478 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2479 win = 1;
2480 break;
2482 default:
2483 if (strict < 0
2484 || (strict == 0
2485 && GET_CODE (op) == REG
2486 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2487 || (strict == 0 && GET_CODE (op) == SCRATCH)
2488 || (GET_CODE (op) == REG
2489 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2490 offset, mode)))
2491 win = 1;
2494 constraints[opno] = p;
2495 /* If this operand did not win somehow,
2496 this alternative loses. */
2497 if (! win)
2498 lose = 1;
2500 /* This alternative won; the operands are ok.
2501 Change whichever operands this alternative says to change. */
2502 if (! lose)
2504 int opno, eopno;
2506 /* See if any earlyclobber operand conflicts with some other
2507 operand. */
2509 if (strict > 0)
2510 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2511 /* Ignore earlyclobber operands now in memory,
2512 because we would often report failure when we have
2513 two memory operands, one of which was formerly a REG. */
2514 if (earlyclobber[eopno]
2515 && GET_CODE (recog_data.operand[eopno]) == REG)
2516 for (opno = 0; opno < recog_data.n_operands; opno++)
2517 if ((GET_CODE (recog_data.operand[opno]) == MEM
2518 || recog_data.operand_type[opno] != OP_OUT)
2519 && opno != eopno
2520 /* Ignore things like match_operator operands. */
2521 && *recog_data.constraints[opno] != 0
2522 && ! (matching_operands[opno] == eopno
2523 && operands_match_p (recog_data.operand[opno],
2524 recog_data.operand[eopno]))
2525 && ! safe_from_earlyclobber (recog_data.operand[opno],
2526 recog_data.operand[eopno]))
2527 lose = 1;
2529 if (! lose)
2531 while (--funny_match_index >= 0)
2533 recog_data.operand[funny_match[funny_match_index].other]
2534 = recog_data.operand[funny_match[funny_match_index].this];
2537 return 1;
2541 which_alternative++;
2544 /* If we are about to reject this, but we are not to test strictly,
2545 try a very loose test. Only return failure if it fails also. */
2546 if (strict == 0)
2547 return constrain_operands (-1);
2548 else
2549 return 0;
2552 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2553 is a hard reg in class CLASS when its regno is offset by OFFSET
2554 and changed to mode MODE.
2555 If REG occupies multiple hard regs, all of them must be in CLASS. */
2558 reg_fits_class_p (operand, class, offset, mode)
2559 rtx operand;
2560 register enum reg_class class;
2561 int offset;
2562 enum machine_mode mode;
2564 register int regno = REGNO (operand);
2565 if (regno < FIRST_PSEUDO_REGISTER
2566 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2567 regno + offset))
2569 register int sr;
2570 regno += offset;
2571 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2572 sr > 0; sr--)
2573 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2574 regno + sr))
2575 break;
2576 return sr == 0;
2579 return 0;
2582 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2584 void
2585 split_all_insns (upd_life)
2586 int upd_life;
2588 sbitmap blocks;
2589 int changed;
2590 int i;
2592 blocks = sbitmap_alloc (n_basic_blocks);
2593 sbitmap_zero (blocks);
2594 changed = 0;
2596 for (i = n_basic_blocks - 1; i >= 0; --i)
2598 basic_block bb = BASIC_BLOCK (i);
2599 rtx insn, next;
2601 for (insn = bb->head; insn ; insn = next)
2603 rtx set;
2605 /* Can't use `next_real_insn' because that might go across
2606 CODE_LABELS and short-out basic blocks. */
2607 next = NEXT_INSN (insn);
2608 if (GET_CODE (insn) != INSN)
2611 /* Don't split no-op move insns. These should silently
2612 disappear later in final. Splitting such insns would
2613 break the code that handles REG_NO_CONFLICT blocks. */
2615 else if ((set = single_set (insn)) != NULL
2616 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2618 /* Nops get in the way while scheduling, so delete them
2619 now if register allocation has already been done. It
2620 is too risky to try to do this before register
2621 allocation, and there are unlikely to be very many
2622 nops then anyways. */
2623 if (reload_completed)
2625 PUT_CODE (insn, NOTE);
2626 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2627 NOTE_SOURCE_FILE (insn) = 0;
2630 else
2632 /* Split insns here to get max fine-grain parallelism. */
2633 rtx first = PREV_INSN (insn);
2634 rtx last = try_split (PATTERN (insn), insn, 1);
2636 if (last != insn)
2638 SET_BIT (blocks, i);
2639 changed = 1;
2641 /* try_split returns the NOTE that INSN became. */
2642 first = NEXT_INSN (first);
2643 PUT_CODE (insn, NOTE);
2644 NOTE_SOURCE_FILE (insn) = 0;
2645 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2647 if (insn == bb->end)
2649 bb->end = last;
2650 break;
2655 if (insn == bb->end)
2656 break;
2659 /* ??? When we're called from just after reload, the CFG is in bad
2660 shape, and we may have fallen off the end. This could be fixed
2661 by having reload not try to delete unreachable code. Otherwise
2662 assert we found the end insn. */
2663 if (insn == NULL && upd_life)
2664 abort ();
2667 if (changed && upd_life)
2669 compute_bb_for_insn (get_max_uid ());
2670 count_or_remove_death_notes (blocks, 1);
2671 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2674 sbitmap_free (blocks);
2677 #ifdef HAVE_peephole2
2678 /* This is the last insn we'll allow recog_next_insn to consider. */
2679 static rtx recog_last_allowed_insn;
2681 /* Return the Nth non-note insn after INSN, or return NULL_RTX if it does
2682 not exist. Used by the recognizer to find the next insn to match in a
2683 multi-insn pattern. */
2685 recog_next_insn (insn, n)
2686 rtx insn;
2687 int n;
2689 if (insn != NULL_RTX)
2691 while (n > 0)
2693 if (insn == recog_last_allowed_insn)
2694 return NULL_RTX;
2696 insn = NEXT_INSN (insn);
2697 if (insn == NULL_RTX)
2698 break;
2700 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2701 n -= 1;
2705 return insn;
2708 /* Perform the peephole2 optimization pass. */
2709 void
2710 peephole2_optimize (dump_file)
2711 FILE *dump_file ATTRIBUTE_UNUSED;
2713 rtx insn, prev;
2714 int i, changed;
2715 sbitmap blocks;
2717 /* ??? TODO: Arrange with resource.c to start at bb->global_live_at_end
2718 and backtrack insn by insn as we proceed through the block. In this
2719 way we'll not need to keep searching forward from the beginning of
2720 basic blocks to find register life info. */
2722 init_resource_info (NULL);
2724 blocks = sbitmap_alloc (n_basic_blocks);
2725 sbitmap_zero (blocks);
2726 changed = 0;
2728 for (i = n_basic_blocks - 1; i >= 0; --i)
2730 basic_block bb = BASIC_BLOCK (i);
2732 /* Since we don't update life info until the very end, we can't
2733 allow matching instructions that we've replaced before. Walk
2734 backward through the basic block so that we don't have to
2735 care about subsequent life info; recog_last_allowed_insn to
2736 restrict how far forward we will allow the match to proceed. */
2738 recog_last_allowed_insn = NEXT_INSN (bb->end);
2739 for (insn = bb->end; ; insn = prev)
2741 prev = PREV_INSN (insn);
2742 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2744 rtx try, last_insn;
2746 try = peephole2_insns (PATTERN (insn), insn, &last_insn);
2747 if (try != NULL)
2749 flow_delete_insn_chain (insn, last_insn);
2750 try = emit_insn_after (try, prev);
2752 if (last_insn == bb->end)
2753 bb->end = try;
2754 if (insn == bb->head)
2755 bb->head = NEXT_INSN (prev);
2757 recog_last_allowed_insn = NEXT_INSN (prev);
2758 SET_BIT (blocks, i);
2759 changed = 1;
2763 if (insn == bb->head)
2764 break;
2768 free_resource_info ();
2770 compute_bb_for_insn (get_max_uid ());
2771 count_or_remove_death_notes (blocks, 1);
2772 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2774 #endif