Appleid arm-elf contribution from Philip Blundell and merged with Catherine
[official-gcc.git] / gcc / recog.c
blob17ec357f084e9ee0df99ac874c3f04617a8ae274
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "recog.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "toplev.h"
36 #ifndef STACK_PUSH_CODE
37 #ifdef STACK_GROWS_DOWNWARD
38 #define STACK_PUSH_CODE PRE_DEC
39 #else
40 #define STACK_PUSH_CODE PRE_INC
41 #endif
42 #endif
44 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
45 static rtx *find_single_use_1 PROTO((rtx, rtx *));
46 static rtx *find_constant_term_loc PROTO((rtx *));
47 static int insn_invalid_p PROTO((rtx));
49 /* Nonzero means allow operands to be volatile.
50 This should be 0 if you are generating rtl, such as if you are calling
51 the functions in optabs.c and expmed.c (most of the time).
52 This should be 1 if all valid insns need to be recognized,
53 such as in regclass.c and final.c and reload.c.
55 init_recog and init_recog_no_volatile are responsible for setting this. */
57 int volatile_ok;
59 /* The following vectors hold the results from insn_extract. */
61 /* Indexed by N, gives value of operand N. */
62 rtx recog_operand[MAX_RECOG_OPERANDS];
64 /* Indexed by N, gives location where operand N was found. */
65 rtx *recog_operand_loc[MAX_RECOG_OPERANDS];
67 /* Indexed by N, gives location where the Nth duplicate-appearance of
68 an operand was found. This is something that matched MATCH_DUP. */
69 rtx *recog_dup_loc[MAX_RECOG_OPERANDS];
71 /* Indexed by N, gives the operand number that was duplicated in the
72 Nth duplicate-appearance of an operand. */
73 char recog_dup_num[MAX_RECOG_OPERANDS];
76 /* The next variables are set up by extract_insn. */
78 /* The number of operands of the insn. */
79 int recog_n_operands;
81 /* The number of MATCH_DUPs in the insn. */
82 int recog_n_dups;
84 /* The number of alternatives in the constraints for the insn. */
85 int recog_n_alternatives;
87 /* Indexed by N, gives the mode of operand N. */
88 enum machine_mode recog_operand_mode[MAX_RECOG_OPERANDS];
90 /* Indexed by N, gives the constraint string for operand N. */
91 char *recog_constraints[MAX_RECOG_OPERANDS];
93 #ifndef REGISTER_CONSTRAINTS
94 /* Indexed by N, nonzero if operand N should be an address. */
95 char recog_operand_address_p[MAX_RECOG_OPERANDS];
96 #endif
98 /* On return from `constrain_operands', indicate which alternative
99 was satisfied. */
101 int which_alternative;
103 /* Nonzero after end of reload pass.
104 Set to 1 or 0 by toplev.c.
105 Controls the significance of (SUBREG (MEM)). */
107 int reload_completed;
109 /* Initialize data used by the function `recog'.
110 This must be called once in the compilation of a function
111 before any insn recognition may be done in the function. */
113 void
114 init_recog_no_volatile ()
116 volatile_ok = 0;
119 void
120 init_recog ()
122 volatile_ok = 1;
125 /* Try recognizing the instruction INSN,
126 and return the code number that results.
127 Remember the code so that repeated calls do not
128 need to spend the time for actual rerecognition.
130 This function is the normal interface to instruction recognition.
131 The automatically-generated function `recog' is normally called
132 through this one. (The only exception is in combine.c.) */
135 recog_memoized (insn)
136 rtx insn;
138 if (INSN_CODE (insn) < 0)
139 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
140 return INSN_CODE (insn);
143 /* Check that X is an insn-body for an `asm' with operands
144 and that the operands mentioned in it are legitimate. */
147 check_asm_operands (x)
148 rtx x;
150 int noperands = asm_noperands (x);
151 rtx *operands;
152 int i;
154 if (noperands < 0)
155 return 0;
156 if (noperands == 0)
157 return 1;
159 operands = (rtx *) alloca (noperands * sizeof (rtx));
160 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
162 for (i = 0; i < noperands; i++)
163 if (!general_operand (operands[i], VOIDmode))
164 return 0;
166 return 1;
169 /* Static data for the next two routines. */
171 typedef struct change_t
173 rtx object;
174 int old_code;
175 rtx *loc;
176 rtx old;
177 } change_t;
179 static change_t *changes;
180 static int changes_allocated;
182 static int num_changes = 0;
184 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
191 the change in place.
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
203 validate_change (object, loc, new, in_group)
204 rtx object;
205 rtx *loc;
206 rtx new;
207 int in_group;
209 rtx old = *loc;
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
214 if (in_group == 0 && num_changes != 0)
215 abort ();
217 *loc = new;
219 /* Save the information describing this change. */
220 if (num_changes >= changes_allocated)
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
229 changes =
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
238 if (object && GET_CODE (object) != MEM)
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
241 case invalid. */
242 changes[num_changes].old_code = INSN_CODE (object);
243 INSN_CODE (object) = -1;
246 num_changes++;
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
251 if (in_group)
252 return 1;
253 else
254 return apply_change_group ();
257 /* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
260 static int
261 insn_invalid_p (insn)
262 rtx insn;
264 int icode = recog_memoized (insn);
265 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
267 if (is_asm)
269 if (! check_asm_operands (PATTERN (insn)))
270 return 1;
272 /* Disallow modification of ASM_OPERANDS after reload; verifying the
273 constraints is too difficult. */
274 if (reload_completed)
275 return 1;
277 else if (icode < 0)
278 return 1;
280 /* After reload, verify that all constraints are satisfied. */
281 if (reload_completed)
283 insn_extract (insn);
285 if (! constrain_operands (INSN_CODE (insn), 1))
286 return 1;
289 return 0;
292 /* Apply a group of changes previously issued with `validate_change'.
293 Return 1 if all changes are valid, zero otherwise. */
296 apply_change_group ()
298 int i;
300 /* The changes have been applied and all INSN_CODEs have been reset to force
301 rerecognition.
303 The changes are valid if we aren't given an object, or if we are
304 given a MEM and it still is a valid address, or if this is in insn
305 and it is recognized. In the latter case, if reload has completed,
306 we also require that the operands meet the constraints for
307 the insn. We do not allow modifying an ASM_OPERANDS after reload
308 has completed because verifying the constraints is too difficult. */
310 for (i = 0; i < num_changes; i++)
312 rtx object = changes[i].object;
314 if (object == 0)
315 continue;
317 if (GET_CODE (object) == MEM)
319 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
320 break;
322 else if (insn_invalid_p (object))
324 rtx pat = PATTERN (object);
326 /* Perhaps we couldn't recognize the insn because there were
327 extra CLOBBERs at the end. If so, try to re-recognize
328 without the last CLOBBER (later iterations will cause each of
329 them to be eliminated, in turn). But don't do this if we
330 have an ASM_OPERAND. */
331 if (GET_CODE (pat) == PARALLEL
332 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
333 && asm_noperands (PATTERN (object)) < 0)
335 rtx newpat;
337 if (XVECLEN (pat, 0) == 2)
338 newpat = XVECEXP (pat, 0, 0);
339 else
341 int j;
343 newpat = gen_rtx_PARALLEL (VOIDmode,
344 gen_rtvec (XVECLEN (pat, 0) - 1));
345 for (j = 0; j < XVECLEN (newpat, 0); j++)
346 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
349 /* Add a new change to this group to replace the pattern
350 with this new pattern. Then consider this change
351 as having succeeded. The change we added will
352 cause the entire call to fail if things remain invalid.
354 Note that this can lose if a later change than the one
355 we are processing specified &XVECEXP (PATTERN (object), 0, X)
356 but this shouldn't occur. */
358 validate_change (object, &PATTERN (object), newpat, 1);
360 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
361 /* If this insn is a CLOBBER or USE, it is always valid, but is
362 never recognized. */
363 continue;
364 else
365 break;
369 if (i == num_changes)
371 num_changes = 0;
372 return 1;
374 else
376 cancel_changes (0);
377 return 0;
381 /* Return the number of changes so far in the current group. */
384 num_validated_changes ()
386 return num_changes;
389 /* Retract the changes numbered NUM and up. */
391 void
392 cancel_changes (num)
393 int num;
395 int i;
397 /* Back out all the changes. Do this in the opposite order in which
398 they were made. */
399 for (i = num_changes - 1; i >= num; i--)
401 *changes[i].loc = changes[i].old;
402 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
403 INSN_CODE (changes[i].object) = changes[i].old_code;
405 num_changes = num;
408 /* Replace every occurrence of FROM in X with TO. Mark each change with
409 validate_change passing OBJECT. */
411 static void
412 validate_replace_rtx_1 (loc, from, to, object)
413 rtx *loc;
414 rtx from, to, object;
416 register int i, j;
417 register char *fmt;
418 register rtx x = *loc;
419 enum rtx_code code = GET_CODE (x);
421 /* X matches FROM if it is the same rtx or they are both referring to the
422 same register in the same mode. Avoid calling rtx_equal_p unless the
423 operands look similar. */
425 if (x == from
426 || (GET_CODE (x) == REG && GET_CODE (from) == REG
427 && GET_MODE (x) == GET_MODE (from)
428 && REGNO (x) == REGNO (from))
429 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
430 && rtx_equal_p (x, from)))
432 validate_change (object, loc, to, 1);
433 return;
436 /* For commutative or comparison operations, try replacing each argument
437 separately and seeing if we made any changes. If so, put a constant
438 argument last.*/
439 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
441 int prev_changes = num_changes;
443 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
444 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
445 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
447 validate_change (object, loc,
448 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
449 : swap_condition (code),
450 GET_MODE (x), XEXP (x, 1),
451 XEXP (x, 0)),
453 x = *loc;
454 code = GET_CODE (x);
458 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
459 done the substitution, otherwise we won't. */
461 switch (code)
463 case PLUS:
464 /* If we have a PLUS whose second operand is now a CONST_INT, use
465 plus_constant to try to simplify it. */
466 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
467 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
469 return;
471 case MINUS:
472 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
474 validate_change (object, loc,
475 plus_constant (XEXP (x, 0), - INTVAL (to)),
477 return;
479 break;
481 case ZERO_EXTEND:
482 case SIGN_EXTEND:
483 /* In these cases, the operation to be performed depends on the mode
484 of the operand. If we are replacing the operand with a VOIDmode
485 constant, we lose the information. So try to simplify the operation
486 in that case. If it fails, substitute in something that we know
487 won't be recognized. */
488 if (GET_MODE (to) == VOIDmode
489 && (XEXP (x, 0) == from
490 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
491 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
492 && REGNO (XEXP (x, 0)) == REGNO (from))))
494 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
495 GET_MODE (from));
496 if (new == 0)
497 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
499 validate_change (object, loc, new, 1);
500 return;
502 break;
504 case SUBREG:
505 /* If we have a SUBREG of a register that we are replacing and we are
506 replacing it with a MEM, make a new MEM and try replacing the
507 SUBREG with it. Don't do this if the MEM has a mode-dependent address
508 or if we would be widening it. */
510 if (SUBREG_REG (x) == from
511 && GET_CODE (from) == REG
512 && GET_CODE (to) == MEM
513 && ! mode_dependent_address_p (XEXP (to, 0))
514 && ! MEM_VOLATILE_P (to)
515 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
517 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
518 enum machine_mode mode = GET_MODE (x);
519 rtx new;
521 if (BYTES_BIG_ENDIAN)
522 offset += (MIN (UNITS_PER_WORD,
523 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
524 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
526 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
527 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
528 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
529 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
530 validate_change (object, loc, new, 1);
531 return;
533 break;
535 case ZERO_EXTRACT:
536 case SIGN_EXTRACT:
537 /* If we are replacing a register with memory, try to change the memory
538 to be the mode required for memory in extract operations (this isn't
539 likely to be an insertion operation; if it was, nothing bad will
540 happen, we might just fail in some cases). */
542 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
543 && GET_CODE (XEXP (x, 1)) == CONST_INT
544 && GET_CODE (XEXP (x, 2)) == CONST_INT
545 && ! mode_dependent_address_p (XEXP (to, 0))
546 && ! MEM_VOLATILE_P (to))
548 enum machine_mode wanted_mode = VOIDmode;
549 enum machine_mode is_mode = GET_MODE (to);
550 int pos = INTVAL (XEXP (x, 2));
552 #ifdef HAVE_extzv
553 if (code == ZERO_EXTRACT)
555 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
556 if (wanted_mode == VOIDmode)
557 wanted_mode = word_mode;
559 #endif
560 #ifdef HAVE_extv
561 if (code == SIGN_EXTRACT)
563 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
564 if (wanted_mode == VOIDmode)
565 wanted_mode = word_mode;
567 #endif
569 /* If we have a narrower mode, we can do something. */
570 if (wanted_mode != VOIDmode
571 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
573 int offset = pos / BITS_PER_UNIT;
574 rtx newmem;
576 /* If the bytes and bits are counted differently, we
577 must adjust the offset. */
578 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
579 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
580 - offset);
582 pos %= GET_MODE_BITSIZE (wanted_mode);
584 newmem = gen_rtx_MEM (wanted_mode,
585 plus_constant (XEXP (to, 0), offset));
586 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
587 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
588 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
590 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
591 validate_change (object, &XEXP (x, 0), newmem, 1);
595 break;
597 default:
598 break;
601 /* For commutative or comparison operations we've already performed
602 replacements. Don't try to perform them again. */
603 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
605 fmt = GET_RTX_FORMAT (code);
606 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
608 if (fmt[i] == 'e')
609 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
610 else if (fmt[i] == 'E')
611 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
612 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
617 /* Try replacing every occurrence of FROM in INSN with TO. After all
618 changes have been made, validate by seeing if INSN is still valid. */
621 validate_replace_rtx (from, to, insn)
622 rtx from, to, insn;
624 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
625 return apply_change_group ();
628 /* Try replacing every occurrence of FROM in INSN with TO. After all
629 changes have been made, validate by seeing if INSN is still valid. */
631 void
632 validate_replace_rtx_group (from, to, insn)
633 rtx from, to, insn;
635 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
638 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
639 SET_DESTs. After all changes have been made, validate by seeing if
640 INSN is still valid. */
643 validate_replace_src (from, to, insn)
644 rtx from, to, insn;
646 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
647 || GET_CODE (PATTERN (insn)) != SET)
648 abort ();
650 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
651 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
652 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
653 from, to, insn);
654 return apply_change_group ();
657 #ifdef HAVE_cc0
658 /* Return 1 if the insn using CC0 set by INSN does not contain
659 any ordered tests applied to the condition codes.
660 EQ and NE tests do not count. */
663 next_insn_tests_no_inequality (insn)
664 rtx insn;
666 register rtx next = next_cc0_user (insn);
668 /* If there is no next insn, we have to take the conservative choice. */
669 if (next == 0)
670 return 0;
672 return ((GET_CODE (next) == JUMP_INSN
673 || GET_CODE (next) == INSN
674 || GET_CODE (next) == CALL_INSN)
675 && ! inequality_comparisons_p (PATTERN (next)));
678 #if 0 /* This is useless since the insn that sets the cc's
679 must be followed immediately by the use of them. */
680 /* Return 1 if the CC value set up by INSN is not used. */
683 next_insns_test_no_inequality (insn)
684 rtx insn;
686 register rtx next = NEXT_INSN (insn);
688 for (; next != 0; next = NEXT_INSN (next))
690 if (GET_CODE (next) == CODE_LABEL
691 || GET_CODE (next) == BARRIER)
692 return 1;
693 if (GET_CODE (next) == NOTE)
694 continue;
695 if (inequality_comparisons_p (PATTERN (next)))
696 return 0;
697 if (sets_cc0_p (PATTERN (next)) == 1)
698 return 1;
699 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
700 return 1;
702 return 1;
704 #endif
705 #endif
707 /* This is used by find_single_use to locate an rtx that contains exactly one
708 use of DEST, which is typically either a REG or CC0. It returns a
709 pointer to the innermost rtx expression containing DEST. Appearances of
710 DEST that are being used to totally replace it are not counted. */
712 static rtx *
713 find_single_use_1 (dest, loc)
714 rtx dest;
715 rtx *loc;
717 rtx x = *loc;
718 enum rtx_code code = GET_CODE (x);
719 rtx *result = 0;
720 rtx *this_result;
721 int i;
722 char *fmt;
724 switch (code)
726 case CONST_INT:
727 case CONST:
728 case LABEL_REF:
729 case SYMBOL_REF:
730 case CONST_DOUBLE:
731 case CLOBBER:
732 return 0;
734 case SET:
735 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
736 of a REG that occupies all of the REG, the insn uses DEST if
737 it is mentioned in the destination or the source. Otherwise, we
738 need just check the source. */
739 if (GET_CODE (SET_DEST (x)) != CC0
740 && GET_CODE (SET_DEST (x)) != PC
741 && GET_CODE (SET_DEST (x)) != REG
742 && ! (GET_CODE (SET_DEST (x)) == SUBREG
743 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
744 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
745 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
746 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
747 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
748 break;
750 return find_single_use_1 (dest, &SET_SRC (x));
752 case MEM:
753 case SUBREG:
754 return find_single_use_1 (dest, &XEXP (x, 0));
756 default:
757 break;
760 /* If it wasn't one of the common cases above, check each expression and
761 vector of this code. Look for a unique usage of DEST. */
763 fmt = GET_RTX_FORMAT (code);
764 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
766 if (fmt[i] == 'e')
768 if (dest == XEXP (x, i)
769 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
770 && REGNO (dest) == REGNO (XEXP (x, i))))
771 this_result = loc;
772 else
773 this_result = find_single_use_1 (dest, &XEXP (x, i));
775 if (result == 0)
776 result = this_result;
777 else if (this_result)
778 /* Duplicate usage. */
779 return 0;
781 else if (fmt[i] == 'E')
783 int j;
785 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
787 if (XVECEXP (x, i, j) == dest
788 || (GET_CODE (dest) == REG
789 && GET_CODE (XVECEXP (x, i, j)) == REG
790 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
791 this_result = loc;
792 else
793 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
795 if (result == 0)
796 result = this_result;
797 else if (this_result)
798 return 0;
803 return result;
806 /* See if DEST, produced in INSN, is used only a single time in the
807 sequel. If so, return a pointer to the innermost rtx expression in which
808 it is used.
810 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
812 This routine will return usually zero either before flow is called (because
813 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
814 note can't be trusted).
816 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
817 care about REG_DEAD notes or LOG_LINKS.
819 Otherwise, we find the single use by finding an insn that has a
820 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
821 only referenced once in that insn, we know that it must be the first
822 and last insn referencing DEST. */
824 rtx *
825 find_single_use (dest, insn, ploc)
826 rtx dest;
827 rtx insn;
828 rtx *ploc;
830 rtx next;
831 rtx *result;
832 rtx link;
834 #ifdef HAVE_cc0
835 if (dest == cc0_rtx)
837 next = NEXT_INSN (insn);
838 if (next == 0
839 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
840 return 0;
842 result = find_single_use_1 (dest, &PATTERN (next));
843 if (result && ploc)
844 *ploc = next;
845 return result;
847 #endif
849 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
850 return 0;
852 for (next = next_nonnote_insn (insn);
853 next != 0 && GET_CODE (next) != CODE_LABEL;
854 next = next_nonnote_insn (next))
855 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
857 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
858 if (XEXP (link, 0) == insn)
859 break;
861 if (link)
863 result = find_single_use_1 (dest, &PATTERN (next));
864 if (ploc)
865 *ploc = next;
866 return result;
870 return 0;
873 /* Return 1 if OP is a valid general operand for machine mode MODE.
874 This is either a register reference, a memory reference,
875 or a constant. In the case of a memory reference, the address
876 is checked for general validity for the target machine.
878 Register and memory references must have mode MODE in order to be valid,
879 but some constants have no machine mode and are valid for any mode.
881 If MODE is VOIDmode, OP is checked for validity for whatever mode
882 it has.
884 The main use of this function is as a predicate in match_operand
885 expressions in the machine description.
887 For an explanation of this function's behavior for registers of
888 class NO_REGS, see the comment for `register_operand'. */
891 general_operand (op, mode)
892 register rtx op;
893 enum machine_mode mode;
895 register enum rtx_code code = GET_CODE (op);
896 int mode_altering_drug = 0;
898 if (mode == VOIDmode)
899 mode = GET_MODE (op);
901 /* Don't accept CONST_INT or anything similar
902 if the caller wants something floating. */
903 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
904 && GET_MODE_CLASS (mode) != MODE_INT
905 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
906 return 0;
908 if (CONSTANT_P (op))
909 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
910 #ifdef LEGITIMATE_PIC_OPERAND_P
911 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
912 #endif
913 && LEGITIMATE_CONSTANT_P (op));
915 /* Except for certain constants with VOIDmode, already checked for,
916 OP's mode must match MODE if MODE specifies a mode. */
918 if (GET_MODE (op) != mode)
919 return 0;
921 if (code == SUBREG)
923 #ifdef INSN_SCHEDULING
924 /* On machines that have insn scheduling, we want all memory
925 reference to be explicit, so outlaw paradoxical SUBREGs. */
926 if (GET_CODE (SUBREG_REG (op)) == MEM
927 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
928 return 0;
929 #endif
931 op = SUBREG_REG (op);
932 code = GET_CODE (op);
933 #if 0
934 /* No longer needed, since (SUBREG (MEM...))
935 will load the MEM into a reload reg in the MEM's own mode. */
936 mode_altering_drug = 1;
937 #endif
940 if (code == REG)
941 /* A register whose class is NO_REGS is not a general operand. */
942 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
943 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
945 if (code == MEM)
947 register rtx y = XEXP (op, 0);
948 if (! volatile_ok && MEM_VOLATILE_P (op))
949 return 0;
950 if (GET_CODE (y) == ADDRESSOF)
951 return 1;
952 /* Use the mem's mode, since it will be reloaded thus. */
953 mode = GET_MODE (op);
954 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
957 /* Pretend this is an operand for now; we'll run force_operand
958 on its replacement in fixup_var_refs_1. */
959 if (code == ADDRESSOF)
960 return 1;
962 return 0;
964 win:
965 if (mode_altering_drug)
966 return ! mode_dependent_address_p (XEXP (op, 0));
967 return 1;
970 /* Return 1 if OP is a valid memory address for a memory reference
971 of mode MODE.
973 The main use of this function is as a predicate in match_operand
974 expressions in the machine description. */
977 address_operand (op, mode)
978 register rtx op;
979 enum machine_mode mode;
981 return memory_address_p (mode, op);
984 /* Return 1 if OP is a register reference of mode MODE.
985 If MODE is VOIDmode, accept a register in any mode.
987 The main use of this function is as a predicate in match_operand
988 expressions in the machine description.
990 As a special exception, registers whose class is NO_REGS are
991 not accepted by `register_operand'. The reason for this change
992 is to allow the representation of special architecture artifacts
993 (such as a condition code register) without extending the rtl
994 definitions. Since registers of class NO_REGS cannot be used
995 as registers in any case where register classes are examined,
996 it is most consistent to keep this function from accepting them. */
999 register_operand (op, mode)
1000 register rtx op;
1001 enum machine_mode mode;
1003 if (GET_MODE (op) != mode && mode != VOIDmode)
1004 return 0;
1006 if (GET_CODE (op) == SUBREG)
1008 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1009 because it is guaranteed to be reloaded into one.
1010 Just make sure the MEM is valid in itself.
1011 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1012 but currently it does result from (SUBREG (REG)...) where the
1013 reg went on the stack.) */
1014 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1015 return general_operand (op, mode);
1017 #ifdef CLASS_CANNOT_CHANGE_SIZE
1018 if (GET_CODE (SUBREG_REG (op)) == REG
1019 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1020 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1021 REGNO (SUBREG_REG (op)))
1022 && (GET_MODE_SIZE (mode)
1023 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1024 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1025 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1026 return 0;
1027 #endif
1029 op = SUBREG_REG (op);
1032 /* We don't consider registers whose class is NO_REGS
1033 to be a register operand. */
1034 return (GET_CODE (op) == REG
1035 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1036 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1039 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1040 or a hard register. */
1043 scratch_operand (op, mode)
1044 register rtx op;
1045 enum machine_mode mode;
1047 return (GET_MODE (op) == mode
1048 && (GET_CODE (op) == SCRATCH
1049 || (GET_CODE (op) == REG
1050 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
1053 /* Return 1 if OP is a valid immediate operand for mode MODE.
1055 The main use of this function is as a predicate in match_operand
1056 expressions in the machine description. */
1059 immediate_operand (op, mode)
1060 register rtx op;
1061 enum machine_mode mode;
1063 /* Don't accept CONST_INT or anything similar
1064 if the caller wants something floating. */
1065 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1066 && GET_MODE_CLASS (mode) != MODE_INT
1067 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1068 return 0;
1070 return (CONSTANT_P (op)
1071 && (GET_MODE (op) == mode || mode == VOIDmode
1072 || GET_MODE (op) == VOIDmode)
1073 #ifdef LEGITIMATE_PIC_OPERAND_P
1074 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1075 #endif
1076 && LEGITIMATE_CONSTANT_P (op));
1079 /* Returns 1 if OP is an operand that is a CONST_INT. */
1082 const_int_operand (op, mode)
1083 register rtx op;
1084 enum machine_mode mode ATTRIBUTE_UNUSED;
1086 return GET_CODE (op) == CONST_INT;
1089 /* Returns 1 if OP is an operand that is a constant integer or constant
1090 floating-point number. */
1093 const_double_operand (op, mode)
1094 register rtx op;
1095 enum machine_mode mode;
1097 /* Don't accept CONST_INT or anything similar
1098 if the caller wants something floating. */
1099 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1100 && GET_MODE_CLASS (mode) != MODE_INT
1101 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1102 return 0;
1104 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1105 && (mode == VOIDmode || GET_MODE (op) == mode
1106 || GET_MODE (op) == VOIDmode));
1109 /* Return 1 if OP is a general operand that is not an immediate operand. */
1112 nonimmediate_operand (op, mode)
1113 register rtx op;
1114 enum machine_mode mode;
1116 return (general_operand (op, mode) && ! CONSTANT_P (op));
1119 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1122 nonmemory_operand (op, mode)
1123 register rtx op;
1124 enum machine_mode mode;
1126 if (CONSTANT_P (op))
1128 /* Don't accept CONST_INT or anything similar
1129 if the caller wants something floating. */
1130 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1131 && GET_MODE_CLASS (mode) != MODE_INT
1132 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1133 return 0;
1135 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1136 #ifdef LEGITIMATE_PIC_OPERAND_P
1137 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1138 #endif
1139 && LEGITIMATE_CONSTANT_P (op));
1142 if (GET_MODE (op) != mode && mode != VOIDmode)
1143 return 0;
1145 if (GET_CODE (op) == SUBREG)
1147 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1148 because it is guaranteed to be reloaded into one.
1149 Just make sure the MEM is valid in itself.
1150 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1151 but currently it does result from (SUBREG (REG)...) where the
1152 reg went on the stack.) */
1153 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1154 return general_operand (op, mode);
1155 op = SUBREG_REG (op);
1158 /* We don't consider registers whose class is NO_REGS
1159 to be a register operand. */
1160 return (GET_CODE (op) == REG
1161 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1162 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1165 /* Return 1 if OP is a valid operand that stands for pushing a
1166 value of mode MODE onto the stack.
1168 The main use of this function is as a predicate in match_operand
1169 expressions in the machine description. */
1172 push_operand (op, mode)
1173 rtx op;
1174 enum machine_mode mode;
1176 if (GET_CODE (op) != MEM)
1177 return 0;
1179 if (GET_MODE (op) != mode)
1180 return 0;
1182 op = XEXP (op, 0);
1184 if (GET_CODE (op) != STACK_PUSH_CODE)
1185 return 0;
1187 return XEXP (op, 0) == stack_pointer_rtx;
1190 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1193 memory_address_p (mode, addr)
1194 enum machine_mode mode;
1195 register rtx addr;
1197 if (GET_CODE (addr) == ADDRESSOF)
1198 return 1;
1200 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1201 return 0;
1203 win:
1204 return 1;
1207 /* Return 1 if OP is a valid memory reference with mode MODE,
1208 including a valid address.
1210 The main use of this function is as a predicate in match_operand
1211 expressions in the machine description. */
1214 memory_operand (op, mode)
1215 register rtx op;
1216 enum machine_mode mode;
1218 rtx inner;
1220 if (! reload_completed)
1221 /* Note that no SUBREG is a memory operand before end of reload pass,
1222 because (SUBREG (MEM...)) forces reloading into a register. */
1223 return GET_CODE (op) == MEM && general_operand (op, mode);
1225 if (mode != VOIDmode && GET_MODE (op) != mode)
1226 return 0;
1228 inner = op;
1229 if (GET_CODE (inner) == SUBREG)
1230 inner = SUBREG_REG (inner);
1232 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1235 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1236 that is, a memory reference whose address is a general_operand. */
1239 indirect_operand (op, mode)
1240 register rtx op;
1241 enum machine_mode mode;
1243 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1244 if (! reload_completed
1245 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1247 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1248 rtx inner = SUBREG_REG (op);
1250 if (BYTES_BIG_ENDIAN)
1251 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1252 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1254 if (mode != VOIDmode && GET_MODE (op) != mode)
1255 return 0;
1257 /* The only way that we can have a general_operand as the resulting
1258 address is if OFFSET is zero and the address already is an operand
1259 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1260 operand. */
1262 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1263 || (GET_CODE (XEXP (inner, 0)) == PLUS
1264 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1265 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1266 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1269 return (GET_CODE (op) == MEM
1270 && memory_operand (op, mode)
1271 && general_operand (XEXP (op, 0), Pmode));
1274 /* Return 1 if this is a comparison operator. This allows the use of
1275 MATCH_OPERATOR to recognize all the branch insns. */
1278 comparison_operator (op, mode)
1279 register rtx op;
1280 enum machine_mode mode;
1282 return ((mode == VOIDmode || GET_MODE (op) == mode)
1283 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1286 /* If BODY is an insn body that uses ASM_OPERANDS,
1287 return the number of operands (both input and output) in the insn.
1288 Otherwise return -1. */
1291 asm_noperands (body)
1292 rtx body;
1294 if (GET_CODE (body) == ASM_OPERANDS)
1295 /* No output operands: return number of input operands. */
1296 return ASM_OPERANDS_INPUT_LENGTH (body);
1297 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1298 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1299 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1300 else if (GET_CODE (body) == PARALLEL
1301 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1302 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1304 /* Multiple output operands, or 1 output plus some clobbers:
1305 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1306 int i;
1307 int n_sets;
1309 /* Count backwards through CLOBBERs to determine number of SETs. */
1310 for (i = XVECLEN (body, 0); i > 0; i--)
1312 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1313 break;
1314 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1315 return -1;
1318 /* N_SETS is now number of output operands. */
1319 n_sets = i;
1321 /* Verify that all the SETs we have
1322 came from a single original asm_operands insn
1323 (so that invalid combinations are blocked). */
1324 for (i = 0; i < n_sets; i++)
1326 rtx elt = XVECEXP (body, 0, i);
1327 if (GET_CODE (elt) != SET)
1328 return -1;
1329 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1330 return -1;
1331 /* If these ASM_OPERANDS rtx's came from different original insns
1332 then they aren't allowed together. */
1333 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1334 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1335 return -1;
1337 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1338 + n_sets);
1340 else if (GET_CODE (body) == PARALLEL
1341 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1343 /* 0 outputs, but some clobbers:
1344 body is [(asm_operands ...) (clobber (reg ...))...]. */
1345 int i;
1347 /* Make sure all the other parallel things really are clobbers. */
1348 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1349 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1350 return -1;
1352 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1354 else
1355 return -1;
1358 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1359 copy its operands (both input and output) into the vector OPERANDS,
1360 the locations of the operands within the insn into the vector OPERAND_LOCS,
1361 and the constraints for the operands into CONSTRAINTS.
1362 Write the modes of the operands into MODES.
1363 Return the assembler-template.
1365 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1366 we don't store that info. */
1368 char *
1369 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1370 rtx body;
1371 rtx *operands;
1372 rtx **operand_locs;
1373 char **constraints;
1374 enum machine_mode *modes;
1376 register int i;
1377 int noperands;
1378 char *template = 0;
1380 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1382 rtx asmop = SET_SRC (body);
1383 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1385 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1387 for (i = 1; i < noperands; i++)
1389 if (operand_locs)
1390 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1391 if (operands)
1392 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1393 if (constraints)
1394 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1395 if (modes)
1396 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1399 /* The output is in the SET.
1400 Its constraint is in the ASM_OPERANDS itself. */
1401 if (operands)
1402 operands[0] = SET_DEST (body);
1403 if (operand_locs)
1404 operand_locs[0] = &SET_DEST (body);
1405 if (constraints)
1406 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1407 if (modes)
1408 modes[0] = GET_MODE (SET_DEST (body));
1409 template = ASM_OPERANDS_TEMPLATE (asmop);
1411 else if (GET_CODE (body) == ASM_OPERANDS)
1413 rtx asmop = body;
1414 /* No output operands: BODY is (asm_operands ....). */
1416 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1418 /* The input operands are found in the 1st element vector. */
1419 /* Constraints for inputs are in the 2nd element vector. */
1420 for (i = 0; i < noperands; i++)
1422 if (operand_locs)
1423 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1424 if (operands)
1425 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1426 if (constraints)
1427 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1428 if (modes)
1429 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1431 template = ASM_OPERANDS_TEMPLATE (asmop);
1433 else if (GET_CODE (body) == PARALLEL
1434 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1436 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1437 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1438 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1439 int nout = 0; /* Does not include CLOBBERs. */
1441 /* At least one output, plus some CLOBBERs. */
1443 /* The outputs are in the SETs.
1444 Their constraints are in the ASM_OPERANDS itself. */
1445 for (i = 0; i < nparallel; i++)
1447 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1448 break; /* Past last SET */
1450 if (operands)
1451 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1452 if (operand_locs)
1453 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1454 if (constraints)
1455 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1456 if (modes)
1457 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1458 nout++;
1461 for (i = 0; i < nin; i++)
1463 if (operand_locs)
1464 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1465 if (operands)
1466 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1467 if (constraints)
1468 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1469 if (modes)
1470 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1473 template = ASM_OPERANDS_TEMPLATE (asmop);
1475 else if (GET_CODE (body) == PARALLEL
1476 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1478 /* No outputs, but some CLOBBERs. */
1480 rtx asmop = XVECEXP (body, 0, 0);
1481 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1483 for (i = 0; i < nin; i++)
1485 if (operand_locs)
1486 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1487 if (operands)
1488 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1489 if (constraints)
1490 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1491 if (modes)
1492 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1495 template = ASM_OPERANDS_TEMPLATE (asmop);
1498 return template;
1501 /* Given an rtx *P, if it is a sum containing an integer constant term,
1502 return the location (type rtx *) of the pointer to that constant term.
1503 Otherwise, return a null pointer. */
1505 static rtx *
1506 find_constant_term_loc (p)
1507 rtx *p;
1509 register rtx *tem;
1510 register enum rtx_code code = GET_CODE (*p);
1512 /* If *P IS such a constant term, P is its location. */
1514 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1515 || code == CONST)
1516 return p;
1518 /* Otherwise, if not a sum, it has no constant term. */
1520 if (GET_CODE (*p) != PLUS)
1521 return 0;
1523 /* If one of the summands is constant, return its location. */
1525 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1526 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1527 return p;
1529 /* Otherwise, check each summand for containing a constant term. */
1531 if (XEXP (*p, 0) != 0)
1533 tem = find_constant_term_loc (&XEXP (*p, 0));
1534 if (tem != 0)
1535 return tem;
1538 if (XEXP (*p, 1) != 0)
1540 tem = find_constant_term_loc (&XEXP (*p, 1));
1541 if (tem != 0)
1542 return tem;
1545 return 0;
1548 /* Return 1 if OP is a memory reference
1549 whose address contains no side effects
1550 and remains valid after the addition
1551 of a positive integer less than the
1552 size of the object being referenced.
1554 We assume that the original address is valid and do not check it.
1556 This uses strict_memory_address_p as a subroutine, so
1557 don't use it before reload. */
1560 offsettable_memref_p (op)
1561 rtx op;
1563 return ((GET_CODE (op) == MEM)
1564 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1567 /* Similar, but don't require a strictly valid mem ref:
1568 consider pseudo-regs valid as index or base regs. */
1571 offsettable_nonstrict_memref_p (op)
1572 rtx op;
1574 return ((GET_CODE (op) == MEM)
1575 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1578 /* Return 1 if Y is a memory address which contains no side effects
1579 and would remain valid after the addition of a positive integer
1580 less than the size of that mode.
1582 We assume that the original address is valid and do not check it.
1583 We do check that it is valid for narrower modes.
1585 If STRICTP is nonzero, we require a strictly valid address,
1586 for the sake of use in reload.c. */
1589 offsettable_address_p (strictp, mode, y)
1590 int strictp;
1591 enum machine_mode mode;
1592 register rtx y;
1594 register enum rtx_code ycode = GET_CODE (y);
1595 register rtx z;
1596 rtx y1 = y;
1597 rtx *y2;
1598 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1600 if (CONSTANT_ADDRESS_P (y))
1601 return 1;
1603 /* Adjusting an offsettable address involves changing to a narrower mode.
1604 Make sure that's OK. */
1606 if (mode_dependent_address_p (y))
1607 return 0;
1609 /* If the expression contains a constant term,
1610 see if it remains valid when max possible offset is added. */
1612 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1614 int good;
1616 y1 = *y2;
1617 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1618 /* Use QImode because an odd displacement may be automatically invalid
1619 for any wider mode. But it should be valid for a single byte. */
1620 good = (*addressp) (QImode, y);
1622 /* In any case, restore old contents of memory. */
1623 *y2 = y1;
1624 return good;
1627 if (ycode == PRE_DEC || ycode == PRE_INC
1628 || ycode == POST_DEC || ycode == POST_INC)
1629 return 0;
1631 /* The offset added here is chosen as the maximum offset that
1632 any instruction could need to add when operating on something
1633 of the specified mode. We assume that if Y and Y+c are
1634 valid addresses then so is Y+d for all 0<d<c. */
1636 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1638 /* Use QImode because an odd displacement may be automatically invalid
1639 for any wider mode. But it should be valid for a single byte. */
1640 return (*addressp) (QImode, z);
1643 /* Return 1 if ADDR is an address-expression whose effect depends
1644 on the mode of the memory reference it is used in.
1646 Autoincrement addressing is a typical example of mode-dependence
1647 because the amount of the increment depends on the mode. */
1650 mode_dependent_address_p (addr)
1651 rtx addr;
1653 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1654 return 0;
1655 win:
1656 return 1;
1659 /* Return 1 if OP is a general operand
1660 other than a memory ref with a mode dependent address. */
1663 mode_independent_operand (op, mode)
1664 enum machine_mode mode;
1665 rtx op;
1667 rtx addr;
1669 if (! general_operand (op, mode))
1670 return 0;
1672 if (GET_CODE (op) != MEM)
1673 return 1;
1675 addr = XEXP (op, 0);
1676 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1677 return 1;
1678 lose:
1679 return 0;
1682 /* Given an operand OP that is a valid memory reference
1683 which satisfies offsettable_memref_p,
1684 return a new memory reference whose address has been adjusted by OFFSET.
1685 OFFSET should be positive and less than the size of the object referenced.
1689 adj_offsettable_operand (op, offset)
1690 rtx op;
1691 int offset;
1693 register enum rtx_code code = GET_CODE (op);
1695 if (code == MEM)
1697 register rtx y = XEXP (op, 0);
1698 register rtx new;
1700 if (CONSTANT_ADDRESS_P (y))
1702 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1703 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1704 return new;
1707 if (GET_CODE (y) == PLUS)
1709 rtx z = y;
1710 register rtx *const_loc;
1712 op = copy_rtx (op);
1713 z = XEXP (op, 0);
1714 const_loc = find_constant_term_loc (&z);
1715 if (const_loc)
1717 *const_loc = plus_constant_for_output (*const_loc, offset);
1718 return op;
1722 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1723 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1724 return new;
1726 abort ();
1729 /* Analyze INSN and compute the variables recog_n_operands, recog_n_dups,
1730 recog_n_alternatives, recog_operand, recog_operand_loc, recog_constraints,
1731 recog_operand_mode, recog_dup_loc and recog_dup_num.
1732 If REGISTER_CONSTRAINTS is not defined, also compute
1733 recog_operand_address_p. */
1734 void
1735 extract_insn (insn)
1736 rtx insn;
1738 int i;
1739 int icode;
1740 int noperands;
1741 rtx body = PATTERN (insn);
1743 recog_n_operands = 0;
1744 recog_n_alternatives = 0;
1745 recog_n_dups = 0;
1747 switch (GET_CODE (body))
1749 case USE:
1750 case CLOBBER:
1751 case ASM_INPUT:
1752 case ADDR_VEC:
1753 case ADDR_DIFF_VEC:
1754 return;
1756 case SET:
1757 case PARALLEL:
1758 case ASM_OPERANDS:
1759 recog_n_operands = noperands = asm_noperands (body);
1760 if (noperands >= 0)
1762 /* This insn is an `asm' with operands. */
1764 /* expand_asm_operands makes sure there aren't too many operands. */
1765 if (noperands > MAX_RECOG_OPERANDS)
1766 abort ();
1768 /* Now get the operand values and constraints out of the insn. */
1769 decode_asm_operands (body, recog_operand, recog_operand_loc,
1770 recog_constraints, recog_operand_mode);
1771 if (noperands > 0)
1773 char *p = recog_constraints[0];
1774 recog_n_alternatives = 1;
1775 while (*p)
1776 recog_n_alternatives += (*p++ == ',');
1778 #ifndef REGISTER_CONSTRAINTS
1779 bzero (recog_operand_address_p, sizeof recog_operand_address_p);
1780 #endif
1781 break;
1784 /* FALLTHROUGH */
1786 default:
1787 /* Ordinary insn: recognize it, get the operands via insn_extract
1788 and get the constraints. */
1790 icode = recog_memoized (insn);
1791 if (icode < 0)
1792 fatal_insn_not_found (insn);
1794 recog_n_operands = noperands = insn_n_operands[icode];
1795 recog_n_alternatives = insn_n_alternatives[icode];
1796 recog_n_dups = insn_n_dups[icode];
1798 insn_extract (insn);
1800 for (i = 0; i < noperands; i++)
1802 #ifdef REGISTER_CONSTRAINTS
1803 recog_constraints[i] = insn_operand_constraint[icode][i];
1804 #else
1805 recog_operand_address_p[i] = insn_operand_address_p[icode][i];
1806 #endif
1807 recog_operand_mode[i] = insn_operand_mode[icode][i];
1812 #ifdef REGISTER_CONSTRAINTS
1814 /* Check the operands of an insn (found in recog_operands)
1815 against the insn's operand constraints (found via INSN_CODE_NUM)
1816 and return 1 if they are valid.
1818 WHICH_ALTERNATIVE is set to a number which indicates which
1819 alternative of constraints was matched: 0 for the first alternative,
1820 1 for the next, etc.
1822 In addition, when two operands are match
1823 and it happens that the output operand is (reg) while the
1824 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1825 make the output operand look like the input.
1826 This is because the output operand is the one the template will print.
1828 This is used in final, just before printing the assembler code and by
1829 the routines that determine an insn's attribute.
1831 If STRICT is a positive non-zero value, it means that we have been
1832 called after reload has been completed. In that case, we must
1833 do all checks strictly. If it is zero, it means that we have been called
1834 before reload has completed. In that case, we first try to see if we can
1835 find an alternative that matches strictly. If not, we try again, this
1836 time assuming that reload will fix up the insn. This provides a "best
1837 guess" for the alternative and is used to compute attributes of insns prior
1838 to reload. A negative value of STRICT is used for this internal call. */
1840 struct funny_match
1842 int this, other;
1846 constrain_operands (insn_code_num, strict)
1847 int insn_code_num;
1848 int strict;
1850 char *constraints[MAX_RECOG_OPERANDS];
1851 int matching_operands[MAX_RECOG_OPERANDS];
1852 enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS];
1853 int earlyclobber[MAX_RECOG_OPERANDS];
1854 register int c;
1855 int noperands = insn_n_operands[insn_code_num];
1857 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1858 int funny_match_index;
1859 int nalternatives = insn_n_alternatives[insn_code_num];
1861 if (noperands == 0 || nalternatives == 0)
1862 return 1;
1864 for (c = 0; c < noperands; c++)
1866 constraints[c] = insn_operand_constraint[insn_code_num][c];
1867 matching_operands[c] = -1;
1868 op_types[c] = OP_IN;
1871 which_alternative = 0;
1873 while (which_alternative < nalternatives)
1875 register int opno;
1876 int lose = 0;
1877 funny_match_index = 0;
1879 for (opno = 0; opno < noperands; opno++)
1881 register rtx op = recog_operand[opno];
1882 enum machine_mode mode = GET_MODE (op);
1883 register char *p = constraints[opno];
1884 int offset = 0;
1885 int win = 0;
1886 int val;
1888 earlyclobber[opno] = 0;
1890 /* A unary operator may be accepted by the predicate, but it
1891 is irrelevant for matching constraints. */
1892 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
1893 op = XEXP (op, 0);
1895 if (GET_CODE (op) == SUBREG)
1897 if (GET_CODE (SUBREG_REG (op)) == REG
1898 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1899 offset = SUBREG_WORD (op);
1900 op = SUBREG_REG (op);
1903 /* An empty constraint or empty alternative
1904 allows anything which matched the pattern. */
1905 if (*p == 0 || *p == ',')
1906 win = 1;
1908 while (*p && (c = *p++) != ',')
1909 switch (c)
1911 case '?':
1912 case '!':
1913 case '*':
1914 case '%':
1915 break;
1917 case '#':
1918 /* Ignore rest of this alternative as far as
1919 constraint checking is concerned. */
1920 while (*p && *p != ',')
1921 p++;
1922 break;
1924 case '=':
1925 op_types[opno] = OP_OUT;
1926 break;
1928 case '+':
1929 op_types[opno] = OP_INOUT;
1930 break;
1932 case '&':
1933 earlyclobber[opno] = 1;
1934 break;
1936 case '0':
1937 case '1':
1938 case '2':
1939 case '3':
1940 case '4':
1941 /* This operand must be the same as a previous one.
1942 This kind of constraint is used for instructions such
1943 as add when they take only two operands.
1945 Note that the lower-numbered operand is passed first.
1947 If we are not testing strictly, assume that this constraint
1948 will be satisfied. */
1949 if (strict < 0)
1950 val = 1;
1951 else
1952 val = operands_match_p (recog_operand[c - '0'],
1953 recog_operand[opno]);
1955 matching_operands[opno] = c - '0';
1956 matching_operands[c - '0'] = opno;
1958 if (val != 0)
1959 win = 1;
1960 /* If output is *x and input is *--x,
1961 arrange later to change the output to *--x as well,
1962 since the output op is the one that will be printed. */
1963 if (val == 2 && strict > 0)
1965 funny_match[funny_match_index].this = opno;
1966 funny_match[funny_match_index++].other = c - '0';
1968 break;
1970 case 'p':
1971 /* p is used for address_operands. When we are called by
1972 gen_reload, no one will have checked that the address is
1973 strictly valid, i.e., that all pseudos requiring hard regs
1974 have gotten them. */
1975 if (strict <= 0
1976 || (strict_memory_address_p
1977 (insn_operand_mode[insn_code_num][opno], op)))
1978 win = 1;
1979 break;
1981 /* No need to check general_operand again;
1982 it was done in insn-recog.c. */
1983 case 'g':
1984 /* Anything goes unless it is a REG and really has a hard reg
1985 but the hard reg is not in the class GENERAL_REGS. */
1986 if (strict < 0
1987 || GENERAL_REGS == ALL_REGS
1988 || GET_CODE (op) != REG
1989 || (reload_in_progress
1990 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1991 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1992 win = 1;
1993 break;
1995 case 'r':
1996 if (strict < 0
1997 || (strict == 0
1998 && GET_CODE (op) == REG
1999 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2000 || (strict == 0 && GET_CODE (op) == SCRATCH)
2001 || (GET_CODE (op) == REG
2002 && ((GENERAL_REGS == ALL_REGS
2003 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2004 || reg_fits_class_p (op, GENERAL_REGS,
2005 offset, mode))))
2006 win = 1;
2007 break;
2009 case 'X':
2010 /* This is used for a MATCH_SCRATCH in the cases when
2011 we don't actually need anything. So anything goes
2012 any time. */
2013 win = 1;
2014 break;
2016 case 'm':
2017 if (GET_CODE (op) == MEM
2018 /* Before reload, accept what reload can turn into mem. */
2019 || (strict < 0 && CONSTANT_P (op))
2020 /* During reload, accept a pseudo */
2021 || (reload_in_progress && GET_CODE (op) == REG
2022 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2023 win = 1;
2024 break;
2026 case '<':
2027 if (GET_CODE (op) == MEM
2028 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2029 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2030 win = 1;
2031 break;
2033 case '>':
2034 if (GET_CODE (op) == MEM
2035 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2036 || GET_CODE (XEXP (op, 0)) == POST_INC))
2037 win = 1;
2038 break;
2040 case 'E':
2041 #ifndef REAL_ARITHMETIC
2042 /* Match any CONST_DOUBLE, but only if
2043 we can examine the bits of it reliably. */
2044 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2045 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2046 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2047 break;
2048 #endif
2049 if (GET_CODE (op) == CONST_DOUBLE)
2050 win = 1;
2051 break;
2053 case 'F':
2054 if (GET_CODE (op) == CONST_DOUBLE)
2055 win = 1;
2056 break;
2058 case 'G':
2059 case 'H':
2060 if (GET_CODE (op) == CONST_DOUBLE
2061 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2062 win = 1;
2063 break;
2065 case 's':
2066 if (GET_CODE (op) == CONST_INT
2067 || (GET_CODE (op) == CONST_DOUBLE
2068 && GET_MODE (op) == VOIDmode))
2069 break;
2070 case 'i':
2071 if (CONSTANT_P (op))
2072 win = 1;
2073 break;
2075 case 'n':
2076 if (GET_CODE (op) == CONST_INT
2077 || (GET_CODE (op) == CONST_DOUBLE
2078 && GET_MODE (op) == VOIDmode))
2079 win = 1;
2080 break;
2082 case 'I':
2083 case 'J':
2084 case 'K':
2085 case 'L':
2086 case 'M':
2087 case 'N':
2088 case 'O':
2089 case 'P':
2090 if (GET_CODE (op) == CONST_INT
2091 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2092 win = 1;
2093 break;
2095 #ifdef EXTRA_CONSTRAINT
2096 case 'Q':
2097 case 'R':
2098 case 'S':
2099 case 'T':
2100 case 'U':
2101 if (EXTRA_CONSTRAINT (op, c))
2102 win = 1;
2103 break;
2104 #endif
2106 case 'V':
2107 if (GET_CODE (op) == MEM
2108 && ((strict > 0 && ! offsettable_memref_p (op))
2109 || (strict < 0
2110 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2111 || (reload_in_progress
2112 && !(GET_CODE (op) == REG
2113 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2114 win = 1;
2115 break;
2117 case 'o':
2118 if ((strict > 0 && offsettable_memref_p (op))
2119 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2120 /* Before reload, accept what reload can handle. */
2121 || (strict < 0
2122 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2123 /* During reload, accept a pseudo */
2124 || (reload_in_progress && GET_CODE (op) == REG
2125 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2126 win = 1;
2127 break;
2129 default:
2130 if (strict < 0
2131 || (strict == 0
2132 && GET_CODE (op) == REG
2133 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2134 || (strict == 0 && GET_CODE (op) == SCRATCH)
2135 || (GET_CODE (op) == REG
2136 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2137 offset, mode)))
2138 win = 1;
2141 constraints[opno] = p;
2142 /* If this operand did not win somehow,
2143 this alternative loses. */
2144 if (! win)
2145 lose = 1;
2147 /* This alternative won; the operands are ok.
2148 Change whichever operands this alternative says to change. */
2149 if (! lose)
2151 int opno, eopno;
2153 /* See if any earlyclobber operand conflicts with some other
2154 operand. */
2156 if (strict > 0)
2157 for (eopno = 0; eopno < noperands; eopno++)
2158 /* Ignore earlyclobber operands now in memory,
2159 because we would often report failure when we have
2160 two memory operands, one of which was formerly a REG. */
2161 if (earlyclobber[eopno]
2162 && GET_CODE (recog_operand[eopno]) == REG)
2163 for (opno = 0; opno < noperands; opno++)
2164 if ((GET_CODE (recog_operand[opno]) == MEM
2165 || op_types[opno] != OP_OUT)
2166 && opno != eopno
2167 /* Ignore things like match_operator operands. */
2168 && *insn_operand_constraint[insn_code_num][opno] != 0
2169 && ! (matching_operands[opno] == eopno
2170 && operands_match_p (recog_operand[opno],
2171 recog_operand[eopno]))
2172 && ! safe_from_earlyclobber (recog_operand[opno],
2173 recog_operand[eopno]))
2174 lose = 1;
2176 if (! lose)
2178 while (--funny_match_index >= 0)
2180 recog_operand[funny_match[funny_match_index].other]
2181 = recog_operand[funny_match[funny_match_index].this];
2184 return 1;
2188 which_alternative++;
2191 /* If we are about to reject this, but we are not to test strictly,
2192 try a very loose test. Only return failure if it fails also. */
2193 if (strict == 0)
2194 return constrain_operands (insn_code_num, -1);
2195 else
2196 return 0;
2199 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2200 is a hard reg in class CLASS when its regno is offset by OFFSET
2201 and changed to mode MODE.
2202 If REG occupies multiple hard regs, all of them must be in CLASS. */
2205 reg_fits_class_p (operand, class, offset, mode)
2206 rtx operand;
2207 register enum reg_class class;
2208 int offset;
2209 enum machine_mode mode;
2211 register int regno = REGNO (operand);
2212 if (regno < FIRST_PSEUDO_REGISTER
2213 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2214 regno + offset))
2216 register int sr;
2217 regno += offset;
2218 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2219 sr > 0; sr--)
2220 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2221 regno + sr))
2222 break;
2223 return sr == 0;
2226 return 0;
2229 #endif /* REGISTER_CONSTRAINTS */