(*zeroextract[qs]i_compare0_scratch): Use const_int_operand
[official-gcc.git] / gcc / recog.c
blob745d62881d27d6cd144d5ed0096fd4e6be26d151
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 88, 91, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "rtl.h"
24 #include <stdio.h>
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "recog.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
35 #ifndef STACK_PUSH_CODE
36 #ifdef STACK_GROWS_DOWNWARD
37 #define STACK_PUSH_CODE PRE_DEC
38 #else
39 #define STACK_PUSH_CODE PRE_INC
40 #endif
41 #endif
43 /* Import from final.c: */
44 extern rtx alter_subreg ();
46 int strict_memory_address_p ();
47 int memory_address_p ();
49 /* Nonzero means allow operands to be volatile.
50 This should be 0 if you are generating rtl, such as if you are calling
51 the functions in optabs.c and expmed.c (most of the time).
52 This should be 1 if all valid insns need to be recognized,
53 such as in regclass.c and final.c and reload.c.
55 init_recog and init_recog_no_volatile are responsible for setting this. */
57 int volatile_ok;
59 /* On return from `constrain_operands', indicate which alternative
60 was satisfied. */
62 int which_alternative;
64 /* Nonzero after end of reload pass.
65 Set to 1 or 0 by toplev.c.
66 Controls the significance of (SUBREG (MEM)). */
68 int reload_completed;
70 /* Initialize data used by the function `recog'.
71 This must be called once in the compilation of a function
72 before any insn recognition may be done in the function. */
74 void
75 init_recog_no_volatile ()
77 volatile_ok = 0;
80 void
81 init_recog ()
83 volatile_ok = 1;
86 /* Try recognizing the instruction INSN,
87 and return the code number that results.
88 Remember the code so that repeated calls do not
89 need to spend the time for actual rerecognition.
91 This function is the normal interface to instruction recognition.
92 The automatically-generated function `recog' is normally called
93 through this one. (The only exception is in combine.c.) */
95 int
96 recog_memoized (insn)
97 rtx insn;
99 if (INSN_CODE (insn) < 0)
100 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
101 return INSN_CODE (insn);
104 /* Check that X is an insn-body for an `asm' with operands
105 and that the operands mentioned in it are legitimate. */
108 check_asm_operands (x)
109 rtx x;
111 int noperands = asm_noperands (x);
112 rtx *operands;
113 int i;
115 if (noperands < 0)
116 return 0;
117 if (noperands == 0)
118 return 1;
120 operands = (rtx *) alloca (noperands * sizeof (rtx));
121 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
123 for (i = 0; i < noperands; i++)
124 if (!general_operand (operands[i], VOIDmode))
125 return 0;
127 return 1;
130 /* Static data for the next two routines.
132 The maximum number of changes supported is defined as the maximum
133 number of operands times 5. This allows for repeated substitutions
134 inside complex indexed address, or, alternatively, changes in up
135 to 5 insns. */
137 #define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
139 static rtx change_objects[MAX_CHANGE_LOCS];
140 static int change_old_codes[MAX_CHANGE_LOCS];
141 static rtx *change_locs[MAX_CHANGE_LOCS];
142 static rtx change_olds[MAX_CHANGE_LOCS];
144 static int num_changes = 0;
146 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
147 at which NEW will be placed. If OBJECT is zero, no validation is done,
148 the change is simply made.
150 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
151 will be called with the address and mode as parameters. If OBJECT is
152 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
153 the change in place.
155 IN_GROUP is non-zero if this is part of a group of changes that must be
156 performed as a group. In that case, the changes will be stored. The
157 function `apply_change_group' will validate and apply the changes.
159 If IN_GROUP is zero, this is a single change. Try to recognize the insn
160 or validate the memory reference with the change applied. If the result
161 is not valid for the machine, suppress the change and return zero.
162 Otherwise, perform the change and return 1. */
165 validate_change (object, loc, new, in_group)
166 rtx object;
167 rtx *loc;
168 rtx new;
169 int in_group;
171 rtx old = *loc;
173 if (old == new || rtx_equal_p (old, new))
174 return 1;
176 if (num_changes >= MAX_CHANGE_LOCS
177 || (in_group == 0 && num_changes != 0))
178 abort ();
180 *loc = new;
182 /* Save the information describing this change. */
183 change_objects[num_changes] = object;
184 change_locs[num_changes] = loc;
185 change_olds[num_changes] = old;
187 if (object && GET_CODE (object) != MEM)
189 /* Set INSN_CODE to force rerecognition of insn. Save old code in
190 case invalid. */
191 change_old_codes[num_changes] = INSN_CODE (object);
192 INSN_CODE (object) = -1;
195 num_changes++;
197 /* If we are making a group of changes, return 1. Otherwise, validate the
198 change group we made. */
200 if (in_group)
201 return 1;
202 else
203 return apply_change_group ();
206 /* Apply a group of changes previously issued with `validate_change'.
207 Return 1 if all changes are valid, zero otherwise. */
210 apply_change_group ()
212 int i;
214 /* The changes have been applied and all INSN_CODEs have been reset to force
215 rerecognition.
217 The changes are valid if we aren't given an object, or if we are
218 given a MEM and it still is a valid address, or if this is in insn
219 and it is recognized. In the latter case, if reload has completed,
220 we also require that the operands meet the constraints for
221 the insn. We do not allow modifying an ASM_OPERANDS after reload
222 has completed because verifying the constraints is too difficult. */
224 for (i = 0; i < num_changes; i++)
226 rtx object = change_objects[i];
228 if (object == 0)
229 continue;
231 if (GET_CODE (object) == MEM)
233 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
234 break;
236 else if ((recog_memoized (object) < 0
237 && (asm_noperands (PATTERN (object)) < 0
238 || ! check_asm_operands (PATTERN (object))
239 || reload_completed))
240 || (reload_completed
241 && (insn_extract (object),
242 ! constrain_operands (INSN_CODE (object), 1))))
244 rtx pat = PATTERN (object);
246 /* Perhaps we couldn't recognize the insn because there were
247 extra CLOBBERs at the end. If so, try to re-recognize
248 without the last CLOBBER (later iterations will cause each of
249 them to be eliminated, in turn). But don't do this if we
250 have an ASM_OPERAND. */
251 if (GET_CODE (pat) == PARALLEL
252 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
253 && asm_noperands (PATTERN (object)) < 0)
255 rtx newpat;
257 if (XVECLEN (pat, 0) == 2)
258 newpat = XVECEXP (pat, 0, 0);
259 else
261 int j;
263 newpat = gen_rtx (PARALLEL, VOIDmode,
264 gen_rtvec (XVECLEN (pat, 0) - 1));
265 for (j = 0; j < XVECLEN (newpat, 0); j++)
266 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
269 /* Add a new change to this group to replace the pattern
270 with this new pattern. Then consider this change
271 as having succeeded. The change we added will
272 cause the entire call to fail if things remain invalid.
274 Note that this can lose if a later change than the one
275 we are processing specified &XVECEXP (PATTERN (object), 0, X)
276 but this shouldn't occur. */
278 validate_change (object, &PATTERN (object), newpat, 1);
280 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
281 /* If this insn is a CLOBBER or USE, it is always valid, but is
282 never recognized. */
283 continue;
284 else
285 break;
289 if (i == num_changes)
291 num_changes = 0;
292 return 1;
294 else
296 cancel_changes (0);
297 return 0;
301 /* Return the number of changes so far in the current group. */
304 num_validated_changes ()
306 return num_changes;
309 /* Retract the changes numbered NUM and up. */
311 void
312 cancel_changes (num)
313 int num;
315 int i;
317 /* Back out all the changes. Do this in the opposite order in which
318 they were made. */
319 for (i = num_changes - 1; i >= num; i--)
321 *change_locs[i] = change_olds[i];
322 if (change_objects[i] && GET_CODE (change_objects[i]) != MEM)
323 INSN_CODE (change_objects[i]) = change_old_codes[i];
325 num_changes = num;
328 /* Replace every occurrence of FROM in X with TO. Mark each change with
329 validate_change passing OBJECT. */
331 static void
332 validate_replace_rtx_1 (loc, from, to, object)
333 rtx *loc;
334 rtx from, to, object;
336 register int i, j;
337 register char *fmt;
338 register rtx x = *loc;
339 enum rtx_code code = GET_CODE (x);
341 /* X matches FROM if it is the same rtx or they are both referring to the
342 same register in the same mode. Avoid calling rtx_equal_p unless the
343 operands look similar. */
345 if (x == from
346 || (GET_CODE (x) == REG && GET_CODE (from) == REG
347 && GET_MODE (x) == GET_MODE (from)
348 && REGNO (x) == REGNO (from))
349 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
350 && rtx_equal_p (x, from)))
352 validate_change (object, loc, to, 1);
353 return;
356 /* For commutative or comparison operations, try replacing each argument
357 separately and seeing if we made any changes. If so, put a constant
358 argument last.*/
359 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
361 int prev_changes = num_changes;
363 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
364 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
365 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
367 validate_change (object, loc,
368 gen_rtx (GET_RTX_CLASS (code) == 'c' ? code
369 : swap_condition (code),
370 GET_MODE (x), XEXP (x, 1), XEXP (x, 0)),
372 x = *loc;
373 code = GET_CODE (x);
377 switch (code)
379 case PLUS:
380 /* If we have have a PLUS whose second operand is now a CONST_INT, use
381 plus_constant to try to simplify it. */
382 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
383 validate_change (object, loc,
384 plus_constant (XEXP (x, 0), INTVAL (XEXP (x, 1))), 1);
385 return;
387 case ZERO_EXTEND:
388 case SIGN_EXTEND:
389 /* In these cases, the operation to be performed depends on the mode
390 of the operand. If we are replacing the operand with a VOIDmode
391 constant, we lose the information. So try to simplify the operation
392 in that case. If it fails, substitute in something that we know
393 won't be recognized. */
394 if (GET_MODE (to) == VOIDmode
395 && (XEXP (x, 0) == from
396 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
397 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
398 && REGNO (XEXP (x, 0)) == REGNO (from))))
400 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
401 GET_MODE (from));
402 if (new == 0)
403 new = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
405 validate_change (object, loc, new, 1);
406 return;
408 break;
410 case SUBREG:
411 /* If we have a SUBREG of a register that we are replacing and we are
412 replacing it with a MEM, make a new MEM and try replacing the
413 SUBREG with it. Don't do this if the MEM has a mode-dependent address
414 or if we would be widening it. */
416 if (SUBREG_REG (x) == from
417 && GET_CODE (from) == REG
418 && GET_CODE (to) == MEM
419 && ! mode_dependent_address_p (XEXP (to, 0))
420 && ! MEM_VOLATILE_P (to)
421 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
423 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
424 enum machine_mode mode = GET_MODE (x);
425 rtx new;
427 if (BYTES_BIG_ENDIAN)
428 offset += (MIN (UNITS_PER_WORD,
429 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
430 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
432 new = gen_rtx (MEM, mode, plus_constant (XEXP (to, 0), offset));
433 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
434 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
435 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
436 validate_change (object, loc, new, 1);
437 return;
439 break;
441 case ZERO_EXTRACT:
442 case SIGN_EXTRACT:
443 /* If we are replacing a register with memory, try to change the memory
444 to be the mode required for memory in extract operations (this isn't
445 likely to be an insertion operation; if it was, nothing bad will
446 happen, we might just fail in some cases). */
448 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
449 && GET_CODE (XEXP (x, 1)) == CONST_INT
450 && GET_CODE (XEXP (x, 2)) == CONST_INT
451 && ! mode_dependent_address_p (XEXP (to, 0))
452 && ! MEM_VOLATILE_P (to))
454 enum machine_mode wanted_mode = VOIDmode;
455 enum machine_mode is_mode = GET_MODE (to);
456 int width = INTVAL (XEXP (x, 1));
457 int pos = INTVAL (XEXP (x, 2));
459 #ifdef HAVE_extzv
460 if (code == ZERO_EXTRACT)
461 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
462 #endif
463 #ifdef HAVE_extv
464 if (code == SIGN_EXTRACT)
465 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
466 #endif
468 /* If we have a narrower mode, we can do something. */
469 if (wanted_mode != VOIDmode
470 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
472 int offset = pos / BITS_PER_UNIT;
473 rtx newmem;
475 /* If the bytes and bits are counted differently, we
476 must adjust the offset. */
477 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
478 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
479 - offset);
481 pos %= GET_MODE_BITSIZE (wanted_mode);
483 newmem = gen_rtx (MEM, wanted_mode,
484 plus_constant (XEXP (to, 0), offset));
485 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
486 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
487 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
489 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
490 validate_change (object, &XEXP (x, 0), newmem, 1);
494 break;
497 fmt = GET_RTX_FORMAT (code);
498 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
500 if (fmt[i] == 'e')
501 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
502 else if (fmt[i] == 'E')
503 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
504 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
508 /* Try replacing every occurrence of FROM in INSN with TO. After all
509 changes have been made, validate by seeing if INSN is still valid. */
512 validate_replace_rtx (from, to, insn)
513 rtx from, to, insn;
515 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
516 return apply_change_group ();
519 #ifdef HAVE_cc0
520 /* Return 1 if the insn using CC0 set by INSN does not contain
521 any ordered tests applied to the condition codes.
522 EQ and NE tests do not count. */
525 next_insn_tests_no_inequality (insn)
526 rtx insn;
528 register rtx next = next_cc0_user (insn);
530 /* If there is no next insn, we have to take the conservative choice. */
531 if (next == 0)
532 return 0;
534 return ((GET_CODE (next) == JUMP_INSN
535 || GET_CODE (next) == INSN
536 || GET_CODE (next) == CALL_INSN)
537 && ! inequality_comparisons_p (PATTERN (next)));
540 #if 0 /* This is useless since the insn that sets the cc's
541 must be followed immediately by the use of them. */
542 /* Return 1 if the CC value set up by INSN is not used. */
545 next_insns_test_no_inequality (insn)
546 rtx insn;
548 register rtx next = NEXT_INSN (insn);
550 for (; next != 0; next = NEXT_INSN (next))
552 if (GET_CODE (next) == CODE_LABEL
553 || GET_CODE (next) == BARRIER)
554 return 1;
555 if (GET_CODE (next) == NOTE)
556 continue;
557 if (inequality_comparisons_p (PATTERN (next)))
558 return 0;
559 if (sets_cc0_p (PATTERN (next)) == 1)
560 return 1;
561 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
562 return 1;
564 return 1;
566 #endif
567 #endif
569 /* This is used by find_single_use to locate an rtx that contains exactly one
570 use of DEST, which is typically either a REG or CC0. It returns a
571 pointer to the innermost rtx expression containing DEST. Appearances of
572 DEST that are being used to totally replace it are not counted. */
574 static rtx *
575 find_single_use_1 (dest, loc)
576 rtx dest;
577 rtx *loc;
579 rtx x = *loc;
580 enum rtx_code code = GET_CODE (x);
581 rtx *result = 0;
582 rtx *this_result;
583 int i;
584 char *fmt;
586 switch (code)
588 case CONST_INT:
589 case CONST:
590 case LABEL_REF:
591 case SYMBOL_REF:
592 case CONST_DOUBLE:
593 case CLOBBER:
594 return 0;
596 case SET:
597 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
598 of a REG that occupies all of the REG, the insn uses DEST if
599 it is mentioned in the destination or the source. Otherwise, we
600 need just check the source. */
601 if (GET_CODE (SET_DEST (x)) != CC0
602 && GET_CODE (SET_DEST (x)) != PC
603 && GET_CODE (SET_DEST (x)) != REG
604 && ! (GET_CODE (SET_DEST (x)) == SUBREG
605 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
606 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
607 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
608 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
609 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
610 break;
612 return find_single_use_1 (dest, &SET_SRC (x));
614 case MEM:
615 case SUBREG:
616 return find_single_use_1 (dest, &XEXP (x, 0));
619 /* If it wasn't one of the common cases above, check each expression and
620 vector of this code. Look for a unique usage of DEST. */
622 fmt = GET_RTX_FORMAT (code);
623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
625 if (fmt[i] == 'e')
627 if (dest == XEXP (x, i)
628 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
629 && REGNO (dest) == REGNO (XEXP (x, i))))
630 this_result = loc;
631 else
632 this_result = find_single_use_1 (dest, &XEXP (x, i));
634 if (result == 0)
635 result = this_result;
636 else if (this_result)
637 /* Duplicate usage. */
638 return 0;
640 else if (fmt[i] == 'E')
642 int j;
644 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
646 if (XVECEXP (x, i, j) == dest
647 || (GET_CODE (dest) == REG
648 && GET_CODE (XVECEXP (x, i, j)) == REG
649 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
650 this_result = loc;
651 else
652 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
654 if (result == 0)
655 result = this_result;
656 else if (this_result)
657 return 0;
662 return result;
665 /* See if DEST, produced in INSN, is used only a single time in the
666 sequel. If so, return a pointer to the innermost rtx expression in which
667 it is used.
669 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
671 This routine will return usually zero either before flow is called (because
672 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
673 note can't be trusted).
675 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
676 care about REG_DEAD notes or LOG_LINKS.
678 Otherwise, we find the single use by finding an insn that has a
679 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
680 only referenced once in that insn, we know that it must be the first
681 and last insn referencing DEST. */
683 rtx *
684 find_single_use (dest, insn, ploc)
685 rtx dest;
686 rtx insn;
687 rtx *ploc;
689 rtx next;
690 rtx *result;
691 rtx link;
693 #ifdef HAVE_cc0
694 if (dest == cc0_rtx)
696 next = NEXT_INSN (insn);
697 if (next == 0
698 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
699 return 0;
701 result = find_single_use_1 (dest, &PATTERN (next));
702 if (result && ploc)
703 *ploc = next;
704 return result;
706 #endif
708 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
709 return 0;
711 for (next = next_nonnote_insn (insn);
712 next != 0 && GET_CODE (next) != CODE_LABEL;
713 next = next_nonnote_insn (next))
714 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
716 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
717 if (XEXP (link, 0) == insn)
718 break;
720 if (link)
722 result = find_single_use_1 (dest, &PATTERN (next));
723 if (ploc)
724 *ploc = next;
725 return result;
729 return 0;
732 /* Return 1 if OP is a valid general operand for machine mode MODE.
733 This is either a register reference, a memory reference,
734 or a constant. In the case of a memory reference, the address
735 is checked for general validity for the target machine.
737 Register and memory references must have mode MODE in order to be valid,
738 but some constants have no machine mode and are valid for any mode.
740 If MODE is VOIDmode, OP is checked for validity for whatever mode
741 it has.
743 The main use of this function is as a predicate in match_operand
744 expressions in the machine description.
746 For an explanation of this function's behavior for registers of
747 class NO_REGS, see the comment for `register_operand'. */
750 general_operand (op, mode)
751 register rtx op;
752 enum machine_mode mode;
754 register enum rtx_code code = GET_CODE (op);
755 int mode_altering_drug = 0;
757 if (mode == VOIDmode)
758 mode = GET_MODE (op);
760 /* Don't accept CONST_INT or anything similar
761 if the caller wants something floating. */
762 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
763 && GET_MODE_CLASS (mode) != MODE_INT
764 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
765 return 0;
767 if (CONSTANT_P (op))
768 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
769 #ifdef LEGITIMATE_PIC_OPERAND_P
770 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
771 #endif
772 && LEGITIMATE_CONSTANT_P (op));
774 /* Except for certain constants with VOIDmode, already checked for,
775 OP's mode must match MODE if MODE specifies a mode. */
777 if (GET_MODE (op) != mode)
778 return 0;
780 if (code == SUBREG)
782 #ifdef INSN_SCHEDULING
783 /* On machines that have insn scheduling, we want all memory
784 reference to be explicit, so outlaw paradoxical SUBREGs. */
785 if (GET_CODE (SUBREG_REG (op)) == MEM
786 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
787 return 0;
788 #endif
790 op = SUBREG_REG (op);
791 code = GET_CODE (op);
792 #if 0
793 /* No longer needed, since (SUBREG (MEM...))
794 will load the MEM into a reload reg in the MEM's own mode. */
795 mode_altering_drug = 1;
796 #endif
799 if (code == REG)
800 /* A register whose class is NO_REGS is not a general operand. */
801 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
802 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
804 if (code == MEM)
806 register rtx y = XEXP (op, 0);
807 if (! volatile_ok && MEM_VOLATILE_P (op))
808 return 0;
809 /* Use the mem's mode, since it will be reloaded thus. */
810 mode = GET_MODE (op);
811 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
813 return 0;
815 win:
816 if (mode_altering_drug)
817 return ! mode_dependent_address_p (XEXP (op, 0));
818 return 1;
821 /* Return 1 if OP is a valid memory address for a memory reference
822 of mode MODE.
824 The main use of this function is as a predicate in match_operand
825 expressions in the machine description. */
828 address_operand (op, mode)
829 register rtx op;
830 enum machine_mode mode;
832 return memory_address_p (mode, op);
835 /* Return 1 if OP is a register reference of mode MODE.
836 If MODE is VOIDmode, accept a register in any mode.
838 The main use of this function is as a predicate in match_operand
839 expressions in the machine description.
841 As a special exception, registers whose class is NO_REGS are
842 not accepted by `register_operand'. The reason for this change
843 is to allow the representation of special architecture artifacts
844 (such as a condition code register) without extending the rtl
845 definitions. Since registers of class NO_REGS cannot be used
846 as registers in any case where register classes are examined,
847 it is most consistent to keep this function from accepting them. */
850 register_operand (op, mode)
851 register rtx op;
852 enum machine_mode mode;
854 if (GET_MODE (op) != mode && mode != VOIDmode)
855 return 0;
857 if (GET_CODE (op) == SUBREG)
859 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
860 because it is guaranteed to be reloaded into one.
861 Just make sure the MEM is valid in itself.
862 (Ideally, (SUBREG (MEM)...) should not exist after reload,
863 but currently it does result from (SUBREG (REG)...) where the
864 reg went on the stack.) */
865 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
866 return general_operand (op, mode);
868 #ifdef CLASS_CANNOT_CHANGE_SIZE
869 if (GET_CODE (SUBREG_REG (op)) == REG
870 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
871 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
872 REGNO (SUBREG_REG (op)))
873 && (GET_MODE_SIZE (mode)
874 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op)))))
875 return 0;
876 #endif
878 op = SUBREG_REG (op);
881 /* We don't consider registers whose class is NO_REGS
882 to be a register operand. */
883 return (GET_CODE (op) == REG
884 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
885 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
888 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
889 or a hard register. */
892 scratch_operand (op, mode)
893 register rtx op;
894 enum machine_mode mode;
896 return (GET_MODE (op) == mode
897 && (GET_CODE (op) == SCRATCH
898 || (GET_CODE (op) == REG
899 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
902 /* Return 1 if OP is a valid immediate operand for mode MODE.
904 The main use of this function is as a predicate in match_operand
905 expressions in the machine description. */
908 immediate_operand (op, mode)
909 register rtx op;
910 enum machine_mode mode;
912 /* Don't accept CONST_INT or anything similar
913 if the caller wants something floating. */
914 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
915 && GET_MODE_CLASS (mode) != MODE_INT
916 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
917 return 0;
919 return (CONSTANT_P (op)
920 && (GET_MODE (op) == mode || mode == VOIDmode
921 || GET_MODE (op) == VOIDmode)
922 #ifdef LEGITIMATE_PIC_OPERAND_P
923 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
924 #endif
925 && LEGITIMATE_CONSTANT_P (op));
928 /* Returns 1 if OP is an operand that is a CONST_INT. */
931 const_int_operand (op, mode)
932 register rtx op;
933 enum machine_mode mode;
935 return GET_CODE (op) == CONST_INT;
938 /* Returns 1 if OP is an operand that is a constant integer or constant
939 floating-point number. */
942 const_double_operand (op, mode)
943 register rtx op;
944 enum machine_mode mode;
946 /* Don't accept CONST_INT or anything similar
947 if the caller wants something floating. */
948 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
949 && GET_MODE_CLASS (mode) != MODE_INT
950 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
951 return 0;
953 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
954 && (mode == VOIDmode || GET_MODE (op) == mode
955 || GET_MODE (op) == VOIDmode));
958 /* Return 1 if OP is a general operand that is not an immediate operand. */
961 nonimmediate_operand (op, mode)
962 register rtx op;
963 enum machine_mode mode;
965 return (general_operand (op, mode) && ! CONSTANT_P (op));
968 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
971 nonmemory_operand (op, mode)
972 register rtx op;
973 enum machine_mode mode;
975 if (CONSTANT_P (op))
977 /* Don't accept CONST_INT or anything similar
978 if the caller wants something floating. */
979 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
980 && GET_MODE_CLASS (mode) != MODE_INT
981 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
982 return 0;
984 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
985 #ifdef LEGITIMATE_PIC_OPERAND_P
986 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
987 #endif
988 && LEGITIMATE_CONSTANT_P (op));
991 if (GET_MODE (op) != mode && mode != VOIDmode)
992 return 0;
994 if (GET_CODE (op) == SUBREG)
996 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
997 because it is guaranteed to be reloaded into one.
998 Just make sure the MEM is valid in itself.
999 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1000 but currently it does result from (SUBREG (REG)...) where the
1001 reg went on the stack.) */
1002 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1003 return general_operand (op, mode);
1004 op = SUBREG_REG (op);
1007 /* We don't consider registers whose class is NO_REGS
1008 to be a register operand. */
1009 return (GET_CODE (op) == REG
1010 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1011 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1014 /* Return 1 if OP is a valid operand that stands for pushing a
1015 value of mode MODE onto the stack.
1017 The main use of this function is as a predicate in match_operand
1018 expressions in the machine description. */
1021 push_operand (op, mode)
1022 rtx op;
1023 enum machine_mode mode;
1025 if (GET_CODE (op) != MEM)
1026 return 0;
1028 if (GET_MODE (op) != mode)
1029 return 0;
1031 op = XEXP (op, 0);
1033 if (GET_CODE (op) != STACK_PUSH_CODE)
1034 return 0;
1036 return XEXP (op, 0) == stack_pointer_rtx;
1039 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1042 memory_address_p (mode, addr)
1043 enum machine_mode mode;
1044 register rtx addr;
1046 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1047 return 0;
1049 win:
1050 return 1;
1053 /* Return 1 if OP is a valid memory reference with mode MODE,
1054 including a valid address.
1056 The main use of this function is as a predicate in match_operand
1057 expressions in the machine description. */
1060 memory_operand (op, mode)
1061 register rtx op;
1062 enum machine_mode mode;
1064 rtx inner;
1066 if (! reload_completed)
1067 /* Note that no SUBREG is a memory operand before end of reload pass,
1068 because (SUBREG (MEM...)) forces reloading into a register. */
1069 return GET_CODE (op) == MEM && general_operand (op, mode);
1071 if (mode != VOIDmode && GET_MODE (op) != mode)
1072 return 0;
1074 inner = op;
1075 if (GET_CODE (inner) == SUBREG)
1076 inner = SUBREG_REG (inner);
1078 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1081 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1082 that is, a memory reference whose address is a general_operand. */
1085 indirect_operand (op, mode)
1086 register rtx op;
1087 enum machine_mode mode;
1089 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1090 if (! reload_completed
1091 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1093 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1094 rtx inner = SUBREG_REG (op);
1096 if (BYTES_BIG_ENDIAN)
1097 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1098 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1100 if (mode != VOIDmode && GET_MODE (op) != mode)
1101 return 0;
1103 /* The only way that we can have a general_operand as the resulting
1104 address is if OFFSET is zero and the address already is an operand
1105 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1106 operand. */
1108 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1109 || (GET_CODE (XEXP (inner, 0)) == PLUS
1110 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1111 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1112 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1115 return (GET_CODE (op) == MEM
1116 && memory_operand (op, mode)
1117 && general_operand (XEXP (op, 0), Pmode));
1120 /* Return 1 if this is a comparison operator. This allows the use of
1121 MATCH_OPERATOR to recognize all the branch insns. */
1124 comparison_operator (op, mode)
1125 register rtx op;
1126 enum machine_mode mode;
1128 return ((mode == VOIDmode || GET_MODE (op) == mode)
1129 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1132 /* If BODY is an insn body that uses ASM_OPERANDS,
1133 return the number of operands (both input and output) in the insn.
1134 Otherwise return -1. */
1137 asm_noperands (body)
1138 rtx body;
1140 if (GET_CODE (body) == ASM_OPERANDS)
1141 /* No output operands: return number of input operands. */
1142 return ASM_OPERANDS_INPUT_LENGTH (body);
1143 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1144 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1145 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1146 else if (GET_CODE (body) == PARALLEL
1147 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1148 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1150 /* Multiple output operands, or 1 output plus some clobbers:
1151 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1152 int i;
1153 int n_sets;
1155 /* Count backwards through CLOBBERs to determine number of SETs. */
1156 for (i = XVECLEN (body, 0); i > 0; i--)
1158 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1159 break;
1160 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1161 return -1;
1164 /* N_SETS is now number of output operands. */
1165 n_sets = i;
1167 /* Verify that all the SETs we have
1168 came from a single original asm_operands insn
1169 (so that invalid combinations are blocked). */
1170 for (i = 0; i < n_sets; i++)
1172 rtx elt = XVECEXP (body, 0, i);
1173 if (GET_CODE (elt) != SET)
1174 return -1;
1175 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1176 return -1;
1177 /* If these ASM_OPERANDS rtx's came from different original insns
1178 then they aren't allowed together. */
1179 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1180 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1181 return -1;
1183 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1184 + n_sets);
1186 else if (GET_CODE (body) == PARALLEL
1187 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1189 /* 0 outputs, but some clobbers:
1190 body is [(asm_operands ...) (clobber (reg ...))...]. */
1191 int i;
1193 /* Make sure all the other parallel things really are clobbers. */
1194 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1195 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1196 return -1;
1198 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1200 else
1201 return -1;
1204 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1205 copy its operands (both input and output) into the vector OPERANDS,
1206 the locations of the operands within the insn into the vector OPERAND_LOCS,
1207 and the constraints for the operands into CONSTRAINTS.
1208 Write the modes of the operands into MODES.
1209 Return the assembler-template.
1211 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1212 we don't store that info. */
1214 char *
1215 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1216 rtx body;
1217 rtx *operands;
1218 rtx **operand_locs;
1219 char **constraints;
1220 enum machine_mode *modes;
1222 register int i;
1223 int noperands;
1224 char *template = 0;
1226 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1228 rtx asmop = SET_SRC (body);
1229 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1231 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1233 for (i = 1; i < noperands; i++)
1235 if (operand_locs)
1236 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1237 if (operands)
1238 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1239 if (constraints)
1240 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1241 if (modes)
1242 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1245 /* The output is in the SET.
1246 Its constraint is in the ASM_OPERANDS itself. */
1247 if (operands)
1248 operands[0] = SET_DEST (body);
1249 if (operand_locs)
1250 operand_locs[0] = &SET_DEST (body);
1251 if (constraints)
1252 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1253 if (modes)
1254 modes[0] = GET_MODE (SET_DEST (body));
1255 template = ASM_OPERANDS_TEMPLATE (asmop);
1257 else if (GET_CODE (body) == ASM_OPERANDS)
1259 rtx asmop = body;
1260 /* No output operands: BODY is (asm_operands ....). */
1262 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1264 /* The input operands are found in the 1st element vector. */
1265 /* Constraints for inputs are in the 2nd element vector. */
1266 for (i = 0; i < noperands; i++)
1268 if (operand_locs)
1269 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1270 if (operands)
1271 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1272 if (constraints)
1273 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1274 if (modes)
1275 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1277 template = ASM_OPERANDS_TEMPLATE (asmop);
1279 else if (GET_CODE (body) == PARALLEL
1280 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1282 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1283 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1284 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1285 int nout = 0; /* Does not include CLOBBERs. */
1287 /* At least one output, plus some CLOBBERs. */
1289 /* The outputs are in the SETs.
1290 Their constraints are in the ASM_OPERANDS itself. */
1291 for (i = 0; i < nparallel; i++)
1293 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1294 break; /* Past last SET */
1296 if (operands)
1297 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1298 if (operand_locs)
1299 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1300 if (constraints)
1301 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1302 if (modes)
1303 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1304 nout++;
1307 for (i = 0; i < nin; i++)
1309 if (operand_locs)
1310 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1311 if (operands)
1312 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1313 if (constraints)
1314 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1315 if (modes)
1316 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1319 template = ASM_OPERANDS_TEMPLATE (asmop);
1321 else if (GET_CODE (body) == PARALLEL
1322 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1324 /* No outputs, but some CLOBBERs. */
1326 rtx asmop = XVECEXP (body, 0, 0);
1327 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1329 for (i = 0; i < nin; i++)
1331 if (operand_locs)
1332 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1333 if (operands)
1334 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1335 if (constraints)
1336 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1337 if (modes)
1338 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1341 template = ASM_OPERANDS_TEMPLATE (asmop);
1344 return template;
1347 /* Given an rtx *P, if it is a sum containing an integer constant term,
1348 return the location (type rtx *) of the pointer to that constant term.
1349 Otherwise, return a null pointer. */
1351 static rtx *
1352 find_constant_term_loc (p)
1353 rtx *p;
1355 register rtx *tem;
1356 register enum rtx_code code = GET_CODE (*p);
1358 /* If *P IS such a constant term, P is its location. */
1360 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1361 || code == CONST)
1362 return p;
1364 /* Otherwise, if not a sum, it has no constant term. */
1366 if (GET_CODE (*p) != PLUS)
1367 return 0;
1369 /* If one of the summands is constant, return its location. */
1371 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1372 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1373 return p;
1375 /* Otherwise, check each summand for containing a constant term. */
1377 if (XEXP (*p, 0) != 0)
1379 tem = find_constant_term_loc (&XEXP (*p, 0));
1380 if (tem != 0)
1381 return tem;
1384 if (XEXP (*p, 1) != 0)
1386 tem = find_constant_term_loc (&XEXP (*p, 1));
1387 if (tem != 0)
1388 return tem;
1391 return 0;
1394 /* Return 1 if OP is a memory reference
1395 whose address contains no side effects
1396 and remains valid after the addition
1397 of a positive integer less than the
1398 size of the object being referenced.
1400 We assume that the original address is valid and do not check it.
1402 This uses strict_memory_address_p as a subroutine, so
1403 don't use it before reload. */
1406 offsettable_memref_p (op)
1407 rtx op;
1409 return ((GET_CODE (op) == MEM)
1410 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1413 /* Similar, but don't require a strictly valid mem ref:
1414 consider pseudo-regs valid as index or base regs. */
1417 offsettable_nonstrict_memref_p (op)
1418 rtx op;
1420 return ((GET_CODE (op) == MEM)
1421 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1424 /* Return 1 if Y is a memory address which contains no side effects
1425 and would remain valid after the addition of a positive integer
1426 less than the size of that mode.
1428 We assume that the original address is valid and do not check it.
1429 We do check that it is valid for narrower modes.
1431 If STRICTP is nonzero, we require a strictly valid address,
1432 for the sake of use in reload.c. */
1435 offsettable_address_p (strictp, mode, y)
1436 int strictp;
1437 enum machine_mode mode;
1438 register rtx y;
1440 register enum rtx_code ycode = GET_CODE (y);
1441 register rtx z;
1442 rtx y1 = y;
1443 rtx *y2;
1444 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1446 if (CONSTANT_ADDRESS_P (y))
1447 return 1;
1449 /* Adjusting an offsettable address involves changing to a narrower mode.
1450 Make sure that's OK. */
1452 if (mode_dependent_address_p (y))
1453 return 0;
1455 /* If the expression contains a constant term,
1456 see if it remains valid when max possible offset is added. */
1458 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1460 int good;
1462 y1 = *y2;
1463 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1464 /* Use QImode because an odd displacement may be automatically invalid
1465 for any wider mode. But it should be valid for a single byte. */
1466 good = (*addressp) (QImode, y);
1468 /* In any case, restore old contents of memory. */
1469 *y2 = y1;
1470 return good;
1473 if (ycode == PRE_DEC || ycode == PRE_INC
1474 || ycode == POST_DEC || ycode == POST_INC)
1475 return 0;
1477 /* The offset added here is chosen as the maximum offset that
1478 any instruction could need to add when operating on something
1479 of the specified mode. We assume that if Y and Y+c are
1480 valid addresses then so is Y+d for all 0<d<c. */
1482 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1484 /* Use QImode because an odd displacement may be automatically invalid
1485 for any wider mode. But it should be valid for a single byte. */
1486 return (*addressp) (QImode, z);
1489 /* Return 1 if ADDR is an address-expression whose effect depends
1490 on the mode of the memory reference it is used in.
1492 Autoincrement addressing is a typical example of mode-dependence
1493 because the amount of the increment depends on the mode. */
1496 mode_dependent_address_p (addr)
1497 rtx addr;
1499 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1500 return 0;
1501 win:
1502 return 1;
1505 /* Return 1 if OP is a general operand
1506 other than a memory ref with a mode dependent address. */
1509 mode_independent_operand (op, mode)
1510 enum machine_mode mode;
1511 rtx op;
1513 rtx addr;
1515 if (! general_operand (op, mode))
1516 return 0;
1518 if (GET_CODE (op) != MEM)
1519 return 1;
1521 addr = XEXP (op, 0);
1522 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1523 return 1;
1524 lose:
1525 return 0;
1528 /* Given an operand OP that is a valid memory reference
1529 which satisfies offsettable_memref_p,
1530 return a new memory reference whose address has been adjusted by OFFSET.
1531 OFFSET should be positive and less than the size of the object referenced.
1535 adj_offsettable_operand (op, offset)
1536 rtx op;
1537 int offset;
1539 register enum rtx_code code = GET_CODE (op);
1541 if (code == MEM)
1543 register rtx y = XEXP (op, 0);
1544 register rtx new;
1546 if (CONSTANT_ADDRESS_P (y))
1548 new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
1549 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1550 return new;
1553 if (GET_CODE (y) == PLUS)
1555 rtx z = y;
1556 register rtx *const_loc;
1558 op = copy_rtx (op);
1559 z = XEXP (op, 0);
1560 const_loc = find_constant_term_loc (&z);
1561 if (const_loc)
1563 *const_loc = plus_constant_for_output (*const_loc, offset);
1564 return op;
1568 new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
1569 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1570 return new;
1572 abort ();
1575 #ifdef REGISTER_CONSTRAINTS
1577 /* Check the operands of an insn (found in recog_operands)
1578 against the insn's operand constraints (found via INSN_CODE_NUM)
1579 and return 1 if they are valid.
1581 WHICH_ALTERNATIVE is set to a number which indicates which
1582 alternative of constraints was matched: 0 for the first alternative,
1583 1 for the next, etc.
1585 In addition, when two operands are match
1586 and it happens that the output operand is (reg) while the
1587 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1588 make the output operand look like the input.
1589 This is because the output operand is the one the template will print.
1591 This is used in final, just before printing the assembler code and by
1592 the routines that determine an insn's attribute.
1594 If STRICT is a positive non-zero value, it means that we have been
1595 called after reload has been completed. In that case, we must
1596 do all checks strictly. If it is zero, it means that we have been called
1597 before reload has completed. In that case, we first try to see if we can
1598 find an alternative that matches strictly. If not, we try again, this
1599 time assuming that reload will fix up the insn. This provides a "best
1600 guess" for the alternative and is used to compute attributes of insns prior
1601 to reload. A negative value of STRICT is used for this internal call. */
1603 struct funny_match
1605 int this, other;
1609 constrain_operands (insn_code_num, strict)
1610 int insn_code_num;
1611 int strict;
1613 char *constraints[MAX_RECOG_OPERANDS];
1614 int matching_operands[MAX_RECOG_OPERANDS];
1615 enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS];
1616 int earlyclobber[MAX_RECOG_OPERANDS];
1617 register int c;
1618 int noperands = insn_n_operands[insn_code_num];
1620 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1621 int funny_match_index;
1622 int nalternatives = insn_n_alternatives[insn_code_num];
1624 if (noperands == 0 || nalternatives == 0)
1625 return 1;
1627 for (c = 0; c < noperands; c++)
1629 constraints[c] = insn_operand_constraint[insn_code_num][c];
1630 matching_operands[c] = -1;
1631 op_types[c] = OP_IN;
1634 which_alternative = 0;
1636 while (which_alternative < nalternatives)
1638 register int opno;
1639 int lose = 0;
1640 funny_match_index = 0;
1642 for (opno = 0; opno < noperands; opno++)
1644 register rtx op = recog_operand[opno];
1645 enum machine_mode mode = GET_MODE (op);
1646 register char *p = constraints[opno];
1647 int offset = 0;
1648 int win = 0;
1649 int val;
1651 earlyclobber[opno] = 0;
1653 if (GET_CODE (op) == SUBREG)
1655 if (GET_CODE (SUBREG_REG (op)) == REG
1656 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1657 offset = SUBREG_WORD (op);
1658 op = SUBREG_REG (op);
1661 /* An empty constraint or empty alternative
1662 allows anything which matched the pattern. */
1663 if (*p == 0 || *p == ',')
1664 win = 1;
1666 while (*p && (c = *p++) != ',')
1667 switch (c)
1669 case '?':
1670 case '!':
1671 case '*':
1672 case '%':
1673 break;
1675 case '#':
1676 /* Ignore rest of this alternative as far as
1677 constraint checking is concerned. */
1678 while (*p && *p != ',')
1679 p++;
1680 break;
1682 case '=':
1683 op_types[opno] = OP_OUT;
1684 break;
1686 case '+':
1687 op_types[opno] = OP_INOUT;
1688 break;
1690 case '&':
1691 earlyclobber[opno] = 1;
1692 break;
1694 case '0':
1695 case '1':
1696 case '2':
1697 case '3':
1698 case '4':
1699 /* This operand must be the same as a previous one.
1700 This kind of constraint is used for instructions such
1701 as add when they take only two operands.
1703 Note that the lower-numbered operand is passed first.
1705 If we are not testing strictly, assume that this constraint
1706 will be satisfied. */
1707 if (strict < 0)
1708 val = 1;
1709 else
1710 val = operands_match_p (recog_operand[c - '0'],
1711 recog_operand[opno]);
1713 matching_operands[opno] = c - '0';
1714 matching_operands[c - '0'] = opno;
1716 if (val != 0)
1717 win = 1;
1718 /* If output is *x and input is *--x,
1719 arrange later to change the output to *--x as well,
1720 since the output op is the one that will be printed. */
1721 if (val == 2 && strict > 0)
1723 funny_match[funny_match_index].this = opno;
1724 funny_match[funny_match_index++].other = c - '0';
1726 break;
1728 case 'p':
1729 /* p is used for address_operands. When we are called by
1730 gen_reload, no one will have checked that the address is
1731 strictly valid, i.e., that all pseudos requiring hard regs
1732 have gotten them. */
1733 if (strict <= 0
1734 || (strict_memory_address_p
1735 (insn_operand_mode[insn_code_num][opno], op)))
1736 win = 1;
1737 break;
1739 /* No need to check general_operand again;
1740 it was done in insn-recog.c. */
1741 case 'g':
1742 /* Anything goes unless it is a REG and really has a hard reg
1743 but the hard reg is not in the class GENERAL_REGS. */
1744 if (strict < 0
1745 || GENERAL_REGS == ALL_REGS
1746 || GET_CODE (op) != REG
1747 || (reload_in_progress
1748 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1749 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1750 win = 1;
1751 break;
1753 case 'r':
1754 if (strict < 0
1755 || (strict == 0
1756 && GET_CODE (op) == REG
1757 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1758 || (strict == 0 && GET_CODE (op) == SCRATCH)
1759 || (GET_CODE (op) == REG
1760 && ((GENERAL_REGS == ALL_REGS
1761 && REGNO (op) < FIRST_PSEUDO_REGISTER)
1762 || reg_fits_class_p (op, GENERAL_REGS,
1763 offset, mode))))
1764 win = 1;
1765 break;
1767 case 'X':
1768 /* This is used for a MATCH_SCRATCH in the cases when we
1769 don't actually need anything. So anything goes any time. */
1770 win = 1;
1771 break;
1773 case 'm':
1774 if (GET_CODE (op) == MEM
1775 /* Before reload, accept what reload can turn into mem. */
1776 || (strict < 0 && CONSTANT_P (op))
1777 /* During reload, accept a pseudo */
1778 || (reload_in_progress && GET_CODE (op) == REG
1779 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1780 win = 1;
1781 break;
1783 case '<':
1784 if (GET_CODE (op) == MEM
1785 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1786 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1787 win = 1;
1788 break;
1790 case '>':
1791 if (GET_CODE (op) == MEM
1792 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1793 || GET_CODE (XEXP (op, 0)) == POST_INC))
1794 win = 1;
1795 break;
1797 case 'E':
1798 #ifndef REAL_ARITHMETIC
1799 /* Match any CONST_DOUBLE, but only if
1800 we can examine the bits of it reliably. */
1801 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1802 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1803 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1804 break;
1805 #endif
1806 if (GET_CODE (op) == CONST_DOUBLE)
1807 win = 1;
1808 break;
1810 case 'F':
1811 if (GET_CODE (op) == CONST_DOUBLE)
1812 win = 1;
1813 break;
1815 case 'G':
1816 case 'H':
1817 if (GET_CODE (op) == CONST_DOUBLE
1818 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
1819 win = 1;
1820 break;
1822 case 's':
1823 if (GET_CODE (op) == CONST_INT
1824 || (GET_CODE (op) == CONST_DOUBLE
1825 && GET_MODE (op) == VOIDmode))
1826 break;
1827 case 'i':
1828 if (CONSTANT_P (op))
1829 win = 1;
1830 break;
1832 case 'n':
1833 if (GET_CODE (op) == CONST_INT
1834 || (GET_CODE (op) == CONST_DOUBLE
1835 && GET_MODE (op) == VOIDmode))
1836 win = 1;
1837 break;
1839 case 'I':
1840 case 'J':
1841 case 'K':
1842 case 'L':
1843 case 'M':
1844 case 'N':
1845 case 'O':
1846 case 'P':
1847 if (GET_CODE (op) == CONST_INT
1848 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
1849 win = 1;
1850 break;
1852 #ifdef EXTRA_CONSTRAINT
1853 case 'Q':
1854 case 'R':
1855 case 'S':
1856 case 'T':
1857 case 'U':
1858 if (EXTRA_CONSTRAINT (op, c))
1859 win = 1;
1860 break;
1861 #endif
1863 case 'V':
1864 if (GET_CODE (op) == MEM
1865 && ! offsettable_memref_p (op))
1866 win = 1;
1867 break;
1869 case 'o':
1870 if ((strict > 0 && offsettable_memref_p (op))
1871 || (strict == 0 && offsettable_nonstrict_memref_p (op))
1872 /* Before reload, accept what reload can handle. */
1873 || (strict < 0
1874 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
1875 /* During reload, accept a pseudo */
1876 || (reload_in_progress && GET_CODE (op) == REG
1877 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1878 win = 1;
1879 break;
1881 default:
1882 if (strict < 0
1883 || (strict == 0
1884 && GET_CODE (op) == REG
1885 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1886 || (strict == 0 && GET_CODE (op) == SCRATCH)
1887 || (GET_CODE (op) == REG
1888 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
1889 offset, mode)))
1890 win = 1;
1893 constraints[opno] = p;
1894 /* If this operand did not win somehow,
1895 this alternative loses. */
1896 if (! win)
1897 lose = 1;
1899 /* This alternative won; the operands are ok.
1900 Change whichever operands this alternative says to change. */
1901 if (! lose)
1903 int opno, eopno;
1905 /* See if any earlyclobber operand conflicts with some other
1906 operand. */
1908 if (strict > 0)
1909 for (eopno = 0; eopno < noperands; eopno++)
1910 /* Ignore earlyclobber operands now in memory,
1911 because we would often report failure when we have
1912 two memory operands, one of which was formerly a REG. */
1913 if (earlyclobber[eopno]
1914 && GET_CODE (recog_operand[eopno]) == REG)
1915 for (opno = 0; opno < noperands; opno++)
1916 if ((GET_CODE (recog_operand[opno]) == MEM
1917 || op_types[opno] != OP_OUT)
1918 && opno != eopno
1919 /* Ignore things like match_operator operands. */
1920 && *constraints[opno] != 0
1921 && ! (matching_operands[opno] == eopno
1922 && rtx_equal_p (recog_operand[opno],
1923 recog_operand[eopno]))
1924 && ! safe_from_earlyclobber (recog_operand[opno],
1925 recog_operand[eopno]))
1926 lose = 1;
1928 if (! lose)
1930 while (--funny_match_index >= 0)
1932 recog_operand[funny_match[funny_match_index].other]
1933 = recog_operand[funny_match[funny_match_index].this];
1936 return 1;
1940 which_alternative++;
1943 /* If we are about to reject this, but we are not to test strictly,
1944 try a very loose test. Only return failure if it fails also. */
1945 if (strict == 0)
1946 return constrain_operands (insn_code_num, -1);
1947 else
1948 return 0;
1951 /* Return 1 iff OPERAND (assumed to be a REG rtx)
1952 is a hard reg in class CLASS when its regno is offsetted by OFFSET
1953 and changed to mode MODE.
1954 If REG occupies multiple hard regs, all of them must be in CLASS. */
1957 reg_fits_class_p (operand, class, offset, mode)
1958 rtx operand;
1959 register enum reg_class class;
1960 int offset;
1961 enum machine_mode mode;
1963 register int regno = REGNO (operand);
1964 if (regno < FIRST_PSEUDO_REGISTER
1965 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1966 regno + offset))
1968 register int sr;
1969 regno += offset;
1970 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
1971 sr > 0; sr--)
1972 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1973 regno + sr))
1974 break;
1975 return sr == 0;
1978 return 0;
1981 #endif /* REGISTER_CONSTRAINTS */