(INITIAL_ELIMINATION_OFFSET): Check frame_pointer_needed when eliminating
[official-gcc.git] / gcc / recog.c
blob1072fe93ef7079506b2f7b0ef5b0576be7a2ac02
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 88, 91, 92, 93, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 #include "config.h"
22 #include "rtl.h"
23 #include <stdio.h>
24 #include "insn-config.h"
25 #include "insn-attr.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "recog.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
34 #ifndef STACK_PUSH_CODE
35 #ifdef STACK_GROWS_DOWNWARD
36 #define STACK_PUSH_CODE PRE_DEC
37 #else
38 #define STACK_PUSH_CODE PRE_INC
39 #endif
40 #endif
42 /* Import from final.c: */
43 extern rtx alter_subreg ();
45 int strict_memory_address_p ();
46 int memory_address_p ();
48 /* Nonzero means allow operands to be volatile.
49 This should be 0 if you are generating rtl, such as if you are calling
50 the functions in optabs.c and expmed.c (most of the time).
51 This should be 1 if all valid insns need to be recognized,
52 such as in regclass.c and final.c and reload.c.
54 init_recog and init_recog_no_volatile are responsible for setting this. */
56 int volatile_ok;
58 /* On return from `constrain_operands', indicate which alternative
59 was satisfied. */
61 int which_alternative;
63 /* Nonzero after end of reload pass.
64 Set to 1 or 0 by toplev.c.
65 Controls the significance of (SUBREG (MEM)). */
67 int reload_completed;
69 /* Initialize data used by the function `recog'.
70 This must be called once in the compilation of a function
71 before any insn recognition may be done in the function. */
73 void
74 init_recog_no_volatile ()
76 volatile_ok = 0;
79 void
80 init_recog ()
82 volatile_ok = 1;
85 /* Try recognizing the instruction INSN,
86 and return the code number that results.
87 Remeber the code so that repeated calls do not
88 need to spend the time for actual rerecognition.
90 This function is the normal interface to instruction recognition.
91 The automatically-generated function `recog' is normally called
92 through this one. (The only exception is in combine.c.) */
94 int
95 recog_memoized (insn)
96 rtx insn;
98 if (INSN_CODE (insn) < 0)
99 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
100 return INSN_CODE (insn);
103 /* Check that X is an insn-body for an `asm' with operands
104 and that the operands mentioned in it are legitimate. */
107 check_asm_operands (x)
108 rtx x;
110 int noperands = asm_noperands (x);
111 rtx *operands;
112 int i;
114 if (noperands < 0)
115 return 0;
116 if (noperands == 0)
117 return 1;
119 operands = (rtx *) alloca (noperands * sizeof (rtx));
120 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
122 for (i = 0; i < noperands; i++)
123 if (!general_operand (operands[i], VOIDmode))
124 return 0;
126 return 1;
129 /* Static data for the next two routines.
131 The maximum number of changes supported is defined as the maximum
132 number of operands times 5. This allows for repeated substitutions
133 inside complex indexed address, or, alternatively, changes in up
134 to 5 insns. */
136 #define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
138 static rtx change_objects[MAX_CHANGE_LOCS];
139 static int change_old_codes[MAX_CHANGE_LOCS];
140 static rtx *change_locs[MAX_CHANGE_LOCS];
141 static rtx change_olds[MAX_CHANGE_LOCS];
143 static int num_changes = 0;
145 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
146 at which NEW will be placed. If OBJECT is zero, no validation is done,
147 the change is simply made.
149 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
150 will be called with the address and mode as parameters. If OBJECT is
151 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
152 the change in place.
154 IN_GROUP is non-zero if this is part of a group of changes that must be
155 performed as a group. In that case, the changes will be stored. The
156 function `apply_change_group' will validate and apply the changes.
158 If IN_GROUP is zero, this is a single change. Try to recognize the insn
159 or validate the memory reference with the change applied. If the result
160 is not valid for the machine, suppress the change and return zero.
161 Otherwise, perform the change and return 1. */
164 validate_change (object, loc, new, in_group)
165 rtx object;
166 rtx *loc;
167 rtx new;
168 int in_group;
170 rtx old = *loc;
172 if (old == new || rtx_equal_p (old, new))
173 return 1;
175 if (num_changes >= MAX_CHANGE_LOCS
176 || (in_group == 0 && num_changes != 0))
177 abort ();
179 *loc = new;
181 /* Save the information describing this change. */
182 change_objects[num_changes] = object;
183 change_locs[num_changes] = loc;
184 change_olds[num_changes] = old;
186 if (object && GET_CODE (object) != MEM)
188 /* Set INSN_CODE to force rerecognition of insn. Save old code in
189 case invalid. */
190 change_old_codes[num_changes] = INSN_CODE (object);
191 INSN_CODE (object) = -1;
194 num_changes++;
196 /* If we are making a group of changes, return 1. Otherwise, validate the
197 change group we made. */
199 if (in_group)
200 return 1;
201 else
202 return apply_change_group ();
205 /* Apply a group of changes previously issued with `validate_change'.
206 Return 1 if all changes are valid, zero otherwise. */
209 apply_change_group ()
211 int i;
213 /* The changes have been applied and all INSN_CODEs have been reset to force
214 rerecognition.
216 The changes are valid if we aren't given an object, or if we are
217 given a MEM and it still is a valid address, or if this is in insn
218 and it is recognized. In the latter case, if reload has completed,
219 we also require that the operands meet the constraints for
220 the insn. We do not allow modifying an ASM_OPERANDS after reload
221 has completed because verifying the constraints is too difficult. */
223 for (i = 0; i < num_changes; i++)
225 rtx object = change_objects[i];
227 if (object == 0)
228 continue;
230 if (GET_CODE (object) == MEM)
232 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
233 break;
235 else if ((recog_memoized (object) < 0
236 && (asm_noperands (PATTERN (object)) < 0
237 || ! check_asm_operands (PATTERN (object))
238 || reload_completed))
239 || (reload_completed
240 && (insn_extract (object),
241 ! constrain_operands (INSN_CODE (object), 1))))
243 rtx pat = PATTERN (object);
245 /* Perhaps we couldn't recognize the insn because there were
246 extra CLOBBERs at the end. If so, try to re-recognize
247 without the last CLOBBER (later iterations will cause each of
248 them to be eliminated, in turn). But don't do this if we
249 have an ASM_OPERAND. */
250 if (GET_CODE (pat) == PARALLEL
251 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
252 && asm_noperands (PATTERN (object)) < 0)
254 rtx newpat;
256 if (XVECLEN (pat, 0) == 2)
257 newpat = XVECEXP (pat, 0, 0);
258 else
260 int j;
262 newpat = gen_rtx (PARALLEL, VOIDmode,
263 gen_rtvec (XVECLEN (pat, 0) - 1));
264 for (j = 0; j < XVECLEN (newpat, 0); j++)
265 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
268 /* Add a new change to this group to replace the pattern
269 with this new pattern. Then consider this change
270 as having succeeded. The change we added will
271 cause the entire call to fail if things remain invalid.
273 Note that this can lose if a later change than the one
274 we are processing specified &XVECEXP (PATTERN (object), 0, X)
275 but this shouldn't occur. */
277 validate_change (object, &PATTERN (object), newpat, 1);
279 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
280 /* If this insn is a CLOBBER or USE, it is always valid, but is
281 never recognized. */
282 continue;
283 else
284 break;
288 if (i == num_changes)
290 num_changes = 0;
291 return 1;
293 else
295 cancel_changes (0);
296 return 0;
300 /* Return the number of changes so far in the current group. */
303 num_validated_changes ()
305 return num_changes;
308 /* Retract the changes numbered NUM and up. */
310 void
311 cancel_changes (num)
312 int num;
314 int i;
316 /* Back out all the changes. Do this in the opposite order in which
317 they were made. */
318 for (i = num_changes - 1; i >= num; i--)
320 *change_locs[i] = change_olds[i];
321 if (change_objects[i] && GET_CODE (change_objects[i]) != MEM)
322 INSN_CODE (change_objects[i]) = change_old_codes[i];
324 num_changes = num;
327 /* Replace every occurrence of FROM in X with TO. Mark each change with
328 validate_change passing OBJECT. */
330 static void
331 validate_replace_rtx_1 (loc, from, to, object)
332 rtx *loc;
333 rtx from, to, object;
335 register int i, j;
336 register char *fmt;
337 register rtx x = *loc;
338 enum rtx_code code = GET_CODE (x);
340 /* X matches FROM if it is the same rtx or they are both referring to the
341 same register in the same mode. Avoid calling rtx_equal_p unless the
342 operands look similar. */
344 if (x == from
345 || (GET_CODE (x) == REG && GET_CODE (from) == REG
346 && GET_MODE (x) == GET_MODE (from)
347 && REGNO (x) == REGNO (from))
348 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
349 && rtx_equal_p (x, from)))
351 validate_change (object, loc, to, 1);
352 return;
355 /* For commutative or comparison operations, try replacing each argument
356 separately and seeing if we made any changes. If so, put a constant
357 argument last.*/
358 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
360 int prev_changes = num_changes;
362 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
363 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
364 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
366 validate_change (object, loc,
367 gen_rtx (GET_RTX_CLASS (code) == 'c' ? code
368 : swap_condition (code),
369 GET_MODE (x), XEXP (x, 1), XEXP (x, 0)),
371 x = *loc;
372 code = GET_CODE (x);
376 switch (code)
378 case PLUS:
379 /* If we have have a PLUS whose second operand is now a CONST_INT, use
380 plus_constant to try to simplify it. */
381 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
382 validate_change (object, loc,
383 plus_constant (XEXP (x, 0), INTVAL (XEXP (x, 1))), 1);
384 return;
386 case ZERO_EXTEND:
387 case SIGN_EXTEND:
388 /* In these cases, the operation to be performed depends on the mode
389 of the operand. If we are replacing the operand with a VOIDmode
390 constant, we lose the information. So try to simplify the operation
391 in that case. If it fails, substitute in something that we know
392 won't be recognized. */
393 if (GET_MODE (to) == VOIDmode
394 && (XEXP (x, 0) == from
395 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
396 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
397 && REGNO (XEXP (x, 0)) == REGNO (from))))
399 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
400 GET_MODE (from));
401 if (new == 0)
402 new = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
404 validate_change (object, loc, new, 1);
405 return;
407 break;
409 case SUBREG:
410 /* If we have a SUBREG of a register that we are replacing and we are
411 replacing it with a MEM, make a new MEM and try replacing the
412 SUBREG with it. Don't do this if the MEM has a mode-dependent address
413 or if we would be widening it. */
415 if (SUBREG_REG (x) == from
416 && GET_CODE (from) == REG
417 && GET_CODE (to) == MEM
418 && ! mode_dependent_address_p (XEXP (to, 0))
419 && ! MEM_VOLATILE_P (to)
420 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
422 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
423 enum machine_mode mode = GET_MODE (x);
424 rtx new;
426 #if BYTES_BIG_ENDIAN
427 offset += (MIN (UNITS_PER_WORD,
428 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
429 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
430 #endif
432 new = gen_rtx (MEM, mode, plus_constant (XEXP (to, 0), offset));
433 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
434 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
435 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
436 validate_change (object, loc, new, 1);
437 return;
439 break;
441 case ZERO_EXTRACT:
442 case SIGN_EXTRACT:
443 /* If we are replacing a register with memory, try to change the memory
444 to be the mode required for memory in extract operations (this isn't
445 likely to be an insertion operation; if it was, nothing bad will
446 happen, we might just fail in some cases). */
448 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
449 && GET_CODE (XEXP (x, 1)) == CONST_INT
450 && GET_CODE (XEXP (x, 2)) == CONST_INT
451 && ! mode_dependent_address_p (XEXP (to, 0))
452 && ! MEM_VOLATILE_P (to))
454 enum machine_mode wanted_mode = VOIDmode;
455 enum machine_mode is_mode = GET_MODE (to);
456 int width = INTVAL (XEXP (x, 1));
457 int pos = INTVAL (XEXP (x, 2));
459 #ifdef HAVE_extzv
460 if (code == ZERO_EXTRACT)
461 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
462 #endif
463 #ifdef HAVE_extv
464 if (code == SIGN_EXTRACT)
465 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
466 #endif
468 /* If we have a narrower mode, we can do something. */
469 if (wanted_mode != VOIDmode
470 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
472 int offset = pos / BITS_PER_UNIT;
473 rtx newmem;
475 /* If the bytes and bits are counted differently, we
476 must adjust the offset. */
477 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
478 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
479 - offset);
480 #endif
482 pos %= GET_MODE_BITSIZE (wanted_mode);
484 newmem = gen_rtx (MEM, wanted_mode,
485 plus_constant (XEXP (to, 0), offset));
486 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
487 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
488 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
490 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
491 validate_change (object, &XEXP (x, 0), newmem, 1);
495 break;
498 fmt = GET_RTX_FORMAT (code);
499 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
501 if (fmt[i] == 'e')
502 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
503 else if (fmt[i] == 'E')
504 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
505 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
509 /* Try replacing every occurrence of FROM in INSN with TO. After all
510 changes have been made, validate by seeing if INSN is still valid. */
513 validate_replace_rtx (from, to, insn)
514 rtx from, to, insn;
516 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
517 return apply_change_group ();
520 #ifdef HAVE_cc0
521 /* Return 1 if the insn using CC0 set by INSN does not contain
522 any ordered tests applied to the condition codes.
523 EQ and NE tests do not count. */
526 next_insn_tests_no_inequality (insn)
527 rtx insn;
529 register rtx next = next_cc0_user (insn);
531 /* If there is no next insn, we have to take the conservative choice. */
532 if (next == 0)
533 return 0;
535 return ((GET_CODE (next) == JUMP_INSN
536 || GET_CODE (next) == INSN
537 || GET_CODE (next) == CALL_INSN)
538 && ! inequality_comparisons_p (PATTERN (next)));
541 #if 0 /* This is useless since the insn that sets the cc's
542 must be followed immediately by the use of them. */
543 /* Return 1 if the CC value set up by INSN is not used. */
546 next_insns_test_no_inequality (insn)
547 rtx insn;
549 register rtx next = NEXT_INSN (insn);
551 for (; next != 0; next = NEXT_INSN (next))
553 if (GET_CODE (next) == CODE_LABEL
554 || GET_CODE (next) == BARRIER)
555 return 1;
556 if (GET_CODE (next) == NOTE)
557 continue;
558 if (inequality_comparisons_p (PATTERN (next)))
559 return 0;
560 if (sets_cc0_p (PATTERN (next)) == 1)
561 return 1;
562 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
563 return 1;
565 return 1;
567 #endif
568 #endif
570 /* This is used by find_single_use to locate an rtx that contains exactly one
571 use of DEST, which is typically either a REG or CC0. It returns a
572 pointer to the innermost rtx expression containing DEST. Appearances of
573 DEST that are being used to totally replace it are not counted. */
575 static rtx *
576 find_single_use_1 (dest, loc)
577 rtx dest;
578 rtx *loc;
580 rtx x = *loc;
581 enum rtx_code code = GET_CODE (x);
582 rtx *result = 0;
583 rtx *this_result;
584 int i;
585 char *fmt;
587 switch (code)
589 case CONST_INT:
590 case CONST:
591 case LABEL_REF:
592 case SYMBOL_REF:
593 case CONST_DOUBLE:
594 case CLOBBER:
595 return 0;
597 case SET:
598 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
599 of a REG that occupies all of the REG, the insn uses DEST if
600 it is mentioned in the destination or the source. Otherwise, we
601 need just check the source. */
602 if (GET_CODE (SET_DEST (x)) != CC0
603 && GET_CODE (SET_DEST (x)) != PC
604 && GET_CODE (SET_DEST (x)) != REG
605 && ! (GET_CODE (SET_DEST (x)) == SUBREG
606 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
607 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
608 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
609 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
610 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
611 break;
613 return find_single_use_1 (dest, &SET_SRC (x));
615 case MEM:
616 case SUBREG:
617 return find_single_use_1 (dest, &XEXP (x, 0));
620 /* If it wasn't one of the common cases above, check each expression and
621 vector of this code. Look for a unique usage of DEST. */
623 fmt = GET_RTX_FORMAT (code);
624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
626 if (fmt[i] == 'e')
628 if (dest == XEXP (x, i)
629 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
630 && REGNO (dest) == REGNO (XEXP (x, i))))
631 this_result = loc;
632 else
633 this_result = find_single_use_1 (dest, &XEXP (x, i));
635 if (result == 0)
636 result = this_result;
637 else if (this_result)
638 /* Duplicate usage. */
639 return 0;
641 else if (fmt[i] == 'E')
643 int j;
645 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
647 if (XVECEXP (x, i, j) == dest
648 || (GET_CODE (dest) == REG
649 && GET_CODE (XVECEXP (x, i, j)) == REG
650 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
651 this_result = loc;
652 else
653 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
655 if (result == 0)
656 result = this_result;
657 else if (this_result)
658 return 0;
663 return result;
666 /* See if DEST, produced in INSN, is used only a single time in the
667 sequel. If so, return a pointer to the innermost rtx expression in which
668 it is used.
670 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
672 This routine will return usually zero either before flow is called (because
673 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
674 note can't be trusted).
676 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
677 care about REG_DEAD notes or LOG_LINKS.
679 Otherwise, we find the single use by finding an insn that has a
680 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
681 only referenced once in that insn, we know that it must be the first
682 and last insn referencing DEST. */
684 rtx *
685 find_single_use (dest, insn, ploc)
686 rtx dest;
687 rtx insn;
688 rtx *ploc;
690 rtx next;
691 rtx *result;
692 rtx link;
694 #ifdef HAVE_cc0
695 if (dest == cc0_rtx)
697 next = NEXT_INSN (insn);
698 if (next == 0
699 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
700 return 0;
702 result = find_single_use_1 (dest, &PATTERN (next));
703 if (result && ploc)
704 *ploc = next;
705 return result;
707 #endif
709 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
710 return 0;
712 for (next = next_nonnote_insn (insn);
713 next != 0 && GET_CODE (next) != CODE_LABEL;
714 next = next_nonnote_insn (next))
715 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
717 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
718 if (XEXP (link, 0) == insn)
719 break;
721 if (link)
723 result = find_single_use_1 (dest, &PATTERN (next));
724 if (ploc)
725 *ploc = next;
726 return result;
730 return 0;
733 /* Return 1 if OP is a valid general operand for machine mode MODE.
734 This is either a register reference, a memory reference,
735 or a constant. In the case of a memory reference, the address
736 is checked for general validity for the target machine.
738 Register and memory references must have mode MODE in order to be valid,
739 but some constants have no machine mode and are valid for any mode.
741 If MODE is VOIDmode, OP is checked for validity for whatever mode
742 it has.
744 The main use of this function is as a predicate in match_operand
745 expressions in the machine description.
747 For an explanation of this function's behavior for registers of
748 class NO_REGS, see the comment for `register_operand'. */
751 general_operand (op, mode)
752 register rtx op;
753 enum machine_mode mode;
755 register enum rtx_code code = GET_CODE (op);
756 int mode_altering_drug = 0;
758 if (mode == VOIDmode)
759 mode = GET_MODE (op);
761 /* Don't accept CONST_INT or anything similar
762 if the caller wants something floating. */
763 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
764 && GET_MODE_CLASS (mode) != MODE_INT
765 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
766 return 0;
768 if (CONSTANT_P (op))
769 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
770 #ifdef LEGITIMATE_PIC_OPERAND_P
771 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
772 #endif
773 && LEGITIMATE_CONSTANT_P (op));
775 /* Except for certain constants with VOIDmode, already checked for,
776 OP's mode must match MODE if MODE specifies a mode. */
778 if (GET_MODE (op) != mode)
779 return 0;
781 if (code == SUBREG)
783 #ifdef INSN_SCHEDULING
784 /* On machines that have insn scheduling, we want all memory
785 reference to be explicit, so outlaw paradoxical SUBREGs. */
786 if (GET_CODE (SUBREG_REG (op)) == MEM
787 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
788 return 0;
789 #endif
791 op = SUBREG_REG (op);
792 code = GET_CODE (op);
793 #if 0
794 /* No longer needed, since (SUBREG (MEM...))
795 will load the MEM into a reload reg in the MEM's own mode. */
796 mode_altering_drug = 1;
797 #endif
800 if (code == REG)
801 /* A register whose class is NO_REGS is not a general operand. */
802 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
803 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
805 if (code == MEM)
807 register rtx y = XEXP (op, 0);
808 if (! volatile_ok && MEM_VOLATILE_P (op))
809 return 0;
810 /* Use the mem's mode, since it will be reloaded thus. */
811 mode = GET_MODE (op);
812 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
814 return 0;
816 win:
817 if (mode_altering_drug)
818 return ! mode_dependent_address_p (XEXP (op, 0));
819 return 1;
822 /* Return 1 if OP is a valid memory address for a memory reference
823 of mode MODE.
825 The main use of this function is as a predicate in match_operand
826 expressions in the machine description. */
829 address_operand (op, mode)
830 register rtx op;
831 enum machine_mode mode;
833 return memory_address_p (mode, op);
836 /* Return 1 if OP is a register reference of mode MODE.
837 If MODE is VOIDmode, accept a register in any mode.
839 The main use of this function is as a predicate in match_operand
840 expressions in the machine description.
842 As a special exception, registers whose class is NO_REGS are
843 not accepted by `register_operand'. The reason for this change
844 is to allow the representation of special architecture artifacts
845 (such as a condition code register) without extending the rtl
846 definitions. Since registers of class NO_REGS cannot be used
847 as registers in any case where register classes are examined,
848 it is most consistent to keep this function from accepting them. */
851 register_operand (op, mode)
852 register rtx op;
853 enum machine_mode mode;
855 if (GET_MODE (op) != mode && mode != VOIDmode)
856 return 0;
858 if (GET_CODE (op) == SUBREG)
860 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
861 because it is guaranteed to be reloaded into one.
862 Just make sure the MEM is valid in itself.
863 (Ideally, (SUBREG (MEM)...) should not exist after reload,
864 but currently it does result from (SUBREG (REG)...) where the
865 reg went on the stack.) */
866 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
867 return general_operand (op, mode);
868 op = SUBREG_REG (op);
871 /* We don't consider registers whose class is NO_REGS
872 to be a register operand. */
873 return (GET_CODE (op) == REG
874 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
875 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
878 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
879 or a hard register. */
882 scratch_operand (op, mode)
883 register rtx op;
884 enum machine_mode mode;
886 return (GET_MODE (op) == mode
887 && (GET_CODE (op) == SCRATCH
888 || (GET_CODE (op) == REG
889 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
892 /* Return 1 if OP is a valid immediate operand for mode MODE.
894 The main use of this function is as a predicate in match_operand
895 expressions in the machine description. */
898 immediate_operand (op, mode)
899 register rtx op;
900 enum machine_mode mode;
902 /* Don't accept CONST_INT or anything similar
903 if the caller wants something floating. */
904 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
905 && GET_MODE_CLASS (mode) != MODE_INT
906 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
907 return 0;
909 return (CONSTANT_P (op)
910 && (GET_MODE (op) == mode || mode == VOIDmode
911 || GET_MODE (op) == VOIDmode)
912 #ifdef LEGITIMATE_PIC_OPERAND_P
913 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
914 #endif
915 && LEGITIMATE_CONSTANT_P (op));
918 /* Returns 1 if OP is an operand that is a CONST_INT. */
921 const_int_operand (op, mode)
922 register rtx op;
923 enum machine_mode mode;
925 return GET_CODE (op) == CONST_INT;
928 /* Returns 1 if OP is an operand that is a constant integer or constant
929 floating-point number. */
932 const_double_operand (op, mode)
933 register rtx op;
934 enum machine_mode mode;
936 /* Don't accept CONST_INT or anything similar
937 if the caller wants something floating. */
938 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
939 && GET_MODE_CLASS (mode) != MODE_INT
940 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
941 return 0;
943 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
944 && (mode == VOIDmode || GET_MODE (op) == mode
945 || GET_MODE (op) == VOIDmode));
948 /* Return 1 if OP is a general operand that is not an immediate operand. */
951 nonimmediate_operand (op, mode)
952 register rtx op;
953 enum machine_mode mode;
955 return (general_operand (op, mode) && ! CONSTANT_P (op));
958 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
961 nonmemory_operand (op, mode)
962 register rtx op;
963 enum machine_mode mode;
965 if (CONSTANT_P (op))
967 /* Don't accept CONST_INT or anything similar
968 if the caller wants something floating. */
969 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
970 && GET_MODE_CLASS (mode) != MODE_INT
971 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
972 return 0;
974 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
975 #ifdef LEGITIMATE_PIC_OPERAND_P
976 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
977 #endif
978 && LEGITIMATE_CONSTANT_P (op));
981 if (GET_MODE (op) != mode && mode != VOIDmode)
982 return 0;
984 if (GET_CODE (op) == SUBREG)
986 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
987 because it is guaranteed to be reloaded into one.
988 Just make sure the MEM is valid in itself.
989 (Ideally, (SUBREG (MEM)...) should not exist after reload,
990 but currently it does result from (SUBREG (REG)...) where the
991 reg went on the stack.) */
992 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
993 return general_operand (op, mode);
994 op = SUBREG_REG (op);
997 /* We don't consider registers whose class is NO_REGS
998 to be a register operand. */
999 return (GET_CODE (op) == REG
1000 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1001 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1004 /* Return 1 if OP is a valid operand that stands for pushing a
1005 value of mode MODE onto the stack.
1007 The main use of this function is as a predicate in match_operand
1008 expressions in the machine description. */
1011 push_operand (op, mode)
1012 rtx op;
1013 enum machine_mode mode;
1015 if (GET_CODE (op) != MEM)
1016 return 0;
1018 if (GET_MODE (op) != mode)
1019 return 0;
1021 op = XEXP (op, 0);
1023 if (GET_CODE (op) != STACK_PUSH_CODE)
1024 return 0;
1026 return XEXP (op, 0) == stack_pointer_rtx;
1029 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1032 memory_address_p (mode, addr)
1033 enum machine_mode mode;
1034 register rtx addr;
1036 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1037 return 0;
1039 win:
1040 return 1;
1043 /* Return 1 if OP is a valid memory reference with mode MODE,
1044 including a valid address.
1046 The main use of this function is as a predicate in match_operand
1047 expressions in the machine description. */
1050 memory_operand (op, mode)
1051 register rtx op;
1052 enum machine_mode mode;
1054 rtx inner;
1056 if (! reload_completed)
1057 /* Note that no SUBREG is a memory operand before end of reload pass,
1058 because (SUBREG (MEM...)) forces reloading into a register. */
1059 return GET_CODE (op) == MEM && general_operand (op, mode);
1061 if (mode != VOIDmode && GET_MODE (op) != mode)
1062 return 0;
1064 inner = op;
1065 if (GET_CODE (inner) == SUBREG)
1066 inner = SUBREG_REG (inner);
1068 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1071 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1072 that is, a memory reference whose address is a general_operand. */
1075 indirect_operand (op, mode)
1076 register rtx op;
1077 enum machine_mode mode;
1079 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1080 if (! reload_completed
1081 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1083 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1084 rtx inner = SUBREG_REG (op);
1086 #if BYTES_BIG_ENDIAN
1087 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1088 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1089 #endif
1091 if (mode != VOIDmode && GET_MODE (op) != mode)
1092 return 0;
1094 /* The only way that we can have a general_operand as the resulting
1095 address is if OFFSET is zero and the address already is an operand
1096 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1097 operand. */
1099 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1100 || (GET_CODE (XEXP (inner, 0)) == PLUS
1101 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1102 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1103 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1106 return (GET_CODE (op) == MEM
1107 && memory_operand (op, mode)
1108 && general_operand (XEXP (op, 0), Pmode));
1111 /* Return 1 if this is a comparison operator. This allows the use of
1112 MATCH_OPERATOR to recognize all the branch insns. */
1115 comparison_operator (op, mode)
1116 register rtx op;
1117 enum machine_mode mode;
1119 return ((mode == VOIDmode || GET_MODE (op) == mode)
1120 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1123 /* If BODY is an insn body that uses ASM_OPERANDS,
1124 return the number of operands (both input and output) in the insn.
1125 Otherwise return -1. */
1128 asm_noperands (body)
1129 rtx body;
1131 if (GET_CODE (body) == ASM_OPERANDS)
1132 /* No output operands: return number of input operands. */
1133 return ASM_OPERANDS_INPUT_LENGTH (body);
1134 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1135 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1136 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1137 else if (GET_CODE (body) == PARALLEL
1138 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1139 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1141 /* Multiple output operands, or 1 output plus some clobbers:
1142 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1143 int i;
1144 int n_sets;
1146 /* Count backwards through CLOBBERs to determine number of SETs. */
1147 for (i = XVECLEN (body, 0); i > 0; i--)
1149 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1150 break;
1151 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1152 return -1;
1155 /* N_SETS is now number of output operands. */
1156 n_sets = i;
1158 /* Verify that all the SETs we have
1159 came from a single original asm_operands insn
1160 (so that invalid combinations are blocked). */
1161 for (i = 0; i < n_sets; i++)
1163 rtx elt = XVECEXP (body, 0, i);
1164 if (GET_CODE (elt) != SET)
1165 return -1;
1166 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1167 return -1;
1168 /* If these ASM_OPERANDS rtx's came from different original insns
1169 then they aren't allowed together. */
1170 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1171 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1172 return -1;
1174 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1175 + n_sets);
1177 else if (GET_CODE (body) == PARALLEL
1178 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1180 /* 0 outputs, but some clobbers:
1181 body is [(asm_operands ...) (clobber (reg ...))...]. */
1182 int i;
1184 /* Make sure all the other parallel things really are clobbers. */
1185 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1186 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1187 return -1;
1189 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1191 else
1192 return -1;
1195 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1196 copy its operands (both input and output) into the vector OPERANDS,
1197 the locations of the operands within the insn into the vector OPERAND_LOCS,
1198 and the constraints for the operands into CONSTRAINTS.
1199 Write the modes of the operands into MODES.
1200 Return the assembler-template.
1202 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1203 we don't store that info. */
1205 char *
1206 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1207 rtx body;
1208 rtx *operands;
1209 rtx **operand_locs;
1210 char **constraints;
1211 enum machine_mode *modes;
1213 register int i;
1214 int noperands;
1215 char *template = 0;
1217 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1219 rtx asmop = SET_SRC (body);
1220 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1222 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1224 for (i = 1; i < noperands; i++)
1226 if (operand_locs)
1227 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1228 if (operands)
1229 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1230 if (constraints)
1231 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1232 if (modes)
1233 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1236 /* The output is in the SET.
1237 Its constraint is in the ASM_OPERANDS itself. */
1238 if (operands)
1239 operands[0] = SET_DEST (body);
1240 if (operand_locs)
1241 operand_locs[0] = &SET_DEST (body);
1242 if (constraints)
1243 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1244 if (modes)
1245 modes[0] = GET_MODE (SET_DEST (body));
1246 template = ASM_OPERANDS_TEMPLATE (asmop);
1248 else if (GET_CODE (body) == ASM_OPERANDS)
1250 rtx asmop = body;
1251 /* No output operands: BODY is (asm_operands ....). */
1253 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1255 /* The input operands are found in the 1st element vector. */
1256 /* Constraints for inputs are in the 2nd element vector. */
1257 for (i = 0; i < noperands; i++)
1259 if (operand_locs)
1260 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1261 if (operands)
1262 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1263 if (constraints)
1264 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1265 if (modes)
1266 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1268 template = ASM_OPERANDS_TEMPLATE (asmop);
1270 else if (GET_CODE (body) == PARALLEL
1271 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1273 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1274 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1275 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1276 int nout = 0; /* Does not include CLOBBERs. */
1278 /* At least one output, plus some CLOBBERs. */
1280 /* The outputs are in the SETs.
1281 Their constraints are in the ASM_OPERANDS itself. */
1282 for (i = 0; i < nparallel; i++)
1284 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1285 break; /* Past last SET */
1287 if (operands)
1288 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1289 if (operand_locs)
1290 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1291 if (constraints)
1292 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1293 if (modes)
1294 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1295 nout++;
1298 for (i = 0; i < nin; i++)
1300 if (operand_locs)
1301 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1302 if (operands)
1303 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1304 if (constraints)
1305 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1306 if (modes)
1307 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1310 template = ASM_OPERANDS_TEMPLATE (asmop);
1312 else if (GET_CODE (body) == PARALLEL
1313 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1315 /* No outputs, but some CLOBBERs. */
1317 rtx asmop = XVECEXP (body, 0, 0);
1318 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1320 for (i = 0; i < nin; i++)
1322 if (operand_locs)
1323 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1324 if (operands)
1325 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1326 if (constraints)
1327 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1328 if (modes)
1329 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1332 template = ASM_OPERANDS_TEMPLATE (asmop);
1335 return template;
1338 /* Given an rtx *P, if it is a sum containing an integer constant term,
1339 return the location (type rtx *) of the pointer to that constant term.
1340 Otherwise, return a null pointer. */
1342 static rtx *
1343 find_constant_term_loc (p)
1344 rtx *p;
1346 register rtx *tem;
1347 register enum rtx_code code = GET_CODE (*p);
1349 /* If *P IS such a constant term, P is its location. */
1351 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1352 || code == CONST)
1353 return p;
1355 /* Otherwise, if not a sum, it has no constant term. */
1357 if (GET_CODE (*p) != PLUS)
1358 return 0;
1360 /* If one of the summands is constant, return its location. */
1362 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1363 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1364 return p;
1366 /* Otherwise, check each summand for containing a constant term. */
1368 if (XEXP (*p, 0) != 0)
1370 tem = find_constant_term_loc (&XEXP (*p, 0));
1371 if (tem != 0)
1372 return tem;
1375 if (XEXP (*p, 1) != 0)
1377 tem = find_constant_term_loc (&XEXP (*p, 1));
1378 if (tem != 0)
1379 return tem;
1382 return 0;
1385 /* Return 1 if OP is a memory reference
1386 whose address contains no side effects
1387 and remains valid after the addition
1388 of a positive integer less than the
1389 size of the object being referenced.
1391 We assume that the original address is valid and do not check it.
1393 This uses strict_memory_address_p as a subroutine, so
1394 don't use it before reload. */
1397 offsettable_memref_p (op)
1398 rtx op;
1400 return ((GET_CODE (op) == MEM)
1401 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1404 /* Similar, but don't require a strictly valid mem ref:
1405 consider pseudo-regs valid as index or base regs. */
1408 offsettable_nonstrict_memref_p (op)
1409 rtx op;
1411 return ((GET_CODE (op) == MEM)
1412 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1415 /* Return 1 if Y is a memory address which contains no side effects
1416 and would remain valid after the addition of a positive integer
1417 less than the size of that mode.
1419 We assume that the original address is valid and do not check it.
1420 We do check that it is valid for narrower modes.
1422 If STRICTP is nonzero, we require a strictly valid address,
1423 for the sake of use in reload.c. */
1426 offsettable_address_p (strictp, mode, y)
1427 int strictp;
1428 enum machine_mode mode;
1429 register rtx y;
1431 register enum rtx_code ycode = GET_CODE (y);
1432 register rtx z;
1433 rtx y1 = y;
1434 rtx *y2;
1435 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1437 if (CONSTANT_ADDRESS_P (y))
1438 return 1;
1440 /* Adjusting an offsettable address involves changing to a narrower mode.
1441 Make sure that's OK. */
1443 if (mode_dependent_address_p (y))
1444 return 0;
1446 /* If the expression contains a constant term,
1447 see if it remains valid when max possible offset is added. */
1449 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1451 int good;
1453 y1 = *y2;
1454 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1455 /* Use QImode because an odd displacement may be automatically invalid
1456 for any wider mode. But it should be valid for a single byte. */
1457 good = (*addressp) (QImode, y);
1459 /* In any case, restore old contents of memory. */
1460 *y2 = y1;
1461 return good;
1464 if (ycode == PRE_DEC || ycode == PRE_INC
1465 || ycode == POST_DEC || ycode == POST_INC)
1466 return 0;
1468 /* The offset added here is chosen as the maximum offset that
1469 any instruction could need to add when operating on something
1470 of the specified mode. We assume that if Y and Y+c are
1471 valid addresses then so is Y+d for all 0<d<c. */
1473 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1475 /* Use QImode because an odd displacement may be automatically invalid
1476 for any wider mode. But it should be valid for a single byte. */
1477 return (*addressp) (QImode, z);
1480 /* Return 1 if ADDR is an address-expression whose effect depends
1481 on the mode of the memory reference it is used in.
1483 Autoincrement addressing is a typical example of mode-dependence
1484 because the amount of the increment depends on the mode. */
1487 mode_dependent_address_p (addr)
1488 rtx addr;
1490 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1491 return 0;
1492 win:
1493 return 1;
1496 /* Return 1 if OP is a general operand
1497 other than a memory ref with a mode dependent address. */
1500 mode_independent_operand (op, mode)
1501 enum machine_mode mode;
1502 rtx op;
1504 rtx addr;
1506 if (! general_operand (op, mode))
1507 return 0;
1509 if (GET_CODE (op) != MEM)
1510 return 1;
1512 addr = XEXP (op, 0);
1513 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1514 return 1;
1515 lose:
1516 return 0;
1519 /* Given an operand OP that is a valid memory reference
1520 which satisfies offsettable_memref_p,
1521 return a new memory reference whose address has been adjusted by OFFSET.
1522 OFFSET should be positive and less than the size of the object referenced.
1526 adj_offsettable_operand (op, offset)
1527 rtx op;
1528 int offset;
1530 register enum rtx_code code = GET_CODE (op);
1532 if (code == MEM)
1534 register rtx y = XEXP (op, 0);
1535 register rtx new;
1537 if (CONSTANT_ADDRESS_P (y))
1539 new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
1540 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1541 return new;
1544 if (GET_CODE (y) == PLUS)
1546 rtx z = y;
1547 register rtx *const_loc;
1549 op = copy_rtx (op);
1550 z = XEXP (op, 0);
1551 const_loc = find_constant_term_loc (&z);
1552 if (const_loc)
1554 *const_loc = plus_constant_for_output (*const_loc, offset);
1555 return op;
1559 new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
1560 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1561 return new;
1563 abort ();
1566 #ifdef REGISTER_CONSTRAINTS
1568 /* Check the operands of an insn (found in recog_operands)
1569 against the insn's operand constraints (found via INSN_CODE_NUM)
1570 and return 1 if they are valid.
1572 WHICH_ALTERNATIVE is set to a number which indicates which
1573 alternative of constraints was matched: 0 for the first alternative,
1574 1 for the next, etc.
1576 In addition, when two operands are match
1577 and it happens that the output operand is (reg) while the
1578 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1579 make the output operand look like the input.
1580 This is because the output operand is the one the template will print.
1582 This is used in final, just before printing the assembler code and by
1583 the routines that determine an insn's attribute.
1585 If STRICT is a positive non-zero value, it means that we have been
1586 called after reload has been completed. In that case, we must
1587 do all checks strictly. If it is zero, it means that we have been called
1588 before reload has completed. In that case, we first try to see if we can
1589 find an alternative that matches strictly. If not, we try again, this
1590 time assuming that reload will fix up the insn. This provides a "best
1591 guess" for the alternative and is used to compute attributes of insns prior
1592 to reload. A negative value of STRICT is used for this internal call. */
1594 struct funny_match
1596 int this, other;
1600 constrain_operands (insn_code_num, strict)
1601 int insn_code_num;
1602 int strict;
1604 char *constraints[MAX_RECOG_OPERANDS];
1605 int matching_operands[MAX_RECOG_OPERANDS];
1606 enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS];
1607 int earlyclobber[MAX_RECOG_OPERANDS];
1608 register int c;
1609 int noperands = insn_n_operands[insn_code_num];
1611 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1612 int funny_match_index;
1613 int nalternatives = insn_n_alternatives[insn_code_num];
1615 if (noperands == 0 || nalternatives == 0)
1616 return 1;
1618 for (c = 0; c < noperands; c++)
1620 constraints[c] = insn_operand_constraint[insn_code_num][c];
1621 matching_operands[c] = -1;
1622 op_types[c] = OP_IN;
1625 which_alternative = 0;
1627 while (which_alternative < nalternatives)
1629 register int opno;
1630 int lose = 0;
1631 funny_match_index = 0;
1633 for (opno = 0; opno < noperands; opno++)
1635 register rtx op = recog_operand[opno];
1636 enum machine_mode mode = GET_MODE (op);
1637 register char *p = constraints[opno];
1638 int offset = 0;
1639 int win = 0;
1640 int val;
1642 earlyclobber[opno] = 0;
1644 if (GET_CODE (op) == SUBREG)
1646 if (GET_CODE (SUBREG_REG (op)) == REG
1647 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1648 offset = SUBREG_WORD (op);
1649 op = SUBREG_REG (op);
1652 /* An empty constraint or empty alternative
1653 allows anything which matched the pattern. */
1654 if (*p == 0 || *p == ',')
1655 win = 1;
1657 while (*p && (c = *p++) != ',')
1658 switch (c)
1660 case '?':
1661 case '!':
1662 case '*':
1663 case '%':
1664 break;
1666 case '#':
1667 /* Ignore rest of this alternative as far as
1668 constraint checking is concerned. */
1669 while (*p && *p != ',')
1670 p++;
1671 break;
1673 case '=':
1674 op_types[opno] = OP_OUT;
1675 break;
1677 case '+':
1678 op_types[opno] = OP_INOUT;
1679 break;
1681 case '&':
1682 earlyclobber[opno] = 1;
1683 break;
1685 case '0':
1686 case '1':
1687 case '2':
1688 case '3':
1689 case '4':
1690 /* This operand must be the same as a previous one.
1691 This kind of constraint is used for instructions such
1692 as add when they take only two operands.
1694 Note that the lower-numbered operand is passed first.
1696 If we are not testing strictly, assume that this constraint
1697 will be satisfied. */
1698 if (strict < 0)
1699 val = 1;
1700 else
1701 val = operands_match_p (recog_operand[c - '0'],
1702 recog_operand[opno]);
1704 matching_operands[opno] = c - '0';
1705 matching_operands[c - '0'] = opno;
1707 if (val != 0)
1708 win = 1;
1709 /* If output is *x and input is *--x,
1710 arrange later to change the output to *--x as well,
1711 since the output op is the one that will be printed. */
1712 if (val == 2 && strict > 0)
1714 funny_match[funny_match_index].this = opno;
1715 funny_match[funny_match_index++].other = c - '0';
1717 break;
1719 case 'p':
1720 /* p is used for address_operands. When we are called by
1721 gen_input_reload, no one will have checked that the
1722 address is strictly valid, i.e., that all pseudos
1723 requiring hard regs have gotten them. */
1724 if (strict <= 0
1725 || (strict_memory_address_p
1726 (insn_operand_mode[insn_code_num][opno], op)))
1727 win = 1;
1728 break;
1730 /* No need to check general_operand again;
1731 it was done in insn-recog.c. */
1732 case 'g':
1733 /* Anything goes unless it is a REG and really has a hard reg
1734 but the hard reg is not in the class GENERAL_REGS. */
1735 if (strict < 0
1736 || GENERAL_REGS == ALL_REGS
1737 || GET_CODE (op) != REG
1738 || (reload_in_progress
1739 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1740 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1741 win = 1;
1742 break;
1744 case 'r':
1745 if (strict < 0
1746 || (strict == 0
1747 && GET_CODE (op) == REG
1748 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1749 || (strict == 0 && GET_CODE (op) == SCRATCH)
1750 || (GET_CODE (op) == REG
1751 && ((GENERAL_REGS == ALL_REGS
1752 && REGNO (op) < FIRST_PSEUDO_REGISTER)
1753 || reg_fits_class_p (op, GENERAL_REGS,
1754 offset, mode))))
1755 win = 1;
1756 break;
1758 case 'X':
1759 /* This is used for a MATCH_SCRATCH in the cases when we
1760 don't actually need anything. So anything goes any time. */
1761 win = 1;
1762 break;
1764 case 'm':
1765 if (GET_CODE (op) == MEM
1766 /* Before reload, accept what reload can turn into mem. */
1767 || (strict < 0 && CONSTANT_P (op))
1768 /* During reload, accept a pseudo */
1769 || (reload_in_progress && GET_CODE (op) == REG
1770 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1771 win = 1;
1772 break;
1774 case '<':
1775 if (GET_CODE (op) == MEM
1776 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1777 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1778 win = 1;
1779 break;
1781 case '>':
1782 if (GET_CODE (op) == MEM
1783 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1784 || GET_CODE (XEXP (op, 0)) == POST_INC))
1785 win = 1;
1786 break;
1788 case 'E':
1789 /* Match any CONST_DOUBLE, but only if
1790 we can examine the bits of it reliably. */
1791 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1792 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1793 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1794 break;
1795 if (GET_CODE (op) == CONST_DOUBLE)
1796 win = 1;
1797 break;
1799 case 'F':
1800 if (GET_CODE (op) == CONST_DOUBLE)
1801 win = 1;
1802 break;
1804 case 'G':
1805 case 'H':
1806 if (GET_CODE (op) == CONST_DOUBLE
1807 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
1808 win = 1;
1809 break;
1811 case 's':
1812 if (GET_CODE (op) == CONST_INT
1813 || (GET_CODE (op) == CONST_DOUBLE
1814 && GET_MODE (op) == VOIDmode))
1815 break;
1816 case 'i':
1817 if (CONSTANT_P (op))
1818 win = 1;
1819 break;
1821 case 'n':
1822 if (GET_CODE (op) == CONST_INT
1823 || (GET_CODE (op) == CONST_DOUBLE
1824 && GET_MODE (op) == VOIDmode))
1825 win = 1;
1826 break;
1828 case 'I':
1829 case 'J':
1830 case 'K':
1831 case 'L':
1832 case 'M':
1833 case 'N':
1834 case 'O':
1835 case 'P':
1836 if (GET_CODE (op) == CONST_INT
1837 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
1838 win = 1;
1839 break;
1841 #ifdef EXTRA_CONSTRAINT
1842 case 'Q':
1843 case 'R':
1844 case 'S':
1845 case 'T':
1846 case 'U':
1847 if (EXTRA_CONSTRAINT (op, c))
1848 win = 1;
1849 break;
1850 #endif
1852 case 'V':
1853 if (GET_CODE (op) == MEM
1854 && ! offsettable_memref_p (op))
1855 win = 1;
1856 break;
1858 case 'o':
1859 if ((strict > 0 && offsettable_memref_p (op))
1860 || (strict == 0 && offsettable_nonstrict_memref_p (op))
1861 /* Before reload, accept what reload can handle. */
1862 || (strict < 0
1863 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
1864 /* During reload, accept a pseudo */
1865 || (reload_in_progress && GET_CODE (op) == REG
1866 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
1867 win = 1;
1868 break;
1870 default:
1871 if (strict < 0
1872 || (strict == 0
1873 && GET_CODE (op) == REG
1874 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1875 || (strict == 0 && GET_CODE (op) == SCRATCH)
1876 || (GET_CODE (op) == REG
1877 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
1878 offset, mode)))
1879 win = 1;
1882 constraints[opno] = p;
1883 /* If this operand did not win somehow,
1884 this alternative loses. */
1885 if (! win)
1886 lose = 1;
1888 /* This alternative won; the operands are ok.
1889 Change whichever operands this alternative says to change. */
1890 if (! lose)
1892 int opno, eopno;
1894 /* See if any earlyclobber operand conflicts with some other
1895 operand. */
1897 if (strict > 0)
1898 for (eopno = 0; eopno < noperands; eopno++)
1899 /* Ignore earlyclobber operands now in memory,
1900 because we would often report failure when we have
1901 two memory operands, one of which was formerly a REG. */
1902 if (earlyclobber[eopno]
1903 && GET_CODE (recog_operand[eopno]) == REG)
1904 for (opno = 0; opno < noperands; opno++)
1905 if ((GET_CODE (recog_operand[opno]) == MEM
1906 || op_types[opno] != OP_OUT)
1907 && opno != eopno
1908 /* Ignore things like match_operator operands. */
1909 && *constraints[opno] != 0
1910 && ! (matching_operands[opno] == eopno
1911 && rtx_equal_p (recog_operand[opno],
1912 recog_operand[eopno]))
1913 && ! safe_from_earlyclobber (recog_operand[opno],
1914 recog_operand[eopno]))
1915 lose = 1;
1917 if (! lose)
1919 while (--funny_match_index >= 0)
1921 recog_operand[funny_match[funny_match_index].other]
1922 = recog_operand[funny_match[funny_match_index].this];
1925 return 1;
1929 which_alternative++;
1932 /* If we are about to reject this, but we are not to test strictly,
1933 try a very loose test. Only return failure if it fails also. */
1934 if (strict == 0)
1935 return constrain_operands (insn_code_num, -1);
1936 else
1937 return 0;
1940 /* Return 1 iff OPERAND (assumed to be a REG rtx)
1941 is a hard reg in class CLASS when its regno is offsetted by OFFSET
1942 and changed to mode MODE.
1943 If REG occupies multiple hard regs, all of them must be in CLASS. */
1946 reg_fits_class_p (operand, class, offset, mode)
1947 rtx operand;
1948 register enum reg_class class;
1949 int offset;
1950 enum machine_mode mode;
1952 register int regno = REGNO (operand);
1953 if (regno < FIRST_PSEUDO_REGISTER
1954 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1955 regno + offset))
1957 register int sr;
1958 regno += offset;
1959 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
1960 sr > 0; sr--)
1961 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1962 regno + sr))
1963 break;
1964 return sr == 0;
1967 return 0;
1970 #endif /* REGISTER_CONSTRAINTS */