2002-08-22 Paolo Carlini <pcarlini@unitus.it>
[official-gcc.git] / gcc / recog.c
bloba01abd5b79915df602d6b2284cf4ba85d610b8fc
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "reload.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
60 static rtx split_insn PARAMS ((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
87 int reload_completed;
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
93 void
94 init_recog_no_volatile ()
96 volatile_ok = 0;
99 void
100 init_recog ()
102 volatile_ok = 1;
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized_1 (insn)
116 rtx insn;
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
128 rtx x;
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
167 return 1;
170 /* Static data for the next two routines. */
172 typedef struct change_t
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
210 rtx old = *loc;
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
215 if (in_group == 0 && num_changes != 0)
216 abort ();
218 *loc = new;
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn)
263 rtx insn;
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
268 clobbers. */
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
280 return 1;
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
287 rtx newpat;
289 if (added_clobbers_hard_reg_p (icode))
290 return 1;
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
301 extract_insn (insn);
303 if (! constrain_operands (1))
304 return 1;
307 INSN_CODE (insn) = icode;
308 return 0;
311 /* Return number of changes made and not validated yet. */
313 num_changes_pending ()
315 return num_changes;
318 /* Apply a group of changes previously issued with `validate_change'.
319 Return 1 if all changes are valid, zero otherwise. */
322 apply_change_group ()
324 int i;
325 rtx last_validated = NULL_RTX;
327 /* The changes have been applied and all INSN_CODEs have been reset to force
328 rerecognition.
330 The changes are valid if we aren't given an object, or if we are
331 given a MEM and it still is a valid address, or if this is in insn
332 and it is recognized. In the latter case, if reload has completed,
333 we also require that the operands meet the constraints for
334 the insn. */
336 for (i = 0; i < num_changes; i++)
338 rtx object = changes[i].object;
340 /* if there is no object to test or if it is the same as the one we
341 already tested, ignore it. */
342 if (object == 0 || object == last_validated)
343 continue;
345 if (GET_CODE (object) == MEM)
347 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
348 break;
350 else if (insn_invalid_p (object))
352 rtx pat = PATTERN (object);
354 /* Perhaps we couldn't recognize the insn because there were
355 extra CLOBBERs at the end. If so, try to re-recognize
356 without the last CLOBBER (later iterations will cause each of
357 them to be eliminated, in turn). But don't do this if we
358 have an ASM_OPERAND. */
359 if (GET_CODE (pat) == PARALLEL
360 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
361 && asm_noperands (PATTERN (object)) < 0)
363 rtx newpat;
365 if (XVECLEN (pat, 0) == 2)
366 newpat = XVECEXP (pat, 0, 0);
367 else
369 int j;
371 newpat
372 = gen_rtx_PARALLEL (VOIDmode,
373 rtvec_alloc (XVECLEN (pat, 0) - 1));
374 for (j = 0; j < XVECLEN (newpat, 0); j++)
375 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
378 /* Add a new change to this group to replace the pattern
379 with this new pattern. Then consider this change
380 as having succeeded. The change we added will
381 cause the entire call to fail if things remain invalid.
383 Note that this can lose if a later change than the one
384 we are processing specified &XVECEXP (PATTERN (object), 0, X)
385 but this shouldn't occur. */
387 validate_change (object, &PATTERN (object), newpat, 1);
388 continue;
390 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
391 /* If this insn is a CLOBBER or USE, it is always valid, but is
392 never recognized. */
393 continue;
394 else
395 break;
397 last_validated = object;
400 if (i == num_changes)
402 basic_block bb;
404 for (i = 0; i < num_changes; i++)
405 if (changes[i].object
406 && INSN_P (changes[i].object)
407 && (bb = BLOCK_FOR_INSN (changes[i].object)))
408 bb->flags |= BB_DIRTY;
410 num_changes = 0;
411 return 1;
413 else
415 cancel_changes (0);
416 return 0;
420 /* Return the number of changes so far in the current group. */
423 num_validated_changes ()
425 return num_changes;
428 /* Retract the changes numbered NUM and up. */
430 void
431 cancel_changes (num)
432 int num;
434 int i;
436 /* Back out all the changes. Do this in the opposite order in which
437 they were made. */
438 for (i = num_changes - 1; i >= num; i--)
440 *changes[i].loc = changes[i].old;
441 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
442 INSN_CODE (changes[i].object) = changes[i].old_code;
444 num_changes = num;
447 /* Replace every occurrence of FROM in X with TO. Mark each change with
448 validate_change passing OBJECT. */
450 static void
451 validate_replace_rtx_1 (loc, from, to, object)
452 rtx *loc;
453 rtx from, to, object;
455 int i, j;
456 const char *fmt;
457 rtx x = *loc;
458 enum rtx_code code;
459 enum machine_mode op0_mode = VOIDmode;
460 int prev_changes = num_changes;
461 rtx new;
463 if (!x)
464 return;
466 code = GET_CODE (x);
467 fmt = GET_RTX_FORMAT (code);
468 if (fmt[0] == 'e')
469 op0_mode = GET_MODE (XEXP (x, 0));
471 /* X matches FROM if it is the same rtx or they are both referring to the
472 same register in the same mode. Avoid calling rtx_equal_p unless the
473 operands look similar. */
475 if (x == from
476 || (GET_CODE (x) == REG && GET_CODE (from) == REG
477 && GET_MODE (x) == GET_MODE (from)
478 && REGNO (x) == REGNO (from))
479 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
480 && rtx_equal_p (x, from)))
482 validate_change (object, loc, to, 1);
483 return;
486 /* Call ourself recursively to perform the replacements. */
488 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
490 if (fmt[i] == 'e')
491 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
492 else if (fmt[i] == 'E')
493 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
494 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
497 /* If we didn't substitute, there is nothing more to do. */
498 if (num_changes == prev_changes)
499 return;
501 /* Allow substituted expression to have different mode. This is used by
502 regmove to change mode of pseudo register. */
503 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
504 op0_mode = GET_MODE (XEXP (x, 0));
506 /* Do changes needed to keep rtx consistent. Don't do any other
507 simplifications, as it is not our job. */
509 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
510 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
512 validate_change (object, loc,
513 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
514 : swap_condition (code),
515 GET_MODE (x), XEXP (x, 1),
516 XEXP (x, 0)), 1);
517 x = *loc;
518 code = GET_CODE (x);
521 switch (code)
523 case PLUS:
524 /* If we have a PLUS whose second operand is now a CONST_INT, use
525 plus_constant to try to simplify it.
526 ??? We may want later to remove this, once simplification is
527 separated from this function. */
528 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
529 validate_change (object, loc,
530 simplify_gen_binary
531 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
532 break;
533 case MINUS:
534 if (GET_CODE (XEXP (x, 1)) == CONST_INT
535 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
536 validate_change (object, loc,
537 simplify_gen_binary
538 (PLUS, GET_MODE (x), XEXP (x, 0),
539 simplify_gen_unary (NEG,
540 GET_MODE (x), XEXP (x, 1),
541 GET_MODE (x))), 1);
542 break;
543 case ZERO_EXTEND:
544 case SIGN_EXTEND:
545 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
547 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
548 op0_mode);
549 /* If any of the above failed, substitute in something that
550 we know won't be recognized. */
551 if (!new)
552 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
553 validate_change (object, loc, new, 1);
555 break;
556 case SUBREG:
557 /* All subregs possible to simplify should be simplified. */
558 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
559 SUBREG_BYTE (x));
561 /* Subregs of VOIDmode operands are incorrect. */
562 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
563 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
564 if (new)
565 validate_change (object, loc, new, 1);
566 break;
567 case ZERO_EXTRACT:
568 case SIGN_EXTRACT:
569 /* If we are replacing a register with memory, try to change the memory
570 to be the mode required for memory in extract operations (this isn't
571 likely to be an insertion operation; if it was, nothing bad will
572 happen, we might just fail in some cases). */
574 if (GET_CODE (XEXP (x, 0)) == MEM
575 && GET_CODE (XEXP (x, 1)) == CONST_INT
576 && GET_CODE (XEXP (x, 2)) == CONST_INT
577 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
578 && !MEM_VOLATILE_P (XEXP (x, 0)))
580 enum machine_mode wanted_mode = VOIDmode;
581 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
582 int pos = INTVAL (XEXP (x, 2));
584 if (GET_CODE (x) == ZERO_EXTRACT)
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extzv, 1);
588 if (new_mode != MAX_MACHINE_MODE)
589 wanted_mode = new_mode;
591 else if (GET_CODE (x) == SIGN_EXTRACT)
593 enum machine_mode new_mode
594 = mode_for_extraction (EP_extv, 1);
595 if (new_mode != MAX_MACHINE_MODE)
596 wanted_mode = new_mode;
599 /* If we have a narrower mode, we can do something. */
600 if (wanted_mode != VOIDmode
601 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
603 int offset = pos / BITS_PER_UNIT;
604 rtx newmem;
606 /* If the bytes and bits are counted differently, we
607 must adjust the offset. */
608 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
609 offset =
610 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
611 offset);
613 pos %= GET_MODE_BITSIZE (wanted_mode);
615 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
617 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
618 validate_change (object, &XEXP (x, 0), newmem, 1);
622 break;
624 default:
625 break;
629 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
630 with TO. After all changes have been made, validate by seeing
631 if INSN is still valid. */
634 validate_replace_rtx_subexp (from, to, insn, loc)
635 rtx from, to, insn, *loc;
637 validate_replace_rtx_1 (loc, from, to, insn);
638 return apply_change_group ();
641 /* Try replacing every occurrence of FROM in INSN with TO. After all
642 changes have been made, validate by seeing if INSN is still valid. */
645 validate_replace_rtx (from, to, insn)
646 rtx from, to, insn;
648 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
649 return apply_change_group ();
652 /* Try replacing every occurrence of FROM in INSN with TO. */
654 void
655 validate_replace_rtx_group (from, to, insn)
656 rtx from, to, insn;
658 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
661 /* Function called by note_uses to replace used subexpressions. */
662 struct validate_replace_src_data
664 rtx from; /* Old RTX */
665 rtx to; /* New RTX */
666 rtx insn; /* Insn in which substitution is occurring. */
669 static void
670 validate_replace_src_1 (x, data)
671 rtx *x;
672 void *data;
674 struct validate_replace_src_data *d
675 = (struct validate_replace_src_data *) data;
677 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
680 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
681 SET_DESTs. */
683 void
684 validate_replace_src_group (from, to, insn)
685 rtx from, to, insn;
687 struct validate_replace_src_data d;
689 d.from = from;
690 d.to = to;
691 d.insn = insn;
692 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
695 /* Same as validate_repalace_src_group, but validate by seeing if
696 INSN is still valid. */
698 validate_replace_src (from, to, insn)
699 rtx from, to, insn;
701 validate_replace_src_group (from, to, insn);
702 return apply_change_group ();
705 #ifdef HAVE_cc0
706 /* Return 1 if the insn using CC0 set by INSN does not contain
707 any ordered tests applied to the condition codes.
708 EQ and NE tests do not count. */
711 next_insn_tests_no_inequality (insn)
712 rtx insn;
714 rtx next = next_cc0_user (insn);
716 /* If there is no next insn, we have to take the conservative choice. */
717 if (next == 0)
718 return 0;
720 return ((GET_CODE (next) == JUMP_INSN
721 || GET_CODE (next) == INSN
722 || GET_CODE (next) == CALL_INSN)
723 && ! inequality_comparisons_p (PATTERN (next)));
726 #if 0 /* This is useless since the insn that sets the cc's
727 must be followed immediately by the use of them. */
728 /* Return 1 if the CC value set up by INSN is not used. */
731 next_insns_test_no_inequality (insn)
732 rtx insn;
734 rtx next = NEXT_INSN (insn);
736 for (; next != 0; next = NEXT_INSN (next))
738 if (GET_CODE (next) == CODE_LABEL
739 || GET_CODE (next) == BARRIER)
740 return 1;
741 if (GET_CODE (next) == NOTE)
742 continue;
743 if (inequality_comparisons_p (PATTERN (next)))
744 return 0;
745 if (sets_cc0_p (PATTERN (next)) == 1)
746 return 1;
747 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
748 return 1;
750 return 1;
752 #endif
753 #endif
755 /* This is used by find_single_use to locate an rtx that contains exactly one
756 use of DEST, which is typically either a REG or CC0. It returns a
757 pointer to the innermost rtx expression containing DEST. Appearances of
758 DEST that are being used to totally replace it are not counted. */
760 static rtx *
761 find_single_use_1 (dest, loc)
762 rtx dest;
763 rtx *loc;
765 rtx x = *loc;
766 enum rtx_code code = GET_CODE (x);
767 rtx *result = 0;
768 rtx *this_result;
769 int i;
770 const char *fmt;
772 switch (code)
774 case CONST_INT:
775 case CONST:
776 case LABEL_REF:
777 case SYMBOL_REF:
778 case CONST_DOUBLE:
779 case CONST_VECTOR:
780 case CLOBBER:
781 return 0;
783 case SET:
784 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
785 of a REG that occupies all of the REG, the insn uses DEST if
786 it is mentioned in the destination or the source. Otherwise, we
787 need just check the source. */
788 if (GET_CODE (SET_DEST (x)) != CC0
789 && GET_CODE (SET_DEST (x)) != PC
790 && GET_CODE (SET_DEST (x)) != REG
791 && ! (GET_CODE (SET_DEST (x)) == SUBREG
792 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
793 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
794 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
795 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
796 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
797 break;
799 return find_single_use_1 (dest, &SET_SRC (x));
801 case MEM:
802 case SUBREG:
803 return find_single_use_1 (dest, &XEXP (x, 0));
805 default:
806 break;
809 /* If it wasn't one of the common cases above, check each expression and
810 vector of this code. Look for a unique usage of DEST. */
812 fmt = GET_RTX_FORMAT (code);
813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
815 if (fmt[i] == 'e')
817 if (dest == XEXP (x, i)
818 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
819 && REGNO (dest) == REGNO (XEXP (x, i))))
820 this_result = loc;
821 else
822 this_result = find_single_use_1 (dest, &XEXP (x, i));
824 if (result == 0)
825 result = this_result;
826 else if (this_result)
827 /* Duplicate usage. */
828 return 0;
830 else if (fmt[i] == 'E')
832 int j;
834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
836 if (XVECEXP (x, i, j) == dest
837 || (GET_CODE (dest) == REG
838 && GET_CODE (XVECEXP (x, i, j)) == REG
839 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
840 this_result = loc;
841 else
842 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
844 if (result == 0)
845 result = this_result;
846 else if (this_result)
847 return 0;
852 return result;
855 /* See if DEST, produced in INSN, is used only a single time in the
856 sequel. If so, return a pointer to the innermost rtx expression in which
857 it is used.
859 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
861 This routine will return usually zero either before flow is called (because
862 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
863 note can't be trusted).
865 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
866 care about REG_DEAD notes or LOG_LINKS.
868 Otherwise, we find the single use by finding an insn that has a
869 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
870 only referenced once in that insn, we know that it must be the first
871 and last insn referencing DEST. */
873 rtx *
874 find_single_use (dest, insn, ploc)
875 rtx dest;
876 rtx insn;
877 rtx *ploc;
879 rtx next;
880 rtx *result;
881 rtx link;
883 #ifdef HAVE_cc0
884 if (dest == cc0_rtx)
886 next = NEXT_INSN (insn);
887 if (next == 0
888 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
889 return 0;
891 result = find_single_use_1 (dest, &PATTERN (next));
892 if (result && ploc)
893 *ploc = next;
894 return result;
896 #endif
898 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
899 return 0;
901 for (next = next_nonnote_insn (insn);
902 next != 0 && GET_CODE (next) != CODE_LABEL;
903 next = next_nonnote_insn (next))
904 if (INSN_P (next) && dead_or_set_p (next, dest))
906 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
907 if (XEXP (link, 0) == insn)
908 break;
910 if (link)
912 result = find_single_use_1 (dest, &PATTERN (next));
913 if (ploc)
914 *ploc = next;
915 return result;
919 return 0;
922 /* Return 1 if OP is a valid general operand for machine mode MODE.
923 This is either a register reference, a memory reference,
924 or a constant. In the case of a memory reference, the address
925 is checked for general validity for the target machine.
927 Register and memory references must have mode MODE in order to be valid,
928 but some constants have no machine mode and are valid for any mode.
930 If MODE is VOIDmode, OP is checked for validity for whatever mode
931 it has.
933 The main use of this function is as a predicate in match_operand
934 expressions in the machine description.
936 For an explanation of this function's behavior for registers of
937 class NO_REGS, see the comment for `register_operand'. */
940 general_operand (op, mode)
941 rtx op;
942 enum machine_mode mode;
944 enum rtx_code code = GET_CODE (op);
946 if (mode == VOIDmode)
947 mode = GET_MODE (op);
949 /* Don't accept CONST_INT or anything similar
950 if the caller wants something floating. */
951 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
952 && GET_MODE_CLASS (mode) != MODE_INT
953 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
954 return 0;
956 if (GET_CODE (op) == CONST_INT
957 && mode != VOIDmode
958 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
959 return 0;
961 if (CONSTANT_P (op))
962 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
963 || mode == VOIDmode)
964 #ifdef LEGITIMATE_PIC_OPERAND_P
965 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
966 #endif
967 && LEGITIMATE_CONSTANT_P (op));
969 /* Except for certain constants with VOIDmode, already checked for,
970 OP's mode must match MODE if MODE specifies a mode. */
972 if (GET_MODE (op) != mode)
973 return 0;
975 if (code == SUBREG)
977 rtx sub = SUBREG_REG (op);
979 #ifdef INSN_SCHEDULING
980 /* On machines that have insn scheduling, we want all memory
981 reference to be explicit, so outlaw paradoxical SUBREGs. */
982 if (GET_CODE (sub) == MEM
983 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
984 return 0;
985 #endif
986 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
987 may result in incorrect reference. We should simplify all valid
988 subregs of MEM anyway. But allow this after reload because we
989 might be called from cleanup_subreg_operands.
991 ??? This is a kludge. */
992 if (!reload_completed && SUBREG_BYTE (op) != 0
993 && GET_CODE (sub) == MEM)
994 return 0;
996 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
997 create such rtl, and we must reject it. */
998 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
999 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1000 return 0;
1002 op = sub;
1003 code = GET_CODE (op);
1006 if (code == REG)
1007 /* A register whose class is NO_REGS is not a general operand. */
1008 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1009 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1011 if (code == MEM)
1013 rtx y = XEXP (op, 0);
1015 if (! volatile_ok && MEM_VOLATILE_P (op))
1016 return 0;
1018 if (GET_CODE (y) == ADDRESSOF)
1019 return 1;
1021 /* Use the mem's mode, since it will be reloaded thus. */
1022 mode = GET_MODE (op);
1023 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1026 /* Pretend this is an operand for now; we'll run force_operand
1027 on its replacement in fixup_var_refs_1. */
1028 if (code == ADDRESSOF)
1029 return 1;
1031 return 0;
1033 win:
1034 return 1;
1037 /* Return 1 if OP is a valid memory address for a memory reference
1038 of mode MODE.
1040 The main use of this function is as a predicate in match_operand
1041 expressions in the machine description. */
1044 address_operand (op, mode)
1045 rtx op;
1046 enum machine_mode mode;
1048 return memory_address_p (mode, op);
1051 /* Return 1 if OP is a register reference of mode MODE.
1052 If MODE is VOIDmode, accept a register in any mode.
1054 The main use of this function is as a predicate in match_operand
1055 expressions in the machine description.
1057 As a special exception, registers whose class is NO_REGS are
1058 not accepted by `register_operand'. The reason for this change
1059 is to allow the representation of special architecture artifacts
1060 (such as a condition code register) without extending the rtl
1061 definitions. Since registers of class NO_REGS cannot be used
1062 as registers in any case where register classes are examined,
1063 it is most consistent to keep this function from accepting them. */
1066 register_operand (op, mode)
1067 rtx op;
1068 enum machine_mode mode;
1070 if (GET_MODE (op) != mode && mode != VOIDmode)
1071 return 0;
1073 if (GET_CODE (op) == SUBREG)
1075 rtx sub = SUBREG_REG (op);
1077 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1078 because it is guaranteed to be reloaded into one.
1079 Just make sure the MEM is valid in itself.
1080 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1081 but currently it does result from (SUBREG (REG)...) where the
1082 reg went on the stack.) */
1083 if (! reload_completed && GET_CODE (sub) == MEM)
1084 return general_operand (op, mode);
1086 #ifdef CLASS_CANNOT_CHANGE_MODE
1087 if (GET_CODE (sub) == REG
1088 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1089 && (TEST_HARD_REG_BIT
1090 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1091 REGNO (sub)))
1092 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (sub))
1093 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1094 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1095 return 0;
1096 #endif
1098 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1099 create such rtl, and we must reject it. */
1100 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1101 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1102 return 0;
1104 op = sub;
1107 /* If we have an ADDRESSOF, consider it valid since it will be
1108 converted into something that will not be a MEM. */
1109 if (GET_CODE (op) == ADDRESSOF)
1110 return 1;
1112 /* We don't consider registers whose class is NO_REGS
1113 to be a register operand. */
1114 return (GET_CODE (op) == REG
1115 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1116 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1119 /* Return 1 for a register in Pmode; ignore the tested mode. */
1122 pmode_register_operand (op, mode)
1123 rtx op;
1124 enum machine_mode mode ATTRIBUTE_UNUSED;
1126 return register_operand (op, Pmode);
1129 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1130 or a hard register. */
1133 scratch_operand (op, mode)
1134 rtx op;
1135 enum machine_mode mode;
1137 if (GET_MODE (op) != mode && mode != VOIDmode)
1138 return 0;
1140 return (GET_CODE (op) == SCRATCH
1141 || (GET_CODE (op) == REG
1142 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1145 /* Return 1 if OP is a valid immediate operand for mode MODE.
1147 The main use of this function is as a predicate in match_operand
1148 expressions in the machine description. */
1151 immediate_operand (op, mode)
1152 rtx op;
1153 enum machine_mode mode;
1155 /* Don't accept CONST_INT or anything similar
1156 if the caller wants something floating. */
1157 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1158 && GET_MODE_CLASS (mode) != MODE_INT
1159 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1160 return 0;
1162 if (GET_CODE (op) == CONST_INT
1163 && mode != VOIDmode
1164 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1165 return 0;
1167 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1168 result in 0/1. It seems a safe assumption that this is
1169 in range for everyone. */
1170 if (GET_CODE (op) == CONSTANT_P_RTX)
1171 return 1;
1173 return (CONSTANT_P (op)
1174 && (GET_MODE (op) == mode || mode == VOIDmode
1175 || GET_MODE (op) == VOIDmode)
1176 #ifdef LEGITIMATE_PIC_OPERAND_P
1177 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1178 #endif
1179 && LEGITIMATE_CONSTANT_P (op));
1182 /* Returns 1 if OP is an operand that is a CONST_INT. */
1185 const_int_operand (op, mode)
1186 rtx op;
1187 enum machine_mode mode;
1189 if (GET_CODE (op) != CONST_INT)
1190 return 0;
1192 if (mode != VOIDmode
1193 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1194 return 0;
1196 return 1;
1199 /* Returns 1 if OP is an operand that is a constant integer or constant
1200 floating-point number. */
1203 const_double_operand (op, mode)
1204 rtx op;
1205 enum machine_mode mode;
1207 /* Don't accept CONST_INT or anything similar
1208 if the caller wants something floating. */
1209 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1210 && GET_MODE_CLASS (mode) != MODE_INT
1211 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1212 return 0;
1214 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1215 && (mode == VOIDmode || GET_MODE (op) == mode
1216 || GET_MODE (op) == VOIDmode));
1219 /* Return 1 if OP is a general operand that is not an immediate operand. */
1222 nonimmediate_operand (op, mode)
1223 rtx op;
1224 enum machine_mode mode;
1226 return (general_operand (op, mode) && ! CONSTANT_P (op));
1229 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1232 nonmemory_operand (op, mode)
1233 rtx op;
1234 enum machine_mode mode;
1236 if (CONSTANT_P (op))
1238 /* Don't accept CONST_INT or anything similar
1239 if the caller wants something floating. */
1240 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1241 && GET_MODE_CLASS (mode) != MODE_INT
1242 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1243 return 0;
1245 if (GET_CODE (op) == CONST_INT
1246 && mode != VOIDmode
1247 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1248 return 0;
1250 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1251 || mode == VOIDmode)
1252 #ifdef LEGITIMATE_PIC_OPERAND_P
1253 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1254 #endif
1255 && LEGITIMATE_CONSTANT_P (op));
1258 if (GET_MODE (op) != mode && mode != VOIDmode)
1259 return 0;
1261 if (GET_CODE (op) == SUBREG)
1263 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1264 because it is guaranteed to be reloaded into one.
1265 Just make sure the MEM is valid in itself.
1266 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1267 but currently it does result from (SUBREG (REG)...) where the
1268 reg went on the stack.) */
1269 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1270 return general_operand (op, mode);
1271 op = SUBREG_REG (op);
1274 /* We don't consider registers whose class is NO_REGS
1275 to be a register operand. */
1276 return (GET_CODE (op) == REG
1277 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1278 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1281 /* Return 1 if OP is a valid operand that stands for pushing a
1282 value of mode MODE onto the stack.
1284 The main use of this function is as a predicate in match_operand
1285 expressions in the machine description. */
1288 push_operand (op, mode)
1289 rtx op;
1290 enum machine_mode mode;
1292 unsigned int rounded_size = GET_MODE_SIZE (mode);
1294 #ifdef PUSH_ROUNDING
1295 rounded_size = PUSH_ROUNDING (rounded_size);
1296 #endif
1298 if (GET_CODE (op) != MEM)
1299 return 0;
1301 if (mode != VOIDmode && GET_MODE (op) != mode)
1302 return 0;
1304 op = XEXP (op, 0);
1306 if (rounded_size == GET_MODE_SIZE (mode))
1308 if (GET_CODE (op) != STACK_PUSH_CODE)
1309 return 0;
1311 else
1313 if (GET_CODE (op) != PRE_MODIFY
1314 || GET_CODE (XEXP (op, 1)) != PLUS
1315 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1316 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1317 #ifdef STACK_GROWS_DOWNWARD
1318 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1319 #else
1320 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1321 #endif
1323 return 0;
1326 return XEXP (op, 0) == stack_pointer_rtx;
1329 /* Return 1 if OP is a valid operand that stands for popping a
1330 value of mode MODE off the stack.
1332 The main use of this function is as a predicate in match_operand
1333 expressions in the machine description. */
1336 pop_operand (op, mode)
1337 rtx op;
1338 enum machine_mode mode;
1340 if (GET_CODE (op) != MEM)
1341 return 0;
1343 if (mode != VOIDmode && GET_MODE (op) != mode)
1344 return 0;
1346 op = XEXP (op, 0);
1348 if (GET_CODE (op) != STACK_POP_CODE)
1349 return 0;
1351 return XEXP (op, 0) == stack_pointer_rtx;
1354 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1357 memory_address_p (mode, addr)
1358 enum machine_mode mode ATTRIBUTE_UNUSED;
1359 rtx addr;
1361 if (GET_CODE (addr) == ADDRESSOF)
1362 return 1;
1364 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1365 return 0;
1367 win:
1368 return 1;
1371 /* Return 1 if OP is a valid memory reference with mode MODE,
1372 including a valid address.
1374 The main use of this function is as a predicate in match_operand
1375 expressions in the machine description. */
1378 memory_operand (op, mode)
1379 rtx op;
1380 enum machine_mode mode;
1382 rtx inner;
1384 if (! reload_completed)
1385 /* Note that no SUBREG is a memory operand before end of reload pass,
1386 because (SUBREG (MEM...)) forces reloading into a register. */
1387 return GET_CODE (op) == MEM && general_operand (op, mode);
1389 if (mode != VOIDmode && GET_MODE (op) != mode)
1390 return 0;
1392 inner = op;
1393 if (GET_CODE (inner) == SUBREG)
1394 inner = SUBREG_REG (inner);
1396 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1399 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1400 that is, a memory reference whose address is a general_operand. */
1403 indirect_operand (op, mode)
1404 rtx op;
1405 enum machine_mode mode;
1407 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1408 if (! reload_completed
1409 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1411 int offset = SUBREG_BYTE (op);
1412 rtx inner = SUBREG_REG (op);
1414 if (mode != VOIDmode && GET_MODE (op) != mode)
1415 return 0;
1417 /* The only way that we can have a general_operand as the resulting
1418 address is if OFFSET is zero and the address already is an operand
1419 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1420 operand. */
1422 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1423 || (GET_CODE (XEXP (inner, 0)) == PLUS
1424 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1425 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1426 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1429 return (GET_CODE (op) == MEM
1430 && memory_operand (op, mode)
1431 && general_operand (XEXP (op, 0), Pmode));
1434 /* Return 1 if this is a comparison operator. This allows the use of
1435 MATCH_OPERATOR to recognize all the branch insns. */
1438 comparison_operator (op, mode)
1439 rtx op;
1440 enum machine_mode mode;
1442 return ((mode == VOIDmode || GET_MODE (op) == mode)
1443 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1446 /* If BODY is an insn body that uses ASM_OPERANDS,
1447 return the number of operands (both input and output) in the insn.
1448 Otherwise return -1. */
1451 asm_noperands (body)
1452 rtx body;
1454 switch (GET_CODE (body))
1456 case ASM_OPERANDS:
1457 /* No output operands: return number of input operands. */
1458 return ASM_OPERANDS_INPUT_LENGTH (body);
1459 case SET:
1460 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1461 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1462 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1463 else
1464 return -1;
1465 case PARALLEL:
1466 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1467 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1469 /* Multiple output operands, or 1 output plus some clobbers:
1470 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1471 int i;
1472 int n_sets;
1474 /* Count backwards through CLOBBERs to determine number of SETs. */
1475 for (i = XVECLEN (body, 0); i > 0; i--)
1477 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1478 break;
1479 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1480 return -1;
1483 /* N_SETS is now number of output operands. */
1484 n_sets = i;
1486 /* Verify that all the SETs we have
1487 came from a single original asm_operands insn
1488 (so that invalid combinations are blocked). */
1489 for (i = 0; i < n_sets; i++)
1491 rtx elt = XVECEXP (body, 0, i);
1492 if (GET_CODE (elt) != SET)
1493 return -1;
1494 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1495 return -1;
1496 /* If these ASM_OPERANDS rtx's came from different original insns
1497 then they aren't allowed together. */
1498 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1499 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1500 return -1;
1502 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1503 + n_sets);
1505 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1507 /* 0 outputs, but some clobbers:
1508 body is [(asm_operands ...) (clobber (reg ...))...]. */
1509 int i;
1511 /* Make sure all the other parallel things really are clobbers. */
1512 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1513 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1514 return -1;
1516 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1518 else
1519 return -1;
1520 default:
1521 return -1;
1525 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1526 copy its operands (both input and output) into the vector OPERANDS,
1527 the locations of the operands within the insn into the vector OPERAND_LOCS,
1528 and the constraints for the operands into CONSTRAINTS.
1529 Write the modes of the operands into MODES.
1530 Return the assembler-template.
1532 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1533 we don't store that info. */
1535 const char *
1536 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1537 rtx body;
1538 rtx *operands;
1539 rtx **operand_locs;
1540 const char **constraints;
1541 enum machine_mode *modes;
1543 int i;
1544 int noperands;
1545 const char *template = 0;
1547 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1549 rtx asmop = SET_SRC (body);
1550 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1552 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1554 for (i = 1; i < noperands; i++)
1556 if (operand_locs)
1557 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1558 if (operands)
1559 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1560 if (constraints)
1561 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1562 if (modes)
1563 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1566 /* The output is in the SET.
1567 Its constraint is in the ASM_OPERANDS itself. */
1568 if (operands)
1569 operands[0] = SET_DEST (body);
1570 if (operand_locs)
1571 operand_locs[0] = &SET_DEST (body);
1572 if (constraints)
1573 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1574 if (modes)
1575 modes[0] = GET_MODE (SET_DEST (body));
1576 template = ASM_OPERANDS_TEMPLATE (asmop);
1578 else if (GET_CODE (body) == ASM_OPERANDS)
1580 rtx asmop = body;
1581 /* No output operands: BODY is (asm_operands ....). */
1583 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1585 /* The input operands are found in the 1st element vector. */
1586 /* Constraints for inputs are in the 2nd element vector. */
1587 for (i = 0; i < noperands; i++)
1589 if (operand_locs)
1590 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1591 if (operands)
1592 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1593 if (constraints)
1594 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1595 if (modes)
1596 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1598 template = ASM_OPERANDS_TEMPLATE (asmop);
1600 else if (GET_CODE (body) == PARALLEL
1601 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1602 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1604 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1605 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1606 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1607 int nout = 0; /* Does not include CLOBBERs. */
1609 /* At least one output, plus some CLOBBERs. */
1611 /* The outputs are in the SETs.
1612 Their constraints are in the ASM_OPERANDS itself. */
1613 for (i = 0; i < nparallel; i++)
1615 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1616 break; /* Past last SET */
1618 if (operands)
1619 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1620 if (operand_locs)
1621 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1622 if (constraints)
1623 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1624 if (modes)
1625 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1626 nout++;
1629 for (i = 0; i < nin; i++)
1631 if (operand_locs)
1632 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1633 if (operands)
1634 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1635 if (constraints)
1636 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1637 if (modes)
1638 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1641 template = ASM_OPERANDS_TEMPLATE (asmop);
1643 else if (GET_CODE (body) == PARALLEL
1644 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1646 /* No outputs, but some CLOBBERs. */
1648 rtx asmop = XVECEXP (body, 0, 0);
1649 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1651 for (i = 0; i < nin; i++)
1653 if (operand_locs)
1654 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1655 if (operands)
1656 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1657 if (constraints)
1658 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1659 if (modes)
1660 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1663 template = ASM_OPERANDS_TEMPLATE (asmop);
1666 return template;
1669 /* Check if an asm_operand matches it's constraints.
1670 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1673 asm_operand_ok (op, constraint)
1674 rtx op;
1675 const char *constraint;
1677 int result = 0;
1679 /* Use constrain_operands after reload. */
1680 if (reload_completed)
1681 abort ();
1683 while (*constraint)
1685 char c = *constraint++;
1686 switch (c)
1688 case '=':
1689 case '+':
1690 case '*':
1691 case '%':
1692 case '?':
1693 case '!':
1694 case '#':
1695 case '&':
1696 case ',':
1697 break;
1699 case '0': case '1': case '2': case '3': case '4':
1700 case '5': case '6': case '7': case '8': case '9':
1701 /* For best results, our caller should have given us the
1702 proper matching constraint, but we can't actually fail
1703 the check if they didn't. Indicate that results are
1704 inconclusive. */
1705 while (ISDIGIT (*constraint))
1706 constraint++;
1707 result = -1;
1708 break;
1710 case 'p':
1711 if (address_operand (op, VOIDmode))
1712 return 1;
1713 break;
1715 case 'm':
1716 case 'V': /* non-offsettable */
1717 if (memory_operand (op, VOIDmode))
1718 return 1;
1719 break;
1721 case 'o': /* offsettable */
1722 if (offsettable_nonstrict_memref_p (op))
1723 return 1;
1724 break;
1726 case '<':
1727 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1728 excepting those that expand_call created. Further, on some
1729 machines which do not have generalized auto inc/dec, an inc/dec
1730 is not a memory_operand.
1732 Match any memory and hope things are resolved after reload. */
1734 if (GET_CODE (op) == MEM
1735 && (1
1736 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1737 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1738 return 1;
1739 break;
1741 case '>':
1742 if (GET_CODE (op) == MEM
1743 && (1
1744 || GET_CODE (XEXP (op, 0)) == PRE_INC
1745 || GET_CODE (XEXP (op, 0)) == POST_INC))
1746 return 1;
1747 break;
1749 case 'E':
1750 case 'F':
1751 if (GET_CODE (op) == CONST_DOUBLE
1752 || (GET_CODE (op) == CONST_VECTOR
1753 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1754 return 1;
1755 break;
1757 case 'G':
1758 if (GET_CODE (op) == CONST_DOUBLE
1759 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1760 return 1;
1761 break;
1762 case 'H':
1763 if (GET_CODE (op) == CONST_DOUBLE
1764 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1765 return 1;
1766 break;
1768 case 's':
1769 if (GET_CODE (op) == CONST_INT
1770 || (GET_CODE (op) == CONST_DOUBLE
1771 && GET_MODE (op) == VOIDmode))
1772 break;
1773 /* FALLTHRU */
1775 case 'i':
1776 if (CONSTANT_P (op)
1777 #ifdef LEGITIMATE_PIC_OPERAND_P
1778 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1779 #endif
1781 return 1;
1782 break;
1784 case 'n':
1785 if (GET_CODE (op) == CONST_INT
1786 || (GET_CODE (op) == CONST_DOUBLE
1787 && GET_MODE (op) == VOIDmode))
1788 return 1;
1789 break;
1791 case 'I':
1792 if (GET_CODE (op) == CONST_INT
1793 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1794 return 1;
1795 break;
1796 case 'J':
1797 if (GET_CODE (op) == CONST_INT
1798 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1799 return 1;
1800 break;
1801 case 'K':
1802 if (GET_CODE (op) == CONST_INT
1803 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1804 return 1;
1805 break;
1806 case 'L':
1807 if (GET_CODE (op) == CONST_INT
1808 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1809 return 1;
1810 break;
1811 case 'M':
1812 if (GET_CODE (op) == CONST_INT
1813 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1814 return 1;
1815 break;
1816 case 'N':
1817 if (GET_CODE (op) == CONST_INT
1818 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1819 return 1;
1820 break;
1821 case 'O':
1822 if (GET_CODE (op) == CONST_INT
1823 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1824 return 1;
1825 break;
1826 case 'P':
1827 if (GET_CODE (op) == CONST_INT
1828 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1829 return 1;
1830 break;
1832 case 'X':
1833 return 1;
1835 case 'g':
1836 if (general_operand (op, VOIDmode))
1837 return 1;
1838 break;
1840 default:
1841 /* For all other letters, we first check for a register class,
1842 otherwise it is an EXTRA_CONSTRAINT. */
1843 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1845 case 'r':
1846 if (GET_MODE (op) == BLKmode)
1847 break;
1848 if (register_operand (op, VOIDmode))
1849 return 1;
1851 #ifdef EXTRA_CONSTRAINT
1852 if (EXTRA_CONSTRAINT (op, c))
1853 return 1;
1854 if (EXTRA_MEMORY_CONSTRAINT (c))
1856 /* Every memory operand can be reloaded to fit. */
1857 if (memory_operand (op, VOIDmode))
1858 return 1;
1860 if (EXTRA_ADDRESS_CONSTRAINT (c))
1862 /* Every address operand can be reloaded to fit. */
1863 if (address_operand (op, VOIDmode))
1864 return 1;
1866 #endif
1867 break;
1871 return result;
1874 /* Given an rtx *P, if it is a sum containing an integer constant term,
1875 return the location (type rtx *) of the pointer to that constant term.
1876 Otherwise, return a null pointer. */
1878 rtx *
1879 find_constant_term_loc (p)
1880 rtx *p;
1882 rtx *tem;
1883 enum rtx_code code = GET_CODE (*p);
1885 /* If *P IS such a constant term, P is its location. */
1887 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1888 || code == CONST)
1889 return p;
1891 /* Otherwise, if not a sum, it has no constant term. */
1893 if (GET_CODE (*p) != PLUS)
1894 return 0;
1896 /* If one of the summands is constant, return its location. */
1898 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1899 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1900 return p;
1902 /* Otherwise, check each summand for containing a constant term. */
1904 if (XEXP (*p, 0) != 0)
1906 tem = find_constant_term_loc (&XEXP (*p, 0));
1907 if (tem != 0)
1908 return tem;
1911 if (XEXP (*p, 1) != 0)
1913 tem = find_constant_term_loc (&XEXP (*p, 1));
1914 if (tem != 0)
1915 return tem;
1918 return 0;
1921 /* Return 1 if OP is a memory reference
1922 whose address contains no side effects
1923 and remains valid after the addition
1924 of a positive integer less than the
1925 size of the object being referenced.
1927 We assume that the original address is valid and do not check it.
1929 This uses strict_memory_address_p as a subroutine, so
1930 don't use it before reload. */
1933 offsettable_memref_p (op)
1934 rtx op;
1936 return ((GET_CODE (op) == MEM)
1937 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1940 /* Similar, but don't require a strictly valid mem ref:
1941 consider pseudo-regs valid as index or base regs. */
1944 offsettable_nonstrict_memref_p (op)
1945 rtx op;
1947 return ((GET_CODE (op) == MEM)
1948 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1951 /* Return 1 if Y is a memory address which contains no side effects
1952 and would remain valid after the addition of a positive integer
1953 less than the size of that mode.
1955 We assume that the original address is valid and do not check it.
1956 We do check that it is valid for narrower modes.
1958 If STRICTP is nonzero, we require a strictly valid address,
1959 for the sake of use in reload.c. */
1962 offsettable_address_p (strictp, mode, y)
1963 int strictp;
1964 enum machine_mode mode;
1965 rtx y;
1967 enum rtx_code ycode = GET_CODE (y);
1968 rtx z;
1969 rtx y1 = y;
1970 rtx *y2;
1971 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1972 (strictp ? strict_memory_address_p : memory_address_p);
1973 unsigned int mode_sz = GET_MODE_SIZE (mode);
1975 if (CONSTANT_ADDRESS_P (y))
1976 return 1;
1978 /* Adjusting an offsettable address involves changing to a narrower mode.
1979 Make sure that's OK. */
1981 if (mode_dependent_address_p (y))
1982 return 0;
1984 /* ??? How much offset does an offsettable BLKmode reference need?
1985 Clearly that depends on the situation in which it's being used.
1986 However, the current situation in which we test 0xffffffff is
1987 less than ideal. Caveat user. */
1988 if (mode_sz == 0)
1989 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1991 /* If the expression contains a constant term,
1992 see if it remains valid when max possible offset is added. */
1994 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1996 int good;
1998 y1 = *y2;
1999 *y2 = plus_constant (*y2, mode_sz - 1);
2000 /* Use QImode because an odd displacement may be automatically invalid
2001 for any wider mode. But it should be valid for a single byte. */
2002 good = (*addressp) (QImode, y);
2004 /* In any case, restore old contents of memory. */
2005 *y2 = y1;
2006 return good;
2009 if (GET_RTX_CLASS (ycode) == 'a')
2010 return 0;
2012 /* The offset added here is chosen as the maximum offset that
2013 any instruction could need to add when operating on something
2014 of the specified mode. We assume that if Y and Y+c are
2015 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2016 go inside a LO_SUM here, so we do so as well. */
2017 if (GET_CODE (y) == LO_SUM
2018 && mode != BLKmode
2019 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2020 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2021 plus_constant (XEXP (y, 1), mode_sz - 1));
2022 else
2023 z = plus_constant (y, mode_sz - 1);
2025 /* Use QImode because an odd displacement may be automatically invalid
2026 for any wider mode. But it should be valid for a single byte. */
2027 return (*addressp) (QImode, z);
2030 /* Return 1 if ADDR is an address-expression whose effect depends
2031 on the mode of the memory reference it is used in.
2033 Autoincrement addressing is a typical example of mode-dependence
2034 because the amount of the increment depends on the mode. */
2037 mode_dependent_address_p (addr)
2038 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2040 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2041 return 0;
2042 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2043 win: ATTRIBUTE_UNUSED_LABEL
2044 return 1;
2047 /* Return 1 if OP is a general operand
2048 other than a memory ref with a mode dependent address. */
2051 mode_independent_operand (op, mode)
2052 enum machine_mode mode;
2053 rtx op;
2055 rtx addr;
2057 if (! general_operand (op, mode))
2058 return 0;
2060 if (GET_CODE (op) != MEM)
2061 return 1;
2063 addr = XEXP (op, 0);
2064 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2065 return 1;
2066 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2067 lose: ATTRIBUTE_UNUSED_LABEL
2068 return 0;
2071 /* Like extract_insn, but save insn extracted and don't extract again, when
2072 called again for the same insn expecting that recog_data still contain the
2073 valid information. This is used primary by gen_attr infrastructure that
2074 often does extract insn again and again. */
2075 void
2076 extract_insn_cached (insn)
2077 rtx insn;
2079 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2080 return;
2081 extract_insn (insn);
2082 recog_data.insn = insn;
2084 /* Do cached extract_insn, constrain_operand and complain about failures.
2085 Used by insn_attrtab. */
2086 void
2087 extract_constrain_insn_cached (insn)
2088 rtx insn;
2090 extract_insn_cached (insn);
2091 if (which_alternative == -1
2092 && !constrain_operands (reload_completed))
2093 fatal_insn_not_found (insn);
2095 /* Do cached constrain_operand and complain about failures. */
2097 constrain_operands_cached (strict)
2098 int strict;
2100 if (which_alternative == -1)
2101 return constrain_operands (strict);
2102 else
2103 return 1;
2106 /* Analyze INSN and fill in recog_data. */
2108 void
2109 extract_insn (insn)
2110 rtx insn;
2112 int i;
2113 int icode;
2114 int noperands;
2115 rtx body = PATTERN (insn);
2117 recog_data.insn = NULL;
2118 recog_data.n_operands = 0;
2119 recog_data.n_alternatives = 0;
2120 recog_data.n_dups = 0;
2121 which_alternative = -1;
2123 switch (GET_CODE (body))
2125 case USE:
2126 case CLOBBER:
2127 case ASM_INPUT:
2128 case ADDR_VEC:
2129 case ADDR_DIFF_VEC:
2130 return;
2132 case SET:
2133 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2134 goto asm_insn;
2135 else
2136 goto normal_insn;
2137 case PARALLEL:
2138 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2139 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2140 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2141 goto asm_insn;
2142 else
2143 goto normal_insn;
2144 case ASM_OPERANDS:
2145 asm_insn:
2146 recog_data.n_operands = noperands = asm_noperands (body);
2147 if (noperands >= 0)
2149 /* This insn is an `asm' with operands. */
2151 /* expand_asm_operands makes sure there aren't too many operands. */
2152 if (noperands > MAX_RECOG_OPERANDS)
2153 abort ();
2155 /* Now get the operand values and constraints out of the insn. */
2156 decode_asm_operands (body, recog_data.operand,
2157 recog_data.operand_loc,
2158 recog_data.constraints,
2159 recog_data.operand_mode);
2160 if (noperands > 0)
2162 const char *p = recog_data.constraints[0];
2163 recog_data.n_alternatives = 1;
2164 while (*p)
2165 recog_data.n_alternatives += (*p++ == ',');
2167 break;
2169 fatal_insn_not_found (insn);
2171 default:
2172 normal_insn:
2173 /* Ordinary insn: recognize it, get the operands via insn_extract
2174 and get the constraints. */
2176 icode = recog_memoized (insn);
2177 if (icode < 0)
2178 fatal_insn_not_found (insn);
2180 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2181 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2182 recog_data.n_dups = insn_data[icode].n_dups;
2184 insn_extract (insn);
2186 for (i = 0; i < noperands; i++)
2188 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2189 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2190 /* VOIDmode match_operands gets mode from their real operand. */
2191 if (recog_data.operand_mode[i] == VOIDmode)
2192 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2195 for (i = 0; i < noperands; i++)
2196 recog_data.operand_type[i]
2197 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2198 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2199 : OP_IN);
2201 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2202 abort ();
2205 /* After calling extract_insn, you can use this function to extract some
2206 information from the constraint strings into a more usable form.
2207 The collected data is stored in recog_op_alt. */
2208 void
2209 preprocess_constraints ()
2211 int i;
2213 memset (recog_op_alt, 0, sizeof recog_op_alt);
2214 for (i = 0; i < recog_data.n_operands; i++)
2216 int j;
2217 struct operand_alternative *op_alt;
2218 const char *p = recog_data.constraints[i];
2220 op_alt = recog_op_alt[i];
2222 for (j = 0; j < recog_data.n_alternatives; j++)
2224 op_alt[j].class = NO_REGS;
2225 op_alt[j].constraint = p;
2226 op_alt[j].matches = -1;
2227 op_alt[j].matched = -1;
2229 if (*p == '\0' || *p == ',')
2231 op_alt[j].anything_ok = 1;
2232 continue;
2235 for (;;)
2237 char c = *p++;
2238 if (c == '#')
2240 c = *p++;
2241 while (c != ',' && c != '\0');
2242 if (c == ',' || c == '\0')
2243 break;
2245 switch (c)
2247 case '=': case '+': case '*': case '%':
2248 case 'E': case 'F': case 'G': case 'H':
2249 case 's': case 'i': case 'n':
2250 case 'I': case 'J': case 'K': case 'L':
2251 case 'M': case 'N': case 'O': case 'P':
2252 /* These don't say anything we care about. */
2253 break;
2255 case '?':
2256 op_alt[j].reject += 6;
2257 break;
2258 case '!':
2259 op_alt[j].reject += 600;
2260 break;
2261 case '&':
2262 op_alt[j].earlyclobber = 1;
2263 break;
2265 case '0': case '1': case '2': case '3': case '4':
2266 case '5': case '6': case '7': case '8': case '9':
2268 char *end;
2269 op_alt[j].matches = strtoul (p - 1, &end, 10);
2270 recog_op_alt[op_alt[j].matches][j].matched = i;
2271 p = end;
2273 break;
2275 case 'm':
2276 op_alt[j].memory_ok = 1;
2277 break;
2278 case '<':
2279 op_alt[j].decmem_ok = 1;
2280 break;
2281 case '>':
2282 op_alt[j].incmem_ok = 1;
2283 break;
2284 case 'V':
2285 op_alt[j].nonoffmem_ok = 1;
2286 break;
2287 case 'o':
2288 op_alt[j].offmem_ok = 1;
2289 break;
2290 case 'X':
2291 op_alt[j].anything_ok = 1;
2292 break;
2294 case 'p':
2295 op_alt[j].is_address = 1;
2296 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2297 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2298 break;
2300 case 'g': case 'r':
2301 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2302 break;
2304 default:
2305 if (EXTRA_MEMORY_CONSTRAINT (c))
2307 op_alt[j].memory_ok = 1;
2308 break;
2310 if (EXTRA_ADDRESS_CONSTRAINT (c))
2312 op_alt[j].is_address = 1;
2313 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2314 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2315 break;
2318 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2319 break;
2326 /* Check the operands of an insn against the insn's operand constraints
2327 and return 1 if they are valid.
2328 The information about the insn's operands, constraints, operand modes
2329 etc. is obtained from the global variables set up by extract_insn.
2331 WHICH_ALTERNATIVE is set to a number which indicates which
2332 alternative of constraints was matched: 0 for the first alternative,
2333 1 for the next, etc.
2335 In addition, when two operands are match
2336 and it happens that the output operand is (reg) while the
2337 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2338 make the output operand look like the input.
2339 This is because the output operand is the one the template will print.
2341 This is used in final, just before printing the assembler code and by
2342 the routines that determine an insn's attribute.
2344 If STRICT is a positive non-zero value, it means that we have been
2345 called after reload has been completed. In that case, we must
2346 do all checks strictly. If it is zero, it means that we have been called
2347 before reload has completed. In that case, we first try to see if we can
2348 find an alternative that matches strictly. If not, we try again, this
2349 time assuming that reload will fix up the insn. This provides a "best
2350 guess" for the alternative and is used to compute attributes of insns prior
2351 to reload. A negative value of STRICT is used for this internal call. */
2353 struct funny_match
2355 int this, other;
2359 constrain_operands (strict)
2360 int strict;
2362 const char *constraints[MAX_RECOG_OPERANDS];
2363 int matching_operands[MAX_RECOG_OPERANDS];
2364 int earlyclobber[MAX_RECOG_OPERANDS];
2365 int c;
2367 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2368 int funny_match_index;
2370 which_alternative = 0;
2371 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2372 return 1;
2374 for (c = 0; c < recog_data.n_operands; c++)
2376 constraints[c] = recog_data.constraints[c];
2377 matching_operands[c] = -1;
2382 int opno;
2383 int lose = 0;
2384 funny_match_index = 0;
2386 for (opno = 0; opno < recog_data.n_operands; opno++)
2388 rtx op = recog_data.operand[opno];
2389 enum machine_mode mode = GET_MODE (op);
2390 const char *p = constraints[opno];
2391 int offset = 0;
2392 int win = 0;
2393 int val;
2395 earlyclobber[opno] = 0;
2397 /* A unary operator may be accepted by the predicate, but it
2398 is irrelevant for matching constraints. */
2399 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2400 op = XEXP (op, 0);
2402 if (GET_CODE (op) == SUBREG)
2404 if (GET_CODE (SUBREG_REG (op)) == REG
2405 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2406 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2407 GET_MODE (SUBREG_REG (op)),
2408 SUBREG_BYTE (op),
2409 GET_MODE (op));
2410 op = SUBREG_REG (op);
2413 /* An empty constraint or empty alternative
2414 allows anything which matched the pattern. */
2415 if (*p == 0 || *p == ',')
2416 win = 1;
2418 while (*p && (c = *p++) != ',')
2419 switch (c)
2421 case '?': case '!': case '*': case '%':
2422 case '=': case '+':
2423 break;
2425 case '#':
2426 /* Ignore rest of this alternative as far as
2427 constraint checking is concerned. */
2428 while (*p && *p != ',')
2429 p++;
2430 break;
2432 case '&':
2433 earlyclobber[opno] = 1;
2434 break;
2436 case '0': case '1': case '2': case '3': case '4':
2437 case '5': case '6': case '7': case '8': case '9':
2439 /* This operand must be the same as a previous one.
2440 This kind of constraint is used for instructions such
2441 as add when they take only two operands.
2443 Note that the lower-numbered operand is passed first.
2445 If we are not testing strictly, assume that this
2446 constraint will be satisfied. */
2448 char *end;
2449 int match;
2451 match = strtoul (p - 1, &end, 10);
2452 p = end;
2454 if (strict < 0)
2455 val = 1;
2456 else
2458 rtx op1 = recog_data.operand[match];
2459 rtx op2 = recog_data.operand[opno];
2461 /* A unary operator may be accepted by the predicate,
2462 but it is irrelevant for matching constraints. */
2463 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2464 op1 = XEXP (op1, 0);
2465 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2466 op2 = XEXP (op2, 0);
2468 val = operands_match_p (op1, op2);
2471 matching_operands[opno] = match;
2472 matching_operands[match] = opno;
2474 if (val != 0)
2475 win = 1;
2477 /* If output is *x and input is *--x, arrange later
2478 to change the output to *--x as well, since the
2479 output op is the one that will be printed. */
2480 if (val == 2 && strict > 0)
2482 funny_match[funny_match_index].this = opno;
2483 funny_match[funny_match_index++].other = match;
2486 break;
2488 case 'p':
2489 /* p is used for address_operands. When we are called by
2490 gen_reload, no one will have checked that the address is
2491 strictly valid, i.e., that all pseudos requiring hard regs
2492 have gotten them. */
2493 if (strict <= 0
2494 || (strict_memory_address_p (recog_data.operand_mode[opno],
2495 op)))
2496 win = 1;
2497 break;
2499 /* No need to check general_operand again;
2500 it was done in insn-recog.c. */
2501 case 'g':
2502 /* Anything goes unless it is a REG and really has a hard reg
2503 but the hard reg is not in the class GENERAL_REGS. */
2504 if (strict < 0
2505 || GENERAL_REGS == ALL_REGS
2506 || GET_CODE (op) != REG
2507 || (reload_in_progress
2508 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2509 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2510 win = 1;
2511 break;
2513 case 'X':
2514 /* This is used for a MATCH_SCRATCH in the cases when
2515 we don't actually need anything. So anything goes
2516 any time. */
2517 win = 1;
2518 break;
2520 case 'm':
2521 if (GET_CODE (op) == MEM
2522 /* Before reload, accept what reload can turn into mem. */
2523 || (strict < 0 && CONSTANT_P (op))
2524 /* During reload, accept a pseudo */
2525 || (reload_in_progress && GET_CODE (op) == REG
2526 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2527 win = 1;
2528 break;
2530 case '<':
2531 if (GET_CODE (op) == MEM
2532 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2533 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2534 win = 1;
2535 break;
2537 case '>':
2538 if (GET_CODE (op) == MEM
2539 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2540 || GET_CODE (XEXP (op, 0)) == POST_INC))
2541 win = 1;
2542 break;
2544 case 'E':
2545 case 'F':
2546 if (GET_CODE (op) == CONST_DOUBLE
2547 || (GET_CODE (op) == CONST_VECTOR
2548 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2549 win = 1;
2550 break;
2552 case 'G':
2553 case 'H':
2554 if (GET_CODE (op) == CONST_DOUBLE
2555 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2556 win = 1;
2557 break;
2559 case 's':
2560 if (GET_CODE (op) == CONST_INT
2561 || (GET_CODE (op) == CONST_DOUBLE
2562 && GET_MODE (op) == VOIDmode))
2563 break;
2564 case 'i':
2565 if (CONSTANT_P (op))
2566 win = 1;
2567 break;
2569 case 'n':
2570 if (GET_CODE (op) == CONST_INT
2571 || (GET_CODE (op) == CONST_DOUBLE
2572 && GET_MODE (op) == VOIDmode))
2573 win = 1;
2574 break;
2576 case 'I':
2577 case 'J':
2578 case 'K':
2579 case 'L':
2580 case 'M':
2581 case 'N':
2582 case 'O':
2583 case 'P':
2584 if (GET_CODE (op) == CONST_INT
2585 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2586 win = 1;
2587 break;
2589 case 'V':
2590 if (GET_CODE (op) == MEM
2591 && ((strict > 0 && ! offsettable_memref_p (op))
2592 || (strict < 0
2593 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2594 || (reload_in_progress
2595 && !(GET_CODE (op) == REG
2596 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2597 win = 1;
2598 break;
2600 case 'o':
2601 if ((strict > 0 && offsettable_memref_p (op))
2602 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2603 /* Before reload, accept what reload can handle. */
2604 || (strict < 0
2605 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2606 /* During reload, accept a pseudo */
2607 || (reload_in_progress && GET_CODE (op) == REG
2608 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2609 win = 1;
2610 break;
2612 default:
2614 enum reg_class class;
2616 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2617 if (class != NO_REGS)
2619 if (strict < 0
2620 || (strict == 0
2621 && GET_CODE (op) == REG
2622 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2623 || (strict == 0 && GET_CODE (op) == SCRATCH)
2624 || (GET_CODE (op) == REG
2625 && reg_fits_class_p (op, class, offset, mode)))
2626 win = 1;
2628 #ifdef EXTRA_CONSTRAINT
2629 else if (EXTRA_CONSTRAINT (op, c))
2630 win = 1;
2632 if (EXTRA_MEMORY_CONSTRAINT (c))
2634 /* Every memory operand can be reloaded to fit,
2635 so copy the condition from the 'm' case. */
2636 if (GET_CODE (op) == MEM
2637 /* Before reload, accept what reload can turn into mem. */
2638 || (strict < 0 && CONSTANT_P (op))
2639 /* During reload, accept a pseudo */
2640 || (reload_in_progress && GET_CODE (op) == REG
2641 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2642 win = 1;
2644 if (EXTRA_ADDRESS_CONSTRAINT (c))
2646 /* Every address operand can be reloaded to fit,
2647 so copy the condition from the 'p' case. */
2648 if (strict <= 0
2649 || (strict_memory_address_p (recog_data.operand_mode[opno],
2650 op)))
2651 win = 1;
2653 #endif
2654 break;
2658 constraints[opno] = p;
2659 /* If this operand did not win somehow,
2660 this alternative loses. */
2661 if (! win)
2662 lose = 1;
2664 /* This alternative won; the operands are ok.
2665 Change whichever operands this alternative says to change. */
2666 if (! lose)
2668 int opno, eopno;
2670 /* See if any earlyclobber operand conflicts with some other
2671 operand. */
2673 if (strict > 0)
2674 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2675 /* Ignore earlyclobber operands now in memory,
2676 because we would often report failure when we have
2677 two memory operands, one of which was formerly a REG. */
2678 if (earlyclobber[eopno]
2679 && GET_CODE (recog_data.operand[eopno]) == REG)
2680 for (opno = 0; opno < recog_data.n_operands; opno++)
2681 if ((GET_CODE (recog_data.operand[opno]) == MEM
2682 || recog_data.operand_type[opno] != OP_OUT)
2683 && opno != eopno
2684 /* Ignore things like match_operator operands. */
2685 && *recog_data.constraints[opno] != 0
2686 && ! (matching_operands[opno] == eopno
2687 && operands_match_p (recog_data.operand[opno],
2688 recog_data.operand[eopno]))
2689 && ! safe_from_earlyclobber (recog_data.operand[opno],
2690 recog_data.operand[eopno]))
2691 lose = 1;
2693 if (! lose)
2695 while (--funny_match_index >= 0)
2697 recog_data.operand[funny_match[funny_match_index].other]
2698 = recog_data.operand[funny_match[funny_match_index].this];
2701 return 1;
2705 which_alternative++;
2707 while (which_alternative < recog_data.n_alternatives);
2709 which_alternative = -1;
2710 /* If we are about to reject this, but we are not to test strictly,
2711 try a very loose test. Only return failure if it fails also. */
2712 if (strict == 0)
2713 return constrain_operands (-1);
2714 else
2715 return 0;
2718 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2719 is a hard reg in class CLASS when its regno is offset by OFFSET
2720 and changed to mode MODE.
2721 If REG occupies multiple hard regs, all of them must be in CLASS. */
2724 reg_fits_class_p (operand, class, offset, mode)
2725 rtx operand;
2726 enum reg_class class;
2727 int offset;
2728 enum machine_mode mode;
2730 int regno = REGNO (operand);
2731 if (regno < FIRST_PSEUDO_REGISTER
2732 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2733 regno + offset))
2735 int sr;
2736 regno += offset;
2737 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2738 sr > 0; sr--)
2739 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2740 regno + sr))
2741 break;
2742 return sr == 0;
2745 return 0;
2748 /* Split single instruction. Helper function for split_all_insns.
2749 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2750 static rtx
2751 split_insn (insn)
2752 rtx insn;
2754 rtx set;
2755 if (!INSN_P (insn))
2757 /* Don't split no-op move insns. These should silently
2758 disappear later in final. Splitting such insns would
2759 break the code that handles REG_NO_CONFLICT blocks. */
2761 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2763 /* Nops get in the way while scheduling, so delete them
2764 now if register allocation has already been done. It
2765 is too risky to try to do this before register
2766 allocation, and there are unlikely to be very many
2767 nops then anyways. */
2768 if (reload_completed)
2769 delete_insn_and_edges (insn);
2771 else
2773 /* Split insns here to get max fine-grain parallelism. */
2774 rtx first = PREV_INSN (insn);
2775 rtx last = try_split (PATTERN (insn), insn, 1);
2777 if (last != insn)
2779 /* try_split returns the NOTE that INSN became. */
2780 PUT_CODE (insn, NOTE);
2781 NOTE_SOURCE_FILE (insn) = 0;
2782 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2784 /* ??? Coddle to md files that generate subregs in post-
2785 reload splitters instead of computing the proper
2786 hard register. */
2787 if (reload_completed && first != last)
2789 first = NEXT_INSN (first);
2790 while (1)
2792 if (INSN_P (first))
2793 cleanup_subreg_operands (first);
2794 if (first == last)
2795 break;
2796 first = NEXT_INSN (first);
2799 return last;
2802 return NULL_RTX;
2804 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2806 void
2807 split_all_insns (upd_life)
2808 int upd_life;
2810 sbitmap blocks;
2811 int changed;
2812 basic_block bb;
2814 blocks = sbitmap_alloc (last_basic_block);
2815 sbitmap_zero (blocks);
2816 changed = 0;
2818 FOR_EACH_BB_REVERSE (bb)
2820 rtx insn, next;
2821 bool finish = false;
2823 for (insn = bb->head; !finish ; insn = next)
2825 rtx last;
2827 /* Can't use `next_real_insn' because that might go across
2828 CODE_LABELS and short-out basic blocks. */
2829 next = NEXT_INSN (insn);
2830 finish = (insn == bb->end);
2831 last = split_insn (insn);
2832 if (last)
2834 /* The split sequence may include barrier, but the
2835 BB boundary we are interested in will be set to previous
2836 one. */
2838 while (GET_CODE (last) == BARRIER)
2839 last = PREV_INSN (last);
2840 SET_BIT (blocks, bb->index);
2841 changed = 1;
2842 insn = last;
2847 if (changed)
2849 find_many_sub_basic_blocks (blocks);
2852 if (changed && upd_life)
2854 count_or_remove_death_notes (blocks, 1);
2855 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2857 #ifdef ENABLE_CHECKING
2858 verify_flow_info ();
2859 #endif
2861 sbitmap_free (blocks);
2864 /* Same as split_all_insns, but do not expect CFG to be available.
2865 Used by machine depedent reorg passes. */
2867 void
2868 split_all_insns_noflow ()
2870 rtx next, insn;
2872 for (insn = get_insns (); insn; insn = next)
2874 next = NEXT_INSN (insn);
2875 split_insn (insn);
2877 return;
2880 #ifdef HAVE_peephole2
2881 struct peep2_insn_data
2883 rtx insn;
2884 regset live_before;
2887 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2888 static int peep2_current;
2890 /* A non-insn marker indicating the last insn of the block.
2891 The live_before regset for this element is correct, indicating
2892 global_live_at_end for the block. */
2893 #define PEEP2_EOB pc_rtx
2895 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2896 does not exist. Used by the recognizer to find the next insn to match
2897 in a multi-insn pattern. */
2900 peep2_next_insn (n)
2901 int n;
2903 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2904 abort ();
2906 n += peep2_current;
2907 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2908 n -= MAX_INSNS_PER_PEEP2 + 1;
2910 if (peep2_insn_data[n].insn == PEEP2_EOB)
2911 return NULL_RTX;
2912 return peep2_insn_data[n].insn;
2915 /* Return true if REGNO is dead before the Nth non-note insn
2916 after `current'. */
2919 peep2_regno_dead_p (ofs, regno)
2920 int ofs;
2921 int regno;
2923 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2924 abort ();
2926 ofs += peep2_current;
2927 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2928 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2930 if (peep2_insn_data[ofs].insn == NULL_RTX)
2931 abort ();
2933 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2936 /* Similarly for a REG. */
2939 peep2_reg_dead_p (ofs, reg)
2940 int ofs;
2941 rtx reg;
2943 int regno, n;
2945 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2946 abort ();
2948 ofs += peep2_current;
2949 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2950 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2952 if (peep2_insn_data[ofs].insn == NULL_RTX)
2953 abort ();
2955 regno = REGNO (reg);
2956 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2957 while (--n >= 0)
2958 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2959 return 0;
2960 return 1;
2963 /* Try to find a hard register of mode MODE, matching the register class in
2964 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2965 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2966 in which case the only condition is that the register must be available
2967 before CURRENT_INSN.
2968 Registers that already have bits set in REG_SET will not be considered.
2970 If an appropriate register is available, it will be returned and the
2971 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2972 returned. */
2975 peep2_find_free_register (from, to, class_str, mode, reg_set)
2976 int from, to;
2977 const char *class_str;
2978 enum machine_mode mode;
2979 HARD_REG_SET *reg_set;
2981 static int search_ofs;
2982 enum reg_class class;
2983 HARD_REG_SET live;
2984 int i;
2986 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2987 abort ();
2989 from += peep2_current;
2990 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2991 from -= MAX_INSNS_PER_PEEP2 + 1;
2992 to += peep2_current;
2993 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2994 to -= MAX_INSNS_PER_PEEP2 + 1;
2996 if (peep2_insn_data[from].insn == NULL_RTX)
2997 abort ();
2998 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3000 while (from != to)
3002 HARD_REG_SET this_live;
3004 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3005 from = 0;
3006 if (peep2_insn_data[from].insn == NULL_RTX)
3007 abort ();
3008 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3009 IOR_HARD_REG_SET (live, this_live);
3012 class = (class_str[0] == 'r' ? GENERAL_REGS
3013 : REG_CLASS_FROM_LETTER (class_str[0]));
3015 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3017 int raw_regno, regno, success, j;
3019 /* Distribute the free registers as much as possible. */
3020 raw_regno = search_ofs + i;
3021 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3022 raw_regno -= FIRST_PSEUDO_REGISTER;
3023 #ifdef REG_ALLOC_ORDER
3024 regno = reg_alloc_order[raw_regno];
3025 #else
3026 regno = raw_regno;
3027 #endif
3029 /* Don't allocate fixed registers. */
3030 if (fixed_regs[regno])
3031 continue;
3032 /* Make sure the register is of the right class. */
3033 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3034 continue;
3035 /* And can support the mode we need. */
3036 if (! HARD_REGNO_MODE_OK (regno, mode))
3037 continue;
3038 /* And that we don't create an extra save/restore. */
3039 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3040 continue;
3041 /* And we don't clobber traceback for noreturn functions. */
3042 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3043 && (! reload_completed || frame_pointer_needed))
3044 continue;
3046 success = 1;
3047 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3049 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3050 || TEST_HARD_REG_BIT (live, regno + j))
3052 success = 0;
3053 break;
3056 if (success)
3058 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3059 SET_HARD_REG_BIT (*reg_set, regno + j);
3061 /* Start the next search with the next register. */
3062 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3063 raw_regno = 0;
3064 search_ofs = raw_regno;
3066 return gen_rtx_REG (mode, regno);
3070 search_ofs = 0;
3071 return NULL_RTX;
3074 /* Perform the peephole2 optimization pass. */
3076 void
3077 peephole2_optimize (dump_file)
3078 FILE *dump_file ATTRIBUTE_UNUSED;
3080 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3081 rtx insn, prev;
3082 regset live;
3083 int i;
3084 basic_block bb;
3085 #ifdef HAVE_conditional_execution
3086 sbitmap blocks;
3087 bool changed;
3088 #endif
3089 bool do_cleanup_cfg = false;
3090 bool do_rebuild_jump_labels = false;
3092 /* Initialize the regsets we're going to use. */
3093 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3094 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3095 live = INITIALIZE_REG_SET (rs_heads[i]);
3097 #ifdef HAVE_conditional_execution
3098 blocks = sbitmap_alloc (last_basic_block);
3099 sbitmap_zero (blocks);
3100 changed = false;
3101 #else
3102 count_or_remove_death_notes (NULL, 1);
3103 #endif
3105 FOR_EACH_BB_REVERSE (bb)
3107 struct propagate_block_info *pbi;
3109 /* Indicate that all slots except the last holds invalid data. */
3110 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3111 peep2_insn_data[i].insn = NULL_RTX;
3113 /* Indicate that the last slot contains live_after data. */
3114 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3115 peep2_current = MAX_INSNS_PER_PEEP2;
3117 /* Start up propagation. */
3118 COPY_REG_SET (live, bb->global_live_at_end);
3119 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3121 #ifdef HAVE_conditional_execution
3122 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3123 #else
3124 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3125 #endif
3127 for (insn = bb->end; ; insn = prev)
3129 prev = PREV_INSN (insn);
3130 if (INSN_P (insn))
3132 rtx try, before_try, x;
3133 int match_len;
3134 rtx note;
3135 bool was_call = false;
3137 /* Record this insn. */
3138 if (--peep2_current < 0)
3139 peep2_current = MAX_INSNS_PER_PEEP2;
3140 peep2_insn_data[peep2_current].insn = insn;
3141 propagate_one_insn (pbi, insn);
3142 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3144 /* Match the peephole. */
3145 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3146 if (try != NULL)
3148 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3149 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3150 cfg-related call notes. */
3151 for (i = 0; i <= match_len; ++i)
3153 int j;
3154 rtx old_insn, new_insn, note;
3156 j = i + peep2_current;
3157 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3158 j -= MAX_INSNS_PER_PEEP2 + 1;
3159 old_insn = peep2_insn_data[j].insn;
3160 if (GET_CODE (old_insn) != CALL_INSN)
3161 continue;
3162 was_call = true;
3164 new_insn = try;
3165 while (new_insn != NULL_RTX)
3167 if (GET_CODE (new_insn) == CALL_INSN)
3168 break;
3169 new_insn = NEXT_INSN (new_insn);
3172 if (new_insn == NULL_RTX)
3173 abort ();
3175 CALL_INSN_FUNCTION_USAGE (new_insn)
3176 = CALL_INSN_FUNCTION_USAGE (old_insn);
3178 for (note = REG_NOTES (old_insn);
3179 note;
3180 note = XEXP (note, 1))
3181 switch (REG_NOTE_KIND (note))
3183 case REG_NORETURN:
3184 case REG_SETJMP:
3185 case REG_ALWAYS_RETURN:
3186 REG_NOTES (new_insn)
3187 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3188 XEXP (note, 0),
3189 REG_NOTES (new_insn));
3190 default:
3191 /* Discard all other reg notes. */
3192 break;
3195 /* Croak if there is another call in the sequence. */
3196 while (++i <= match_len)
3198 j = i + peep2_current;
3199 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3200 j -= MAX_INSNS_PER_PEEP2 + 1;
3201 old_insn = peep2_insn_data[j].insn;
3202 if (GET_CODE (old_insn) == CALL_INSN)
3203 abort ();
3205 break;
3208 i = match_len + peep2_current;
3209 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3210 i -= MAX_INSNS_PER_PEEP2 + 1;
3212 note = find_reg_note (peep2_insn_data[i].insn,
3213 REG_EH_REGION, NULL_RTX);
3215 /* Replace the old sequence with the new. */
3216 try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
3217 INSN_SCOPE (peep2_insn_data[i].insn));
3218 before_try = PREV_INSN (insn);
3219 delete_insn_chain (insn, peep2_insn_data[i].insn);
3221 /* Re-insert the EH_REGION notes. */
3222 if (note || (was_call && nonlocal_goto_handler_labels))
3224 edge eh_edge;
3226 for (eh_edge = bb->succ; eh_edge
3227 ; eh_edge = eh_edge->succ_next)
3228 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3229 break;
3231 for (x = try ; x != before_try ; x = PREV_INSN (x))
3232 if (GET_CODE (x) == CALL_INSN
3233 || (flag_non_call_exceptions
3234 && may_trap_p (PATTERN (x))
3235 && !find_reg_note (x, REG_EH_REGION, NULL)))
3237 if (note)
3238 REG_NOTES (x)
3239 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3240 XEXP (note, 0),
3241 REG_NOTES (x));
3243 if (x != bb->end && eh_edge)
3245 edge nfte, nehe;
3246 int flags;
3248 nfte = split_block (bb, x);
3249 flags = (eh_edge->flags
3250 & (EDGE_EH | EDGE_ABNORMAL));
3251 if (GET_CODE (x) == CALL_INSN)
3252 flags |= EDGE_ABNORMAL_CALL;
3253 nehe = make_edge (nfte->src, eh_edge->dest,
3254 flags);
3256 nehe->probability = eh_edge->probability;
3257 nfte->probability
3258 = REG_BR_PROB_BASE - nehe->probability;
3260 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3261 #ifdef HAVE_conditional_execution
3262 SET_BIT (blocks, nfte->dest->index);
3263 changed = true;
3264 #endif
3265 bb = nfte->src;
3266 eh_edge = nehe;
3270 /* Converting possibly trapping insn to non-trapping is
3271 possible. Zap dummy outgoing edges. */
3272 do_cleanup_cfg |= purge_dead_edges (bb);
3275 #ifdef HAVE_conditional_execution
3276 /* With conditional execution, we cannot back up the
3277 live information so easily, since the conditional
3278 death data structures are not so self-contained.
3279 So record that we've made a modification to this
3280 block and update life information at the end. */
3281 SET_BIT (blocks, bb->index);
3282 changed = true;
3284 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3285 peep2_insn_data[i].insn = NULL_RTX;
3286 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3287 #else
3288 /* Back up lifetime information past the end of the
3289 newly created sequence. */
3290 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3291 i = 0;
3292 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3294 /* Update life information for the new sequence. */
3295 x = try;
3298 if (INSN_P (x))
3300 if (--i < 0)
3301 i = MAX_INSNS_PER_PEEP2;
3302 peep2_insn_data[i].insn = x;
3303 propagate_one_insn (pbi, x);
3304 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3306 x = PREV_INSN (x);
3308 while (x != prev);
3310 /* ??? Should verify that LIVE now matches what we
3311 had before the new sequence. */
3313 peep2_current = i;
3314 #endif
3316 /* If we generated a jump instruction, it won't have
3317 JUMP_LABEL set. Recompute after we're done. */
3318 for (x = try; x != before_try; x = PREV_INSN (x))
3319 if (GET_CODE (x) == JUMP_INSN)
3321 do_rebuild_jump_labels = true;
3322 break;
3327 if (insn == bb->head)
3328 break;
3331 free_propagate_block_info (pbi);
3334 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3335 FREE_REG_SET (peep2_insn_data[i].live_before);
3336 FREE_REG_SET (live);
3338 if (do_rebuild_jump_labels)
3339 rebuild_jump_labels (get_insns ());
3341 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3342 we've changed global life since exception handlers are no longer
3343 reachable. */
3344 if (do_cleanup_cfg)
3346 cleanup_cfg (0);
3347 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3349 #ifdef HAVE_conditional_execution
3350 else
3352 count_or_remove_death_notes (blocks, 1);
3353 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3355 sbitmap_free (blocks);
3356 #endif
3358 #endif /* HAVE_peephole2 */
3360 /* Common predicates for use with define_bypass. */
3362 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3363 data not the address operand(s) of the store. IN_INSN must be
3364 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3365 SETs inside. */
3368 store_data_bypass_p (out_insn, in_insn)
3369 rtx out_insn, in_insn;
3371 rtx out_set, in_set;
3373 in_set = single_set (in_insn);
3374 if (! in_set)
3375 abort ();
3377 if (GET_CODE (SET_DEST (in_set)) != MEM)
3378 return false;
3380 out_set = single_set (out_insn);
3381 if (out_set)
3383 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3384 return false;
3386 else
3388 rtx out_pat;
3389 int i;
3391 out_pat = PATTERN (out_insn);
3392 if (GET_CODE (out_pat) != PARALLEL)
3393 abort ();
3395 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3397 rtx exp = XVECEXP (out_pat, 0, i);
3399 if (GET_CODE (exp) == CLOBBER)
3400 continue;
3402 if (GET_CODE (exp) != SET)
3403 abort ();
3405 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3406 return false;
3410 return true;
3413 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3414 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3415 or multiple set; IN_INSN should be single_set for truth, but for convenience
3416 of insn categorization may be any JUMP or CALL insn. */
3419 if_test_bypass_p (out_insn, in_insn)
3420 rtx out_insn, in_insn;
3422 rtx out_set, in_set;
3424 in_set = single_set (in_insn);
3425 if (! in_set)
3427 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3428 return false;
3429 abort ();
3432 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3433 return false;
3434 in_set = SET_SRC (in_set);
3436 out_set = single_set (out_insn);
3437 if (out_set)
3439 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3440 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3441 return false;
3443 else
3445 rtx out_pat;
3446 int i;
3448 out_pat = PATTERN (out_insn);
3449 if (GET_CODE (out_pat) != PARALLEL)
3450 abort ();
3452 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3454 rtx exp = XVECEXP (out_pat, 0, i);
3456 if (GET_CODE (exp) == CLOBBER)
3457 continue;
3459 if (GET_CODE (exp) != SET)
3460 abort ();
3462 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3463 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3464 return false;
3468 return true;