new (placement delete): Remove unused paramater names.
[official-gcc.git] / gcc / recog.c
blobc56069a053cde10254afef4990dbc22d5dafda57
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "reload.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
60 static rtx split_insn PARAMS ((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
87 int reload_completed;
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
93 void
94 init_recog_no_volatile ()
96 volatile_ok = 0;
99 void
100 init_recog ()
102 volatile_ok = 1;
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized_1 (insn)
116 rtx insn;
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
128 rtx x;
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
167 return 1;
170 /* Static data for the next two routines. */
172 typedef struct change_t
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
210 rtx old = *loc;
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
215 if (in_group == 0 && num_changes != 0)
216 abort ();
218 *loc = new;
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn)
263 rtx insn;
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
268 clobbers. */
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
280 return 1;
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
287 rtx newpat;
289 if (added_clobbers_hard_reg_p (icode))
290 return 1;
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
301 extract_insn (insn);
303 if (! constrain_operands (1))
304 return 1;
307 INSN_CODE (insn) = icode;
308 return 0;
311 /* Return number of changes made and not validated yet. */
313 num_changes_pending ()
315 return num_changes;
318 /* Apply a group of changes previously issued with `validate_change'.
319 Return 1 if all changes are valid, zero otherwise. */
322 apply_change_group ()
324 int i;
325 rtx last_validated = NULL_RTX;
327 /* The changes have been applied and all INSN_CODEs have been reset to force
328 rerecognition.
330 The changes are valid if we aren't given an object, or if we are
331 given a MEM and it still is a valid address, or if this is in insn
332 and it is recognized. In the latter case, if reload has completed,
333 we also require that the operands meet the constraints for
334 the insn. */
336 for (i = 0; i < num_changes; i++)
338 rtx object = changes[i].object;
340 /* if there is no object to test or if it is the same as the one we
341 already tested, ignore it. */
342 if (object == 0 || object == last_validated)
343 continue;
345 if (GET_CODE (object) == MEM)
347 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
348 break;
350 else if (insn_invalid_p (object))
352 rtx pat = PATTERN (object);
354 /* Perhaps we couldn't recognize the insn because there were
355 extra CLOBBERs at the end. If so, try to re-recognize
356 without the last CLOBBER (later iterations will cause each of
357 them to be eliminated, in turn). But don't do this if we
358 have an ASM_OPERAND. */
359 if (GET_CODE (pat) == PARALLEL
360 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
361 && asm_noperands (PATTERN (object)) < 0)
363 rtx newpat;
365 if (XVECLEN (pat, 0) == 2)
366 newpat = XVECEXP (pat, 0, 0);
367 else
369 int j;
371 newpat
372 = gen_rtx_PARALLEL (VOIDmode,
373 rtvec_alloc (XVECLEN (pat, 0) - 1));
374 for (j = 0; j < XVECLEN (newpat, 0); j++)
375 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
378 /* Add a new change to this group to replace the pattern
379 with this new pattern. Then consider this change
380 as having succeeded. The change we added will
381 cause the entire call to fail if things remain invalid.
383 Note that this can lose if a later change than the one
384 we are processing specified &XVECEXP (PATTERN (object), 0, X)
385 but this shouldn't occur. */
387 validate_change (object, &PATTERN (object), newpat, 1);
388 continue;
390 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
391 /* If this insn is a CLOBBER or USE, it is always valid, but is
392 never recognized. */
393 continue;
394 else
395 break;
397 last_validated = object;
400 if (i == num_changes)
402 basic_block bb;
404 for (i = 0; i < num_changes; i++)
405 if (changes[i].object
406 && INSN_P (changes[i].object)
407 && (bb = BLOCK_FOR_INSN (changes[i].object)))
408 bb->flags |= BB_DIRTY;
410 num_changes = 0;
411 return 1;
413 else
415 cancel_changes (0);
416 return 0;
420 /* Return the number of changes so far in the current group. */
423 num_validated_changes ()
425 return num_changes;
428 /* Retract the changes numbered NUM and up. */
430 void
431 cancel_changes (num)
432 int num;
434 int i;
436 /* Back out all the changes. Do this in the opposite order in which
437 they were made. */
438 for (i = num_changes - 1; i >= num; i--)
440 *changes[i].loc = changes[i].old;
441 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
442 INSN_CODE (changes[i].object) = changes[i].old_code;
444 num_changes = num;
447 /* Replace every occurrence of FROM in X with TO. Mark each change with
448 validate_change passing OBJECT. */
450 static void
451 validate_replace_rtx_1 (loc, from, to, object)
452 rtx *loc;
453 rtx from, to, object;
455 int i, j;
456 const char *fmt;
457 rtx x = *loc;
458 enum rtx_code code;
459 enum machine_mode op0_mode = VOIDmode;
460 int prev_changes = num_changes;
461 rtx new;
463 if (!x)
464 return;
466 code = GET_CODE (x);
467 fmt = GET_RTX_FORMAT (code);
468 if (fmt[0] == 'e')
469 op0_mode = GET_MODE (XEXP (x, 0));
471 /* X matches FROM if it is the same rtx or they are both referring to the
472 same register in the same mode. Avoid calling rtx_equal_p unless the
473 operands look similar. */
475 if (x == from
476 || (GET_CODE (x) == REG && GET_CODE (from) == REG
477 && GET_MODE (x) == GET_MODE (from)
478 && REGNO (x) == REGNO (from))
479 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
480 && rtx_equal_p (x, from)))
482 validate_change (object, loc, to, 1);
483 return;
486 /* Call ourself recursively to perform the replacements. */
488 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
490 if (fmt[i] == 'e')
491 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
492 else if (fmt[i] == 'E')
493 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
494 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
497 /* If we didn't substitute, there is nothing more to do. */
498 if (num_changes == prev_changes)
499 return;
501 /* Allow substituted expression to have different mode. This is used by
502 regmove to change mode of pseudo register. */
503 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
504 op0_mode = GET_MODE (XEXP (x, 0));
506 /* Do changes needed to keep rtx consistent. Don't do any other
507 simplifications, as it is not our job. */
509 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
510 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
512 validate_change (object, loc,
513 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
514 : swap_condition (code),
515 GET_MODE (x), XEXP (x, 1),
516 XEXP (x, 0)), 1);
517 x = *loc;
518 code = GET_CODE (x);
521 switch (code)
523 case PLUS:
524 /* If we have a PLUS whose second operand is now a CONST_INT, use
525 plus_constant to try to simplify it.
526 ??? We may want later to remove this, once simplification is
527 separated from this function. */
528 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
529 validate_change (object, loc,
530 simplify_gen_binary
531 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
532 break;
533 case MINUS:
534 if (GET_CODE (XEXP (x, 1)) == CONST_INT
535 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
536 validate_change (object, loc,
537 simplify_gen_binary
538 (PLUS, GET_MODE (x), XEXP (x, 0),
539 simplify_gen_unary (NEG,
540 GET_MODE (x), XEXP (x, 1),
541 GET_MODE (x))), 1);
542 break;
543 case ZERO_EXTEND:
544 case SIGN_EXTEND:
545 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
547 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
548 op0_mode);
549 /* If any of the above failed, substitute in something that
550 we know won't be recognized. */
551 if (!new)
552 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
553 validate_change (object, loc, new, 1);
555 break;
556 case SUBREG:
557 /* All subregs possible to simplify should be simplified. */
558 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
559 SUBREG_BYTE (x));
561 /* Subregs of VOIDmode operands are incorrect. */
562 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
563 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
564 if (new)
565 validate_change (object, loc, new, 1);
566 break;
567 case ZERO_EXTRACT:
568 case SIGN_EXTRACT:
569 /* If we are replacing a register with memory, try to change the memory
570 to be the mode required for memory in extract operations (this isn't
571 likely to be an insertion operation; if it was, nothing bad will
572 happen, we might just fail in some cases). */
574 if (GET_CODE (XEXP (x, 0)) == MEM
575 && GET_CODE (XEXP (x, 1)) == CONST_INT
576 && GET_CODE (XEXP (x, 2)) == CONST_INT
577 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
578 && !MEM_VOLATILE_P (XEXP (x, 0)))
580 enum machine_mode wanted_mode = VOIDmode;
581 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
582 int pos = INTVAL (XEXP (x, 2));
584 if (GET_CODE (x) == ZERO_EXTRACT)
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extzv, 1);
588 if (new_mode != MAX_MACHINE_MODE)
589 wanted_mode = new_mode;
591 else if (GET_CODE (x) == SIGN_EXTRACT)
593 enum machine_mode new_mode
594 = mode_for_extraction (EP_extv, 1);
595 if (new_mode != MAX_MACHINE_MODE)
596 wanted_mode = new_mode;
599 /* If we have a narrower mode, we can do something. */
600 if (wanted_mode != VOIDmode
601 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
603 int offset = pos / BITS_PER_UNIT;
604 rtx newmem;
606 /* If the bytes and bits are counted differently, we
607 must adjust the offset. */
608 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
609 offset =
610 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
611 offset);
613 pos %= GET_MODE_BITSIZE (wanted_mode);
615 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
617 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
618 validate_change (object, &XEXP (x, 0), newmem, 1);
622 break;
624 default:
625 break;
629 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
630 with TO. After all changes have been made, validate by seeing
631 if INSN is still valid. */
634 validate_replace_rtx_subexp (from, to, insn, loc)
635 rtx from, to, insn, *loc;
637 validate_replace_rtx_1 (loc, from, to, insn);
638 return apply_change_group ();
641 /* Try replacing every occurrence of FROM in INSN with TO. After all
642 changes have been made, validate by seeing if INSN is still valid. */
645 validate_replace_rtx (from, to, insn)
646 rtx from, to, insn;
648 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
649 return apply_change_group ();
652 /* Try replacing every occurrence of FROM in INSN with TO. */
654 void
655 validate_replace_rtx_group (from, to, insn)
656 rtx from, to, insn;
658 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
661 /* Function called by note_uses to replace used subexpressions. */
662 struct validate_replace_src_data
664 rtx from; /* Old RTX */
665 rtx to; /* New RTX */
666 rtx insn; /* Insn in which substitution is occurring. */
669 static void
670 validate_replace_src_1 (x, data)
671 rtx *x;
672 void *data;
674 struct validate_replace_src_data *d
675 = (struct validate_replace_src_data *) data;
677 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
680 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
681 SET_DESTs. */
683 void
684 validate_replace_src_group (from, to, insn)
685 rtx from, to, insn;
687 struct validate_replace_src_data d;
689 d.from = from;
690 d.to = to;
691 d.insn = insn;
692 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
695 /* Same as validate_repalace_src_group, but validate by seeing if
696 INSN is still valid. */
698 validate_replace_src (from, to, insn)
699 rtx from, to, insn;
701 validate_replace_src_group (from, to, insn);
702 return apply_change_group ();
705 #ifdef HAVE_cc0
706 /* Return 1 if the insn using CC0 set by INSN does not contain
707 any ordered tests applied to the condition codes.
708 EQ and NE tests do not count. */
711 next_insn_tests_no_inequality (insn)
712 rtx insn;
714 rtx next = next_cc0_user (insn);
716 /* If there is no next insn, we have to take the conservative choice. */
717 if (next == 0)
718 return 0;
720 return ((GET_CODE (next) == JUMP_INSN
721 || GET_CODE (next) == INSN
722 || GET_CODE (next) == CALL_INSN)
723 && ! inequality_comparisons_p (PATTERN (next)));
726 #if 0 /* This is useless since the insn that sets the cc's
727 must be followed immediately by the use of them. */
728 /* Return 1 if the CC value set up by INSN is not used. */
731 next_insns_test_no_inequality (insn)
732 rtx insn;
734 rtx next = NEXT_INSN (insn);
736 for (; next != 0; next = NEXT_INSN (next))
738 if (GET_CODE (next) == CODE_LABEL
739 || GET_CODE (next) == BARRIER)
740 return 1;
741 if (GET_CODE (next) == NOTE)
742 continue;
743 if (inequality_comparisons_p (PATTERN (next)))
744 return 0;
745 if (sets_cc0_p (PATTERN (next)) == 1)
746 return 1;
747 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
748 return 1;
750 return 1;
752 #endif
753 #endif
755 /* This is used by find_single_use to locate an rtx that contains exactly one
756 use of DEST, which is typically either a REG or CC0. It returns a
757 pointer to the innermost rtx expression containing DEST. Appearances of
758 DEST that are being used to totally replace it are not counted. */
760 static rtx *
761 find_single_use_1 (dest, loc)
762 rtx dest;
763 rtx *loc;
765 rtx x = *loc;
766 enum rtx_code code = GET_CODE (x);
767 rtx *result = 0;
768 rtx *this_result;
769 int i;
770 const char *fmt;
772 switch (code)
774 case CONST_INT:
775 case CONST:
776 case LABEL_REF:
777 case SYMBOL_REF:
778 case CONST_DOUBLE:
779 case CONST_VECTOR:
780 case CLOBBER:
781 return 0;
783 case SET:
784 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
785 of a REG that occupies all of the REG, the insn uses DEST if
786 it is mentioned in the destination or the source. Otherwise, we
787 need just check the source. */
788 if (GET_CODE (SET_DEST (x)) != CC0
789 && GET_CODE (SET_DEST (x)) != PC
790 && GET_CODE (SET_DEST (x)) != REG
791 && ! (GET_CODE (SET_DEST (x)) == SUBREG
792 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
793 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
794 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
795 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
796 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
797 break;
799 return find_single_use_1 (dest, &SET_SRC (x));
801 case MEM:
802 case SUBREG:
803 return find_single_use_1 (dest, &XEXP (x, 0));
805 default:
806 break;
809 /* If it wasn't one of the common cases above, check each expression and
810 vector of this code. Look for a unique usage of DEST. */
812 fmt = GET_RTX_FORMAT (code);
813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
815 if (fmt[i] == 'e')
817 if (dest == XEXP (x, i)
818 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
819 && REGNO (dest) == REGNO (XEXP (x, i))))
820 this_result = loc;
821 else
822 this_result = find_single_use_1 (dest, &XEXP (x, i));
824 if (result == 0)
825 result = this_result;
826 else if (this_result)
827 /* Duplicate usage. */
828 return 0;
830 else if (fmt[i] == 'E')
832 int j;
834 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
836 if (XVECEXP (x, i, j) == dest
837 || (GET_CODE (dest) == REG
838 && GET_CODE (XVECEXP (x, i, j)) == REG
839 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
840 this_result = loc;
841 else
842 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
844 if (result == 0)
845 result = this_result;
846 else if (this_result)
847 return 0;
852 return result;
855 /* See if DEST, produced in INSN, is used only a single time in the
856 sequel. If so, return a pointer to the innermost rtx expression in which
857 it is used.
859 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
861 This routine will return usually zero either before flow is called (because
862 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
863 note can't be trusted).
865 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
866 care about REG_DEAD notes or LOG_LINKS.
868 Otherwise, we find the single use by finding an insn that has a
869 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
870 only referenced once in that insn, we know that it must be the first
871 and last insn referencing DEST. */
873 rtx *
874 find_single_use (dest, insn, ploc)
875 rtx dest;
876 rtx insn;
877 rtx *ploc;
879 rtx next;
880 rtx *result;
881 rtx link;
883 #ifdef HAVE_cc0
884 if (dest == cc0_rtx)
886 next = NEXT_INSN (insn);
887 if (next == 0
888 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
889 return 0;
891 result = find_single_use_1 (dest, &PATTERN (next));
892 if (result && ploc)
893 *ploc = next;
894 return result;
896 #endif
898 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
899 return 0;
901 for (next = next_nonnote_insn (insn);
902 next != 0 && GET_CODE (next) != CODE_LABEL;
903 next = next_nonnote_insn (next))
904 if (INSN_P (next) && dead_or_set_p (next, dest))
906 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
907 if (XEXP (link, 0) == insn)
908 break;
910 if (link)
912 result = find_single_use_1 (dest, &PATTERN (next));
913 if (ploc)
914 *ploc = next;
915 return result;
919 return 0;
922 /* Return 1 if OP is a valid general operand for machine mode MODE.
923 This is either a register reference, a memory reference,
924 or a constant. In the case of a memory reference, the address
925 is checked for general validity for the target machine.
927 Register and memory references must have mode MODE in order to be valid,
928 but some constants have no machine mode and are valid for any mode.
930 If MODE is VOIDmode, OP is checked for validity for whatever mode
931 it has.
933 The main use of this function is as a predicate in match_operand
934 expressions in the machine description.
936 For an explanation of this function's behavior for registers of
937 class NO_REGS, see the comment for `register_operand'. */
940 general_operand (op, mode)
941 rtx op;
942 enum machine_mode mode;
944 enum rtx_code code = GET_CODE (op);
946 if (mode == VOIDmode)
947 mode = GET_MODE (op);
949 /* Don't accept CONST_INT or anything similar
950 if the caller wants something floating. */
951 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
952 && GET_MODE_CLASS (mode) != MODE_INT
953 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
954 return 0;
956 if (GET_CODE (op) == CONST_INT
957 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
958 return 0;
960 if (CONSTANT_P (op))
961 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
962 || mode == VOIDmode)
963 #ifdef LEGITIMATE_PIC_OPERAND_P
964 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
965 #endif
966 && LEGITIMATE_CONSTANT_P (op));
968 /* Except for certain constants with VOIDmode, already checked for,
969 OP's mode must match MODE if MODE specifies a mode. */
971 if (GET_MODE (op) != mode)
972 return 0;
974 if (code == SUBREG)
976 rtx sub = SUBREG_REG (op);
978 #ifdef INSN_SCHEDULING
979 /* On machines that have insn scheduling, we want all memory
980 reference to be explicit, so outlaw paradoxical SUBREGs. */
981 if (GET_CODE (sub) == MEM
982 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
983 return 0;
984 #endif
985 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
986 may result in incorrect reference. We should simplify all valid
987 subregs of MEM anyway. But allow this after reload because we
988 might be called from cleanup_subreg_operands.
990 ??? This is a kludge. */
991 if (!reload_completed && SUBREG_BYTE (op) != 0
992 && GET_CODE (sub) == MEM)
993 return 0;
995 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
996 create such rtl, and we must reject it. */
997 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
998 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
999 return 0;
1001 op = sub;
1002 code = GET_CODE (op);
1005 if (code == REG)
1006 /* A register whose class is NO_REGS is not a general operand. */
1007 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1008 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1010 if (code == MEM)
1012 rtx y = XEXP (op, 0);
1014 if (! volatile_ok && MEM_VOLATILE_P (op))
1015 return 0;
1017 if (GET_CODE (y) == ADDRESSOF)
1018 return 1;
1020 /* Use the mem's mode, since it will be reloaded thus. */
1021 mode = GET_MODE (op);
1022 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1025 /* Pretend this is an operand for now; we'll run force_operand
1026 on its replacement in fixup_var_refs_1. */
1027 if (code == ADDRESSOF)
1028 return 1;
1030 return 0;
1032 win:
1033 return 1;
1036 /* Return 1 if OP is a valid memory address for a memory reference
1037 of mode MODE.
1039 The main use of this function is as a predicate in match_operand
1040 expressions in the machine description. */
1043 address_operand (op, mode)
1044 rtx op;
1045 enum machine_mode mode;
1047 return memory_address_p (mode, op);
1050 /* Return 1 if OP is a register reference of mode MODE.
1051 If MODE is VOIDmode, accept a register in any mode.
1053 The main use of this function is as a predicate in match_operand
1054 expressions in the machine description.
1056 As a special exception, registers whose class is NO_REGS are
1057 not accepted by `register_operand'. The reason for this change
1058 is to allow the representation of special architecture artifacts
1059 (such as a condition code register) without extending the rtl
1060 definitions. Since registers of class NO_REGS cannot be used
1061 as registers in any case where register classes are examined,
1062 it is most consistent to keep this function from accepting them. */
1065 register_operand (op, mode)
1066 rtx op;
1067 enum machine_mode mode;
1069 if (GET_MODE (op) != mode && mode != VOIDmode)
1070 return 0;
1072 if (GET_CODE (op) == SUBREG)
1074 rtx sub = SUBREG_REG (op);
1076 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1077 because it is guaranteed to be reloaded into one.
1078 Just make sure the MEM is valid in itself.
1079 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1080 but currently it does result from (SUBREG (REG)...) where the
1081 reg went on the stack.) */
1082 if (! reload_completed && GET_CODE (sub) == MEM)
1083 return general_operand (op, mode);
1085 #ifdef CLASS_CANNOT_CHANGE_MODE
1086 if (GET_CODE (sub) == REG
1087 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1088 && (TEST_HARD_REG_BIT
1089 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1090 REGNO (sub)))
1091 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (sub))
1092 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1093 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1094 return 0;
1095 #endif
1097 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1098 create such rtl, and we must reject it. */
1099 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1100 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1101 return 0;
1103 op = sub;
1106 /* If we have an ADDRESSOF, consider it valid since it will be
1107 converted into something that will not be a MEM. */
1108 if (GET_CODE (op) == ADDRESSOF)
1109 return 1;
1111 /* We don't consider registers whose class is NO_REGS
1112 to be a register operand. */
1113 return (GET_CODE (op) == REG
1114 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1115 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1118 /* Return 1 for a register in Pmode; ignore the tested mode. */
1121 pmode_register_operand (op, mode)
1122 rtx op;
1123 enum machine_mode mode ATTRIBUTE_UNUSED;
1125 return register_operand (op, Pmode);
1128 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1129 or a hard register. */
1132 scratch_operand (op, mode)
1133 rtx op;
1134 enum machine_mode mode;
1136 if (GET_MODE (op) != mode && mode != VOIDmode)
1137 return 0;
1139 return (GET_CODE (op) == SCRATCH
1140 || (GET_CODE (op) == REG
1141 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1144 /* Return 1 if OP is a valid immediate operand for mode MODE.
1146 The main use of this function is as a predicate in match_operand
1147 expressions in the machine description. */
1150 immediate_operand (op, mode)
1151 rtx op;
1152 enum machine_mode mode;
1154 /* Don't accept CONST_INT or anything similar
1155 if the caller wants something floating. */
1156 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1157 && GET_MODE_CLASS (mode) != MODE_INT
1158 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1159 return 0;
1161 if (GET_CODE (op) == CONST_INT
1162 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1163 return 0;
1165 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1166 result in 0/1. It seems a safe assumption that this is
1167 in range for everyone. */
1168 if (GET_CODE (op) == CONSTANT_P_RTX)
1169 return 1;
1171 return (CONSTANT_P (op)
1172 && (GET_MODE (op) == mode || mode == VOIDmode
1173 || GET_MODE (op) == VOIDmode)
1174 #ifdef LEGITIMATE_PIC_OPERAND_P
1175 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1176 #endif
1177 && LEGITIMATE_CONSTANT_P (op));
1180 /* Returns 1 if OP is an operand that is a CONST_INT. */
1183 const_int_operand (op, mode)
1184 rtx op;
1185 enum machine_mode mode;
1187 if (GET_CODE (op) != CONST_INT)
1188 return 0;
1190 if (mode != VOIDmode
1191 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1192 return 0;
1194 return 1;
1197 /* Returns 1 if OP is an operand that is a constant integer or constant
1198 floating-point number. */
1201 const_double_operand (op, mode)
1202 rtx op;
1203 enum machine_mode mode;
1205 /* Don't accept CONST_INT or anything similar
1206 if the caller wants something floating. */
1207 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1208 && GET_MODE_CLASS (mode) != MODE_INT
1209 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1210 return 0;
1212 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1213 && (mode == VOIDmode || GET_MODE (op) == mode
1214 || GET_MODE (op) == VOIDmode));
1217 /* Return 1 if OP is a general operand that is not an immediate operand. */
1220 nonimmediate_operand (op, mode)
1221 rtx op;
1222 enum machine_mode mode;
1224 return (general_operand (op, mode) && ! CONSTANT_P (op));
1227 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1230 nonmemory_operand (op, mode)
1231 rtx op;
1232 enum machine_mode mode;
1234 if (CONSTANT_P (op))
1236 /* Don't accept CONST_INT or anything similar
1237 if the caller wants something floating. */
1238 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1239 && GET_MODE_CLASS (mode) != MODE_INT
1240 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1241 return 0;
1243 if (GET_CODE (op) == CONST_INT
1244 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1245 return 0;
1247 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1248 || mode == VOIDmode)
1249 #ifdef LEGITIMATE_PIC_OPERAND_P
1250 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1251 #endif
1252 && LEGITIMATE_CONSTANT_P (op));
1255 if (GET_MODE (op) != mode && mode != VOIDmode)
1256 return 0;
1258 if (GET_CODE (op) == SUBREG)
1260 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1261 because it is guaranteed to be reloaded into one.
1262 Just make sure the MEM is valid in itself.
1263 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1264 but currently it does result from (SUBREG (REG)...) where the
1265 reg went on the stack.) */
1266 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1267 return general_operand (op, mode);
1268 op = SUBREG_REG (op);
1271 /* We don't consider registers whose class is NO_REGS
1272 to be a register operand. */
1273 return (GET_CODE (op) == REG
1274 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1275 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1278 /* Return 1 if OP is a valid operand that stands for pushing a
1279 value of mode MODE onto the stack.
1281 The main use of this function is as a predicate in match_operand
1282 expressions in the machine description. */
1285 push_operand (op, mode)
1286 rtx op;
1287 enum machine_mode mode;
1289 unsigned int rounded_size = GET_MODE_SIZE (mode);
1291 #ifdef PUSH_ROUNDING
1292 rounded_size = PUSH_ROUNDING (rounded_size);
1293 #endif
1295 if (GET_CODE (op) != MEM)
1296 return 0;
1298 if (mode != VOIDmode && GET_MODE (op) != mode)
1299 return 0;
1301 op = XEXP (op, 0);
1303 if (rounded_size == GET_MODE_SIZE (mode))
1305 if (GET_CODE (op) != STACK_PUSH_CODE)
1306 return 0;
1308 else
1310 if (GET_CODE (op) != PRE_MODIFY
1311 || GET_CODE (XEXP (op, 1)) != PLUS
1312 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1313 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1314 #ifdef STACK_GROWS_DOWNWARD
1315 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1316 #else
1317 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1318 #endif
1320 return 0;
1323 return XEXP (op, 0) == stack_pointer_rtx;
1326 /* Return 1 if OP is a valid operand that stands for popping a
1327 value of mode MODE off the stack.
1329 The main use of this function is as a predicate in match_operand
1330 expressions in the machine description. */
1333 pop_operand (op, mode)
1334 rtx op;
1335 enum machine_mode mode;
1337 if (GET_CODE (op) != MEM)
1338 return 0;
1340 if (mode != VOIDmode && GET_MODE (op) != mode)
1341 return 0;
1343 op = XEXP (op, 0);
1345 if (GET_CODE (op) != STACK_POP_CODE)
1346 return 0;
1348 return XEXP (op, 0) == stack_pointer_rtx;
1351 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1354 memory_address_p (mode, addr)
1355 enum machine_mode mode ATTRIBUTE_UNUSED;
1356 rtx addr;
1358 if (GET_CODE (addr) == ADDRESSOF)
1359 return 1;
1361 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1362 return 0;
1364 win:
1365 return 1;
1368 /* Return 1 if OP is a valid memory reference with mode MODE,
1369 including a valid address.
1371 The main use of this function is as a predicate in match_operand
1372 expressions in the machine description. */
1375 memory_operand (op, mode)
1376 rtx op;
1377 enum machine_mode mode;
1379 rtx inner;
1381 if (! reload_completed)
1382 /* Note that no SUBREG is a memory operand before end of reload pass,
1383 because (SUBREG (MEM...)) forces reloading into a register. */
1384 return GET_CODE (op) == MEM && general_operand (op, mode);
1386 if (mode != VOIDmode && GET_MODE (op) != mode)
1387 return 0;
1389 inner = op;
1390 if (GET_CODE (inner) == SUBREG)
1391 inner = SUBREG_REG (inner);
1393 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1396 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1397 that is, a memory reference whose address is a general_operand. */
1400 indirect_operand (op, mode)
1401 rtx op;
1402 enum machine_mode mode;
1404 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1405 if (! reload_completed
1406 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1408 int offset = SUBREG_BYTE (op);
1409 rtx inner = SUBREG_REG (op);
1411 if (mode != VOIDmode && GET_MODE (op) != mode)
1412 return 0;
1414 /* The only way that we can have a general_operand as the resulting
1415 address is if OFFSET is zero and the address already is an operand
1416 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1417 operand. */
1419 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1420 || (GET_CODE (XEXP (inner, 0)) == PLUS
1421 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1422 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1423 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1426 return (GET_CODE (op) == MEM
1427 && memory_operand (op, mode)
1428 && general_operand (XEXP (op, 0), Pmode));
1431 /* Return 1 if this is a comparison operator. This allows the use of
1432 MATCH_OPERATOR to recognize all the branch insns. */
1435 comparison_operator (op, mode)
1436 rtx op;
1437 enum machine_mode mode;
1439 return ((mode == VOIDmode || GET_MODE (op) == mode)
1440 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1443 /* If BODY is an insn body that uses ASM_OPERANDS,
1444 return the number of operands (both input and output) in the insn.
1445 Otherwise return -1. */
1448 asm_noperands (body)
1449 rtx body;
1451 switch (GET_CODE (body))
1453 case ASM_OPERANDS:
1454 /* No output operands: return number of input operands. */
1455 return ASM_OPERANDS_INPUT_LENGTH (body);
1456 case SET:
1457 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1458 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1459 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1460 else
1461 return -1;
1462 case PARALLEL:
1463 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1464 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1466 /* Multiple output operands, or 1 output plus some clobbers:
1467 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1468 int i;
1469 int n_sets;
1471 /* Count backwards through CLOBBERs to determine number of SETs. */
1472 for (i = XVECLEN (body, 0); i > 0; i--)
1474 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1475 break;
1476 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1477 return -1;
1480 /* N_SETS is now number of output operands. */
1481 n_sets = i;
1483 /* Verify that all the SETs we have
1484 came from a single original asm_operands insn
1485 (so that invalid combinations are blocked). */
1486 for (i = 0; i < n_sets; i++)
1488 rtx elt = XVECEXP (body, 0, i);
1489 if (GET_CODE (elt) != SET)
1490 return -1;
1491 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1492 return -1;
1493 /* If these ASM_OPERANDS rtx's came from different original insns
1494 then they aren't allowed together. */
1495 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1496 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1497 return -1;
1499 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1500 + n_sets);
1502 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1504 /* 0 outputs, but some clobbers:
1505 body is [(asm_operands ...) (clobber (reg ...))...]. */
1506 int i;
1508 /* Make sure all the other parallel things really are clobbers. */
1509 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1510 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1511 return -1;
1513 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1515 else
1516 return -1;
1517 default:
1518 return -1;
1522 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1523 copy its operands (both input and output) into the vector OPERANDS,
1524 the locations of the operands within the insn into the vector OPERAND_LOCS,
1525 and the constraints for the operands into CONSTRAINTS.
1526 Write the modes of the operands into MODES.
1527 Return the assembler-template.
1529 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1530 we don't store that info. */
1532 const char *
1533 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1534 rtx body;
1535 rtx *operands;
1536 rtx **operand_locs;
1537 const char **constraints;
1538 enum machine_mode *modes;
1540 int i;
1541 int noperands;
1542 const char *template = 0;
1544 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1546 rtx asmop = SET_SRC (body);
1547 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1549 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1551 for (i = 1; i < noperands; i++)
1553 if (operand_locs)
1554 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1555 if (operands)
1556 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1557 if (constraints)
1558 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1559 if (modes)
1560 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1563 /* The output is in the SET.
1564 Its constraint is in the ASM_OPERANDS itself. */
1565 if (operands)
1566 operands[0] = SET_DEST (body);
1567 if (operand_locs)
1568 operand_locs[0] = &SET_DEST (body);
1569 if (constraints)
1570 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1571 if (modes)
1572 modes[0] = GET_MODE (SET_DEST (body));
1573 template = ASM_OPERANDS_TEMPLATE (asmop);
1575 else if (GET_CODE (body) == ASM_OPERANDS)
1577 rtx asmop = body;
1578 /* No output operands: BODY is (asm_operands ....). */
1580 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1582 /* The input operands are found in the 1st element vector. */
1583 /* Constraints for inputs are in the 2nd element vector. */
1584 for (i = 0; i < noperands; i++)
1586 if (operand_locs)
1587 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1588 if (operands)
1589 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1590 if (constraints)
1591 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1592 if (modes)
1593 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1595 template = ASM_OPERANDS_TEMPLATE (asmop);
1597 else if (GET_CODE (body) == PARALLEL
1598 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1599 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1601 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1602 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1603 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1604 int nout = 0; /* Does not include CLOBBERs. */
1606 /* At least one output, plus some CLOBBERs. */
1608 /* The outputs are in the SETs.
1609 Their constraints are in the ASM_OPERANDS itself. */
1610 for (i = 0; i < nparallel; i++)
1612 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1613 break; /* Past last SET */
1615 if (operands)
1616 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1617 if (operand_locs)
1618 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1619 if (constraints)
1620 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1621 if (modes)
1622 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1623 nout++;
1626 for (i = 0; i < nin; i++)
1628 if (operand_locs)
1629 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1630 if (operands)
1631 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1632 if (constraints)
1633 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1634 if (modes)
1635 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1638 template = ASM_OPERANDS_TEMPLATE (asmop);
1640 else if (GET_CODE (body) == PARALLEL
1641 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1643 /* No outputs, but some CLOBBERs. */
1645 rtx asmop = XVECEXP (body, 0, 0);
1646 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1648 for (i = 0; i < nin; i++)
1650 if (operand_locs)
1651 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1652 if (operands)
1653 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1654 if (constraints)
1655 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1656 if (modes)
1657 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1660 template = ASM_OPERANDS_TEMPLATE (asmop);
1663 return template;
1666 /* Check if an asm_operand matches it's constraints.
1667 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1670 asm_operand_ok (op, constraint)
1671 rtx op;
1672 const char *constraint;
1674 int result = 0;
1676 /* Use constrain_operands after reload. */
1677 if (reload_completed)
1678 abort ();
1680 while (*constraint)
1682 char c = *constraint++;
1683 switch (c)
1685 case '=':
1686 case '+':
1687 case '*':
1688 case '%':
1689 case '?':
1690 case '!':
1691 case '#':
1692 case '&':
1693 case ',':
1694 break;
1696 case '0': case '1': case '2': case '3': case '4':
1697 case '5': case '6': case '7': case '8': case '9':
1698 /* For best results, our caller should have given us the
1699 proper matching constraint, but we can't actually fail
1700 the check if they didn't. Indicate that results are
1701 inconclusive. */
1702 while (ISDIGIT (*constraint))
1703 constraint++;
1704 result = -1;
1705 break;
1707 case 'p':
1708 if (address_operand (op, VOIDmode))
1709 return 1;
1710 break;
1712 case 'm':
1713 case 'V': /* non-offsettable */
1714 if (memory_operand (op, VOIDmode))
1715 return 1;
1716 break;
1718 case 'o': /* offsettable */
1719 if (offsettable_nonstrict_memref_p (op))
1720 return 1;
1721 break;
1723 case '<':
1724 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1725 excepting those that expand_call created. Further, on some
1726 machines which do not have generalized auto inc/dec, an inc/dec
1727 is not a memory_operand.
1729 Match any memory and hope things are resolved after reload. */
1731 if (GET_CODE (op) == MEM
1732 && (1
1733 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1734 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1735 return 1;
1736 break;
1738 case '>':
1739 if (GET_CODE (op) == MEM
1740 && (1
1741 || GET_CODE (XEXP (op, 0)) == PRE_INC
1742 || GET_CODE (XEXP (op, 0)) == POST_INC))
1743 return 1;
1744 break;
1746 case 'E':
1747 case 'F':
1748 if (GET_CODE (op) == CONST_DOUBLE
1749 || (GET_CODE (op) == CONST_VECTOR
1750 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
1751 return 1;
1752 break;
1754 case 'G':
1755 if (GET_CODE (op) == CONST_DOUBLE
1756 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1757 return 1;
1758 break;
1759 case 'H':
1760 if (GET_CODE (op) == CONST_DOUBLE
1761 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1762 return 1;
1763 break;
1765 case 's':
1766 if (GET_CODE (op) == CONST_INT
1767 || (GET_CODE (op) == CONST_DOUBLE
1768 && GET_MODE (op) == VOIDmode))
1769 break;
1770 /* FALLTHRU */
1772 case 'i':
1773 if (CONSTANT_P (op)
1774 #ifdef LEGITIMATE_PIC_OPERAND_P
1775 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1776 #endif
1778 return 1;
1779 break;
1781 case 'n':
1782 if (GET_CODE (op) == CONST_INT
1783 || (GET_CODE (op) == CONST_DOUBLE
1784 && GET_MODE (op) == VOIDmode))
1785 return 1;
1786 break;
1788 case 'I':
1789 if (GET_CODE (op) == CONST_INT
1790 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1791 return 1;
1792 break;
1793 case 'J':
1794 if (GET_CODE (op) == CONST_INT
1795 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1796 return 1;
1797 break;
1798 case 'K':
1799 if (GET_CODE (op) == CONST_INT
1800 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1801 return 1;
1802 break;
1803 case 'L':
1804 if (GET_CODE (op) == CONST_INT
1805 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1806 return 1;
1807 break;
1808 case 'M':
1809 if (GET_CODE (op) == CONST_INT
1810 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1811 return 1;
1812 break;
1813 case 'N':
1814 if (GET_CODE (op) == CONST_INT
1815 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1816 return 1;
1817 break;
1818 case 'O':
1819 if (GET_CODE (op) == CONST_INT
1820 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1821 return 1;
1822 break;
1823 case 'P':
1824 if (GET_CODE (op) == CONST_INT
1825 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1826 return 1;
1827 break;
1829 case 'X':
1830 return 1;
1832 case 'g':
1833 if (general_operand (op, VOIDmode))
1834 return 1;
1835 break;
1837 default:
1838 /* For all other letters, we first check for a register class,
1839 otherwise it is an EXTRA_CONSTRAINT. */
1840 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1842 case 'r':
1843 if (GET_MODE (op) == BLKmode)
1844 break;
1845 if (register_operand (op, VOIDmode))
1846 return 1;
1848 #ifdef EXTRA_CONSTRAINT
1849 if (EXTRA_CONSTRAINT (op, c))
1850 return 1;
1851 #endif
1852 break;
1856 return result;
1859 /* Given an rtx *P, if it is a sum containing an integer constant term,
1860 return the location (type rtx *) of the pointer to that constant term.
1861 Otherwise, return a null pointer. */
1863 rtx *
1864 find_constant_term_loc (p)
1865 rtx *p;
1867 rtx *tem;
1868 enum rtx_code code = GET_CODE (*p);
1870 /* If *P IS such a constant term, P is its location. */
1872 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1873 || code == CONST)
1874 return p;
1876 /* Otherwise, if not a sum, it has no constant term. */
1878 if (GET_CODE (*p) != PLUS)
1879 return 0;
1881 /* If one of the summands is constant, return its location. */
1883 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1884 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1885 return p;
1887 /* Otherwise, check each summand for containing a constant term. */
1889 if (XEXP (*p, 0) != 0)
1891 tem = find_constant_term_loc (&XEXP (*p, 0));
1892 if (tem != 0)
1893 return tem;
1896 if (XEXP (*p, 1) != 0)
1898 tem = find_constant_term_loc (&XEXP (*p, 1));
1899 if (tem != 0)
1900 return tem;
1903 return 0;
1906 /* Return 1 if OP is a memory reference
1907 whose address contains no side effects
1908 and remains valid after the addition
1909 of a positive integer less than the
1910 size of the object being referenced.
1912 We assume that the original address is valid and do not check it.
1914 This uses strict_memory_address_p as a subroutine, so
1915 don't use it before reload. */
1918 offsettable_memref_p (op)
1919 rtx op;
1921 return ((GET_CODE (op) == MEM)
1922 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1925 /* Similar, but don't require a strictly valid mem ref:
1926 consider pseudo-regs valid as index or base regs. */
1929 offsettable_nonstrict_memref_p (op)
1930 rtx op;
1932 return ((GET_CODE (op) == MEM)
1933 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1936 /* Return 1 if Y is a memory address which contains no side effects
1937 and would remain valid after the addition of a positive integer
1938 less than the size of that mode.
1940 We assume that the original address is valid and do not check it.
1941 We do check that it is valid for narrower modes.
1943 If STRICTP is nonzero, we require a strictly valid address,
1944 for the sake of use in reload.c. */
1947 offsettable_address_p (strictp, mode, y)
1948 int strictp;
1949 enum machine_mode mode;
1950 rtx y;
1952 enum rtx_code ycode = GET_CODE (y);
1953 rtx z;
1954 rtx y1 = y;
1955 rtx *y2;
1956 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1957 (strictp ? strict_memory_address_p : memory_address_p);
1958 unsigned int mode_sz = GET_MODE_SIZE (mode);
1960 if (CONSTANT_ADDRESS_P (y))
1961 return 1;
1963 /* Adjusting an offsettable address involves changing to a narrower mode.
1964 Make sure that's OK. */
1966 if (mode_dependent_address_p (y))
1967 return 0;
1969 /* ??? How much offset does an offsettable BLKmode reference need?
1970 Clearly that depends on the situation in which it's being used.
1971 However, the current situation in which we test 0xffffffff is
1972 less than ideal. Caveat user. */
1973 if (mode_sz == 0)
1974 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1976 /* If the expression contains a constant term,
1977 see if it remains valid when max possible offset is added. */
1979 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1981 int good;
1983 y1 = *y2;
1984 *y2 = plus_constant (*y2, mode_sz - 1);
1985 /* Use QImode because an odd displacement may be automatically invalid
1986 for any wider mode. But it should be valid for a single byte. */
1987 good = (*addressp) (QImode, y);
1989 /* In any case, restore old contents of memory. */
1990 *y2 = y1;
1991 return good;
1994 if (GET_RTX_CLASS (ycode) == 'a')
1995 return 0;
1997 /* The offset added here is chosen as the maximum offset that
1998 any instruction could need to add when operating on something
1999 of the specified mode. We assume that if Y and Y+c are
2000 valid addresses then so is Y+d for all 0<d<c. adjust_address will
2001 go inside a LO_SUM here, so we do so as well. */
2002 if (GET_CODE (y) == LO_SUM
2003 && mode != BLKmode
2004 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
2005 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
2006 plus_constant (XEXP (y, 1), mode_sz - 1));
2007 else
2008 z = plus_constant (y, mode_sz - 1);
2010 /* Use QImode because an odd displacement may be automatically invalid
2011 for any wider mode. But it should be valid for a single byte. */
2012 return (*addressp) (QImode, z);
2015 /* Return 1 if ADDR is an address-expression whose effect depends
2016 on the mode of the memory reference it is used in.
2018 Autoincrement addressing is a typical example of mode-dependence
2019 because the amount of the increment depends on the mode. */
2022 mode_dependent_address_p (addr)
2023 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2025 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2026 return 0;
2027 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2028 win: ATTRIBUTE_UNUSED_LABEL
2029 return 1;
2032 /* Return 1 if OP is a general operand
2033 other than a memory ref with a mode dependent address. */
2036 mode_independent_operand (op, mode)
2037 enum machine_mode mode;
2038 rtx op;
2040 rtx addr;
2042 if (! general_operand (op, mode))
2043 return 0;
2045 if (GET_CODE (op) != MEM)
2046 return 1;
2048 addr = XEXP (op, 0);
2049 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2050 return 1;
2051 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2052 lose: ATTRIBUTE_UNUSED_LABEL
2053 return 0;
2056 /* Like extract_insn, but save insn extracted and don't extract again, when
2057 called again for the same insn expecting that recog_data still contain the
2058 valid information. This is used primary by gen_attr infrastructure that
2059 often does extract insn again and again. */
2060 void
2061 extract_insn_cached (insn)
2062 rtx insn;
2064 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2065 return;
2066 extract_insn (insn);
2067 recog_data.insn = insn;
2069 /* Do cached extract_insn, constrain_operand and complain about failures.
2070 Used by insn_attrtab. */
2071 void
2072 extract_constrain_insn_cached (insn)
2073 rtx insn;
2075 extract_insn_cached (insn);
2076 if (which_alternative == -1
2077 && !constrain_operands (reload_completed))
2078 fatal_insn_not_found (insn);
2080 /* Do cached constrain_operand and complain about failures. */
2082 constrain_operands_cached (strict)
2083 int strict;
2085 if (which_alternative == -1)
2086 return constrain_operands (strict);
2087 else
2088 return 1;
2091 /* Analyze INSN and fill in recog_data. */
2093 void
2094 extract_insn (insn)
2095 rtx insn;
2097 int i;
2098 int icode;
2099 int noperands;
2100 rtx body = PATTERN (insn);
2102 recog_data.insn = NULL;
2103 recog_data.n_operands = 0;
2104 recog_data.n_alternatives = 0;
2105 recog_data.n_dups = 0;
2106 which_alternative = -1;
2108 switch (GET_CODE (body))
2110 case USE:
2111 case CLOBBER:
2112 case ASM_INPUT:
2113 case ADDR_VEC:
2114 case ADDR_DIFF_VEC:
2115 return;
2117 case SET:
2118 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2119 goto asm_insn;
2120 else
2121 goto normal_insn;
2122 case PARALLEL:
2123 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2124 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2125 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2126 goto asm_insn;
2127 else
2128 goto normal_insn;
2129 case ASM_OPERANDS:
2130 asm_insn:
2131 recog_data.n_operands = noperands = asm_noperands (body);
2132 if (noperands >= 0)
2134 /* This insn is an `asm' with operands. */
2136 /* expand_asm_operands makes sure there aren't too many operands. */
2137 if (noperands > MAX_RECOG_OPERANDS)
2138 abort ();
2140 /* Now get the operand values and constraints out of the insn. */
2141 decode_asm_operands (body, recog_data.operand,
2142 recog_data.operand_loc,
2143 recog_data.constraints,
2144 recog_data.operand_mode);
2145 if (noperands > 0)
2147 const char *p = recog_data.constraints[0];
2148 recog_data.n_alternatives = 1;
2149 while (*p)
2150 recog_data.n_alternatives += (*p++ == ',');
2152 break;
2154 fatal_insn_not_found (insn);
2156 default:
2157 normal_insn:
2158 /* Ordinary insn: recognize it, get the operands via insn_extract
2159 and get the constraints. */
2161 icode = recog_memoized (insn);
2162 if (icode < 0)
2163 fatal_insn_not_found (insn);
2165 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2166 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2167 recog_data.n_dups = insn_data[icode].n_dups;
2169 insn_extract (insn);
2171 for (i = 0; i < noperands; i++)
2173 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2174 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2175 /* VOIDmode match_operands gets mode from their real operand. */
2176 if (recog_data.operand_mode[i] == VOIDmode)
2177 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2180 for (i = 0; i < noperands; i++)
2181 recog_data.operand_type[i]
2182 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2183 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2184 : OP_IN);
2186 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2187 abort ();
2190 /* After calling extract_insn, you can use this function to extract some
2191 information from the constraint strings into a more usable form.
2192 The collected data is stored in recog_op_alt. */
2193 void
2194 preprocess_constraints ()
2196 int i;
2198 memset (recog_op_alt, 0, sizeof recog_op_alt);
2199 for (i = 0; i < recog_data.n_operands; i++)
2201 int j;
2202 struct operand_alternative *op_alt;
2203 const char *p = recog_data.constraints[i];
2205 op_alt = recog_op_alt[i];
2207 for (j = 0; j < recog_data.n_alternatives; j++)
2209 op_alt[j].class = NO_REGS;
2210 op_alt[j].constraint = p;
2211 op_alt[j].matches = -1;
2212 op_alt[j].matched = -1;
2214 if (*p == '\0' || *p == ',')
2216 op_alt[j].anything_ok = 1;
2217 continue;
2220 for (;;)
2222 char c = *p++;
2223 if (c == '#')
2225 c = *p++;
2226 while (c != ',' && c != '\0');
2227 if (c == ',' || c == '\0')
2228 break;
2230 switch (c)
2232 case '=': case '+': case '*': case '%':
2233 case 'E': case 'F': case 'G': case 'H':
2234 case 's': case 'i': case 'n':
2235 case 'I': case 'J': case 'K': case 'L':
2236 case 'M': case 'N': case 'O': case 'P':
2237 /* These don't say anything we care about. */
2238 break;
2240 case '?':
2241 op_alt[j].reject += 6;
2242 break;
2243 case '!':
2244 op_alt[j].reject += 600;
2245 break;
2246 case '&':
2247 op_alt[j].earlyclobber = 1;
2248 break;
2250 case '0': case '1': case '2': case '3': case '4':
2251 case '5': case '6': case '7': case '8': case '9':
2253 char *end;
2254 op_alt[j].matches = strtoul (p - 1, &end, 10);
2255 recog_op_alt[op_alt[j].matches][j].matched = i;
2256 p = end;
2258 break;
2260 case 'm':
2261 op_alt[j].memory_ok = 1;
2262 break;
2263 case '<':
2264 op_alt[j].decmem_ok = 1;
2265 break;
2266 case '>':
2267 op_alt[j].incmem_ok = 1;
2268 break;
2269 case 'V':
2270 op_alt[j].nonoffmem_ok = 1;
2271 break;
2272 case 'o':
2273 op_alt[j].offmem_ok = 1;
2274 break;
2275 case 'X':
2276 op_alt[j].anything_ok = 1;
2277 break;
2279 case 'p':
2280 op_alt[j].is_address = 1;
2281 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2282 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2283 break;
2285 case 'g': case 'r':
2286 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2287 break;
2289 default:
2290 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2291 break;
2298 /* Check the operands of an insn against the insn's operand constraints
2299 and return 1 if they are valid.
2300 The information about the insn's operands, constraints, operand modes
2301 etc. is obtained from the global variables set up by extract_insn.
2303 WHICH_ALTERNATIVE is set to a number which indicates which
2304 alternative of constraints was matched: 0 for the first alternative,
2305 1 for the next, etc.
2307 In addition, when two operands are match
2308 and it happens that the output operand is (reg) while the
2309 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2310 make the output operand look like the input.
2311 This is because the output operand is the one the template will print.
2313 This is used in final, just before printing the assembler code and by
2314 the routines that determine an insn's attribute.
2316 If STRICT is a positive non-zero value, it means that we have been
2317 called after reload has been completed. In that case, we must
2318 do all checks strictly. If it is zero, it means that we have been called
2319 before reload has completed. In that case, we first try to see if we can
2320 find an alternative that matches strictly. If not, we try again, this
2321 time assuming that reload will fix up the insn. This provides a "best
2322 guess" for the alternative and is used to compute attributes of insns prior
2323 to reload. A negative value of STRICT is used for this internal call. */
2325 struct funny_match
2327 int this, other;
2331 constrain_operands (strict)
2332 int strict;
2334 const char *constraints[MAX_RECOG_OPERANDS];
2335 int matching_operands[MAX_RECOG_OPERANDS];
2336 int earlyclobber[MAX_RECOG_OPERANDS];
2337 int c;
2339 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2340 int funny_match_index;
2342 which_alternative = 0;
2343 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2344 return 1;
2346 for (c = 0; c < recog_data.n_operands; c++)
2348 constraints[c] = recog_data.constraints[c];
2349 matching_operands[c] = -1;
2354 int opno;
2355 int lose = 0;
2356 funny_match_index = 0;
2358 for (opno = 0; opno < recog_data.n_operands; opno++)
2360 rtx op = recog_data.operand[opno];
2361 enum machine_mode mode = GET_MODE (op);
2362 const char *p = constraints[opno];
2363 int offset = 0;
2364 int win = 0;
2365 int val;
2367 earlyclobber[opno] = 0;
2369 /* A unary operator may be accepted by the predicate, but it
2370 is irrelevant for matching constraints. */
2371 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2372 op = XEXP (op, 0);
2374 if (GET_CODE (op) == SUBREG)
2376 if (GET_CODE (SUBREG_REG (op)) == REG
2377 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2378 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2379 GET_MODE (SUBREG_REG (op)),
2380 SUBREG_BYTE (op),
2381 GET_MODE (op));
2382 op = SUBREG_REG (op);
2385 /* An empty constraint or empty alternative
2386 allows anything which matched the pattern. */
2387 if (*p == 0 || *p == ',')
2388 win = 1;
2390 while (*p && (c = *p++) != ',')
2391 switch (c)
2393 case '?': case '!': case '*': case '%':
2394 case '=': case '+':
2395 break;
2397 case '#':
2398 /* Ignore rest of this alternative as far as
2399 constraint checking is concerned. */
2400 while (*p && *p != ',')
2401 p++;
2402 break;
2404 case '&':
2405 earlyclobber[opno] = 1;
2406 break;
2408 case '0': case '1': case '2': case '3': case '4':
2409 case '5': case '6': case '7': case '8': case '9':
2411 /* This operand must be the same as a previous one.
2412 This kind of constraint is used for instructions such
2413 as add when they take only two operands.
2415 Note that the lower-numbered operand is passed first.
2417 If we are not testing strictly, assume that this
2418 constraint will be satisfied. */
2420 char *end;
2421 int match;
2423 match = strtoul (p - 1, &end, 10);
2424 p = end;
2426 if (strict < 0)
2427 val = 1;
2428 else
2430 rtx op1 = recog_data.operand[match];
2431 rtx op2 = recog_data.operand[opno];
2433 /* A unary operator may be accepted by the predicate,
2434 but it is irrelevant for matching constraints. */
2435 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2436 op1 = XEXP (op1, 0);
2437 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2438 op2 = XEXP (op2, 0);
2440 val = operands_match_p (op1, op2);
2443 matching_operands[opno] = match;
2444 matching_operands[match] = opno;
2446 if (val != 0)
2447 win = 1;
2449 /* If output is *x and input is *--x, arrange later
2450 to change the output to *--x as well, since the
2451 output op is the one that will be printed. */
2452 if (val == 2 && strict > 0)
2454 funny_match[funny_match_index].this = opno;
2455 funny_match[funny_match_index++].other = match;
2458 break;
2460 case 'p':
2461 /* p is used for address_operands. When we are called by
2462 gen_reload, no one will have checked that the address is
2463 strictly valid, i.e., that all pseudos requiring hard regs
2464 have gotten them. */
2465 if (strict <= 0
2466 || (strict_memory_address_p (recog_data.operand_mode[opno],
2467 op)))
2468 win = 1;
2469 break;
2471 /* No need to check general_operand again;
2472 it was done in insn-recog.c. */
2473 case 'g':
2474 /* Anything goes unless it is a REG and really has a hard reg
2475 but the hard reg is not in the class GENERAL_REGS. */
2476 if (strict < 0
2477 || GENERAL_REGS == ALL_REGS
2478 || GET_CODE (op) != REG
2479 || (reload_in_progress
2480 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2481 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2482 win = 1;
2483 break;
2485 case 'X':
2486 /* This is used for a MATCH_SCRATCH in the cases when
2487 we don't actually need anything. So anything goes
2488 any time. */
2489 win = 1;
2490 break;
2492 case 'm':
2493 if (GET_CODE (op) == MEM
2494 /* Before reload, accept what reload can turn into mem. */
2495 || (strict < 0 && CONSTANT_P (op))
2496 /* During reload, accept a pseudo */
2497 || (reload_in_progress && GET_CODE (op) == REG
2498 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2499 win = 1;
2500 break;
2502 case '<':
2503 if (GET_CODE (op) == MEM
2504 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2505 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2506 win = 1;
2507 break;
2509 case '>':
2510 if (GET_CODE (op) == MEM
2511 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2512 || GET_CODE (XEXP (op, 0)) == POST_INC))
2513 win = 1;
2514 break;
2516 case 'E':
2517 case 'F':
2518 if (GET_CODE (op) == CONST_DOUBLE
2519 || (GET_CODE (op) == CONST_VECTOR
2520 && GET_MODE_CLASS (GET_MODE (op)) == MODE_VECTOR_FLOAT))
2521 win = 1;
2522 break;
2524 case 'G':
2525 case 'H':
2526 if (GET_CODE (op) == CONST_DOUBLE
2527 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2528 win = 1;
2529 break;
2531 case 's':
2532 if (GET_CODE (op) == CONST_INT
2533 || (GET_CODE (op) == CONST_DOUBLE
2534 && GET_MODE (op) == VOIDmode))
2535 break;
2536 case 'i':
2537 if (CONSTANT_P (op))
2538 win = 1;
2539 break;
2541 case 'n':
2542 if (GET_CODE (op) == CONST_INT
2543 || (GET_CODE (op) == CONST_DOUBLE
2544 && GET_MODE (op) == VOIDmode))
2545 win = 1;
2546 break;
2548 case 'I':
2549 case 'J':
2550 case 'K':
2551 case 'L':
2552 case 'M':
2553 case 'N':
2554 case 'O':
2555 case 'P':
2556 if (GET_CODE (op) == CONST_INT
2557 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2558 win = 1;
2559 break;
2561 case 'V':
2562 if (GET_CODE (op) == MEM
2563 && ((strict > 0 && ! offsettable_memref_p (op))
2564 || (strict < 0
2565 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2566 || (reload_in_progress
2567 && !(GET_CODE (op) == REG
2568 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2569 win = 1;
2570 break;
2572 case 'o':
2573 if ((strict > 0 && offsettable_memref_p (op))
2574 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2575 /* Before reload, accept what reload can handle. */
2576 || (strict < 0
2577 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2578 /* During reload, accept a pseudo */
2579 || (reload_in_progress && GET_CODE (op) == REG
2580 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2581 win = 1;
2582 break;
2584 default:
2586 enum reg_class class;
2588 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2589 if (class != NO_REGS)
2591 if (strict < 0
2592 || (strict == 0
2593 && GET_CODE (op) == REG
2594 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2595 || (strict == 0 && GET_CODE (op) == SCRATCH)
2596 || (GET_CODE (op) == REG
2597 && reg_fits_class_p (op, class, offset, mode)))
2598 win = 1;
2600 #ifdef EXTRA_CONSTRAINT
2601 else if (EXTRA_CONSTRAINT (op, c))
2602 win = 1;
2603 #endif
2604 break;
2608 constraints[opno] = p;
2609 /* If this operand did not win somehow,
2610 this alternative loses. */
2611 if (! win)
2612 lose = 1;
2614 /* This alternative won; the operands are ok.
2615 Change whichever operands this alternative says to change. */
2616 if (! lose)
2618 int opno, eopno;
2620 /* See if any earlyclobber operand conflicts with some other
2621 operand. */
2623 if (strict > 0)
2624 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2625 /* Ignore earlyclobber operands now in memory,
2626 because we would often report failure when we have
2627 two memory operands, one of which was formerly a REG. */
2628 if (earlyclobber[eopno]
2629 && GET_CODE (recog_data.operand[eopno]) == REG)
2630 for (opno = 0; opno < recog_data.n_operands; opno++)
2631 if ((GET_CODE (recog_data.operand[opno]) == MEM
2632 || recog_data.operand_type[opno] != OP_OUT)
2633 && opno != eopno
2634 /* Ignore things like match_operator operands. */
2635 && *recog_data.constraints[opno] != 0
2636 && ! (matching_operands[opno] == eopno
2637 && operands_match_p (recog_data.operand[opno],
2638 recog_data.operand[eopno]))
2639 && ! safe_from_earlyclobber (recog_data.operand[opno],
2640 recog_data.operand[eopno]))
2641 lose = 1;
2643 if (! lose)
2645 while (--funny_match_index >= 0)
2647 recog_data.operand[funny_match[funny_match_index].other]
2648 = recog_data.operand[funny_match[funny_match_index].this];
2651 return 1;
2655 which_alternative++;
2657 while (which_alternative < recog_data.n_alternatives);
2659 which_alternative = -1;
2660 /* If we are about to reject this, but we are not to test strictly,
2661 try a very loose test. Only return failure if it fails also. */
2662 if (strict == 0)
2663 return constrain_operands (-1);
2664 else
2665 return 0;
2668 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2669 is a hard reg in class CLASS when its regno is offset by OFFSET
2670 and changed to mode MODE.
2671 If REG occupies multiple hard regs, all of them must be in CLASS. */
2674 reg_fits_class_p (operand, class, offset, mode)
2675 rtx operand;
2676 enum reg_class class;
2677 int offset;
2678 enum machine_mode mode;
2680 int regno = REGNO (operand);
2681 if (regno < FIRST_PSEUDO_REGISTER
2682 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2683 regno + offset))
2685 int sr;
2686 regno += offset;
2687 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2688 sr > 0; sr--)
2689 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2690 regno + sr))
2691 break;
2692 return sr == 0;
2695 return 0;
2698 /* Split single instruction. Helper function for split_all_insns.
2699 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2700 static rtx
2701 split_insn (insn)
2702 rtx insn;
2704 rtx set;
2705 if (!INSN_P (insn))
2707 /* Don't split no-op move insns. These should silently
2708 disappear later in final. Splitting such insns would
2709 break the code that handles REG_NO_CONFLICT blocks. */
2711 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2713 /* Nops get in the way while scheduling, so delete them
2714 now if register allocation has already been done. It
2715 is too risky to try to do this before register
2716 allocation, and there are unlikely to be very many
2717 nops then anyways. */
2718 if (reload_completed)
2719 delete_insn_and_edges (insn);
2721 else
2723 /* Split insns here to get max fine-grain parallelism. */
2724 rtx first = PREV_INSN (insn);
2725 rtx last = try_split (PATTERN (insn), insn, 1);
2727 if (last != insn)
2729 /* try_split returns the NOTE that INSN became. */
2730 PUT_CODE (insn, NOTE);
2731 NOTE_SOURCE_FILE (insn) = 0;
2732 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2734 /* ??? Coddle to md files that generate subregs in post-
2735 reload splitters instead of computing the proper
2736 hard register. */
2737 if (reload_completed && first != last)
2739 first = NEXT_INSN (first);
2740 while (1)
2742 if (INSN_P (first))
2743 cleanup_subreg_operands (first);
2744 if (first == last)
2745 break;
2746 first = NEXT_INSN (first);
2749 return last;
2752 return NULL_RTX;
2754 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2756 void
2757 split_all_insns (upd_life)
2758 int upd_life;
2760 sbitmap blocks;
2761 int changed;
2762 basic_block bb;
2764 blocks = sbitmap_alloc (last_basic_block);
2765 sbitmap_zero (blocks);
2766 changed = 0;
2768 FOR_EACH_BB_REVERSE (bb)
2770 rtx insn, next;
2771 bool finish = false;
2773 for (insn = bb->head; !finish ; insn = next)
2775 rtx last;
2777 /* Can't use `next_real_insn' because that might go across
2778 CODE_LABELS and short-out basic blocks. */
2779 next = NEXT_INSN (insn);
2780 finish = (insn == bb->end);
2781 last = split_insn (insn);
2782 if (last)
2784 /* The split sequence may include barrier, but the
2785 BB boundary we are interested in will be set to previous
2786 one. */
2788 while (GET_CODE (last) == BARRIER)
2789 last = PREV_INSN (last);
2790 SET_BIT (blocks, bb->index);
2791 changed = 1;
2792 insn = last;
2797 if (changed)
2799 find_many_sub_basic_blocks (blocks);
2802 if (changed && upd_life)
2804 count_or_remove_death_notes (blocks, 1);
2805 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2807 #ifdef ENABLE_CHECKING
2808 verify_flow_info ();
2809 #endif
2811 sbitmap_free (blocks);
2814 /* Same as split_all_insns, but do not expect CFG to be available.
2815 Used by machine depedent reorg passes. */
2817 void
2818 split_all_insns_noflow ()
2820 rtx next, insn;
2822 for (insn = get_insns (); insn; insn = next)
2824 next = NEXT_INSN (insn);
2825 split_insn (insn);
2827 return;
2830 #ifdef HAVE_peephole2
2831 struct peep2_insn_data
2833 rtx insn;
2834 regset live_before;
2837 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2838 static int peep2_current;
2840 /* A non-insn marker indicating the last insn of the block.
2841 The live_before regset for this element is correct, indicating
2842 global_live_at_end for the block. */
2843 #define PEEP2_EOB pc_rtx
2845 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2846 does not exist. Used by the recognizer to find the next insn to match
2847 in a multi-insn pattern. */
2850 peep2_next_insn (n)
2851 int n;
2853 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2854 abort ();
2856 n += peep2_current;
2857 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2858 n -= MAX_INSNS_PER_PEEP2 + 1;
2860 if (peep2_insn_data[n].insn == PEEP2_EOB)
2861 return NULL_RTX;
2862 return peep2_insn_data[n].insn;
2865 /* Return true if REGNO is dead before the Nth non-note insn
2866 after `current'. */
2869 peep2_regno_dead_p (ofs, regno)
2870 int ofs;
2871 int regno;
2873 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2874 abort ();
2876 ofs += peep2_current;
2877 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2878 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2880 if (peep2_insn_data[ofs].insn == NULL_RTX)
2881 abort ();
2883 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2886 /* Similarly for a REG. */
2889 peep2_reg_dead_p (ofs, reg)
2890 int ofs;
2891 rtx reg;
2893 int regno, n;
2895 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2896 abort ();
2898 ofs += peep2_current;
2899 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2900 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2902 if (peep2_insn_data[ofs].insn == NULL_RTX)
2903 abort ();
2905 regno = REGNO (reg);
2906 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2907 while (--n >= 0)
2908 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2909 return 0;
2910 return 1;
2913 /* Try to find a hard register of mode MODE, matching the register class in
2914 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2915 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2916 in which case the only condition is that the register must be available
2917 before CURRENT_INSN.
2918 Registers that already have bits set in REG_SET will not be considered.
2920 If an appropriate register is available, it will be returned and the
2921 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2922 returned. */
2925 peep2_find_free_register (from, to, class_str, mode, reg_set)
2926 int from, to;
2927 const char *class_str;
2928 enum machine_mode mode;
2929 HARD_REG_SET *reg_set;
2931 static int search_ofs;
2932 enum reg_class class;
2933 HARD_REG_SET live;
2934 int i;
2936 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2937 abort ();
2939 from += peep2_current;
2940 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2941 from -= MAX_INSNS_PER_PEEP2 + 1;
2942 to += peep2_current;
2943 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2944 to -= MAX_INSNS_PER_PEEP2 + 1;
2946 if (peep2_insn_data[from].insn == NULL_RTX)
2947 abort ();
2948 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2950 while (from != to)
2952 HARD_REG_SET this_live;
2954 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2955 from = 0;
2956 if (peep2_insn_data[from].insn == NULL_RTX)
2957 abort ();
2958 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2959 IOR_HARD_REG_SET (live, this_live);
2962 class = (class_str[0] == 'r' ? GENERAL_REGS
2963 : REG_CLASS_FROM_LETTER (class_str[0]));
2965 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2967 int raw_regno, regno, success, j;
2969 /* Distribute the free registers as much as possible. */
2970 raw_regno = search_ofs + i;
2971 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2972 raw_regno -= FIRST_PSEUDO_REGISTER;
2973 #ifdef REG_ALLOC_ORDER
2974 regno = reg_alloc_order[raw_regno];
2975 #else
2976 regno = raw_regno;
2977 #endif
2979 /* Don't allocate fixed registers. */
2980 if (fixed_regs[regno])
2981 continue;
2982 /* Make sure the register is of the right class. */
2983 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2984 continue;
2985 /* And can support the mode we need. */
2986 if (! HARD_REGNO_MODE_OK (regno, mode))
2987 continue;
2988 /* And that we don't create an extra save/restore. */
2989 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2990 continue;
2991 /* And we don't clobber traceback for noreturn functions. */
2992 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2993 && (! reload_completed || frame_pointer_needed))
2994 continue;
2996 success = 1;
2997 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2999 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3000 || TEST_HARD_REG_BIT (live, regno + j))
3002 success = 0;
3003 break;
3006 if (success)
3008 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3009 SET_HARD_REG_BIT (*reg_set, regno + j);
3011 /* Start the next search with the next register. */
3012 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3013 raw_regno = 0;
3014 search_ofs = raw_regno;
3016 return gen_rtx_REG (mode, regno);
3020 search_ofs = 0;
3021 return NULL_RTX;
3024 /* Perform the peephole2 optimization pass. */
3026 void
3027 peephole2_optimize (dump_file)
3028 FILE *dump_file ATTRIBUTE_UNUSED;
3030 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3031 rtx insn, prev;
3032 regset live;
3033 int i;
3034 basic_block bb;
3035 #ifdef HAVE_conditional_execution
3036 sbitmap blocks;
3037 bool changed;
3038 #endif
3039 bool do_cleanup_cfg = false;
3040 bool do_rebuild_jump_labels = false;
3042 /* Initialize the regsets we're going to use. */
3043 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3044 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3045 live = INITIALIZE_REG_SET (rs_heads[i]);
3047 #ifdef HAVE_conditional_execution
3048 blocks = sbitmap_alloc (last_basic_block);
3049 sbitmap_zero (blocks);
3050 changed = false;
3051 #else
3052 count_or_remove_death_notes (NULL, 1);
3053 #endif
3055 FOR_EACH_BB_REVERSE (bb)
3057 struct propagate_block_info *pbi;
3059 /* Indicate that all slots except the last holds invalid data. */
3060 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3061 peep2_insn_data[i].insn = NULL_RTX;
3063 /* Indicate that the last slot contains live_after data. */
3064 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3065 peep2_current = MAX_INSNS_PER_PEEP2;
3067 /* Start up propagation. */
3068 COPY_REG_SET (live, bb->global_live_at_end);
3069 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3071 #ifdef HAVE_conditional_execution
3072 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3073 #else
3074 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3075 #endif
3077 for (insn = bb->end; ; insn = prev)
3079 prev = PREV_INSN (insn);
3080 if (INSN_P (insn))
3082 rtx try, before_try, x;
3083 int match_len;
3084 rtx note;
3085 bool was_call = false;
3087 /* Record this insn. */
3088 if (--peep2_current < 0)
3089 peep2_current = MAX_INSNS_PER_PEEP2;
3090 peep2_insn_data[peep2_current].insn = insn;
3091 propagate_one_insn (pbi, insn);
3092 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3094 /* Match the peephole. */
3095 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3096 if (try != NULL)
3098 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3099 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3100 cfg-related call notes. */
3101 for (i = 0; i <= match_len; ++i)
3103 int j;
3104 rtx old_insn, new_insn, note;
3106 j = i + peep2_current;
3107 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3108 j -= MAX_INSNS_PER_PEEP2 + 1;
3109 old_insn = peep2_insn_data[j].insn;
3110 if (GET_CODE (old_insn) != CALL_INSN)
3111 continue;
3112 was_call = true;
3114 new_insn = try;
3115 while (new_insn != NULL_RTX)
3117 if (GET_CODE (new_insn) == CALL_INSN)
3118 break;
3119 new_insn = NEXT_INSN (new_insn);
3122 if (new_insn == NULL_RTX)
3123 abort ();
3125 CALL_INSN_FUNCTION_USAGE (new_insn)
3126 = CALL_INSN_FUNCTION_USAGE (old_insn);
3128 for (note = REG_NOTES (old_insn);
3129 note;
3130 note = XEXP (note, 1))
3131 switch (REG_NOTE_KIND (note))
3133 case REG_NORETURN:
3134 case REG_SETJMP:
3135 case REG_ALWAYS_RETURN:
3136 REG_NOTES (new_insn)
3137 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3138 XEXP (note, 0),
3139 REG_NOTES (new_insn));
3140 default:
3141 /* Discard all other reg notes. */
3142 break;
3145 /* Croak if there is another call in the sequence. */
3146 while (++i <= match_len)
3148 j = i + peep2_current;
3149 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3150 j -= MAX_INSNS_PER_PEEP2 + 1;
3151 old_insn = peep2_insn_data[j].insn;
3152 if (GET_CODE (old_insn) == CALL_INSN)
3153 abort ();
3155 break;
3158 i = match_len + peep2_current;
3159 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3160 i -= MAX_INSNS_PER_PEEP2 + 1;
3162 note = find_reg_note (peep2_insn_data[i].insn,
3163 REG_EH_REGION, NULL_RTX);
3165 /* Replace the old sequence with the new. */
3166 try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
3167 INSN_SCOPE (peep2_insn_data[i].insn));
3168 before_try = PREV_INSN (insn);
3169 delete_insn_chain (insn, peep2_insn_data[i].insn);
3171 /* Re-insert the EH_REGION notes. */
3172 if (note || (was_call && nonlocal_goto_handler_labels))
3174 edge eh_edge;
3176 for (eh_edge = bb->succ; eh_edge
3177 ; eh_edge = eh_edge->succ_next)
3178 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3179 break;
3181 for (x = try ; x != before_try ; x = PREV_INSN (x))
3182 if (GET_CODE (x) == CALL_INSN
3183 || (flag_non_call_exceptions
3184 && may_trap_p (PATTERN (x))
3185 && !find_reg_note (x, REG_EH_REGION, NULL)))
3187 if (note)
3188 REG_NOTES (x)
3189 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3190 XEXP (note, 0),
3191 REG_NOTES (x));
3193 if (x != bb->end && eh_edge)
3195 edge nfte, nehe;
3196 int flags;
3198 nfte = split_block (bb, x);
3199 flags = (eh_edge->flags
3200 & (EDGE_EH | EDGE_ABNORMAL));
3201 if (GET_CODE (x) == CALL_INSN)
3202 flags |= EDGE_ABNORMAL_CALL;
3203 nehe = make_edge (nfte->src, eh_edge->dest,
3204 flags);
3206 nehe->probability = eh_edge->probability;
3207 nfte->probability
3208 = REG_BR_PROB_BASE - nehe->probability;
3210 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3211 #ifdef HAVE_conditional_execution
3212 SET_BIT (blocks, nfte->dest->index);
3213 changed = true;
3214 #endif
3215 bb = nfte->src;
3216 eh_edge = nehe;
3220 /* Converting possibly trapping insn to non-trapping is
3221 possible. Zap dummy outgoing edges. */
3222 do_cleanup_cfg |= purge_dead_edges (bb);
3225 #ifdef HAVE_conditional_execution
3226 /* With conditional execution, we cannot back up the
3227 live information so easily, since the conditional
3228 death data structures are not so self-contained.
3229 So record that we've made a modification to this
3230 block and update life information at the end. */
3231 SET_BIT (blocks, bb->index);
3232 changed = true;
3234 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3235 peep2_insn_data[i].insn = NULL_RTX;
3236 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3237 #else
3238 /* Back up lifetime information past the end of the
3239 newly created sequence. */
3240 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3241 i = 0;
3242 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3244 /* Update life information for the new sequence. */
3245 x = try;
3248 if (INSN_P (x))
3250 if (--i < 0)
3251 i = MAX_INSNS_PER_PEEP2;
3252 peep2_insn_data[i].insn = x;
3253 propagate_one_insn (pbi, x);
3254 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3256 x = PREV_INSN (x);
3258 while (x != prev);
3260 /* ??? Should verify that LIVE now matches what we
3261 had before the new sequence. */
3263 peep2_current = i;
3264 #endif
3266 /* If we generated a jump instruction, it won't have
3267 JUMP_LABEL set. Recompute after we're done. */
3268 for (x = try; x != before_try; x = PREV_INSN (x))
3269 if (GET_CODE (x) == JUMP_INSN)
3271 do_rebuild_jump_labels = true;
3272 break;
3277 if (insn == bb->head)
3278 break;
3281 free_propagate_block_info (pbi);
3284 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3285 FREE_REG_SET (peep2_insn_data[i].live_before);
3286 FREE_REG_SET (live);
3288 if (do_rebuild_jump_labels)
3289 rebuild_jump_labels (get_insns ());
3291 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3292 we've changed global life since exception handlers are no longer
3293 reachable. */
3294 if (do_cleanup_cfg)
3296 cleanup_cfg (0);
3297 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3299 #ifdef HAVE_conditional_execution
3300 else
3302 count_or_remove_death_notes (blocks, 1);
3303 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3305 sbitmap_free (blocks);
3306 #endif
3308 #endif /* HAVE_peephole2 */
3310 /* Common predicates for use with define_bypass. */
3312 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3313 data not the address operand(s) of the store. IN_INSN must be
3314 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3315 SETs inside. */
3318 store_data_bypass_p (out_insn, in_insn)
3319 rtx out_insn, in_insn;
3321 rtx out_set, in_set;
3323 in_set = single_set (in_insn);
3324 if (! in_set)
3325 abort ();
3327 if (GET_CODE (SET_DEST (in_set)) != MEM)
3328 return false;
3330 out_set = single_set (out_insn);
3331 if (out_set)
3333 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3334 return false;
3336 else
3338 rtx out_pat;
3339 int i;
3341 out_pat = PATTERN (out_insn);
3342 if (GET_CODE (out_pat) != PARALLEL)
3343 abort ();
3345 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3347 rtx exp = XVECEXP (out_pat, 0, i);
3349 if (GET_CODE (exp) == CLOBBER)
3350 continue;
3352 if (GET_CODE (exp) != SET)
3353 abort ();
3355 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3356 return false;
3360 return true;
3363 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3364 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3365 or multiple set; IN_INSN should be single_set for truth, but for convenience
3366 of insn categorization may be any JUMP or CALL insn. */
3369 if_test_bypass_p (out_insn, in_insn)
3370 rtx out_insn, in_insn;
3372 rtx out_set, in_set;
3374 in_set = single_set (in_insn);
3375 if (! in_set)
3377 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3378 return false;
3379 abort ();
3382 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3383 return false;
3384 in_set = SET_SRC (in_set);
3386 out_set = single_set (out_insn);
3387 if (out_set)
3389 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3390 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3391 return false;
3393 else
3395 rtx out_pat;
3396 int i;
3398 out_pat = PATTERN (out_insn);
3399 if (GET_CODE (out_pat) != PARALLEL)
3400 abort ();
3402 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3404 rtx exp = XVECEXP (out_pat, 0, i);
3406 if (GET_CODE (exp) == CLOBBER)
3407 continue;
3409 if (GET_CODE (exp) != SET)
3410 abort ();
3412 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3413 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3414 return false;
3418 return true;