Daily bump.
[official-gcc.git] / gcc / recog.c
blob5b5d3dc062a8b78be0cc18f0e2ecc300be17da6d
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "hard-reg-set.h"
30 #include "recog.h"
31 #include "regs.h"
32 #include "expr.h"
33 #include "function.h"
34 #include "flags.h"
35 #include "real.h"
36 #include "toplev.h"
37 #include "basic-block.h"
38 #include "output.h"
39 #include "reload.h"
41 #ifndef STACK_PUSH_CODE
42 #ifdef STACK_GROWS_DOWNWARD
43 #define STACK_PUSH_CODE PRE_DEC
44 #else
45 #define STACK_PUSH_CODE PRE_INC
46 #endif
47 #endif
49 #ifndef STACK_POP_CODE
50 #ifdef STACK_GROWS_DOWNWARD
51 #define STACK_POP_CODE POST_INC
52 #else
53 #define STACK_POP_CODE POST_DEC
54 #endif
55 #endif
57 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
58 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
59 static void validate_replace_src_1 PARAMS ((rtx *, void *));
60 static rtx split_insn PARAMS ((rtx));
62 /* Nonzero means allow operands to be volatile.
63 This should be 0 if you are generating rtl, such as if you are calling
64 the functions in optabs.c and expmed.c (most of the time).
65 This should be 1 if all valid insns need to be recognized,
66 such as in regclass.c and final.c and reload.c.
68 init_recog and init_recog_no_volatile are responsible for setting this. */
70 int volatile_ok;
72 struct recog_data recog_data;
74 /* Contains a vector of operand_alternative structures for every operand.
75 Set up by preprocess_constraints. */
76 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78 /* On return from `constrain_operands', indicate which alternative
79 was satisfied. */
81 int which_alternative;
83 /* Nonzero after end of reload pass.
84 Set to 1 or 0 by toplev.c.
85 Controls the significance of (SUBREG (MEM)). */
87 int reload_completed;
89 /* Initialize data used by the function `recog'.
90 This must be called once in the compilation of a function
91 before any insn recognition may be done in the function. */
93 void
94 init_recog_no_volatile ()
96 volatile_ok = 0;
99 void
100 init_recog ()
102 volatile_ok = 1;
105 /* Try recognizing the instruction INSN,
106 and return the code number that results.
107 Remember the code so that repeated calls do not
108 need to spend the time for actual rerecognition.
110 This function is the normal interface to instruction recognition.
111 The automatically-generated function `recog' is normally called
112 through this one. (The only exception is in combine.c.) */
115 recog_memoized_1 (insn)
116 rtx insn;
118 if (INSN_CODE (insn) < 0)
119 INSN_CODE (insn) = recog (PATTERN (insn), insn, 0);
120 return INSN_CODE (insn);
123 /* Check that X is an insn-body for an `asm' with operands
124 and that the operands mentioned in it are legitimate. */
127 check_asm_operands (x)
128 rtx x;
130 int noperands;
131 rtx *operands;
132 const char **constraints;
133 int i;
135 /* Post-reload, be more strict with things. */
136 if (reload_completed)
138 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
139 extract_insn (make_insn_raw (x));
140 constrain_operands (1);
141 return which_alternative >= 0;
144 noperands = asm_noperands (x);
145 if (noperands < 0)
146 return 0;
147 if (noperands == 0)
148 return 1;
150 operands = (rtx *) alloca (noperands * sizeof (rtx));
151 constraints = (const char **) alloca (noperands * sizeof (char *));
153 decode_asm_operands (x, operands, NULL, constraints, NULL);
155 for (i = 0; i < noperands; i++)
157 const char *c = constraints[i];
158 if (c[0] == '%')
159 c++;
160 if (ISDIGIT ((unsigned char) c[0]) && c[1] == '\0')
161 c = constraints[c[0] - '0'];
163 if (! asm_operand_ok (operands[i], c))
164 return 0;
167 return 1;
170 /* Static data for the next two routines. */
172 typedef struct change_t
174 rtx object;
175 int old_code;
176 rtx *loc;
177 rtx old;
178 } change_t;
180 static change_t *changes;
181 static int changes_allocated;
183 static int num_changes = 0;
185 /* Validate a proposed change to OBJECT. LOC is the location in the rtl
186 at which NEW will be placed. If OBJECT is zero, no validation is done,
187 the change is simply made.
189 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
190 will be called with the address and mode as parameters. If OBJECT is
191 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
192 the change in place.
194 IN_GROUP is non-zero if this is part of a group of changes that must be
195 performed as a group. In that case, the changes will be stored. The
196 function `apply_change_group' will validate and apply the changes.
198 If IN_GROUP is zero, this is a single change. Try to recognize the insn
199 or validate the memory reference with the change applied. If the result
200 is not valid for the machine, suppress the change and return zero.
201 Otherwise, perform the change and return 1. */
204 validate_change (object, loc, new, in_group)
205 rtx object;
206 rtx *loc;
207 rtx new;
208 int in_group;
210 rtx old = *loc;
212 if (old == new || rtx_equal_p (old, new))
213 return 1;
215 if (in_group == 0 && num_changes != 0)
216 abort ();
218 *loc = new;
220 /* Save the information describing this change. */
221 if (num_changes >= changes_allocated)
223 if (changes_allocated == 0)
224 /* This value allows for repeated substitutions inside complex
225 indexed addresses, or changes in up to 5 insns. */
226 changes_allocated = MAX_RECOG_OPERANDS * 5;
227 else
228 changes_allocated *= 2;
230 changes =
231 (change_t*) xrealloc (changes,
232 sizeof (change_t) * changes_allocated);
235 changes[num_changes].object = object;
236 changes[num_changes].loc = loc;
237 changes[num_changes].old = old;
239 if (object && GET_CODE (object) != MEM)
241 /* Set INSN_CODE to force rerecognition of insn. Save old code in
242 case invalid. */
243 changes[num_changes].old_code = INSN_CODE (object);
244 INSN_CODE (object) = -1;
247 num_changes++;
249 /* If we are making a group of changes, return 1. Otherwise, validate the
250 change group we made. */
252 if (in_group)
253 return 1;
254 else
255 return apply_change_group ();
258 /* This subroutine of apply_change_group verifies whether the changes to INSN
259 were valid; i.e. whether INSN can still be recognized. */
262 insn_invalid_p (insn)
263 rtx insn;
265 rtx pat = PATTERN (insn);
266 int num_clobbers = 0;
267 /* If we are before reload and the pattern is a SET, see if we can add
268 clobbers. */
269 int icode = recog (pat, insn,
270 (GET_CODE (pat) == SET
271 && ! reload_completed && ! reload_in_progress)
272 ? &num_clobbers : 0);
273 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
276 /* If this is an asm and the operand aren't legal, then fail. Likewise if
277 this is not an asm and the insn wasn't recognized. */
278 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
279 || (!is_asm && icode < 0))
280 return 1;
282 /* If we have to add CLOBBERs, fail if we have to add ones that reference
283 hard registers since our callers can't know if they are live or not.
284 Otherwise, add them. */
285 if (num_clobbers > 0)
287 rtx newpat;
289 if (added_clobbers_hard_reg_p (icode))
290 return 1;
292 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
293 XVECEXP (newpat, 0, 0) = pat;
294 add_clobbers (newpat, icode);
295 PATTERN (insn) = pat = newpat;
298 /* After reload, verify that all constraints are satisfied. */
299 if (reload_completed)
301 extract_insn (insn);
303 if (! constrain_operands (1))
304 return 1;
307 INSN_CODE (insn) = icode;
308 return 0;
311 /* Apply a group of changes previously issued with `validate_change'.
312 Return 1 if all changes are valid, zero otherwise. */
315 apply_change_group ()
317 int i;
318 rtx last_validated = NULL_RTX;
320 /* The changes have been applied and all INSN_CODEs have been reset to force
321 rerecognition.
323 The changes are valid if we aren't given an object, or if we are
324 given a MEM and it still is a valid address, or if this is in insn
325 and it is recognized. In the latter case, if reload has completed,
326 we also require that the operands meet the constraints for
327 the insn. */
329 for (i = 0; i < num_changes; i++)
331 rtx object = changes[i].object;
333 /* if there is no object to test or if it is the same as the one we
334 already tested, ignore it. */
335 if (object == 0 || object == last_validated)
336 continue;
338 if (GET_CODE (object) == MEM)
340 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
341 break;
343 else if (insn_invalid_p (object))
345 rtx pat = PATTERN (object);
347 /* Perhaps we couldn't recognize the insn because there were
348 extra CLOBBERs at the end. If so, try to re-recognize
349 without the last CLOBBER (later iterations will cause each of
350 them to be eliminated, in turn). But don't do this if we
351 have an ASM_OPERAND. */
352 if (GET_CODE (pat) == PARALLEL
353 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
354 && asm_noperands (PATTERN (object)) < 0)
356 rtx newpat;
358 if (XVECLEN (pat, 0) == 2)
359 newpat = XVECEXP (pat, 0, 0);
360 else
362 int j;
364 newpat
365 = gen_rtx_PARALLEL (VOIDmode,
366 rtvec_alloc (XVECLEN (pat, 0) - 1));
367 for (j = 0; j < XVECLEN (newpat, 0); j++)
368 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
371 /* Add a new change to this group to replace the pattern
372 with this new pattern. Then consider this change
373 as having succeeded. The change we added will
374 cause the entire call to fail if things remain invalid.
376 Note that this can lose if a later change than the one
377 we are processing specified &XVECEXP (PATTERN (object), 0, X)
378 but this shouldn't occur. */
380 validate_change (object, &PATTERN (object), newpat, 1);
381 continue;
383 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
384 /* If this insn is a CLOBBER or USE, it is always valid, but is
385 never recognized. */
386 continue;
387 else
388 break;
390 last_validated = object;
393 if (i == num_changes)
395 basic_block bb;
397 for (i = 0; i < num_changes; i++)
398 if (changes[i].object
399 && INSN_P (changes[i].object)
400 && (bb = BLOCK_FOR_INSN (changes[i].object)))
401 bb->flags |= BB_DIRTY;
403 num_changes = 0;
404 return 1;
406 else
408 cancel_changes (0);
409 return 0;
413 /* Return the number of changes so far in the current group. */
416 num_validated_changes ()
418 return num_changes;
421 /* Retract the changes numbered NUM and up. */
423 void
424 cancel_changes (num)
425 int num;
427 int i;
429 /* Back out all the changes. Do this in the opposite order in which
430 they were made. */
431 for (i = num_changes - 1; i >= num; i--)
433 *changes[i].loc = changes[i].old;
434 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
435 INSN_CODE (changes[i].object) = changes[i].old_code;
437 num_changes = num;
440 /* Replace every occurrence of FROM in X with TO. Mark each change with
441 validate_change passing OBJECT. */
443 static void
444 validate_replace_rtx_1 (loc, from, to, object)
445 rtx *loc;
446 rtx from, to, object;
448 int i, j;
449 const char *fmt;
450 rtx x = *loc;
451 enum rtx_code code;
452 enum machine_mode op0_mode = VOIDmode;
453 int prev_changes = num_changes;
454 rtx new;
456 if (!x)
457 return;
459 code = GET_CODE (x);
460 fmt = GET_RTX_FORMAT (code);
461 if (fmt[0] == 'e')
462 op0_mode = GET_MODE (XEXP (x, 0));
464 /* X matches FROM if it is the same rtx or they are both referring to the
465 same register in the same mode. Avoid calling rtx_equal_p unless the
466 operands look similar. */
468 if (x == from
469 || (GET_CODE (x) == REG && GET_CODE (from) == REG
470 && GET_MODE (x) == GET_MODE (from)
471 && REGNO (x) == REGNO (from))
472 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
473 && rtx_equal_p (x, from)))
475 validate_change (object, loc, to, 1);
476 return;
479 /* Call ourself recursively to perform the replacements. */
481 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
483 if (fmt[i] == 'e')
484 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
485 else if (fmt[i] == 'E')
486 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
487 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
490 /* If we didn't substitute, there is nothing more to do. */
491 if (num_changes == prev_changes)
492 return;
494 /* Allow substituted expression to have different mode. This is used by
495 regmove to change mode of pseudo register. */
496 if (fmt[0] == 'e' && GET_MODE (XEXP (x, 0)) != VOIDmode)
497 op0_mode = GET_MODE (XEXP (x, 0));
499 /* Do changes needed to keep rtx consistent. Don't do any other
500 simplifications, as it is not our job. */
502 if ((GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
503 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
505 validate_change (object, loc,
506 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
507 : swap_condition (code),
508 GET_MODE (x), XEXP (x, 1),
509 XEXP (x, 0)), 1);
510 x = *loc;
511 code = GET_CODE (x);
514 switch (code)
516 case PLUS:
517 /* If we have a PLUS whose second operand is now a CONST_INT, use
518 plus_constant to try to simplify it.
519 ??? We may want later to remove this, once simplification is
520 separated from this function. */
521 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
522 validate_change (object, loc,
523 simplify_gen_binary
524 (PLUS, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)), 1);
525 break;
526 case MINUS:
527 if (GET_CODE (XEXP (x, 1)) == CONST_INT
528 || GET_CODE (XEXP (x, 1)) == CONST_DOUBLE)
529 validate_change (object, loc,
530 simplify_gen_binary
531 (PLUS, GET_MODE (x), XEXP (x, 0),
532 simplify_gen_unary (NEG,
533 GET_MODE (x), XEXP (x, 1),
534 GET_MODE (x))), 1);
535 break;
536 case ZERO_EXTEND:
537 case SIGN_EXTEND:
538 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
540 new = simplify_gen_unary (code, GET_MODE (x), XEXP (x, 0),
541 op0_mode);
542 /* If any of the above failed, substitute in something that
543 we know won't be recognized. */
544 if (!new)
545 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
546 validate_change (object, loc, new, 1);
548 break;
549 case SUBREG:
550 /* All subregs possible to simplify should be simplified. */
551 new = simplify_subreg (GET_MODE (x), SUBREG_REG (x), op0_mode,
552 SUBREG_BYTE (x));
554 /* Subregs of VOIDmode operands are incorrect. */
555 if (!new && GET_MODE (SUBREG_REG (x)) == VOIDmode)
556 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
557 if (new)
558 validate_change (object, loc, new, 1);
559 break;
560 case ZERO_EXTRACT:
561 case SIGN_EXTRACT:
562 /* If we are replacing a register with memory, try to change the memory
563 to be the mode required for memory in extract operations (this isn't
564 likely to be an insertion operation; if it was, nothing bad will
565 happen, we might just fail in some cases). */
567 if (GET_CODE (XEXP (x, 0)) == MEM
568 && GET_CODE (XEXP (x, 1)) == CONST_INT
569 && GET_CODE (XEXP (x, 2)) == CONST_INT
570 && !mode_dependent_address_p (XEXP (XEXP (x, 0), 0))
571 && !MEM_VOLATILE_P (XEXP (x, 0)))
573 enum machine_mode wanted_mode = VOIDmode;
574 enum machine_mode is_mode = GET_MODE (XEXP (x, 0));
575 int pos = INTVAL (XEXP (x, 2));
577 if (GET_CODE (x) == ZERO_EXTRACT)
579 enum machine_mode new_mode
580 = mode_for_extraction (EP_extzv, 1);
581 if (new_mode != MAX_MACHINE_MODE)
582 wanted_mode = new_mode;
584 else if (GET_CODE (x) == SIGN_EXTRACT)
586 enum machine_mode new_mode
587 = mode_for_extraction (EP_extv, 1);
588 if (new_mode != MAX_MACHINE_MODE)
589 wanted_mode = new_mode;
592 /* If we have a narrower mode, we can do something. */
593 if (wanted_mode != VOIDmode
594 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
596 int offset = pos / BITS_PER_UNIT;
597 rtx newmem;
599 /* If the bytes and bits are counted differently, we
600 must adjust the offset. */
601 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
602 offset =
603 (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode) -
604 offset);
606 pos %= GET_MODE_BITSIZE (wanted_mode);
608 newmem = adjust_address_nv (XEXP (x, 0), wanted_mode, offset);
610 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
611 validate_change (object, &XEXP (x, 0), newmem, 1);
615 break;
617 default:
618 break;
622 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
623 with TO. After all changes have been made, validate by seeing
624 if INSN is still valid. */
627 validate_replace_rtx_subexp (from, to, insn, loc)
628 rtx from, to, insn, *loc;
630 validate_replace_rtx_1 (loc, from, to, insn);
631 return apply_change_group ();
634 /* Try replacing every occurrence of FROM in INSN with TO. After all
635 changes have been made, validate by seeing if INSN is still valid. */
638 validate_replace_rtx (from, to, insn)
639 rtx from, to, insn;
641 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
642 return apply_change_group ();
645 /* Try replacing every occurrence of FROM in INSN with TO. */
647 void
648 validate_replace_rtx_group (from, to, insn)
649 rtx from, to, insn;
651 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
654 /* Function called by note_uses to replace used subexpressions. */
655 struct validate_replace_src_data
657 rtx from; /* Old RTX */
658 rtx to; /* New RTX */
659 rtx insn; /* Insn in which substitution is occurring. */
662 static void
663 validate_replace_src_1 (x, data)
664 rtx *x;
665 void *data;
667 struct validate_replace_src_data *d
668 = (struct validate_replace_src_data *) data;
670 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
673 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
674 SET_DESTs. After all changes have been made, validate by seeing if
675 INSN is still valid. */
678 validate_replace_src (from, to, insn)
679 rtx from, to, insn;
681 struct validate_replace_src_data d;
683 d.from = from;
684 d.to = to;
685 d.insn = insn;
686 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
687 return apply_change_group ();
690 #ifdef HAVE_cc0
691 /* Return 1 if the insn using CC0 set by INSN does not contain
692 any ordered tests applied to the condition codes.
693 EQ and NE tests do not count. */
696 next_insn_tests_no_inequality (insn)
697 rtx insn;
699 rtx next = next_cc0_user (insn);
701 /* If there is no next insn, we have to take the conservative choice. */
702 if (next == 0)
703 return 0;
705 return ((GET_CODE (next) == JUMP_INSN
706 || GET_CODE (next) == INSN
707 || GET_CODE (next) == CALL_INSN)
708 && ! inequality_comparisons_p (PATTERN (next)));
711 #if 0 /* This is useless since the insn that sets the cc's
712 must be followed immediately by the use of them. */
713 /* Return 1 if the CC value set up by INSN is not used. */
716 next_insns_test_no_inequality (insn)
717 rtx insn;
719 rtx next = NEXT_INSN (insn);
721 for (; next != 0; next = NEXT_INSN (next))
723 if (GET_CODE (next) == CODE_LABEL
724 || GET_CODE (next) == BARRIER)
725 return 1;
726 if (GET_CODE (next) == NOTE)
727 continue;
728 if (inequality_comparisons_p (PATTERN (next)))
729 return 0;
730 if (sets_cc0_p (PATTERN (next)) == 1)
731 return 1;
732 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
733 return 1;
735 return 1;
737 #endif
738 #endif
740 /* This is used by find_single_use to locate an rtx that contains exactly one
741 use of DEST, which is typically either a REG or CC0. It returns a
742 pointer to the innermost rtx expression containing DEST. Appearances of
743 DEST that are being used to totally replace it are not counted. */
745 static rtx *
746 find_single_use_1 (dest, loc)
747 rtx dest;
748 rtx *loc;
750 rtx x = *loc;
751 enum rtx_code code = GET_CODE (x);
752 rtx *result = 0;
753 rtx *this_result;
754 int i;
755 const char *fmt;
757 switch (code)
759 case CONST_INT:
760 case CONST:
761 case LABEL_REF:
762 case SYMBOL_REF:
763 case CONST_DOUBLE:
764 case CONST_VECTOR:
765 case CLOBBER:
766 return 0;
768 case SET:
769 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
770 of a REG that occupies all of the REG, the insn uses DEST if
771 it is mentioned in the destination or the source. Otherwise, we
772 need just check the source. */
773 if (GET_CODE (SET_DEST (x)) != CC0
774 && GET_CODE (SET_DEST (x)) != PC
775 && GET_CODE (SET_DEST (x)) != REG
776 && ! (GET_CODE (SET_DEST (x)) == SUBREG
777 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
778 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
779 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
780 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
781 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
782 break;
784 return find_single_use_1 (dest, &SET_SRC (x));
786 case MEM:
787 case SUBREG:
788 return find_single_use_1 (dest, &XEXP (x, 0));
790 default:
791 break;
794 /* If it wasn't one of the common cases above, check each expression and
795 vector of this code. Look for a unique usage of DEST. */
797 fmt = GET_RTX_FORMAT (code);
798 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
800 if (fmt[i] == 'e')
802 if (dest == XEXP (x, i)
803 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
804 && REGNO (dest) == REGNO (XEXP (x, i))))
805 this_result = loc;
806 else
807 this_result = find_single_use_1 (dest, &XEXP (x, i));
809 if (result == 0)
810 result = this_result;
811 else if (this_result)
812 /* Duplicate usage. */
813 return 0;
815 else if (fmt[i] == 'E')
817 int j;
819 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
821 if (XVECEXP (x, i, j) == dest
822 || (GET_CODE (dest) == REG
823 && GET_CODE (XVECEXP (x, i, j)) == REG
824 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
825 this_result = loc;
826 else
827 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
829 if (result == 0)
830 result = this_result;
831 else if (this_result)
832 return 0;
837 return result;
840 /* See if DEST, produced in INSN, is used only a single time in the
841 sequel. If so, return a pointer to the innermost rtx expression in which
842 it is used.
844 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
846 This routine will return usually zero either before flow is called (because
847 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
848 note can't be trusted).
850 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
851 care about REG_DEAD notes or LOG_LINKS.
853 Otherwise, we find the single use by finding an insn that has a
854 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
855 only referenced once in that insn, we know that it must be the first
856 and last insn referencing DEST. */
858 rtx *
859 find_single_use (dest, insn, ploc)
860 rtx dest;
861 rtx insn;
862 rtx *ploc;
864 rtx next;
865 rtx *result;
866 rtx link;
868 #ifdef HAVE_cc0
869 if (dest == cc0_rtx)
871 next = NEXT_INSN (insn);
872 if (next == 0
873 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
874 return 0;
876 result = find_single_use_1 (dest, &PATTERN (next));
877 if (result && ploc)
878 *ploc = next;
879 return result;
881 #endif
883 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
884 return 0;
886 for (next = next_nonnote_insn (insn);
887 next != 0 && GET_CODE (next) != CODE_LABEL;
888 next = next_nonnote_insn (next))
889 if (INSN_P (next) && dead_or_set_p (next, dest))
891 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
892 if (XEXP (link, 0) == insn)
893 break;
895 if (link)
897 result = find_single_use_1 (dest, &PATTERN (next));
898 if (ploc)
899 *ploc = next;
900 return result;
904 return 0;
907 /* Return 1 if OP is a valid general operand for machine mode MODE.
908 This is either a register reference, a memory reference,
909 or a constant. In the case of a memory reference, the address
910 is checked for general validity for the target machine.
912 Register and memory references must have mode MODE in order to be valid,
913 but some constants have no machine mode and are valid for any mode.
915 If MODE is VOIDmode, OP is checked for validity for whatever mode
916 it has.
918 The main use of this function is as a predicate in match_operand
919 expressions in the machine description.
921 For an explanation of this function's behavior for registers of
922 class NO_REGS, see the comment for `register_operand'. */
925 general_operand (op, mode)
926 rtx op;
927 enum machine_mode mode;
929 enum rtx_code code = GET_CODE (op);
931 if (mode == VOIDmode)
932 mode = GET_MODE (op);
934 /* Don't accept CONST_INT or anything similar
935 if the caller wants something floating. */
936 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
937 && GET_MODE_CLASS (mode) != MODE_INT
938 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
939 return 0;
941 if (GET_CODE (op) == CONST_INT
942 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
943 return 0;
945 if (CONSTANT_P (op))
946 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
947 || mode == VOIDmode)
948 #ifdef LEGITIMATE_PIC_OPERAND_P
949 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
950 #endif
951 && LEGITIMATE_CONSTANT_P (op));
953 /* Except for certain constants with VOIDmode, already checked for,
954 OP's mode must match MODE if MODE specifies a mode. */
956 if (GET_MODE (op) != mode)
957 return 0;
959 if (code == SUBREG)
961 rtx sub = SUBREG_REG (op);
963 #ifdef INSN_SCHEDULING
964 /* On machines that have insn scheduling, we want all memory
965 reference to be explicit, so outlaw paradoxical SUBREGs. */
966 if (GET_CODE (sub) == MEM
967 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (sub)))
968 return 0;
969 #endif
970 /* Avoid memories with nonzero SUBREG_BYTE, as offsetting the memory
971 may result in incorrect reference. We should simplify all valid
972 subregs of MEM anyway. But allow this after reload because we
973 might be called from cleanup_subreg_operands.
975 ??? This is a kludge. */
976 if (!reload_completed && SUBREG_BYTE (op) != 0
977 && GET_CODE (sub) == MEM)
978 return 0;
980 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
981 create such rtl, and we must reject it. */
982 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
983 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
984 return 0;
986 op = sub;
987 code = GET_CODE (op);
990 if (code == REG)
991 /* A register whose class is NO_REGS is not a general operand. */
992 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
993 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
995 if (code == MEM)
997 rtx y = XEXP (op, 0);
999 if (! volatile_ok && MEM_VOLATILE_P (op))
1000 return 0;
1002 if (GET_CODE (y) == ADDRESSOF)
1003 return 1;
1005 /* Use the mem's mode, since it will be reloaded thus. */
1006 mode = GET_MODE (op);
1007 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1010 /* Pretend this is an operand for now; we'll run force_operand
1011 on its replacement in fixup_var_refs_1. */
1012 if (code == ADDRESSOF)
1013 return 1;
1015 return 0;
1017 win:
1018 return 1;
1021 /* Return 1 if OP is a valid memory address for a memory reference
1022 of mode MODE.
1024 The main use of this function is as a predicate in match_operand
1025 expressions in the machine description. */
1028 address_operand (op, mode)
1029 rtx op;
1030 enum machine_mode mode;
1032 return memory_address_p (mode, op);
1035 /* Return 1 if OP is a register reference of mode MODE.
1036 If MODE is VOIDmode, accept a register in any mode.
1038 The main use of this function is as a predicate in match_operand
1039 expressions in the machine description.
1041 As a special exception, registers whose class is NO_REGS are
1042 not accepted by `register_operand'. The reason for this change
1043 is to allow the representation of special architecture artifacts
1044 (such as a condition code register) without extending the rtl
1045 definitions. Since registers of class NO_REGS cannot be used
1046 as registers in any case where register classes are examined,
1047 it is most consistent to keep this function from accepting them. */
1050 register_operand (op, mode)
1051 rtx op;
1052 enum machine_mode mode;
1054 if (GET_MODE (op) != mode && mode != VOIDmode)
1055 return 0;
1057 if (GET_CODE (op) == SUBREG)
1059 rtx sub = SUBREG_REG (op);
1061 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1062 because it is guaranteed to be reloaded into one.
1063 Just make sure the MEM is valid in itself.
1064 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1065 but currently it does result from (SUBREG (REG)...) where the
1066 reg went on the stack.) */
1067 if (! reload_completed && GET_CODE (sub) == MEM)
1068 return general_operand (op, mode);
1070 #ifdef CLASS_CANNOT_CHANGE_MODE
1071 if (GET_CODE (sub) == REG
1072 && REGNO (sub) < FIRST_PSEUDO_REGISTER
1073 && (TEST_HARD_REG_BIT
1074 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1075 REGNO (sub)))
1076 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (sub))
1077 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_INT
1078 && GET_MODE_CLASS (GET_MODE (sub)) != MODE_COMPLEX_FLOAT)
1079 return 0;
1080 #endif
1082 /* FLOAT_MODE subregs can't be paradoxical. Combine will occasionally
1083 create such rtl, and we must reject it. */
1084 if (GET_MODE_CLASS (GET_MODE (op)) == MODE_FLOAT
1085 && GET_MODE_SIZE (GET_MODE (op)) > GET_MODE_SIZE (GET_MODE (sub)))
1086 return 0;
1088 op = sub;
1091 /* If we have an ADDRESSOF, consider it valid since it will be
1092 converted into something that will not be a MEM. */
1093 if (GET_CODE (op) == ADDRESSOF)
1094 return 1;
1096 /* We don't consider registers whose class is NO_REGS
1097 to be a register operand. */
1098 return (GET_CODE (op) == REG
1099 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1100 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1103 /* Return 1 for a register in Pmode; ignore the tested mode. */
1106 pmode_register_operand (op, mode)
1107 rtx op;
1108 enum machine_mode mode ATTRIBUTE_UNUSED;
1110 return register_operand (op, Pmode);
1113 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1114 or a hard register. */
1117 scratch_operand (op, mode)
1118 rtx op;
1119 enum machine_mode mode;
1121 if (GET_MODE (op) != mode && mode != VOIDmode)
1122 return 0;
1124 return (GET_CODE (op) == SCRATCH
1125 || (GET_CODE (op) == REG
1126 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1129 /* Return 1 if OP is a valid immediate operand for mode MODE.
1131 The main use of this function is as a predicate in match_operand
1132 expressions in the machine description. */
1135 immediate_operand (op, mode)
1136 rtx op;
1137 enum machine_mode mode;
1139 /* Don't accept CONST_INT or anything similar
1140 if the caller wants something floating. */
1141 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1142 && GET_MODE_CLASS (mode) != MODE_INT
1143 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1144 return 0;
1146 if (GET_CODE (op) == CONST_INT
1147 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1148 return 0;
1150 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1151 result in 0/1. It seems a safe assumption that this is
1152 in range for everyone. */
1153 if (GET_CODE (op) == CONSTANT_P_RTX)
1154 return 1;
1156 return (CONSTANT_P (op)
1157 && (GET_MODE (op) == mode || mode == VOIDmode
1158 || GET_MODE (op) == VOIDmode)
1159 #ifdef LEGITIMATE_PIC_OPERAND_P
1160 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1161 #endif
1162 && LEGITIMATE_CONSTANT_P (op));
1165 /* Returns 1 if OP is an operand that is a CONST_INT. */
1168 const_int_operand (op, mode)
1169 rtx op;
1170 enum machine_mode mode;
1172 if (GET_CODE (op) != CONST_INT)
1173 return 0;
1175 if (mode != VOIDmode
1176 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1177 return 0;
1179 return 1;
1182 /* Returns 1 if OP is an operand that is a constant integer or constant
1183 floating-point number. */
1186 const_double_operand (op, mode)
1187 rtx op;
1188 enum machine_mode mode;
1190 /* Don't accept CONST_INT or anything similar
1191 if the caller wants something floating. */
1192 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1193 && GET_MODE_CLASS (mode) != MODE_INT
1194 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1195 return 0;
1197 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1198 && (mode == VOIDmode || GET_MODE (op) == mode
1199 || GET_MODE (op) == VOIDmode));
1202 /* Return 1 if OP is a general operand that is not an immediate operand. */
1205 nonimmediate_operand (op, mode)
1206 rtx op;
1207 enum machine_mode mode;
1209 return (general_operand (op, mode) && ! CONSTANT_P (op));
1212 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1215 nonmemory_operand (op, mode)
1216 rtx op;
1217 enum machine_mode mode;
1219 if (CONSTANT_P (op))
1221 /* Don't accept CONST_INT or anything similar
1222 if the caller wants something floating. */
1223 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1224 && GET_MODE_CLASS (mode) != MODE_INT
1225 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1226 return 0;
1228 if (GET_CODE (op) == CONST_INT
1229 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1230 return 0;
1232 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1233 || mode == VOIDmode)
1234 #ifdef LEGITIMATE_PIC_OPERAND_P
1235 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1236 #endif
1237 && LEGITIMATE_CONSTANT_P (op));
1240 if (GET_MODE (op) != mode && mode != VOIDmode)
1241 return 0;
1243 if (GET_CODE (op) == SUBREG)
1245 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1246 because it is guaranteed to be reloaded into one.
1247 Just make sure the MEM is valid in itself.
1248 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1249 but currently it does result from (SUBREG (REG)...) where the
1250 reg went on the stack.) */
1251 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1252 return general_operand (op, mode);
1253 op = SUBREG_REG (op);
1256 /* We don't consider registers whose class is NO_REGS
1257 to be a register operand. */
1258 return (GET_CODE (op) == REG
1259 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1260 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1263 /* Return 1 if OP is a valid operand that stands for pushing a
1264 value of mode MODE onto the stack.
1266 The main use of this function is as a predicate in match_operand
1267 expressions in the machine description. */
1270 push_operand (op, mode)
1271 rtx op;
1272 enum machine_mode mode;
1274 unsigned int rounded_size = GET_MODE_SIZE (mode);
1276 #ifdef PUSH_ROUNDING
1277 rounded_size = PUSH_ROUNDING (rounded_size);
1278 #endif
1280 if (GET_CODE (op) != MEM)
1281 return 0;
1283 if (mode != VOIDmode && GET_MODE (op) != mode)
1284 return 0;
1286 op = XEXP (op, 0);
1288 if (rounded_size == GET_MODE_SIZE (mode))
1290 if (GET_CODE (op) != STACK_PUSH_CODE)
1291 return 0;
1293 else
1295 if (GET_CODE (op) != PRE_MODIFY
1296 || GET_CODE (XEXP (op, 1)) != PLUS
1297 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1298 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1299 #ifdef STACK_GROWS_DOWNWARD
1300 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
1301 #else
1302 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1303 #endif
1305 return 0;
1308 return XEXP (op, 0) == stack_pointer_rtx;
1311 /* Return 1 if OP is a valid operand that stands for popping a
1312 value of mode MODE off the stack.
1314 The main use of this function is as a predicate in match_operand
1315 expressions in the machine description. */
1318 pop_operand (op, mode)
1319 rtx op;
1320 enum machine_mode mode;
1322 if (GET_CODE (op) != MEM)
1323 return 0;
1325 if (mode != VOIDmode && GET_MODE (op) != mode)
1326 return 0;
1328 op = XEXP (op, 0);
1330 if (GET_CODE (op) != STACK_POP_CODE)
1331 return 0;
1333 return XEXP (op, 0) == stack_pointer_rtx;
1336 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1339 memory_address_p (mode, addr)
1340 enum machine_mode mode ATTRIBUTE_UNUSED;
1341 rtx addr;
1343 if (GET_CODE (addr) == ADDRESSOF)
1344 return 1;
1346 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1347 return 0;
1349 win:
1350 return 1;
1353 /* Return 1 if OP is a valid memory reference with mode MODE,
1354 including a valid address.
1356 The main use of this function is as a predicate in match_operand
1357 expressions in the machine description. */
1360 memory_operand (op, mode)
1361 rtx op;
1362 enum machine_mode mode;
1364 rtx inner;
1366 if (! reload_completed)
1367 /* Note that no SUBREG is a memory operand before end of reload pass,
1368 because (SUBREG (MEM...)) forces reloading into a register. */
1369 return GET_CODE (op) == MEM && general_operand (op, mode);
1371 if (mode != VOIDmode && GET_MODE (op) != mode)
1372 return 0;
1374 inner = op;
1375 if (GET_CODE (inner) == SUBREG)
1376 inner = SUBREG_REG (inner);
1378 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1381 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1382 that is, a memory reference whose address is a general_operand. */
1385 indirect_operand (op, mode)
1386 rtx op;
1387 enum machine_mode mode;
1389 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1390 if (! reload_completed
1391 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1393 int offset = SUBREG_BYTE (op);
1394 rtx inner = SUBREG_REG (op);
1396 if (mode != VOIDmode && GET_MODE (op) != mode)
1397 return 0;
1399 /* The only way that we can have a general_operand as the resulting
1400 address is if OFFSET is zero and the address already is an operand
1401 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1402 operand. */
1404 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1405 || (GET_CODE (XEXP (inner, 0)) == PLUS
1406 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1407 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1408 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1411 return (GET_CODE (op) == MEM
1412 && memory_operand (op, mode)
1413 && general_operand (XEXP (op, 0), Pmode));
1416 /* Return 1 if this is a comparison operator. This allows the use of
1417 MATCH_OPERATOR to recognize all the branch insns. */
1420 comparison_operator (op, mode)
1421 rtx op;
1422 enum machine_mode mode;
1424 return ((mode == VOIDmode || GET_MODE (op) == mode)
1425 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1428 /* If BODY is an insn body that uses ASM_OPERANDS,
1429 return the number of operands (both input and output) in the insn.
1430 Otherwise return -1. */
1433 asm_noperands (body)
1434 rtx body;
1436 switch (GET_CODE (body))
1438 case ASM_OPERANDS:
1439 /* No output operands: return number of input operands. */
1440 return ASM_OPERANDS_INPUT_LENGTH (body);
1441 case SET:
1442 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1443 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1444 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1445 else
1446 return -1;
1447 case PARALLEL:
1448 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1449 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1451 /* Multiple output operands, or 1 output plus some clobbers:
1452 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1453 int i;
1454 int n_sets;
1456 /* Count backwards through CLOBBERs to determine number of SETs. */
1457 for (i = XVECLEN (body, 0); i > 0; i--)
1459 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1460 break;
1461 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1462 return -1;
1465 /* N_SETS is now number of output operands. */
1466 n_sets = i;
1468 /* Verify that all the SETs we have
1469 came from a single original asm_operands insn
1470 (so that invalid combinations are blocked). */
1471 for (i = 0; i < n_sets; i++)
1473 rtx elt = XVECEXP (body, 0, i);
1474 if (GET_CODE (elt) != SET)
1475 return -1;
1476 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1477 return -1;
1478 /* If these ASM_OPERANDS rtx's came from different original insns
1479 then they aren't allowed together. */
1480 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1481 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1482 return -1;
1484 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1485 + n_sets);
1487 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1489 /* 0 outputs, but some clobbers:
1490 body is [(asm_operands ...) (clobber (reg ...))...]. */
1491 int i;
1493 /* Make sure all the other parallel things really are clobbers. */
1494 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1495 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1496 return -1;
1498 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1500 else
1501 return -1;
1502 default:
1503 return -1;
1507 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1508 copy its operands (both input and output) into the vector OPERANDS,
1509 the locations of the operands within the insn into the vector OPERAND_LOCS,
1510 and the constraints for the operands into CONSTRAINTS.
1511 Write the modes of the operands into MODES.
1512 Return the assembler-template.
1514 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1515 we don't store that info. */
1517 const char *
1518 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1519 rtx body;
1520 rtx *operands;
1521 rtx **operand_locs;
1522 const char **constraints;
1523 enum machine_mode *modes;
1525 int i;
1526 int noperands;
1527 const char *template = 0;
1529 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1531 rtx asmop = SET_SRC (body);
1532 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1534 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1536 for (i = 1; i < noperands; i++)
1538 if (operand_locs)
1539 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1540 if (operands)
1541 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1542 if (constraints)
1543 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1544 if (modes)
1545 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1548 /* The output is in the SET.
1549 Its constraint is in the ASM_OPERANDS itself. */
1550 if (operands)
1551 operands[0] = SET_DEST (body);
1552 if (operand_locs)
1553 operand_locs[0] = &SET_DEST (body);
1554 if (constraints)
1555 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1556 if (modes)
1557 modes[0] = GET_MODE (SET_DEST (body));
1558 template = ASM_OPERANDS_TEMPLATE (asmop);
1560 else if (GET_CODE (body) == ASM_OPERANDS)
1562 rtx asmop = body;
1563 /* No output operands: BODY is (asm_operands ....). */
1565 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1567 /* The input operands are found in the 1st element vector. */
1568 /* Constraints for inputs are in the 2nd element vector. */
1569 for (i = 0; i < noperands; i++)
1571 if (operand_locs)
1572 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1573 if (operands)
1574 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1575 if (constraints)
1576 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1577 if (modes)
1578 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1580 template = ASM_OPERANDS_TEMPLATE (asmop);
1582 else if (GET_CODE (body) == PARALLEL
1583 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1584 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1586 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1587 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1588 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1589 int nout = 0; /* Does not include CLOBBERs. */
1591 /* At least one output, plus some CLOBBERs. */
1593 /* The outputs are in the SETs.
1594 Their constraints are in the ASM_OPERANDS itself. */
1595 for (i = 0; i < nparallel; i++)
1597 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1598 break; /* Past last SET */
1600 if (operands)
1601 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1602 if (operand_locs)
1603 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1604 if (constraints)
1605 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1606 if (modes)
1607 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1608 nout++;
1611 for (i = 0; i < nin; i++)
1613 if (operand_locs)
1614 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1615 if (operands)
1616 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1617 if (constraints)
1618 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1619 if (modes)
1620 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1623 template = ASM_OPERANDS_TEMPLATE (asmop);
1625 else if (GET_CODE (body) == PARALLEL
1626 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1628 /* No outputs, but some CLOBBERs. */
1630 rtx asmop = XVECEXP (body, 0, 0);
1631 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1633 for (i = 0; i < nin; i++)
1635 if (operand_locs)
1636 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1637 if (operands)
1638 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1639 if (constraints)
1640 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1641 if (modes)
1642 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1645 template = ASM_OPERANDS_TEMPLATE (asmop);
1648 return template;
1651 /* Check if an asm_operand matches it's constraints.
1652 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1655 asm_operand_ok (op, constraint)
1656 rtx op;
1657 const char *constraint;
1659 int result = 0;
1661 /* Use constrain_operands after reload. */
1662 if (reload_completed)
1663 abort ();
1665 while (*constraint)
1667 char c = *constraint++;
1668 switch (c)
1670 case '=':
1671 case '+':
1672 case '*':
1673 case '%':
1674 case '?':
1675 case '!':
1676 case '#':
1677 case '&':
1678 case ',':
1679 break;
1681 case '0': case '1': case '2': case '3': case '4':
1682 case '5': case '6': case '7': case '8': case '9':
1683 /* For best results, our caller should have given us the
1684 proper matching constraint, but we can't actually fail
1685 the check if they didn't. Indicate that results are
1686 inconclusive. */
1687 while (ISDIGIT (*constraint))
1688 constraint++;
1689 result = -1;
1690 break;
1692 case 'p':
1693 if (address_operand (op, VOIDmode))
1694 return 1;
1695 break;
1697 case 'm':
1698 case 'V': /* non-offsettable */
1699 if (memory_operand (op, VOIDmode))
1700 return 1;
1701 break;
1703 case 'o': /* offsettable */
1704 if (offsettable_nonstrict_memref_p (op))
1705 return 1;
1706 break;
1708 case '<':
1709 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1710 excepting those that expand_call created. Further, on some
1711 machines which do not have generalized auto inc/dec, an inc/dec
1712 is not a memory_operand.
1714 Match any memory and hope things are resolved after reload. */
1716 if (GET_CODE (op) == MEM
1717 && (1
1718 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1719 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1720 return 1;
1721 break;
1723 case '>':
1724 if (GET_CODE (op) == MEM
1725 && (1
1726 || GET_CODE (XEXP (op, 0)) == PRE_INC
1727 || GET_CODE (XEXP (op, 0)) == POST_INC))
1728 return 1;
1729 break;
1731 case 'E':
1732 case 'F':
1733 if (GET_CODE (op) == CONST_DOUBLE)
1734 return 1;
1735 break;
1737 case 'G':
1738 if (GET_CODE (op) == CONST_DOUBLE
1739 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1740 return 1;
1741 break;
1742 case 'H':
1743 if (GET_CODE (op) == CONST_DOUBLE
1744 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1745 return 1;
1746 break;
1748 case 's':
1749 if (GET_CODE (op) == CONST_INT
1750 || (GET_CODE (op) == CONST_DOUBLE
1751 && GET_MODE (op) == VOIDmode))
1752 break;
1753 /* FALLTHRU */
1755 case 'i':
1756 if (CONSTANT_P (op)
1757 #ifdef LEGITIMATE_PIC_OPERAND_P
1758 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1759 #endif
1761 return 1;
1762 break;
1764 case 'n':
1765 if (GET_CODE (op) == CONST_INT
1766 || (GET_CODE (op) == CONST_DOUBLE
1767 && GET_MODE (op) == VOIDmode))
1768 return 1;
1769 break;
1771 case 'I':
1772 if (GET_CODE (op) == CONST_INT
1773 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1774 return 1;
1775 break;
1776 case 'J':
1777 if (GET_CODE (op) == CONST_INT
1778 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1779 return 1;
1780 break;
1781 case 'K':
1782 if (GET_CODE (op) == CONST_INT
1783 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1784 return 1;
1785 break;
1786 case 'L':
1787 if (GET_CODE (op) == CONST_INT
1788 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1789 return 1;
1790 break;
1791 case 'M':
1792 if (GET_CODE (op) == CONST_INT
1793 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1794 return 1;
1795 break;
1796 case 'N':
1797 if (GET_CODE (op) == CONST_INT
1798 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1799 return 1;
1800 break;
1801 case 'O':
1802 if (GET_CODE (op) == CONST_INT
1803 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1804 return 1;
1805 break;
1806 case 'P':
1807 if (GET_CODE (op) == CONST_INT
1808 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1809 return 1;
1810 break;
1812 case 'X':
1813 return 1;
1815 case 'g':
1816 if (general_operand (op, VOIDmode))
1817 return 1;
1818 break;
1820 default:
1821 /* For all other letters, we first check for a register class,
1822 otherwise it is an EXTRA_CONSTRAINT. */
1823 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1825 case 'r':
1826 if (GET_MODE (op) == BLKmode)
1827 break;
1828 if (register_operand (op, VOIDmode))
1829 return 1;
1831 #ifdef EXTRA_CONSTRAINT
1832 if (EXTRA_CONSTRAINT (op, c))
1833 return 1;
1834 #endif
1835 break;
1839 return result;
1842 /* Given an rtx *P, if it is a sum containing an integer constant term,
1843 return the location (type rtx *) of the pointer to that constant term.
1844 Otherwise, return a null pointer. */
1846 rtx *
1847 find_constant_term_loc (p)
1848 rtx *p;
1850 rtx *tem;
1851 enum rtx_code code = GET_CODE (*p);
1853 /* If *P IS such a constant term, P is its location. */
1855 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1856 || code == CONST)
1857 return p;
1859 /* Otherwise, if not a sum, it has no constant term. */
1861 if (GET_CODE (*p) != PLUS)
1862 return 0;
1864 /* If one of the summands is constant, return its location. */
1866 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1867 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1868 return p;
1870 /* Otherwise, check each summand for containing a constant term. */
1872 if (XEXP (*p, 0) != 0)
1874 tem = find_constant_term_loc (&XEXP (*p, 0));
1875 if (tem != 0)
1876 return tem;
1879 if (XEXP (*p, 1) != 0)
1881 tem = find_constant_term_loc (&XEXP (*p, 1));
1882 if (tem != 0)
1883 return tem;
1886 return 0;
1889 /* Return 1 if OP is a memory reference
1890 whose address contains no side effects
1891 and remains valid after the addition
1892 of a positive integer less than the
1893 size of the object being referenced.
1895 We assume that the original address is valid and do not check it.
1897 This uses strict_memory_address_p as a subroutine, so
1898 don't use it before reload. */
1901 offsettable_memref_p (op)
1902 rtx op;
1904 return ((GET_CODE (op) == MEM)
1905 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1908 /* Similar, but don't require a strictly valid mem ref:
1909 consider pseudo-regs valid as index or base regs. */
1912 offsettable_nonstrict_memref_p (op)
1913 rtx op;
1915 return ((GET_CODE (op) == MEM)
1916 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1919 /* Return 1 if Y is a memory address which contains no side effects
1920 and would remain valid after the addition of a positive integer
1921 less than the size of that mode.
1923 We assume that the original address is valid and do not check it.
1924 We do check that it is valid for narrower modes.
1926 If STRICTP is nonzero, we require a strictly valid address,
1927 for the sake of use in reload.c. */
1930 offsettable_address_p (strictp, mode, y)
1931 int strictp;
1932 enum machine_mode mode;
1933 rtx y;
1935 enum rtx_code ycode = GET_CODE (y);
1936 rtx z;
1937 rtx y1 = y;
1938 rtx *y2;
1939 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
1940 (strictp ? strict_memory_address_p : memory_address_p);
1941 unsigned int mode_sz = GET_MODE_SIZE (mode);
1943 if (CONSTANT_ADDRESS_P (y))
1944 return 1;
1946 /* Adjusting an offsettable address involves changing to a narrower mode.
1947 Make sure that's OK. */
1949 if (mode_dependent_address_p (y))
1950 return 0;
1952 /* ??? How much offset does an offsettable BLKmode reference need?
1953 Clearly that depends on the situation in which it's being used.
1954 However, the current situation in which we test 0xffffffff is
1955 less than ideal. Caveat user. */
1956 if (mode_sz == 0)
1957 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1959 /* If the expression contains a constant term,
1960 see if it remains valid when max possible offset is added. */
1962 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1964 int good;
1966 y1 = *y2;
1967 *y2 = plus_constant (*y2, mode_sz - 1);
1968 /* Use QImode because an odd displacement may be automatically invalid
1969 for any wider mode. But it should be valid for a single byte. */
1970 good = (*addressp) (QImode, y);
1972 /* In any case, restore old contents of memory. */
1973 *y2 = y1;
1974 return good;
1977 if (GET_RTX_CLASS (ycode) == 'a')
1978 return 0;
1980 /* The offset added here is chosen as the maximum offset that
1981 any instruction could need to add when operating on something
1982 of the specified mode. We assume that if Y and Y+c are
1983 valid addresses then so is Y+d for all 0<d<c. adjust_address will
1984 go inside a LO_SUM here, so we do so as well. */
1985 if (GET_CODE (y) == LO_SUM
1986 && mode != BLKmode
1987 && mode_sz <= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
1988 z = gen_rtx_LO_SUM (GET_MODE (y), XEXP (y, 0),
1989 plus_constant (XEXP (y, 1), mode_sz - 1));
1990 else
1991 z = plus_constant (y, mode_sz - 1);
1993 /* Use QImode because an odd displacement may be automatically invalid
1994 for any wider mode. But it should be valid for a single byte. */
1995 return (*addressp) (QImode, z);
1998 /* Return 1 if ADDR is an address-expression whose effect depends
1999 on the mode of the memory reference it is used in.
2001 Autoincrement addressing is a typical example of mode-dependence
2002 because the amount of the increment depends on the mode. */
2005 mode_dependent_address_p (addr)
2006 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2008 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2009 return 0;
2010 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2011 win: ATTRIBUTE_UNUSED_LABEL
2012 return 1;
2015 /* Return 1 if OP is a general operand
2016 other than a memory ref with a mode dependent address. */
2019 mode_independent_operand (op, mode)
2020 enum machine_mode mode;
2021 rtx op;
2023 rtx addr;
2025 if (! general_operand (op, mode))
2026 return 0;
2028 if (GET_CODE (op) != MEM)
2029 return 1;
2031 addr = XEXP (op, 0);
2032 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2033 return 1;
2034 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2035 lose: ATTRIBUTE_UNUSED_LABEL
2036 return 0;
2039 /* Like extract_insn, but save insn extracted and don't extract again, when
2040 called again for the same insn expecting that recog_data still contain the
2041 valid information. This is used primary by gen_attr infrastructure that
2042 often does extract insn again and again. */
2043 void
2044 extract_insn_cached (insn)
2045 rtx insn;
2047 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2048 return;
2049 extract_insn (insn);
2050 recog_data.insn = insn;
2052 /* Do cached extract_insn, constrain_operand and complain about failures.
2053 Used by insn_attrtab. */
2054 void
2055 extract_constrain_insn_cached (insn)
2056 rtx insn;
2058 extract_insn_cached (insn);
2059 if (which_alternative == -1
2060 && !constrain_operands (reload_completed))
2061 fatal_insn_not_found (insn);
2063 /* Do cached constrain_operand and complain about failures. */
2065 constrain_operands_cached (strict)
2066 int strict;
2068 if (which_alternative == -1)
2069 return constrain_operands (strict);
2070 else
2071 return 1;
2074 /* Analyze INSN and fill in recog_data. */
2076 void
2077 extract_insn (insn)
2078 rtx insn;
2080 int i;
2081 int icode;
2082 int noperands;
2083 rtx body = PATTERN (insn);
2085 recog_data.insn = NULL;
2086 recog_data.n_operands = 0;
2087 recog_data.n_alternatives = 0;
2088 recog_data.n_dups = 0;
2089 which_alternative = -1;
2091 switch (GET_CODE (body))
2093 case USE:
2094 case CLOBBER:
2095 case ASM_INPUT:
2096 case ADDR_VEC:
2097 case ADDR_DIFF_VEC:
2098 return;
2100 case SET:
2101 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2102 goto asm_insn;
2103 else
2104 goto normal_insn;
2105 case PARALLEL:
2106 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2107 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2108 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2109 goto asm_insn;
2110 else
2111 goto normal_insn;
2112 case ASM_OPERANDS:
2113 asm_insn:
2114 recog_data.n_operands = noperands = asm_noperands (body);
2115 if (noperands >= 0)
2117 /* This insn is an `asm' with operands. */
2119 /* expand_asm_operands makes sure there aren't too many operands. */
2120 if (noperands > MAX_RECOG_OPERANDS)
2121 abort ();
2123 /* Now get the operand values and constraints out of the insn. */
2124 decode_asm_operands (body, recog_data.operand,
2125 recog_data.operand_loc,
2126 recog_data.constraints,
2127 recog_data.operand_mode);
2128 if (noperands > 0)
2130 const char *p = recog_data.constraints[0];
2131 recog_data.n_alternatives = 1;
2132 while (*p)
2133 recog_data.n_alternatives += (*p++ == ',');
2135 break;
2137 fatal_insn_not_found (insn);
2139 default:
2140 normal_insn:
2141 /* Ordinary insn: recognize it, get the operands via insn_extract
2142 and get the constraints. */
2144 icode = recog_memoized (insn);
2145 if (icode < 0)
2146 fatal_insn_not_found (insn);
2148 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2149 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2150 recog_data.n_dups = insn_data[icode].n_dups;
2152 insn_extract (insn);
2154 for (i = 0; i < noperands; i++)
2156 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2157 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2158 /* VOIDmode match_operands gets mode from their real operand. */
2159 if (recog_data.operand_mode[i] == VOIDmode)
2160 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2163 for (i = 0; i < noperands; i++)
2164 recog_data.operand_type[i]
2165 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2166 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2167 : OP_IN);
2169 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2170 abort ();
2173 /* After calling extract_insn, you can use this function to extract some
2174 information from the constraint strings into a more usable form.
2175 The collected data is stored in recog_op_alt. */
2176 void
2177 preprocess_constraints ()
2179 int i;
2181 memset (recog_op_alt, 0, sizeof recog_op_alt);
2182 for (i = 0; i < recog_data.n_operands; i++)
2184 int j;
2185 struct operand_alternative *op_alt;
2186 const char *p = recog_data.constraints[i];
2188 op_alt = recog_op_alt[i];
2190 for (j = 0; j < recog_data.n_alternatives; j++)
2192 op_alt[j].class = NO_REGS;
2193 op_alt[j].constraint = p;
2194 op_alt[j].matches = -1;
2195 op_alt[j].matched = -1;
2197 if (*p == '\0' || *p == ',')
2199 op_alt[j].anything_ok = 1;
2200 continue;
2203 for (;;)
2205 char c = *p++;
2206 if (c == '#')
2208 c = *p++;
2209 while (c != ',' && c != '\0');
2210 if (c == ',' || c == '\0')
2211 break;
2213 switch (c)
2215 case '=': case '+': case '*': case '%':
2216 case 'E': case 'F': case 'G': case 'H':
2217 case 's': case 'i': case 'n':
2218 case 'I': case 'J': case 'K': case 'L':
2219 case 'M': case 'N': case 'O': case 'P':
2220 /* These don't say anything we care about. */
2221 break;
2223 case '?':
2224 op_alt[j].reject += 6;
2225 break;
2226 case '!':
2227 op_alt[j].reject += 600;
2228 break;
2229 case '&':
2230 op_alt[j].earlyclobber = 1;
2231 break;
2233 case '0': case '1': case '2': case '3': case '4':
2234 case '5': case '6': case '7': case '8': case '9':
2236 char *end;
2237 op_alt[j].matches = strtoul (p - 1, &end, 10);
2238 recog_op_alt[op_alt[j].matches][j].matched = i;
2239 p = end;
2241 break;
2243 case 'm':
2244 op_alt[j].memory_ok = 1;
2245 break;
2246 case '<':
2247 op_alt[j].decmem_ok = 1;
2248 break;
2249 case '>':
2250 op_alt[j].incmem_ok = 1;
2251 break;
2252 case 'V':
2253 op_alt[j].nonoffmem_ok = 1;
2254 break;
2255 case 'o':
2256 op_alt[j].offmem_ok = 1;
2257 break;
2258 case 'X':
2259 op_alt[j].anything_ok = 1;
2260 break;
2262 case 'p':
2263 op_alt[j].is_address = 1;
2264 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class]
2265 [(int) MODE_BASE_REG_CLASS (VOIDmode)];
2266 break;
2268 case 'g': case 'r':
2269 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2270 break;
2272 default:
2273 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char) c)];
2274 break;
2281 /* Check the operands of an insn against the insn's operand constraints
2282 and return 1 if they are valid.
2283 The information about the insn's operands, constraints, operand modes
2284 etc. is obtained from the global variables set up by extract_insn.
2286 WHICH_ALTERNATIVE is set to a number which indicates which
2287 alternative of constraints was matched: 0 for the first alternative,
2288 1 for the next, etc.
2290 In addition, when two operands are match
2291 and it happens that the output operand is (reg) while the
2292 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2293 make the output operand look like the input.
2294 This is because the output operand is the one the template will print.
2296 This is used in final, just before printing the assembler code and by
2297 the routines that determine an insn's attribute.
2299 If STRICT is a positive non-zero value, it means that we have been
2300 called after reload has been completed. In that case, we must
2301 do all checks strictly. If it is zero, it means that we have been called
2302 before reload has completed. In that case, we first try to see if we can
2303 find an alternative that matches strictly. If not, we try again, this
2304 time assuming that reload will fix up the insn. This provides a "best
2305 guess" for the alternative and is used to compute attributes of insns prior
2306 to reload. A negative value of STRICT is used for this internal call. */
2308 struct funny_match
2310 int this, other;
2314 constrain_operands (strict)
2315 int strict;
2317 const char *constraints[MAX_RECOG_OPERANDS];
2318 int matching_operands[MAX_RECOG_OPERANDS];
2319 int earlyclobber[MAX_RECOG_OPERANDS];
2320 int c;
2322 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2323 int funny_match_index;
2325 which_alternative = 0;
2326 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2327 return 1;
2329 for (c = 0; c < recog_data.n_operands; c++)
2331 constraints[c] = recog_data.constraints[c];
2332 matching_operands[c] = -1;
2337 int opno;
2338 int lose = 0;
2339 funny_match_index = 0;
2341 for (opno = 0; opno < recog_data.n_operands; opno++)
2343 rtx op = recog_data.operand[opno];
2344 enum machine_mode mode = GET_MODE (op);
2345 const char *p = constraints[opno];
2346 int offset = 0;
2347 int win = 0;
2348 int val;
2350 earlyclobber[opno] = 0;
2352 /* A unary operator may be accepted by the predicate, but it
2353 is irrelevant for matching constraints. */
2354 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2355 op = XEXP (op, 0);
2357 if (GET_CODE (op) == SUBREG)
2359 if (GET_CODE (SUBREG_REG (op)) == REG
2360 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2361 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2362 GET_MODE (SUBREG_REG (op)),
2363 SUBREG_BYTE (op),
2364 GET_MODE (op));
2365 op = SUBREG_REG (op);
2368 /* An empty constraint or empty alternative
2369 allows anything which matched the pattern. */
2370 if (*p == 0 || *p == ',')
2371 win = 1;
2373 while (*p && (c = *p++) != ',')
2374 switch (c)
2376 case '?': case '!': case '*': case '%':
2377 case '=': case '+':
2378 break;
2380 case '#':
2381 /* Ignore rest of this alternative as far as
2382 constraint checking is concerned. */
2383 while (*p && *p != ',')
2384 p++;
2385 break;
2387 case '&':
2388 earlyclobber[opno] = 1;
2389 break;
2391 case '0': case '1': case '2': case '3': case '4':
2392 case '5': case '6': case '7': case '8': case '9':
2394 /* This operand must be the same as a previous one.
2395 This kind of constraint is used for instructions such
2396 as add when they take only two operands.
2398 Note that the lower-numbered operand is passed first.
2400 If we are not testing strictly, assume that this
2401 constraint will be satisfied. */
2403 char *end;
2404 int match;
2406 match = strtoul (p - 1, &end, 10);
2407 p = end;
2409 if (strict < 0)
2410 val = 1;
2411 else
2413 rtx op1 = recog_data.operand[match];
2414 rtx op2 = recog_data.operand[opno];
2416 /* A unary operator may be accepted by the predicate,
2417 but it is irrelevant for matching constraints. */
2418 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2419 op1 = XEXP (op1, 0);
2420 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2421 op2 = XEXP (op2, 0);
2423 val = operands_match_p (op1, op2);
2426 matching_operands[opno] = match;
2427 matching_operands[match] = opno;
2429 if (val != 0)
2430 win = 1;
2432 /* If output is *x and input is *--x, arrange later
2433 to change the output to *--x as well, since the
2434 output op is the one that will be printed. */
2435 if (val == 2 && strict > 0)
2437 funny_match[funny_match_index].this = opno;
2438 funny_match[funny_match_index++].other = match;
2441 break;
2443 case 'p':
2444 /* p is used for address_operands. When we are called by
2445 gen_reload, no one will have checked that the address is
2446 strictly valid, i.e., that all pseudos requiring hard regs
2447 have gotten them. */
2448 if (strict <= 0
2449 || (strict_memory_address_p (recog_data.operand_mode[opno],
2450 op)))
2451 win = 1;
2452 break;
2454 /* No need to check general_operand again;
2455 it was done in insn-recog.c. */
2456 case 'g':
2457 /* Anything goes unless it is a REG and really has a hard reg
2458 but the hard reg is not in the class GENERAL_REGS. */
2459 if (strict < 0
2460 || GENERAL_REGS == ALL_REGS
2461 || GET_CODE (op) != REG
2462 || (reload_in_progress
2463 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2464 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2465 win = 1;
2466 break;
2468 case 'X':
2469 /* This is used for a MATCH_SCRATCH in the cases when
2470 we don't actually need anything. So anything goes
2471 any time. */
2472 win = 1;
2473 break;
2475 case 'm':
2476 if (GET_CODE (op) == MEM
2477 /* Before reload, accept what reload can turn into mem. */
2478 || (strict < 0 && CONSTANT_P (op))
2479 /* During reload, accept a pseudo */
2480 || (reload_in_progress && GET_CODE (op) == REG
2481 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2482 win = 1;
2483 break;
2485 case '<':
2486 if (GET_CODE (op) == MEM
2487 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2488 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2489 win = 1;
2490 break;
2492 case '>':
2493 if (GET_CODE (op) == MEM
2494 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2495 || GET_CODE (XEXP (op, 0)) == POST_INC))
2496 win = 1;
2497 break;
2499 case 'E':
2500 case 'F':
2501 if (GET_CODE (op) == CONST_DOUBLE)
2502 win = 1;
2503 break;
2505 case 'G':
2506 case 'H':
2507 if (GET_CODE (op) == CONST_DOUBLE
2508 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2509 win = 1;
2510 break;
2512 case 's':
2513 if (GET_CODE (op) == CONST_INT
2514 || (GET_CODE (op) == CONST_DOUBLE
2515 && GET_MODE (op) == VOIDmode))
2516 break;
2517 case 'i':
2518 if (CONSTANT_P (op))
2519 win = 1;
2520 break;
2522 case 'n':
2523 if (GET_CODE (op) == CONST_INT
2524 || (GET_CODE (op) == CONST_DOUBLE
2525 && GET_MODE (op) == VOIDmode))
2526 win = 1;
2527 break;
2529 case 'I':
2530 case 'J':
2531 case 'K':
2532 case 'L':
2533 case 'M':
2534 case 'N':
2535 case 'O':
2536 case 'P':
2537 if (GET_CODE (op) == CONST_INT
2538 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2539 win = 1;
2540 break;
2542 case 'V':
2543 if (GET_CODE (op) == MEM
2544 && ((strict > 0 && ! offsettable_memref_p (op))
2545 || (strict < 0
2546 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2547 || (reload_in_progress
2548 && !(GET_CODE (op) == REG
2549 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2550 win = 1;
2551 break;
2553 case 'o':
2554 if ((strict > 0 && offsettable_memref_p (op))
2555 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2556 /* Before reload, accept what reload can handle. */
2557 || (strict < 0
2558 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2559 /* During reload, accept a pseudo */
2560 || (reload_in_progress && GET_CODE (op) == REG
2561 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2562 win = 1;
2563 break;
2565 default:
2567 enum reg_class class;
2569 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2570 if (class != NO_REGS)
2572 if (strict < 0
2573 || (strict == 0
2574 && GET_CODE (op) == REG
2575 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2576 || (strict == 0 && GET_CODE (op) == SCRATCH)
2577 || (GET_CODE (op) == REG
2578 && reg_fits_class_p (op, class, offset, mode)))
2579 win = 1;
2581 #ifdef EXTRA_CONSTRAINT
2582 else if (EXTRA_CONSTRAINT (op, c))
2583 win = 1;
2584 #endif
2585 break;
2589 constraints[opno] = p;
2590 /* If this operand did not win somehow,
2591 this alternative loses. */
2592 if (! win)
2593 lose = 1;
2595 /* This alternative won; the operands are ok.
2596 Change whichever operands this alternative says to change. */
2597 if (! lose)
2599 int opno, eopno;
2601 /* See if any earlyclobber operand conflicts with some other
2602 operand. */
2604 if (strict > 0)
2605 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2606 /* Ignore earlyclobber operands now in memory,
2607 because we would often report failure when we have
2608 two memory operands, one of which was formerly a REG. */
2609 if (earlyclobber[eopno]
2610 && GET_CODE (recog_data.operand[eopno]) == REG)
2611 for (opno = 0; opno < recog_data.n_operands; opno++)
2612 if ((GET_CODE (recog_data.operand[opno]) == MEM
2613 || recog_data.operand_type[opno] != OP_OUT)
2614 && opno != eopno
2615 /* Ignore things like match_operator operands. */
2616 && *recog_data.constraints[opno] != 0
2617 && ! (matching_operands[opno] == eopno
2618 && operands_match_p (recog_data.operand[opno],
2619 recog_data.operand[eopno]))
2620 && ! safe_from_earlyclobber (recog_data.operand[opno],
2621 recog_data.operand[eopno]))
2622 lose = 1;
2624 if (! lose)
2626 while (--funny_match_index >= 0)
2628 recog_data.operand[funny_match[funny_match_index].other]
2629 = recog_data.operand[funny_match[funny_match_index].this];
2632 return 1;
2636 which_alternative++;
2638 while (which_alternative < recog_data.n_alternatives);
2640 which_alternative = -1;
2641 /* If we are about to reject this, but we are not to test strictly,
2642 try a very loose test. Only return failure if it fails also. */
2643 if (strict == 0)
2644 return constrain_operands (-1);
2645 else
2646 return 0;
2649 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2650 is a hard reg in class CLASS when its regno is offset by OFFSET
2651 and changed to mode MODE.
2652 If REG occupies multiple hard regs, all of them must be in CLASS. */
2655 reg_fits_class_p (operand, class, offset, mode)
2656 rtx operand;
2657 enum reg_class class;
2658 int offset;
2659 enum machine_mode mode;
2661 int regno = REGNO (operand);
2662 if (regno < FIRST_PSEUDO_REGISTER
2663 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2664 regno + offset))
2666 int sr;
2667 regno += offset;
2668 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2669 sr > 0; sr--)
2670 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2671 regno + sr))
2672 break;
2673 return sr == 0;
2676 return 0;
2679 /* Split single instruction. Helper function for split_all_insns.
2680 Return last insn in the sequence if successful, or NULL if unsuccessful. */
2681 static rtx
2682 split_insn (insn)
2683 rtx insn;
2685 rtx set;
2686 if (!INSN_P (insn))
2688 /* Don't split no-op move insns. These should silently
2689 disappear later in final. Splitting such insns would
2690 break the code that handles REG_NO_CONFLICT blocks. */
2692 else if ((set = single_set (insn)) != NULL && set_noop_p (set))
2694 /* Nops get in the way while scheduling, so delete them
2695 now if register allocation has already been done. It
2696 is too risky to try to do this before register
2697 allocation, and there are unlikely to be very many
2698 nops then anyways. */
2699 if (reload_completed)
2700 delete_insn_and_edges (insn);
2702 else
2704 /* Split insns here to get max fine-grain parallelism. */
2705 rtx first = PREV_INSN (insn);
2706 rtx last = try_split (PATTERN (insn), insn, 1);
2708 if (last != insn)
2710 /* try_split returns the NOTE that INSN became. */
2711 PUT_CODE (insn, NOTE);
2712 NOTE_SOURCE_FILE (insn) = 0;
2713 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2715 /* ??? Coddle to md files that generate subregs in post-
2716 reload splitters instead of computing the proper
2717 hard register. */
2718 if (reload_completed && first != last)
2720 first = NEXT_INSN (first);
2721 while (1)
2723 if (INSN_P (first))
2724 cleanup_subreg_operands (first);
2725 if (first == last)
2726 break;
2727 first = NEXT_INSN (first);
2730 return last;
2733 return NULL_RTX;
2735 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2737 void
2738 split_all_insns (upd_life)
2739 int upd_life;
2741 sbitmap blocks;
2742 int changed;
2743 basic_block bb;
2745 blocks = sbitmap_alloc (last_basic_block);
2746 sbitmap_zero (blocks);
2747 changed = 0;
2749 FOR_EACH_BB_REVERSE (bb)
2751 rtx insn, next;
2752 bool finish = false;
2754 for (insn = bb->head; !finish ; insn = next)
2756 rtx last;
2758 /* Can't use `next_real_insn' because that might go across
2759 CODE_LABELS and short-out basic blocks. */
2760 next = NEXT_INSN (insn);
2761 finish = (insn == bb->end);
2762 last = split_insn (insn);
2763 if (last)
2765 /* The split sequence may include barrier, but the
2766 BB boundary we are interested in will be set to previous
2767 one. */
2769 while (GET_CODE (last) == BARRIER)
2770 last = PREV_INSN (last);
2771 SET_BIT (blocks, bb->index);
2772 changed = 1;
2773 insn = last;
2778 if (changed)
2780 find_many_sub_basic_blocks (blocks);
2783 if (changed && upd_life)
2785 count_or_remove_death_notes (blocks, 1);
2786 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2788 #ifdef ENABLE_CHECKING
2789 verify_flow_info ();
2790 #endif
2792 sbitmap_free (blocks);
2795 /* Same as split_all_insns, but do not expect CFG to be available.
2796 Used by machine depedent reorg passes. */
2798 void
2799 split_all_insns_noflow ()
2801 rtx next, insn;
2803 for (insn = get_insns (); insn; insn = next)
2805 next = NEXT_INSN (insn);
2806 split_insn (insn);
2808 return;
2811 #ifdef HAVE_peephole2
2812 struct peep2_insn_data
2814 rtx insn;
2815 regset live_before;
2818 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2819 static int peep2_current;
2821 /* A non-insn marker indicating the last insn of the block.
2822 The live_before regset for this element is correct, indicating
2823 global_live_at_end for the block. */
2824 #define PEEP2_EOB pc_rtx
2826 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2827 does not exist. Used by the recognizer to find the next insn to match
2828 in a multi-insn pattern. */
2831 peep2_next_insn (n)
2832 int n;
2834 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2835 abort ();
2837 n += peep2_current;
2838 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2839 n -= MAX_INSNS_PER_PEEP2 + 1;
2841 if (peep2_insn_data[n].insn == PEEP2_EOB)
2842 return NULL_RTX;
2843 return peep2_insn_data[n].insn;
2846 /* Return true if REGNO is dead before the Nth non-note insn
2847 after `current'. */
2850 peep2_regno_dead_p (ofs, regno)
2851 int ofs;
2852 int regno;
2854 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2855 abort ();
2857 ofs += peep2_current;
2858 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2859 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2861 if (peep2_insn_data[ofs].insn == NULL_RTX)
2862 abort ();
2864 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2867 /* Similarly for a REG. */
2870 peep2_reg_dead_p (ofs, reg)
2871 int ofs;
2872 rtx reg;
2874 int regno, n;
2876 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2877 abort ();
2879 ofs += peep2_current;
2880 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2881 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2883 if (peep2_insn_data[ofs].insn == NULL_RTX)
2884 abort ();
2886 regno = REGNO (reg);
2887 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2888 while (--n >= 0)
2889 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2890 return 0;
2891 return 1;
2894 /* Try to find a hard register of mode MODE, matching the register class in
2895 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2896 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2897 in which case the only condition is that the register must be available
2898 before CURRENT_INSN.
2899 Registers that already have bits set in REG_SET will not be considered.
2901 If an appropriate register is available, it will be returned and the
2902 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2903 returned. */
2906 peep2_find_free_register (from, to, class_str, mode, reg_set)
2907 int from, to;
2908 const char *class_str;
2909 enum machine_mode mode;
2910 HARD_REG_SET *reg_set;
2912 static int search_ofs;
2913 enum reg_class class;
2914 HARD_REG_SET live;
2915 int i;
2917 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2918 abort ();
2920 from += peep2_current;
2921 if (from >= MAX_INSNS_PER_PEEP2 + 1)
2922 from -= MAX_INSNS_PER_PEEP2 + 1;
2923 to += peep2_current;
2924 if (to >= MAX_INSNS_PER_PEEP2 + 1)
2925 to -= MAX_INSNS_PER_PEEP2 + 1;
2927 if (peep2_insn_data[from].insn == NULL_RTX)
2928 abort ();
2929 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
2931 while (from != to)
2933 HARD_REG_SET this_live;
2935 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
2936 from = 0;
2937 if (peep2_insn_data[from].insn == NULL_RTX)
2938 abort ();
2939 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
2940 IOR_HARD_REG_SET (live, this_live);
2943 class = (class_str[0] == 'r' ? GENERAL_REGS
2944 : REG_CLASS_FROM_LETTER (class_str[0]));
2946 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2948 int raw_regno, regno, success, j;
2950 /* Distribute the free registers as much as possible. */
2951 raw_regno = search_ofs + i;
2952 if (raw_regno >= FIRST_PSEUDO_REGISTER)
2953 raw_regno -= FIRST_PSEUDO_REGISTER;
2954 #ifdef REG_ALLOC_ORDER
2955 regno = reg_alloc_order[raw_regno];
2956 #else
2957 regno = raw_regno;
2958 #endif
2960 /* Don't allocate fixed registers. */
2961 if (fixed_regs[regno])
2962 continue;
2963 /* Make sure the register is of the right class. */
2964 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
2965 continue;
2966 /* And can support the mode we need. */
2967 if (! HARD_REGNO_MODE_OK (regno, mode))
2968 continue;
2969 /* And that we don't create an extra save/restore. */
2970 if (! call_used_regs[regno] && ! regs_ever_live[regno])
2971 continue;
2972 /* And we don't clobber traceback for noreturn functions. */
2973 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
2974 && (! reload_completed || frame_pointer_needed))
2975 continue;
2977 success = 1;
2978 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2980 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
2981 || TEST_HARD_REG_BIT (live, regno + j))
2983 success = 0;
2984 break;
2987 if (success)
2989 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
2990 SET_HARD_REG_BIT (*reg_set, regno + j);
2992 /* Start the next search with the next register. */
2993 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
2994 raw_regno = 0;
2995 search_ofs = raw_regno;
2997 return gen_rtx_REG (mode, regno);
3001 search_ofs = 0;
3002 return NULL_RTX;
3005 /* Perform the peephole2 optimization pass. */
3007 void
3008 peephole2_optimize (dump_file)
3009 FILE *dump_file ATTRIBUTE_UNUSED;
3011 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3012 rtx insn, prev;
3013 regset live;
3014 int i;
3015 basic_block bb;
3016 #ifdef HAVE_conditional_execution
3017 sbitmap blocks;
3018 bool changed;
3019 #endif
3020 bool do_cleanup_cfg = false;
3021 bool do_rebuild_jump_labels = false;
3023 /* Initialize the regsets we're going to use. */
3024 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3025 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3026 live = INITIALIZE_REG_SET (rs_heads[i]);
3028 #ifdef HAVE_conditional_execution
3029 blocks = sbitmap_alloc (last_basic_block);
3030 sbitmap_zero (blocks);
3031 changed = false;
3032 #else
3033 count_or_remove_death_notes (NULL, 1);
3034 #endif
3036 FOR_EACH_BB_REVERSE (bb)
3038 struct propagate_block_info *pbi;
3040 /* Indicate that all slots except the last holds invalid data. */
3041 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3042 peep2_insn_data[i].insn = NULL_RTX;
3044 /* Indicate that the last slot contains live_after data. */
3045 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3046 peep2_current = MAX_INSNS_PER_PEEP2;
3048 /* Start up propagation. */
3049 COPY_REG_SET (live, bb->global_live_at_end);
3050 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3052 #ifdef HAVE_conditional_execution
3053 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3054 #else
3055 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3056 #endif
3058 for (insn = bb->end; ; insn = prev)
3060 prev = PREV_INSN (insn);
3061 if (INSN_P (insn))
3063 rtx try, before_try, x;
3064 int match_len;
3065 rtx note;
3066 bool was_call = false;
3068 /* Record this insn. */
3069 if (--peep2_current < 0)
3070 peep2_current = MAX_INSNS_PER_PEEP2;
3071 peep2_insn_data[peep2_current].insn = insn;
3072 propagate_one_insn (pbi, insn);
3073 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3075 /* Match the peephole. */
3076 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3077 if (try != NULL)
3079 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3080 in SEQ and copy our CALL_INSN_FUNCTION_USAGE and other
3081 cfg-related call notes. */
3082 for (i = 0; i <= match_len; ++i)
3084 int j;
3085 rtx old_insn, new_insn, note;
3087 j = i + peep2_current;
3088 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3089 j -= MAX_INSNS_PER_PEEP2 + 1;
3090 old_insn = peep2_insn_data[j].insn;
3091 if (GET_CODE (old_insn) != CALL_INSN)
3092 continue;
3093 was_call = true;
3095 new_insn = try;
3096 while (new_insn != NULL_RTX)
3098 if (GET_CODE (new_insn) == CALL_INSN)
3099 break;
3100 new_insn = NEXT_INSN (new_insn);
3103 if (new_insn == NULL_RTX)
3104 abort ();
3106 CALL_INSN_FUNCTION_USAGE (new_insn)
3107 = CALL_INSN_FUNCTION_USAGE (old_insn);
3109 for (note = REG_NOTES (old_insn);
3110 note;
3111 note = XEXP (note, 1))
3112 switch (REG_NOTE_KIND (note))
3114 case REG_NORETURN:
3115 case REG_SETJMP:
3116 case REG_ALWAYS_RETURN:
3117 REG_NOTES (new_insn)
3118 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3119 XEXP (note, 0),
3120 REG_NOTES (new_insn));
3121 default:
3122 /* Discard all other reg notes. */
3123 break;
3126 /* Croak if there is another call in the sequence. */
3127 while (++i <= match_len)
3129 j = i + peep2_current;
3130 if (j >= MAX_INSNS_PER_PEEP2 + 1)
3131 j -= MAX_INSNS_PER_PEEP2 + 1;
3132 old_insn = peep2_insn_data[j].insn;
3133 if (GET_CODE (old_insn) == CALL_INSN)
3134 abort ();
3136 break;
3139 i = match_len + peep2_current;
3140 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3141 i -= MAX_INSNS_PER_PEEP2 + 1;
3143 note = find_reg_note (peep2_insn_data[i].insn,
3144 REG_EH_REGION, NULL_RTX);
3146 /* Replace the old sequence with the new. */
3147 try = emit_insn_after_scope (try, peep2_insn_data[i].insn,
3148 INSN_SCOPE (peep2_insn_data[i].insn));
3149 before_try = PREV_INSN (insn);
3150 delete_insn_chain (insn, peep2_insn_data[i].insn);
3152 /* Re-insert the EH_REGION notes. */
3153 if (note || (was_call && nonlocal_goto_handler_labels))
3155 edge eh_edge;
3157 for (eh_edge = bb->succ; eh_edge
3158 ; eh_edge = eh_edge->succ_next)
3159 if (eh_edge->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
3160 break;
3162 for (x = try ; x != before_try ; x = PREV_INSN (x))
3163 if (GET_CODE (x) == CALL_INSN
3164 || (flag_non_call_exceptions
3165 && may_trap_p (PATTERN (x))
3166 && !find_reg_note (x, REG_EH_REGION, NULL)))
3168 if (note)
3169 REG_NOTES (x)
3170 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3171 XEXP (note, 0),
3172 REG_NOTES (x));
3174 if (x != bb->end && eh_edge)
3176 edge nfte, nehe;
3177 int flags;
3179 nfte = split_block (bb, x);
3180 flags = (eh_edge->flags
3181 & (EDGE_EH | EDGE_ABNORMAL));
3182 if (GET_CODE (x) == CALL_INSN)
3183 flags |= EDGE_ABNORMAL_CALL;
3184 nehe = make_edge (nfte->src, eh_edge->dest,
3185 flags);
3187 nehe->probability = eh_edge->probability;
3188 nfte->probability
3189 = REG_BR_PROB_BASE - nehe->probability;
3191 do_cleanup_cfg |= purge_dead_edges (nfte->dest);
3192 #ifdef HAVE_conditional_execution
3193 SET_BIT (blocks, nfte->dest->index);
3194 changed = true;
3195 #endif
3196 bb = nfte->src;
3197 eh_edge = nehe;
3201 /* Converting possibly trapping insn to non-trapping is
3202 possible. Zap dummy outgoing edges. */
3203 do_cleanup_cfg |= purge_dead_edges (bb);
3206 #ifdef HAVE_conditional_execution
3207 /* With conditional execution, we cannot back up the
3208 live information so easily, since the conditional
3209 death data structures are not so self-contained.
3210 So record that we've made a modification to this
3211 block and update life information at the end. */
3212 SET_BIT (blocks, bb->index);
3213 changed = true;
3215 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3216 peep2_insn_data[i].insn = NULL_RTX;
3217 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3218 #else
3219 /* Back up lifetime information past the end of the
3220 newly created sequence. */
3221 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3222 i = 0;
3223 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3225 /* Update life information for the new sequence. */
3226 x = try;
3229 if (INSN_P (x))
3231 if (--i < 0)
3232 i = MAX_INSNS_PER_PEEP2;
3233 peep2_insn_data[i].insn = x;
3234 propagate_one_insn (pbi, x);
3235 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3237 x = PREV_INSN (x);
3239 while (x != prev);
3241 /* ??? Should verify that LIVE now matches what we
3242 had before the new sequence. */
3244 peep2_current = i;
3245 #endif
3247 /* If we generated a jump instruction, it won't have
3248 JUMP_LABEL set. Recompute after we're done. */
3249 for (x = try; x != before_try; x = PREV_INSN (x))
3250 if (GET_CODE (x) == JUMP_INSN)
3252 do_rebuild_jump_labels = true;
3253 break;
3258 if (insn == bb->head)
3259 break;
3262 free_propagate_block_info (pbi);
3265 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3266 FREE_REG_SET (peep2_insn_data[i].live_before);
3267 FREE_REG_SET (live);
3269 if (do_rebuild_jump_labels)
3270 rebuild_jump_labels (get_insns ());
3272 /* If we eliminated EH edges, we may be able to merge blocks. Further,
3273 we've changed global life since exception handlers are no longer
3274 reachable. */
3275 if (do_cleanup_cfg)
3277 cleanup_cfg (0);
3278 update_life_info (0, UPDATE_LIFE_GLOBAL_RM_NOTES, PROP_DEATH_NOTES);
3280 #ifdef HAVE_conditional_execution
3281 else
3283 count_or_remove_death_notes (blocks, 1);
3284 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3286 sbitmap_free (blocks);
3287 #endif
3289 #endif /* HAVE_peephole2 */
3291 /* Common predicates for use with define_bypass. */
3293 /* True if the dependency between OUT_INSN and IN_INSN is on the store
3294 data not the address operand(s) of the store. IN_INSN must be
3295 single_set. OUT_INSN must be either a single_set or a PARALLEL with
3296 SETs inside. */
3299 store_data_bypass_p (out_insn, in_insn)
3300 rtx out_insn, in_insn;
3302 rtx out_set, in_set;
3304 in_set = single_set (in_insn);
3305 if (! in_set)
3306 abort ();
3308 if (GET_CODE (SET_DEST (in_set)) != MEM)
3309 return false;
3311 out_set = single_set (out_insn);
3312 if (out_set)
3314 if (reg_mentioned_p (SET_DEST (out_set), SET_DEST (in_set)))
3315 return false;
3317 else
3319 rtx out_pat;
3320 int i;
3322 out_pat = PATTERN (out_insn);
3323 if (GET_CODE (out_pat) != PARALLEL)
3324 abort ();
3326 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3328 rtx exp = XVECEXP (out_pat, 0, i);
3330 if (GET_CODE (exp) == CLOBBER)
3331 continue;
3333 if (GET_CODE (exp) != SET)
3334 abort ();
3336 if (reg_mentioned_p (SET_DEST (exp), SET_DEST (in_set)))
3337 return false;
3341 return true;
3344 /* True if the dependency between OUT_INSN and IN_INSN is in the IF_THEN_ELSE
3345 condition, and not the THEN or ELSE branch. OUT_INSN may be either a single
3346 or multiple set; IN_INSN should be single_set for truth, but for convenience
3347 of insn categorization may be any JUMP or CALL insn. */
3350 if_test_bypass_p (out_insn, in_insn)
3351 rtx out_insn, in_insn;
3353 rtx out_set, in_set;
3355 in_set = single_set (in_insn);
3356 if (! in_set)
3358 if (GET_CODE (in_insn) == JUMP_INSN || GET_CODE (in_insn) == CALL_INSN)
3359 return false;
3360 abort ();
3363 if (GET_CODE (SET_SRC (in_set)) != IF_THEN_ELSE)
3364 return false;
3365 in_set = SET_SRC (in_set);
3367 out_set = single_set (out_insn);
3368 if (out_set)
3370 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3371 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3372 return false;
3374 else
3376 rtx out_pat;
3377 int i;
3379 out_pat = PATTERN (out_insn);
3380 if (GET_CODE (out_pat) != PARALLEL)
3381 abort ();
3383 for (i = 0; i < XVECLEN (out_pat, 0); i++)
3385 rtx exp = XVECEXP (out_pat, 0, i);
3387 if (GET_CODE (exp) == CLOBBER)
3388 continue;
3390 if (GET_CODE (exp) != SET)
3391 abort ();
3393 if (reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 1))
3394 || reg_mentioned_p (SET_DEST (out_set), XEXP (in_set, 2)))
3395 return false;
3399 return true;