import of gcc-2.8
[official-gcc.git] / gcc / jump.c
blob00e31cf22ec4bfb8a5ba7960cedfc5b1087e16c4
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-96, 1997 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
54 #include "config.h"
55 #include <stdio.h>
56 #include "rtl.h"
57 #include "flags.h"
58 #include "hard-reg-set.h"
59 #include "regs.h"
60 #include "insn-config.h"
61 #include "insn-flags.h"
62 #include "recog.h"
63 #include "expr.h"
64 #include "real.h"
65 #include "except.h"
67 /* ??? Eventually must record somehow the labels used by jumps
68 from nested functions. */
69 /* Pre-record the next or previous real insn for each label?
70 No, this pass is very fast anyway. */
71 /* Condense consecutive labels?
72 This would make life analysis faster, maybe. */
73 /* Optimize jump y; x: ... y: jumpif... x?
74 Don't know if it is worth bothering with. */
75 /* Optimize two cases of conditional jump to conditional jump?
76 This can never delete any instruction or make anything dead,
77 or even change what is live at any point.
78 So perhaps let combiner do it. */
80 /* Vector indexed by uid.
81 For each CODE_LABEL, index by its uid to get first unconditional jump
82 that jumps to the label.
83 For each JUMP_INSN, index by its uid to get the next unconditional jump
84 that jumps to the same label.
85 Element 0 is the start of a chain of all return insns.
86 (It is safe to use element 0 because insn uid 0 is not used. */
88 static rtx *jump_chain;
90 /* List of labels referred to from initializers.
91 These can never be deleted. */
92 rtx forced_labels;
94 /* Maximum index in jump_chain. */
96 static int max_jump_chain;
98 /* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100 int can_reach_end;
102 /* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
106 case. */
108 static int cross_jump_death_matters = 0;
110 static int duplicate_loop_exit_test PROTO((rtx));
111 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
112 static void do_cross_jump PROTO((rtx, rtx, rtx));
113 static int jump_back_p PROTO((rtx, rtx));
114 static int tension_vector_labels PROTO((rtx, int));
115 static void mark_jump_label PROTO((rtx, rtx, int));
116 static void delete_computation PROTO((rtx));
117 static void delete_from_jump_chain PROTO((rtx));
118 static int delete_labelref_insn PROTO((rtx, rtx, int));
119 static void redirect_tablejump PROTO((rtx, rtx));
120 static rtx find_insert_position PROTO((rtx, rtx));
122 /* Delete no-op jumps and optimize jumps to jumps
123 and jumps around jumps.
124 Delete unused labels and unreachable code.
126 If CROSS_JUMP is 1, detect matching code
127 before a jump and its destination and unify them.
128 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
130 If NOOP_MOVES is nonzero, delete no-op move insns.
132 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
133 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
135 If `optimize' is zero, don't change any code,
136 just determine whether control drops off the end of the function.
137 This case occurs when we have -W and not -O.
138 It works because `delete_insn' checks the value of `optimize'
139 and refrains from actually deleting when that is 0. */
141 void
142 jump_optimize (f, cross_jump, noop_moves, after_regscan)
143 rtx f;
144 int cross_jump;
145 int noop_moves;
146 int after_regscan;
148 register rtx insn, next, note;
149 int changed;
150 int first = 1;
151 int max_uid = 0;
152 rtx last_insn;
154 cross_jump_death_matters = (cross_jump == 2);
156 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
157 notes whose labels don't occur in the insn any more. */
159 for (insn = f; insn; insn = NEXT_INSN (insn))
161 if (GET_CODE (insn) == CODE_LABEL)
162 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
163 else if (GET_CODE (insn) == JUMP_INSN)
164 JUMP_LABEL (insn) = 0;
165 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
166 for (note = REG_NOTES (insn); note; note = next)
168 next = XEXP (note, 1);
169 if (REG_NOTE_KIND (note) == REG_LABEL
170 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
171 remove_note (insn, note);
174 if (INSN_UID (insn) > max_uid)
175 max_uid = INSN_UID (insn);
178 max_uid++;
180 /* Delete insns following barriers, up to next label. */
182 for (insn = f; insn;)
184 if (GET_CODE (insn) == BARRIER)
186 insn = NEXT_INSN (insn);
187 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
189 if (GET_CODE (insn) == NOTE
190 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
191 insn = NEXT_INSN (insn);
192 else
193 insn = delete_insn (insn);
195 /* INSN is now the code_label. */
197 else
198 insn = NEXT_INSN (insn);
201 /* Leave some extra room for labels and duplicate exit test insns
202 we make. */
203 max_jump_chain = max_uid * 14 / 10;
204 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
205 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
207 /* Mark the label each jump jumps to.
208 Combine consecutive labels, and count uses of labels.
210 For each label, make a chain (using `jump_chain')
211 of all the *unconditional* jumps that jump to it;
212 also make a chain of all returns. */
214 for (insn = f; insn; insn = NEXT_INSN (insn))
215 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
216 && ! INSN_DELETED_P (insn))
218 mark_jump_label (PATTERN (insn), insn, cross_jump);
219 if (GET_CODE (insn) == JUMP_INSN)
221 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
223 jump_chain[INSN_UID (insn)]
224 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
225 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
227 if (GET_CODE (PATTERN (insn)) == RETURN)
229 jump_chain[INSN_UID (insn)] = jump_chain[0];
230 jump_chain[0] = insn;
235 /* Keep track of labels used from static data;
236 they cannot ever be deleted. */
238 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
239 LABEL_NUSES (XEXP (insn, 0))++;
241 check_exception_handler_labels ();
243 /* Keep track of labels used for marking handlers for exception
244 regions; they cannot usually be deleted. */
246 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
247 LABEL_NUSES (XEXP (insn, 0))++;
249 exception_optimize ();
251 /* Delete all labels already not referenced.
252 Also find the last insn. */
254 last_insn = 0;
255 for (insn = f; insn; )
257 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
258 insn = delete_insn (insn);
259 else
261 last_insn = insn;
262 insn = NEXT_INSN (insn);
266 if (!optimize)
268 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
269 If so record that this function can drop off the end. */
271 insn = last_insn;
273 int n_labels = 1;
274 while (insn
275 /* One label can follow the end-note: the return label. */
276 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
277 /* Ordinary insns can follow it if returning a structure. */
278 || GET_CODE (insn) == INSN
279 /* If machine uses explicit RETURN insns, no epilogue,
280 then one of them follows the note. */
281 || (GET_CODE (insn) == JUMP_INSN
282 && GET_CODE (PATTERN (insn)) == RETURN)
283 /* A barrier can follow the return insn. */
284 || GET_CODE (insn) == BARRIER
285 /* Other kinds of notes can follow also. */
286 || (GET_CODE (insn) == NOTE
287 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
288 insn = PREV_INSN (insn);
291 /* Report if control can fall through at the end of the function. */
292 if (insn && GET_CODE (insn) == NOTE
293 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
294 && ! INSN_DELETED_P (insn))
295 can_reach_end = 1;
297 /* Zero the "deleted" flag of all the "deleted" insns. */
298 for (insn = f; insn; insn = NEXT_INSN (insn))
299 INSN_DELETED_P (insn) = 0;
300 return;
303 #ifdef HAVE_return
304 if (HAVE_return)
306 /* If we fall through to the epilogue, see if we can insert a RETURN insn
307 in front of it. If the machine allows it at this point (we might be
308 after reload for a leaf routine), it will improve optimization for it
309 to be there. */
310 insn = get_last_insn ();
311 while (insn && GET_CODE (insn) == NOTE)
312 insn = PREV_INSN (insn);
314 if (insn && GET_CODE (insn) != BARRIER)
316 emit_jump_insn (gen_return ());
317 emit_barrier ();
320 #endif
322 if (noop_moves)
323 for (insn = f; insn; )
325 next = NEXT_INSN (insn);
327 if (GET_CODE (insn) == INSN)
329 register rtx body = PATTERN (insn);
331 /* Combine stack_adjusts with following push_insns. */
332 #ifdef PUSH_ROUNDING
333 if (GET_CODE (body) == SET
334 && SET_DEST (body) == stack_pointer_rtx
335 && GET_CODE (SET_SRC (body)) == PLUS
336 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
337 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
338 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
340 rtx p;
341 rtx stack_adjust_insn = insn;
342 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
343 int total_pushed = 0;
344 int pushes = 0;
346 /* Find all successive push insns. */
347 p = insn;
348 /* Don't convert more than three pushes;
349 that starts adding too many displaced addresses
350 and the whole thing starts becoming a losing
351 proposition. */
352 while (pushes < 3)
354 rtx pbody, dest;
355 p = next_nonnote_insn (p);
356 if (p == 0 || GET_CODE (p) != INSN)
357 break;
358 pbody = PATTERN (p);
359 if (GET_CODE (pbody) != SET)
360 break;
361 dest = SET_DEST (pbody);
362 /* Allow a no-op move between the adjust and the push. */
363 if (GET_CODE (dest) == REG
364 && GET_CODE (SET_SRC (pbody)) == REG
365 && REGNO (dest) == REGNO (SET_SRC (pbody)))
366 continue;
367 if (! (GET_CODE (dest) == MEM
368 && GET_CODE (XEXP (dest, 0)) == POST_INC
369 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
370 break;
371 pushes++;
372 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
373 > stack_adjust_amount)
374 break;
375 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
378 /* Discard the amount pushed from the stack adjust;
379 maybe eliminate it entirely. */
380 if (total_pushed >= stack_adjust_amount)
382 delete_computation (stack_adjust_insn);
383 total_pushed = stack_adjust_amount;
385 else
386 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
387 = GEN_INT (stack_adjust_amount - total_pushed);
389 /* Change the appropriate push insns to ordinary stores. */
390 p = insn;
391 while (total_pushed > 0)
393 rtx pbody, dest;
394 p = next_nonnote_insn (p);
395 if (GET_CODE (p) != INSN)
396 break;
397 pbody = PATTERN (p);
398 if (GET_CODE (pbody) == SET)
399 break;
400 dest = SET_DEST (pbody);
401 if (! (GET_CODE (dest) == MEM
402 && GET_CODE (XEXP (dest, 0)) == POST_INC
403 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
404 break;
405 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
406 /* If this push doesn't fully fit in the space
407 of the stack adjust that we deleted,
408 make another stack adjust here for what we
409 didn't use up. There should be peepholes
410 to recognize the resulting sequence of insns. */
411 if (total_pushed < 0)
413 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
414 GEN_INT (- total_pushed)),
416 break;
418 XEXP (dest, 0)
419 = plus_constant (stack_pointer_rtx, total_pushed);
422 #endif
424 /* Detect and delete no-op move instructions
425 resulting from not allocating a parameter in a register. */
427 if (GET_CODE (body) == SET
428 && (SET_DEST (body) == SET_SRC (body)
429 || (GET_CODE (SET_DEST (body)) == MEM
430 && GET_CODE (SET_SRC (body)) == MEM
431 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
432 && ! (GET_CODE (SET_DEST (body)) == MEM
433 && MEM_VOLATILE_P (SET_DEST (body)))
434 && ! (GET_CODE (SET_SRC (body)) == MEM
435 && MEM_VOLATILE_P (SET_SRC (body))))
436 delete_computation (insn);
438 /* Detect and ignore no-op move instructions
439 resulting from smart or fortuitous register allocation. */
441 else if (GET_CODE (body) == SET)
443 int sreg = true_regnum (SET_SRC (body));
444 int dreg = true_regnum (SET_DEST (body));
446 if (sreg == dreg && sreg >= 0)
447 delete_insn (insn);
448 else if (sreg >= 0 && dreg >= 0)
450 rtx trial;
451 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
452 sreg, NULL_PTR, dreg,
453 GET_MODE (SET_SRC (body)));
455 if (tem != 0
456 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
458 /* DREG may have been the target of a REG_DEAD note in
459 the insn which makes INSN redundant. If so, reorg
460 would still think it is dead. So search for such a
461 note and delete it if we find it. */
462 if (! find_regno_note (insn, REG_UNUSED, dreg))
463 for (trial = prev_nonnote_insn (insn);
464 trial && GET_CODE (trial) != CODE_LABEL;
465 trial = prev_nonnote_insn (trial))
466 if (find_regno_note (trial, REG_DEAD, dreg))
468 remove_death (dreg, trial);
469 break;
471 #ifdef PRESERVE_DEATH_INFO_REGNO_P
472 /* Deleting insn could lose a death-note for SREG
473 so don't do it if final needs accurate
474 death-notes. */
475 if (PRESERVE_DEATH_INFO_REGNO_P (sreg)
476 && (trial = find_regno_note (insn, REG_DEAD, sreg)))
478 /* Change this into a USE so that we won't emit
479 code for it, but still can keep the note. */
480 PATTERN (insn)
481 = gen_rtx (USE, VOIDmode, XEXP (trial, 0));
482 INSN_CODE (insn) = -1;
483 /* Remove all reg notes but the REG_DEAD one. */
484 REG_NOTES (insn) = trial;
485 XEXP (trial, 1) = NULL_RTX;
487 else
488 #endif
489 delete_insn (insn);
492 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
493 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
494 NULL_PTR, 0,
495 GET_MODE (SET_DEST (body))))
497 /* This handles the case where we have two consecutive
498 assignments of the same constant to pseudos that didn't
499 get a hard reg. Each SET from the constant will be
500 converted into a SET of the spill register and an
501 output reload will be made following it. This produces
502 two loads of the same constant into the same spill
503 register. */
505 rtx in_insn = insn;
507 /* Look back for a death note for the first reg.
508 If there is one, it is no longer accurate. */
509 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
511 if ((GET_CODE (in_insn) == INSN
512 || GET_CODE (in_insn) == JUMP_INSN)
513 && find_regno_note (in_insn, REG_DEAD, dreg))
515 remove_death (dreg, in_insn);
516 break;
518 in_insn = PREV_INSN (in_insn);
521 /* Delete the second load of the value. */
522 delete_insn (insn);
525 else if (GET_CODE (body) == PARALLEL)
527 /* If each part is a set between two identical registers or
528 a USE or CLOBBER, delete the insn. */
529 int i, sreg, dreg;
530 rtx tem;
532 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
534 tem = XVECEXP (body, 0, i);
535 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
536 continue;
538 if (GET_CODE (tem) != SET
539 || (sreg = true_regnum (SET_SRC (tem))) < 0
540 || (dreg = true_regnum (SET_DEST (tem))) < 0
541 || dreg != sreg)
542 break;
545 if (i < 0)
546 delete_insn (insn);
548 /* Also delete insns to store bit fields if they are no-ops. */
549 /* Not worth the hair to detect this in the big-endian case. */
550 else if (! BYTES_BIG_ENDIAN
551 && GET_CODE (body) == SET
552 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
553 && XEXP (SET_DEST (body), 2) == const0_rtx
554 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
555 && ! (GET_CODE (SET_SRC (body)) == MEM
556 && MEM_VOLATILE_P (SET_SRC (body))))
557 delete_insn (insn);
559 insn = next;
562 /* If we haven't yet gotten to reload and we have just run regscan,
563 delete any insn that sets a register that isn't used elsewhere.
564 This helps some of the optimizations below by having less insns
565 being jumped around. */
567 if (! reload_completed && after_regscan)
568 for (insn = f; insn; insn = next)
570 rtx set = single_set (insn);
572 next = NEXT_INSN (insn);
574 if (set && GET_CODE (SET_DEST (set)) == REG
575 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
576 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
577 /* We use regno_last_note_uid so as not to delete the setting
578 of a reg that's used in notes. A subsequent optimization
579 might arrange to use that reg for real. */
580 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
581 && ! side_effects_p (SET_SRC (set))
582 && ! find_reg_note (insn, REG_RETVAL, 0))
583 delete_insn (insn);
586 /* Now iterate optimizing jumps until nothing changes over one pass. */
587 changed = 1;
588 while (changed)
590 changed = 0;
592 for (insn = f; insn; insn = next)
594 rtx reallabelprev;
595 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
596 rtx nlabel;
597 int this_is_simplejump, this_is_condjump, reversep;
598 int this_is_condjump_in_parallel;
599 #if 0
600 /* If NOT the first iteration, if this is the last jump pass
601 (just before final), do the special peephole optimizations.
602 Avoiding the first iteration gives ordinary jump opts
603 a chance to work before peephole opts. */
605 if (reload_completed && !first && !flag_no_peephole)
606 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
607 peephole (insn);
608 #endif
610 /* That could have deleted some insns after INSN, so check now
611 what the following insn is. */
613 next = NEXT_INSN (insn);
615 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
616 jump. Try to optimize by duplicating the loop exit test if so.
617 This is only safe immediately after regscan, because it uses
618 the values of regno_first_uid and regno_last_uid. */
619 if (after_regscan && GET_CODE (insn) == NOTE
620 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
621 && (temp1 = next_nonnote_insn (insn)) != 0
622 && simplejump_p (temp1))
624 temp = PREV_INSN (insn);
625 if (duplicate_loop_exit_test (insn))
627 changed = 1;
628 next = NEXT_INSN (temp);
629 continue;
633 if (GET_CODE (insn) != JUMP_INSN)
634 continue;
636 this_is_simplejump = simplejump_p (insn);
637 this_is_condjump = condjump_p (insn);
638 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
640 /* Tension the labels in dispatch tables. */
642 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
643 changed |= tension_vector_labels (PATTERN (insn), 0);
644 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
645 changed |= tension_vector_labels (PATTERN (insn), 1);
647 /* If a dispatch table always goes to the same place,
648 get rid of it and replace the insn that uses it. */
650 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
651 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
653 int i;
654 rtx pat = PATTERN (insn);
655 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
656 int len = XVECLEN (pat, diff_vec_p);
657 rtx dispatch = prev_real_insn (insn);
659 for (i = 0; i < len; i++)
660 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
661 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
662 break;
663 if (i == len
664 && dispatch != 0
665 && GET_CODE (dispatch) == JUMP_INSN
666 && JUMP_LABEL (dispatch) != 0
667 /* Don't mess with a casesi insn. */
668 && !(GET_CODE (PATTERN (dispatch)) == SET
669 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
670 == IF_THEN_ELSE))
671 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
673 redirect_tablejump (dispatch,
674 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
675 changed = 1;
679 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
681 /* If a jump references the end of the function, try to turn
682 it into a RETURN insn, possibly a conditional one. */
683 if (JUMP_LABEL (insn)
684 && (next_active_insn (JUMP_LABEL (insn)) == 0
685 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
686 == RETURN))
687 changed |= redirect_jump (insn, NULL_RTX);
689 /* Detect jump to following insn. */
690 if (reallabelprev == insn && condjump_p (insn))
692 next = next_real_insn (JUMP_LABEL (insn));
693 delete_jump (insn);
694 changed = 1;
695 continue;
698 /* If we have an unconditional jump preceded by a USE, try to put
699 the USE before the target and jump there. This simplifies many
700 of the optimizations below since we don't have to worry about
701 dealing with these USE insns. We only do this if the label
702 being branch to already has the identical USE or if code
703 never falls through to that label. */
705 if (this_is_simplejump
706 && (temp = prev_nonnote_insn (insn)) != 0
707 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
708 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
709 && (GET_CODE (temp1) == BARRIER
710 || (GET_CODE (temp1) == INSN
711 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
712 /* Don't do this optimization if we have a loop containing only
713 the USE instruction, and the loop start label has a usage
714 count of 1. This is because we will redo this optimization
715 everytime through the outer loop, and jump opt will never
716 exit. */
717 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
718 && temp2 == JUMP_LABEL (insn)
719 && LABEL_NUSES (temp2) == 1))
721 if (GET_CODE (temp1) == BARRIER)
723 emit_insn_after (PATTERN (temp), temp1);
724 temp1 = NEXT_INSN (temp1);
727 delete_insn (temp);
728 redirect_jump (insn, get_label_before (temp1));
729 reallabelprev = prev_real_insn (temp1);
730 changed = 1;
733 /* Simplify if (...) x = a; else x = b; by converting it
734 to x = b; if (...) x = a;
735 if B is sufficiently simple, the test doesn't involve X,
736 and nothing in the test modifies B or X.
738 If we have small register classes, we also can't do this if X
739 is a hard register.
741 If the "x = b;" insn has any REG_NOTES, we don't do this because
742 of the possibility that we are running after CSE and there is a
743 REG_EQUAL note that is only valid if the branch has already been
744 taken. If we move the insn with the REG_EQUAL note, we may
745 fold the comparison to always be false in a later CSE pass.
746 (We could also delete the REG_NOTES when moving the insn, but it
747 seems simpler to not move it.) An exception is that we can move
748 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
749 value is the same as "b".
751 INSN is the branch over the `else' part.
753 We set:
755 TEMP to the jump insn preceding "x = a;"
756 TEMP1 to X
757 TEMP2 to the insn that sets "x = b;"
758 TEMP3 to the insn that sets "x = a;"
759 TEMP4 to the set of "x = b"; */
761 if (this_is_simplejump
762 && (temp3 = prev_active_insn (insn)) != 0
763 && GET_CODE (temp3) == INSN
764 && (temp4 = single_set (temp3)) != 0
765 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
766 && (! SMALL_REGISTER_CLASSES
767 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
768 && (temp2 = next_active_insn (insn)) != 0
769 && GET_CODE (temp2) == INSN
770 && (temp4 = single_set (temp2)) != 0
771 && rtx_equal_p (SET_DEST (temp4), temp1)
772 && (GET_CODE (SET_SRC (temp4)) == REG
773 || GET_CODE (SET_SRC (temp4)) == SUBREG
774 || CONSTANT_P (SET_SRC (temp4)))
775 && (REG_NOTES (temp2) == 0
776 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
777 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
778 && XEXP (REG_NOTES (temp2), 1) == 0
779 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
780 SET_SRC (temp4))))
781 && (temp = prev_active_insn (temp3)) != 0
782 && condjump_p (temp) && ! simplejump_p (temp)
783 /* TEMP must skip over the "x = a;" insn */
784 && prev_real_insn (JUMP_LABEL (temp)) == insn
785 && no_labels_between_p (insn, JUMP_LABEL (temp))
786 /* There must be no other entries to the "x = b;" insn. */
787 && no_labels_between_p (JUMP_LABEL (temp), temp2)
788 /* INSN must either branch to the insn after TEMP2 or the insn
789 after TEMP2 must branch to the same place as INSN. */
790 && (reallabelprev == temp2
791 || ((temp5 = next_active_insn (temp2)) != 0
792 && simplejump_p (temp5)
793 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
795 /* The test expression, X, may be a complicated test with
796 multiple branches. See if we can find all the uses of
797 the label that TEMP branches to without hitting a CALL_INSN
798 or a jump to somewhere else. */
799 rtx target = JUMP_LABEL (temp);
800 int nuses = LABEL_NUSES (target);
801 rtx p, q;
803 /* Set P to the first jump insn that goes around "x = a;". */
804 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
806 if (GET_CODE (p) == JUMP_INSN)
808 if (condjump_p (p) && ! simplejump_p (p)
809 && JUMP_LABEL (p) == target)
811 nuses--;
812 if (nuses == 0)
813 break;
815 else
816 break;
818 else if (GET_CODE (p) == CALL_INSN)
819 break;
822 #ifdef HAVE_cc0
823 /* We cannot insert anything between a set of cc and its use
824 so if P uses cc0, we must back up to the previous insn. */
825 q = prev_nonnote_insn (p);
826 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
827 && sets_cc0_p (PATTERN (q)))
828 p = q;
829 #endif
831 if (p)
832 p = PREV_INSN (p);
834 /* If we found all the uses and there was no data conflict, we
835 can move the assignment unless we can branch into the middle
836 from somewhere. */
837 if (nuses == 0 && p
838 && no_labels_between_p (p, insn)
839 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
840 && ! reg_set_between_p (temp1, p, temp3)
841 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
842 || ! reg_set_between_p (SET_SRC (temp4), p, temp2)))
844 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
845 delete_insn (temp2);
847 /* Set NEXT to an insn that we know won't go away. */
848 next = next_active_insn (insn);
850 /* Delete the jump around the set. Note that we must do
851 this before we redirect the test jumps so that it won't
852 delete the code immediately following the assignment
853 we moved (which might be a jump). */
855 delete_insn (insn);
857 /* We either have two consecutive labels or a jump to
858 a jump, so adjust all the JUMP_INSNs to branch to where
859 INSN branches to. */
860 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
861 if (GET_CODE (p) == JUMP_INSN)
862 redirect_jump (p, target);
864 changed = 1;
865 continue;
869 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
870 to x = a; if (...) goto l; x = b;
871 if A is sufficiently simple, the test doesn't involve X,
872 and nothing in the test modifies A or X.
874 If we have small register classes, we also can't do this if X
875 is a hard register.
877 If the "x = a;" insn has any REG_NOTES, we don't do this because
878 of the possibility that we are running after CSE and there is a
879 REG_EQUAL note that is only valid if the branch has already been
880 taken. If we move the insn with the REG_EQUAL note, we may
881 fold the comparison to always be false in a later CSE pass.
882 (We could also delete the REG_NOTES when moving the insn, but it
883 seems simpler to not move it.) An exception is that we can move
884 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
885 value is the same as "a".
887 INSN is the goto.
889 We set:
891 TEMP to the jump insn preceding "x = a;"
892 TEMP1 to X
893 TEMP2 to the insn that sets "x = b;"
894 TEMP3 to the insn that sets "x = a;"
895 TEMP4 to the set of "x = a"; */
897 if (this_is_simplejump
898 && (temp2 = next_active_insn (insn)) != 0
899 && GET_CODE (temp2) == INSN
900 && (temp4 = single_set (temp2)) != 0
901 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
902 && (! SMALL_REGISTER_CLASSES
903 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
904 && (temp3 = prev_active_insn (insn)) != 0
905 && GET_CODE (temp3) == INSN
906 && (temp4 = single_set (temp3)) != 0
907 && rtx_equal_p (SET_DEST (temp4), temp1)
908 && (GET_CODE (SET_SRC (temp4)) == REG
909 || GET_CODE (SET_SRC (temp4)) == SUBREG
910 || CONSTANT_P (SET_SRC (temp4)))
911 && (REG_NOTES (temp3) == 0
912 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
913 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
914 && XEXP (REG_NOTES (temp3), 1) == 0
915 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
916 SET_SRC (temp4))))
917 && (temp = prev_active_insn (temp3)) != 0
918 && condjump_p (temp) && ! simplejump_p (temp)
919 /* TEMP must skip over the "x = a;" insn */
920 && prev_real_insn (JUMP_LABEL (temp)) == insn
921 && no_labels_between_p (temp, insn))
923 rtx prev_label = JUMP_LABEL (temp);
924 rtx insert_after = prev_nonnote_insn (temp);
926 #ifdef HAVE_cc0
927 /* We cannot insert anything between a set of cc and its use. */
928 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
929 && sets_cc0_p (PATTERN (insert_after)))
930 insert_after = prev_nonnote_insn (insert_after);
931 #endif
932 ++LABEL_NUSES (prev_label);
934 if (insert_after
935 && no_labels_between_p (insert_after, temp)
936 && ! reg_referenced_between_p (temp1, insert_after, temp3)
937 && ! reg_referenced_between_p (temp1, temp3,
938 NEXT_INSN (temp2))
939 && ! reg_set_between_p (temp1, insert_after, temp)
940 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
941 || ! reg_set_between_p (SET_SRC (temp4),
942 insert_after, temp))
943 && invert_jump (temp, JUMP_LABEL (insn)))
945 emit_insn_after_with_line_notes (PATTERN (temp3),
946 insert_after, temp3);
947 delete_insn (temp3);
948 delete_insn (insn);
949 /* Set NEXT to an insn that we know won't go away. */
950 next = temp2;
951 changed = 1;
953 if (prev_label && --LABEL_NUSES (prev_label) == 0)
954 delete_insn (prev_label);
955 if (changed)
956 continue;
959 #ifndef HAVE_cc0
960 /* If we have if (...) x = exp; and branches are expensive,
961 EXP is a single insn, does not have any side effects, cannot
962 trap, and is not too costly, convert this to
963 t = exp; if (...) x = t;
965 Don't do this when we have CC0 because it is unlikely to help
966 and we'd need to worry about where to place the new insn and
967 the potential for conflicts. We also can't do this when we have
968 notes on the insn for the same reason as above.
970 We set:
972 TEMP to the "x = exp;" insn.
973 TEMP1 to the single set in the "x = exp; insn.
974 TEMP2 to "x". */
976 if (! reload_completed
977 && this_is_condjump && ! this_is_simplejump
978 && BRANCH_COST >= 3
979 && (temp = next_nonnote_insn (insn)) != 0
980 && GET_CODE (temp) == INSN
981 && REG_NOTES (temp) == 0
982 && (reallabelprev == temp
983 || ((temp2 = next_active_insn (temp)) != 0
984 && simplejump_p (temp2)
985 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
986 && (temp1 = single_set (temp)) != 0
987 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
988 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
989 && (! SMALL_REGISTER_CLASSES
990 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
991 && GET_CODE (SET_SRC (temp1)) != REG
992 && GET_CODE (SET_SRC (temp1)) != SUBREG
993 && GET_CODE (SET_SRC (temp1)) != CONST_INT
994 && ! side_effects_p (SET_SRC (temp1))
995 && ! may_trap_p (SET_SRC (temp1))
996 && rtx_cost (SET_SRC (temp1), SET) < 10)
998 rtx new = gen_reg_rtx (GET_MODE (temp2));
1000 if ((temp3 = find_insert_position (insn, temp))
1001 && validate_change (temp, &SET_DEST (temp1), new, 0))
1003 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1004 emit_insn_after_with_line_notes (PATTERN (temp),
1005 PREV_INSN (temp3), temp);
1006 delete_insn (temp);
1007 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
1011 /* Similarly, if it takes two insns to compute EXP but they
1012 have the same destination. Here TEMP3 will be the second
1013 insn and TEMP4 the SET from that insn. */
1015 if (! reload_completed
1016 && this_is_condjump && ! this_is_simplejump
1017 && BRANCH_COST >= 4
1018 && (temp = next_nonnote_insn (insn)) != 0
1019 && GET_CODE (temp) == INSN
1020 && REG_NOTES (temp) == 0
1021 && (temp3 = next_nonnote_insn (temp)) != 0
1022 && GET_CODE (temp3) == INSN
1023 && REG_NOTES (temp3) == 0
1024 && (reallabelprev == temp3
1025 || ((temp2 = next_active_insn (temp3)) != 0
1026 && simplejump_p (temp2)
1027 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1028 && (temp1 = single_set (temp)) != 0
1029 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
1030 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
1031 && (! SMALL_REGISTER_CLASSES
1032 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
1033 && ! side_effects_p (SET_SRC (temp1))
1034 && ! may_trap_p (SET_SRC (temp1))
1035 && rtx_cost (SET_SRC (temp1), SET) < 10
1036 && (temp4 = single_set (temp3)) != 0
1037 && rtx_equal_p (SET_DEST (temp4), temp2)
1038 && ! side_effects_p (SET_SRC (temp4))
1039 && ! may_trap_p (SET_SRC (temp4))
1040 && rtx_cost (SET_SRC (temp4), SET) < 10)
1042 rtx new = gen_reg_rtx (GET_MODE (temp2));
1044 if ((temp5 = find_insert_position (insn, temp))
1045 && (temp6 = find_insert_position (insn, temp3))
1046 && validate_change (temp, &SET_DEST (temp1), new, 0))
1048 /* Use the earliest of temp5 and temp6. */
1049 if (temp5 != insn)
1050 temp6 = temp5;
1051 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1052 emit_insn_after_with_line_notes (PATTERN (temp),
1053 PREV_INSN (temp6), temp);
1054 emit_insn_after_with_line_notes
1055 (replace_rtx (PATTERN (temp3), temp2, new),
1056 PREV_INSN (temp6), temp3);
1057 delete_insn (temp);
1058 delete_insn (temp3);
1059 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
1063 /* Finally, handle the case where two insns are used to
1064 compute EXP but a temporary register is used. Here we must
1065 ensure that the temporary register is not used anywhere else. */
1067 if (! reload_completed
1068 && after_regscan
1069 && this_is_condjump && ! this_is_simplejump
1070 && BRANCH_COST >= 4
1071 && (temp = next_nonnote_insn (insn)) != 0
1072 && GET_CODE (temp) == INSN
1073 && REG_NOTES (temp) == 0
1074 && (temp3 = next_nonnote_insn (temp)) != 0
1075 && GET_CODE (temp3) == INSN
1076 && REG_NOTES (temp3) == 0
1077 && (reallabelprev == temp3
1078 || ((temp2 = next_active_insn (temp3)) != 0
1079 && simplejump_p (temp2)
1080 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
1081 && (temp1 = single_set (temp)) != 0
1082 && (temp5 = SET_DEST (temp1),
1083 (GET_CODE (temp5) == REG
1084 || (GET_CODE (temp5) == SUBREG
1085 && (temp5 = SUBREG_REG (temp5),
1086 GET_CODE (temp5) == REG))))
1087 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
1088 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
1089 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
1090 && ! side_effects_p (SET_SRC (temp1))
1091 && ! may_trap_p (SET_SRC (temp1))
1092 && rtx_cost (SET_SRC (temp1), SET) < 10
1093 && (temp4 = single_set (temp3)) != 0
1094 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
1095 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
1096 && (! SMALL_REGISTER_CLASSES
1097 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
1098 && rtx_equal_p (SET_DEST (temp4), temp2)
1099 && ! side_effects_p (SET_SRC (temp4))
1100 && ! may_trap_p (SET_SRC (temp4))
1101 && rtx_cost (SET_SRC (temp4), SET) < 10)
1103 rtx new = gen_reg_rtx (GET_MODE (temp2));
1105 if ((temp5 = find_insert_position (insn, temp))
1106 && (temp6 = find_insert_position (insn, temp3))
1107 && validate_change (temp3, &SET_DEST (temp4), new, 0))
1109 /* Use the earliest of temp5 and temp6. */
1110 if (temp5 != insn)
1111 temp6 = temp5;
1112 next = emit_insn_after (gen_move_insn (temp2, new), insn);
1113 emit_insn_after_with_line_notes (PATTERN (temp),
1114 PREV_INSN (temp6), temp);
1115 emit_insn_after_with_line_notes (PATTERN (temp3),
1116 PREV_INSN (temp6), temp3);
1117 delete_insn (temp);
1118 delete_insn (temp3);
1119 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
1122 #endif /* HAVE_cc0 */
1124 /* Try to use a conditional move (if the target has them), or a
1125 store-flag insn. The general case is:
1127 1) x = a; if (...) x = b; and
1128 2) if (...) x = b;
1130 If the jump would be faster, the machine should not have defined
1131 the movcc or scc insns!. These cases are often made by the
1132 previous optimization.
1134 The second case is treated as x = x; if (...) x = b;.
1136 INSN here is the jump around the store. We set:
1138 TEMP to the "x = b;" insn.
1139 TEMP1 to X.
1140 TEMP2 to B.
1141 TEMP3 to A (X in the second case).
1142 TEMP4 to the condition being tested.
1143 TEMP5 to the earliest insn used to find the condition. */
1145 if (/* We can't do this after reload has completed. */
1146 ! reload_completed
1147 && this_is_condjump && ! this_is_simplejump
1148 /* Set TEMP to the "x = b;" insn. */
1149 && (temp = next_nonnote_insn (insn)) != 0
1150 && GET_CODE (temp) == INSN
1151 && GET_CODE (PATTERN (temp)) == SET
1152 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
1153 && (! SMALL_REGISTER_CLASSES
1154 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1155 && (GET_CODE (temp2 = SET_SRC (PATTERN (temp))) == REG
1156 || GET_CODE (temp2) == SUBREG
1157 /* ??? How about floating point constants? */
1158 || GET_CODE (temp2) == CONST_INT)
1159 /* Allow either form, but prefer the former if both apply.
1160 There is no point in using the old value of TEMP1 if
1161 it is a register, since cse will alias them. It can
1162 lose if the old value were a hard register since CSE
1163 won't replace hard registers. */
1164 && (((temp3 = reg_set_last (temp1, insn)) != 0)
1165 /* Make the latter case look like x = x; if (...) x = b; */
1166 || (temp3 = temp1, 1))
1167 /* INSN must either branch to the insn after TEMP or the insn
1168 after TEMP must branch to the same place as INSN. */
1169 && (reallabelprev == temp
1170 || ((temp4 = next_active_insn (temp)) != 0
1171 && simplejump_p (temp4)
1172 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1173 && (temp4 = get_condition (insn, &temp5)) != 0
1174 /* We must be comparing objects whose modes imply the size.
1175 We could handle BLKmode if (1) emit_store_flag could
1176 and (2) we could find the size reliably. */
1177 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1178 /* Even if branches are cheap, the store_flag optimization
1179 can win when the operation to be performed can be
1180 expressed directly. */
1181 #ifdef HAVE_cc0
1182 /* If the previous insn sets CC0 and something else, we can't
1183 do this since we are going to delete that insn. */
1185 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1186 && GET_CODE (temp6) == INSN
1187 && (sets_cc0_p (PATTERN (temp6)) == -1
1188 || (sets_cc0_p (PATTERN (temp6)) == 1
1189 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1190 #endif
1193 #ifdef HAVE_conditional_move
1194 /* First try a conditional move. */
1196 enum rtx_code code = GET_CODE (temp4);
1197 rtx var = temp1;
1198 rtx cond0, cond1, aval, bval;
1199 rtx target;
1201 /* Copy the compared variables into cond0 and cond1, so that
1202 any side effects performed in or after the old comparison,
1203 will not affect our compare which will come later. */
1204 /* ??? Is it possible to just use the comparison in the jump
1205 insn? After all, we're going to delete it. We'd have
1206 to modify emit_conditional_move to take a comparison rtx
1207 instead or write a new function. */
1208 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1209 /* We want the target to be able to simplify comparisons with
1210 zero (and maybe other constants as well), so don't create
1211 pseudos for them. There's no need to either. */
1212 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1213 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1214 cond1 = XEXP (temp4, 1);
1215 else
1216 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1218 aval = temp3;
1219 bval = temp2;
1221 start_sequence ();
1222 target = emit_conditional_move (var, code,
1223 cond0, cond1, VOIDmode,
1224 aval, bval, GET_MODE (var),
1225 (code == LTU || code == GEU
1226 || code == LEU || code == GTU));
1228 if (target)
1230 rtx seq1,seq2;
1232 /* Save the conditional move sequence but don't emit it
1233 yet. On some machines, like the alpha, it is possible
1234 that temp5 == insn, so next generate the sequence that
1235 saves the compared values and then emit both
1236 sequences ensuring seq1 occurs before seq2. */
1237 seq2 = get_insns ();
1238 end_sequence ();
1240 /* Now that we can't fail, generate the copy insns that
1241 preserve the compared values. */
1242 start_sequence ();
1243 emit_move_insn (cond0, XEXP (temp4, 0));
1244 if (cond1 != XEXP (temp4, 1))
1245 emit_move_insn (cond1, XEXP (temp4, 1));
1246 seq1 = get_insns ();
1247 end_sequence ();
1249 emit_insns_before (seq1, temp5);
1250 /* Insert conditional move after insn, to be sure that
1251 the jump and a possible compare won't be separated */
1252 emit_insns_after (seq2, insn);
1254 /* ??? We can also delete the insn that sets X to A.
1255 Flow will do it too though. */
1256 delete_insn (temp);
1257 next = NEXT_INSN (insn);
1258 delete_jump (insn);
1259 changed = 1;
1260 continue;
1262 else
1263 end_sequence ();
1265 #endif
1267 /* That didn't work, try a store-flag insn.
1269 We further divide the cases into:
1271 1) x = a; if (...) x = b; and either A or B is zero,
1272 2) if (...) x = 0; and jumps are expensive,
1273 3) x = a; if (...) x = b; and A and B are constants where all
1274 the set bits in A are also set in B and jumps are expensive,
1275 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1276 more expensive, and
1277 5) if (...) x = b; if jumps are even more expensive. */
1279 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1280 && ((GET_CODE (temp3) == CONST_INT)
1281 /* Make the latter case look like
1282 x = x; if (...) x = 0; */
1283 || (temp3 = temp1,
1284 ((BRANCH_COST >= 2
1285 && temp2 == const0_rtx)
1286 || BRANCH_COST >= 3)))
1287 /* If B is zero, OK; if A is zero, can only do (1) if we
1288 can reverse the condition. See if (3) applies possibly
1289 by reversing the condition. Prefer reversing to (4) when
1290 branches are very expensive. */
1291 && (((BRANCH_COST >= 2
1292 || STORE_FLAG_VALUE == -1
1293 || (STORE_FLAG_VALUE == 1
1294 /* Check that the mask is a power of two,
1295 so that it can probably be generated
1296 with a shift. */
1297 && exact_log2 (INTVAL (temp3)) >= 0))
1298 && (reversep = 0, temp2 == const0_rtx))
1299 || ((BRANCH_COST >= 2
1300 || STORE_FLAG_VALUE == -1
1301 || (STORE_FLAG_VALUE == 1
1302 && exact_log2 (INTVAL (temp2)) >= 0))
1303 && temp3 == const0_rtx
1304 && (reversep = can_reverse_comparison_p (temp4, insn)))
1305 || (BRANCH_COST >= 2
1306 && GET_CODE (temp2) == CONST_INT
1307 && GET_CODE (temp3) == CONST_INT
1308 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1309 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1310 && (reversep = can_reverse_comparison_p (temp4,
1311 insn)))))
1312 || BRANCH_COST >= 3)
1315 enum rtx_code code = GET_CODE (temp4);
1316 rtx uval, cval, var = temp1;
1317 int normalizep;
1318 rtx target;
1320 /* If necessary, reverse the condition. */
1321 if (reversep)
1322 code = reverse_condition (code), uval = temp2, cval = temp3;
1323 else
1324 uval = temp3, cval = temp2;
1326 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1327 is the constant 1, it is best to just compute the result
1328 directly. If UVAL is constant and STORE_FLAG_VALUE
1329 includes all of its bits, it is best to compute the flag
1330 value unnormalized and `and' it with UVAL. Otherwise,
1331 normalize to -1 and `and' with UVAL. */
1332 normalizep = (cval != const0_rtx ? -1
1333 : (uval == const1_rtx ? 1
1334 : (GET_CODE (uval) == CONST_INT
1335 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1336 ? 0 : -1));
1338 /* We will be putting the store-flag insn immediately in
1339 front of the comparison that was originally being done,
1340 so we know all the variables in TEMP4 will be valid.
1341 However, this might be in front of the assignment of
1342 A to VAR. If it is, it would clobber the store-flag
1343 we will be emitting.
1345 Therefore, emit into a temporary which will be copied to
1346 VAR immediately after TEMP. */
1348 start_sequence ();
1349 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1350 XEXP (temp4, 0), XEXP (temp4, 1),
1351 VOIDmode,
1352 (code == LTU || code == LEU
1353 || code == GEU || code == GTU),
1354 normalizep);
1355 if (target)
1357 rtx seq;
1358 rtx before = insn;
1360 seq = get_insns ();
1361 end_sequence ();
1363 /* Put the store-flag insns in front of the first insn
1364 used to compute the condition to ensure that we
1365 use the same values of them as the current
1366 comparison. However, the remainder of the insns we
1367 generate will be placed directly in front of the
1368 jump insn, in case any of the pseudos we use
1369 are modified earlier. */
1371 emit_insns_before (seq, temp5);
1373 start_sequence ();
1375 /* Both CVAL and UVAL are non-zero. */
1376 if (cval != const0_rtx && uval != const0_rtx)
1378 rtx tem1, tem2;
1380 tem1 = expand_and (uval, target, NULL_RTX);
1381 if (GET_CODE (cval) == CONST_INT
1382 && GET_CODE (uval) == CONST_INT
1383 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1384 tem2 = cval;
1385 else
1387 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1388 target, NULL_RTX, 0);
1389 tem2 = expand_and (cval, tem2,
1390 (GET_CODE (tem2) == REG
1391 ? tem2 : 0));
1394 /* If we usually make new pseudos, do so here. This
1395 turns out to help machines that have conditional
1396 move insns. */
1397 /* ??? Conditional moves have already been handled.
1398 This may be obsolete. */
1400 if (flag_expensive_optimizations)
1401 target = 0;
1403 target = expand_binop (GET_MODE (var), ior_optab,
1404 tem1, tem2, target,
1405 1, OPTAB_WIDEN);
1407 else if (normalizep != 1)
1409 /* We know that either CVAL or UVAL is zero. If
1410 UVAL is zero, negate TARGET and `and' with CVAL.
1411 Otherwise, `and' with UVAL. */
1412 if (uval == const0_rtx)
1414 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1415 target, NULL_RTX, 0);
1416 uval = cval;
1419 target = expand_and (uval, target,
1420 (GET_CODE (target) == REG
1421 && ! preserve_subexpressions_p ()
1422 ? target : NULL_RTX));
1425 emit_move_insn (var, target);
1426 seq = get_insns ();
1427 end_sequence ();
1428 #ifdef HAVE_cc0
1429 /* If INSN uses CC0, we must not separate it from the
1430 insn that sets cc0. */
1431 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1432 before = prev_nonnote_insn (before);
1433 #endif
1434 emit_insns_before (seq, before);
1436 delete_insn (temp);
1437 next = NEXT_INSN (insn);
1438 delete_jump (insn);
1439 changed = 1;
1440 continue;
1442 else
1443 end_sequence ();
1447 /* If branches are expensive, convert
1448 if (foo) bar++; to bar += (foo != 0);
1449 and similarly for "bar--;"
1451 INSN is the conditional branch around the arithmetic. We set:
1453 TEMP is the arithmetic insn.
1454 TEMP1 is the SET doing the arithmetic.
1455 TEMP2 is the operand being incremented or decremented.
1456 TEMP3 to the condition being tested.
1457 TEMP4 to the earliest insn used to find the condition. */
1459 if ((BRANCH_COST >= 2
1460 #ifdef HAVE_incscc
1461 || HAVE_incscc
1462 #endif
1463 #ifdef HAVE_decscc
1464 || HAVE_decscc
1465 #endif
1467 && ! reload_completed
1468 && this_is_condjump && ! this_is_simplejump
1469 && (temp = next_nonnote_insn (insn)) != 0
1470 && (temp1 = single_set (temp)) != 0
1471 && (temp2 = SET_DEST (temp1),
1472 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1473 && GET_CODE (SET_SRC (temp1)) == PLUS
1474 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1475 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1476 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1477 && ! side_effects_p (temp2)
1478 && ! may_trap_p (temp2)
1479 /* INSN must either branch to the insn after TEMP or the insn
1480 after TEMP must branch to the same place as INSN. */
1481 && (reallabelprev == temp
1482 || ((temp3 = next_active_insn (temp)) != 0
1483 && simplejump_p (temp3)
1484 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1485 && (temp3 = get_condition (insn, &temp4)) != 0
1486 /* We must be comparing objects whose modes imply the size.
1487 We could handle BLKmode if (1) emit_store_flag could
1488 and (2) we could find the size reliably. */
1489 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1490 && can_reverse_comparison_p (temp3, insn))
1492 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1493 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1495 start_sequence ();
1497 /* It must be the case that TEMP2 is not modified in the range
1498 [TEMP4, INSN). The one exception we make is if the insn
1499 before INSN sets TEMP2 to something which is also unchanged
1500 in that range. In that case, we can move the initialization
1501 into our sequence. */
1503 if ((temp5 = prev_active_insn (insn)) != 0
1504 && no_labels_between_p (temp5, insn)
1505 && GET_CODE (temp5) == INSN
1506 && (temp6 = single_set (temp5)) != 0
1507 && rtx_equal_p (temp2, SET_DEST (temp6))
1508 && (CONSTANT_P (SET_SRC (temp6))
1509 || GET_CODE (SET_SRC (temp6)) == REG
1510 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1512 emit_insn (PATTERN (temp5));
1513 init_insn = temp5;
1514 init = SET_SRC (temp6);
1517 if (CONSTANT_P (init)
1518 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1519 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1520 XEXP (temp3, 0), XEXP (temp3, 1),
1521 VOIDmode,
1522 (code == LTU || code == LEU
1523 || code == GTU || code == GEU), 1);
1525 /* If we can do the store-flag, do the addition or
1526 subtraction. */
1528 if (target)
1529 target = expand_binop (GET_MODE (temp2),
1530 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1531 ? add_optab : sub_optab),
1532 temp2, target, temp2, 0, OPTAB_WIDEN);
1534 if (target != 0)
1536 /* Put the result back in temp2 in case it isn't already.
1537 Then replace the jump, possible a CC0-setting insn in
1538 front of the jump, and TEMP, with the sequence we have
1539 made. */
1541 if (target != temp2)
1542 emit_move_insn (temp2, target);
1544 seq = get_insns ();
1545 end_sequence ();
1547 emit_insns_before (seq, temp4);
1548 delete_insn (temp);
1550 if (init_insn)
1551 delete_insn (init_insn);
1553 next = NEXT_INSN (insn);
1554 #ifdef HAVE_cc0
1555 delete_insn (prev_nonnote_insn (insn));
1556 #endif
1557 delete_insn (insn);
1558 changed = 1;
1559 continue;
1561 else
1562 end_sequence ();
1565 /* Simplify if (...) x = 1; else {...} if (x) ...
1566 We recognize this case scanning backwards as well.
1568 TEMP is the assignment to x;
1569 TEMP1 is the label at the head of the second if. */
1570 /* ?? This should call get_condition to find the values being
1571 compared, instead of looking for a COMPARE insn when HAVE_cc0
1572 is not defined. This would allow it to work on the m88k. */
1573 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1574 is not defined and the condition is tested by a separate compare
1575 insn. This is because the code below assumes that the result
1576 of the compare dies in the following branch.
1578 Not only that, but there might be other insns between the
1579 compare and branch whose results are live. Those insns need
1580 to be executed.
1582 A way to fix this is to move the insns at JUMP_LABEL (insn)
1583 to before INSN. If we are running before flow, they will
1584 be deleted if they aren't needed. But this doesn't work
1585 well after flow.
1587 This is really a special-case of jump threading, anyway. The
1588 right thing to do is to replace this and jump threading with
1589 much simpler code in cse.
1591 This code has been turned off in the non-cc0 case in the
1592 meantime. */
1594 #ifdef HAVE_cc0
1595 else if (this_is_simplejump
1596 /* Safe to skip USE and CLOBBER insns here
1597 since they will not be deleted. */
1598 && (temp = prev_active_insn (insn))
1599 && no_labels_between_p (temp, insn)
1600 && GET_CODE (temp) == INSN
1601 && GET_CODE (PATTERN (temp)) == SET
1602 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1603 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1604 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1605 /* If we find that the next value tested is `x'
1606 (TEMP1 is the insn where this happens), win. */
1607 && GET_CODE (temp1) == INSN
1608 && GET_CODE (PATTERN (temp1)) == SET
1609 #ifdef HAVE_cc0
1610 /* Does temp1 `tst' the value of x? */
1611 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1612 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1613 && (temp1 = next_nonnote_insn (temp1))
1614 #else
1615 /* Does temp1 compare the value of x against zero? */
1616 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1617 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1618 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1619 == SET_DEST (PATTERN (temp)))
1620 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1621 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1622 #endif
1623 && condjump_p (temp1))
1625 /* Get the if_then_else from the condjump. */
1626 rtx choice = SET_SRC (PATTERN (temp1));
1627 if (GET_CODE (choice) == IF_THEN_ELSE)
1629 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1630 rtx val = SET_SRC (PATTERN (temp));
1631 rtx cond
1632 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1633 val, const0_rtx);
1634 rtx ultimate;
1636 if (cond == const_true_rtx)
1637 ultimate = XEXP (choice, 1);
1638 else if (cond == const0_rtx)
1639 ultimate = XEXP (choice, 2);
1640 else
1641 ultimate = 0;
1643 if (ultimate == pc_rtx)
1644 ultimate = get_label_after (temp1);
1645 else if (ultimate && GET_CODE (ultimate) != RETURN)
1646 ultimate = XEXP (ultimate, 0);
1648 if (ultimate && JUMP_LABEL(insn) != ultimate)
1649 changed |= redirect_jump (insn, ultimate);
1652 #endif
1654 #if 0
1655 /* @@ This needs a bit of work before it will be right.
1657 Any type of comparison can be accepted for the first and
1658 second compare. When rewriting the first jump, we must
1659 compute the what conditions can reach label3, and use the
1660 appropriate code. We can not simply reverse/swap the code
1661 of the first jump. In some cases, the second jump must be
1662 rewritten also.
1664 For example,
1665 < == converts to > ==
1666 < != converts to == >
1667 etc.
1669 If the code is written to only accept an '==' test for the second
1670 compare, then all that needs to be done is to swap the condition
1671 of the first branch.
1673 It is questionable whether we want this optimization anyways,
1674 since if the user wrote code like this because he/she knew that
1675 the jump to label1 is taken most of the time, then rewriting
1676 this gives slower code. */
1677 /* @@ This should call get_condition to find the values being
1678 compared, instead of looking for a COMPARE insn when HAVE_cc0
1679 is not defined. This would allow it to work on the m88k. */
1680 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1681 is not defined and the condition is tested by a separate compare
1682 insn. This is because the code below assumes that the result
1683 of the compare dies in the following branch. */
1685 /* Simplify test a ~= b
1686 condjump label1;
1687 test a == b
1688 condjump label2;
1689 jump label3;
1690 label1:
1692 rewriting as
1693 test a ~~= b
1694 condjump label3
1695 test a == b
1696 condjump label2
1697 label1:
1699 where ~= is an inequality, e.g. >, and ~~= is the swapped
1700 inequality, e.g. <.
1702 We recognize this case scanning backwards.
1704 TEMP is the conditional jump to `label2';
1705 TEMP1 is the test for `a == b';
1706 TEMP2 is the conditional jump to `label1';
1707 TEMP3 is the test for `a ~= b'. */
1708 else if (this_is_simplejump
1709 && (temp = prev_active_insn (insn))
1710 && no_labels_between_p (temp, insn)
1711 && condjump_p (temp)
1712 && (temp1 = prev_active_insn (temp))
1713 && no_labels_between_p (temp1, temp)
1714 && GET_CODE (temp1) == INSN
1715 && GET_CODE (PATTERN (temp1)) == SET
1716 #ifdef HAVE_cc0
1717 && sets_cc0_p (PATTERN (temp1)) == 1
1718 #else
1719 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1720 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1721 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1722 #endif
1723 && (temp2 = prev_active_insn (temp1))
1724 && no_labels_between_p (temp2, temp1)
1725 && condjump_p (temp2)
1726 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1727 && (temp3 = prev_active_insn (temp2))
1728 && no_labels_between_p (temp3, temp2)
1729 && GET_CODE (PATTERN (temp3)) == SET
1730 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1731 SET_DEST (PATTERN (temp1)))
1732 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1733 SET_SRC (PATTERN (temp3)))
1734 && ! inequality_comparisons_p (PATTERN (temp))
1735 && inequality_comparisons_p (PATTERN (temp2)))
1737 rtx fallthrough_label = JUMP_LABEL (temp2);
1739 ++LABEL_NUSES (fallthrough_label);
1740 if (swap_jump (temp2, JUMP_LABEL (insn)))
1742 delete_insn (insn);
1743 changed = 1;
1746 if (--LABEL_NUSES (fallthrough_label) == 0)
1747 delete_insn (fallthrough_label);
1749 #endif
1750 /* Simplify if (...) {... x = 1;} if (x) ...
1752 We recognize this case backwards.
1754 TEMP is the test of `x';
1755 TEMP1 is the assignment to `x' at the end of the
1756 previous statement. */
1757 /* @@ This should call get_condition to find the values being
1758 compared, instead of looking for a COMPARE insn when HAVE_cc0
1759 is not defined. This would allow it to work on the m88k. */
1760 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1761 is not defined and the condition is tested by a separate compare
1762 insn. This is because the code below assumes that the result
1763 of the compare dies in the following branch. */
1765 /* ??? This has to be turned off. The problem is that the
1766 unconditional jump might indirectly end up branching to the
1767 label between TEMP1 and TEMP. We can't detect this, in general,
1768 since it may become a jump to there after further optimizations.
1769 If that jump is done, it will be deleted, so we will retry
1770 this optimization in the next pass, thus an infinite loop.
1772 The present code prevents this by putting the jump after the
1773 label, but this is not logically correct. */
1774 #if 0
1775 else if (this_is_condjump
1776 /* Safe to skip USE and CLOBBER insns here
1777 since they will not be deleted. */
1778 && (temp = prev_active_insn (insn))
1779 && no_labels_between_p (temp, insn)
1780 && GET_CODE (temp) == INSN
1781 && GET_CODE (PATTERN (temp)) == SET
1782 #ifdef HAVE_cc0
1783 && sets_cc0_p (PATTERN (temp)) == 1
1784 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1785 #else
1786 /* Temp must be a compare insn, we can not accept a register
1787 to register move here, since it may not be simply a
1788 tst insn. */
1789 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1790 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1791 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1792 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1793 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1794 #endif
1795 /* May skip USE or CLOBBER insns here
1796 for checking for opportunity, since we
1797 take care of them later. */
1798 && (temp1 = prev_active_insn (temp))
1799 && GET_CODE (temp1) == INSN
1800 && GET_CODE (PATTERN (temp1)) == SET
1801 #ifdef HAVE_cc0
1802 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1803 #else
1804 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1805 == SET_DEST (PATTERN (temp1)))
1806 #endif
1807 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1808 /* If this isn't true, cse will do the job. */
1809 && ! no_labels_between_p (temp1, temp))
1811 /* Get the if_then_else from the condjump. */
1812 rtx choice = SET_SRC (PATTERN (insn));
1813 if (GET_CODE (choice) == IF_THEN_ELSE
1814 && (GET_CODE (XEXP (choice, 0)) == EQ
1815 || GET_CODE (XEXP (choice, 0)) == NE))
1817 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1818 rtx last_insn;
1819 rtx ultimate;
1820 rtx p;
1822 /* Get the place that condjump will jump to
1823 if it is reached from here. */
1824 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1825 == want_nonzero)
1826 ultimate = XEXP (choice, 1);
1827 else
1828 ultimate = XEXP (choice, 2);
1829 /* Get it as a CODE_LABEL. */
1830 if (ultimate == pc_rtx)
1831 ultimate = get_label_after (insn);
1832 else
1833 /* Get the label out of the LABEL_REF. */
1834 ultimate = XEXP (ultimate, 0);
1836 /* Insert the jump immediately before TEMP, specifically
1837 after the label that is between TEMP1 and TEMP. */
1838 last_insn = PREV_INSN (temp);
1840 /* If we would be branching to the next insn, the jump
1841 would immediately be deleted and the re-inserted in
1842 a subsequent pass over the code. So don't do anything
1843 in that case. */
1844 if (next_active_insn (last_insn)
1845 != next_active_insn (ultimate))
1847 emit_barrier_after (last_insn);
1848 p = emit_jump_insn_after (gen_jump (ultimate),
1849 last_insn);
1850 JUMP_LABEL (p) = ultimate;
1851 ++LABEL_NUSES (ultimate);
1852 if (INSN_UID (ultimate) < max_jump_chain
1853 && INSN_CODE (p) < max_jump_chain)
1855 jump_chain[INSN_UID (p)]
1856 = jump_chain[INSN_UID (ultimate)];
1857 jump_chain[INSN_UID (ultimate)] = p;
1859 changed = 1;
1860 continue;
1864 #endif
1865 /* Detect a conditional jump going to the same place
1866 as an immediately following unconditional jump. */
1867 else if (this_is_condjump
1868 && (temp = next_active_insn (insn)) != 0
1869 && simplejump_p (temp)
1870 && (next_active_insn (JUMP_LABEL (insn))
1871 == next_active_insn (JUMP_LABEL (temp))))
1873 rtx tem = temp;
1875 /* ??? Optional. Disables some optimizations, but makes
1876 gcov output more accurate with -O. */
1877 if (flag_test_coverage && !reload_completed)
1878 for (tem = insn; tem != temp; tem = NEXT_INSN (tem))
1879 if (GET_CODE (tem) == NOTE && NOTE_LINE_NUMBER (tem) > 0)
1880 break;
1882 if (tem == temp)
1884 delete_jump (insn);
1885 changed = 1;
1886 continue;
1889 /* Detect a conditional jump jumping over an unconditional jump. */
1891 else if ((this_is_condjump || this_is_condjump_in_parallel)
1892 && ! this_is_simplejump
1893 && reallabelprev != 0
1894 && GET_CODE (reallabelprev) == JUMP_INSN
1895 && prev_active_insn (reallabelprev) == insn
1896 && no_labels_between_p (insn, reallabelprev)
1897 && simplejump_p (reallabelprev))
1899 /* When we invert the unconditional jump, we will be
1900 decrementing the usage count of its old label.
1901 Make sure that we don't delete it now because that
1902 might cause the following code to be deleted. */
1903 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1904 rtx prev_label = JUMP_LABEL (insn);
1906 if (prev_label)
1907 ++LABEL_NUSES (prev_label);
1909 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1911 /* It is very likely that if there are USE insns before
1912 this jump, they hold REG_DEAD notes. These REG_DEAD
1913 notes are no longer valid due to this optimization,
1914 and will cause the life-analysis that following passes
1915 (notably delayed-branch scheduling) to think that
1916 these registers are dead when they are not.
1918 To prevent this trouble, we just remove the USE insns
1919 from the insn chain. */
1921 while (prev_uses && GET_CODE (prev_uses) == INSN
1922 && GET_CODE (PATTERN (prev_uses)) == USE)
1924 rtx useless = prev_uses;
1925 prev_uses = prev_nonnote_insn (prev_uses);
1926 delete_insn (useless);
1929 delete_insn (reallabelprev);
1930 next = insn;
1931 changed = 1;
1934 /* We can now safely delete the label if it is unreferenced
1935 since the delete_insn above has deleted the BARRIER. */
1936 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1937 delete_insn (prev_label);
1938 continue;
1940 else
1942 /* Detect a jump to a jump. */
1944 nlabel = follow_jumps (JUMP_LABEL (insn));
1945 if (nlabel != JUMP_LABEL (insn)
1946 && redirect_jump (insn, nlabel))
1948 changed = 1;
1949 next = insn;
1952 /* Look for if (foo) bar; else break; */
1953 /* The insns look like this:
1954 insn = condjump label1;
1955 ...range1 (some insns)...
1956 jump label2;
1957 label1:
1958 ...range2 (some insns)...
1959 jump somewhere unconditionally
1960 label2: */
1962 rtx label1 = next_label (insn);
1963 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1964 /* Don't do this optimization on the first round, so that
1965 jump-around-a-jump gets simplified before we ask here
1966 whether a jump is unconditional.
1968 Also don't do it when we are called after reload since
1969 it will confuse reorg. */
1970 if (! first
1971 && (reload_completed ? ! flag_delayed_branch : 1)
1972 /* Make sure INSN is something we can invert. */
1973 && condjump_p (insn)
1974 && label1 != 0
1975 && JUMP_LABEL (insn) == label1
1976 && LABEL_NUSES (label1) == 1
1977 && GET_CODE (range1end) == JUMP_INSN
1978 && simplejump_p (range1end))
1980 rtx label2 = next_label (label1);
1981 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1982 if (range1end != range2end
1983 && JUMP_LABEL (range1end) == label2
1984 && GET_CODE (range2end) == JUMP_INSN
1985 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1986 /* Invert the jump condition, so we
1987 still execute the same insns in each case. */
1988 && invert_jump (insn, label1))
1990 rtx range1beg = next_active_insn (insn);
1991 rtx range2beg = next_active_insn (label1);
1992 rtx range1after, range2after;
1993 rtx range1before, range2before;
1994 rtx rangenext;
1996 /* Include in each range any notes before it, to be
1997 sure that we get the line number note if any, even
1998 if there are other notes here. */
1999 while (PREV_INSN (range1beg)
2000 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
2001 range1beg = PREV_INSN (range1beg);
2003 while (PREV_INSN (range2beg)
2004 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
2005 range2beg = PREV_INSN (range2beg);
2007 /* Don't move NOTEs for blocks or loops; shift them
2008 outside the ranges, where they'll stay put. */
2009 range1beg = squeeze_notes (range1beg, range1end);
2010 range2beg = squeeze_notes (range2beg, range2end);
2012 /* Get current surrounds of the 2 ranges. */
2013 range1before = PREV_INSN (range1beg);
2014 range2before = PREV_INSN (range2beg);
2015 range1after = NEXT_INSN (range1end);
2016 range2after = NEXT_INSN (range2end);
2018 /* Splice range2 where range1 was. */
2019 NEXT_INSN (range1before) = range2beg;
2020 PREV_INSN (range2beg) = range1before;
2021 NEXT_INSN (range2end) = range1after;
2022 PREV_INSN (range1after) = range2end;
2023 /* Splice range1 where range2 was. */
2024 NEXT_INSN (range2before) = range1beg;
2025 PREV_INSN (range1beg) = range2before;
2026 NEXT_INSN (range1end) = range2after;
2027 PREV_INSN (range2after) = range1end;
2029 /* Check for a loop end note between the end of
2030 range2, and the next code label. If there is one,
2031 then what we have really seen is
2032 if (foo) break; end_of_loop;
2033 and moved the break sequence outside the loop.
2034 We must move the LOOP_END note to where the
2035 loop really ends now, or we will confuse loop
2036 optimization. Stop if we find a LOOP_BEG note
2037 first, since we don't want to move the LOOP_END
2038 note in that case. */
2039 for (;range2after != label2; range2after = rangenext)
2041 rangenext = NEXT_INSN (range2after);
2042 if (GET_CODE (range2after) == NOTE)
2044 if (NOTE_LINE_NUMBER (range2after)
2045 == NOTE_INSN_LOOP_END)
2047 NEXT_INSN (PREV_INSN (range2after))
2048 = rangenext;
2049 PREV_INSN (rangenext)
2050 = PREV_INSN (range2after);
2051 PREV_INSN (range2after)
2052 = PREV_INSN (range1beg);
2053 NEXT_INSN (range2after) = range1beg;
2054 NEXT_INSN (PREV_INSN (range1beg))
2055 = range2after;
2056 PREV_INSN (range1beg) = range2after;
2058 else if (NOTE_LINE_NUMBER (range2after)
2059 == NOTE_INSN_LOOP_BEG)
2060 break;
2063 changed = 1;
2064 continue;
2069 /* Now that the jump has been tensioned,
2070 try cross jumping: check for identical code
2071 before the jump and before its target label. */
2073 /* First, cross jumping of conditional jumps: */
2075 if (cross_jump && condjump_p (insn))
2077 rtx newjpos, newlpos;
2078 rtx x = prev_real_insn (JUMP_LABEL (insn));
2080 /* A conditional jump may be crossjumped
2081 only if the place it jumps to follows
2082 an opposing jump that comes back here. */
2084 if (x != 0 && ! jump_back_p (x, insn))
2085 /* We have no opposing jump;
2086 cannot cross jump this insn. */
2087 x = 0;
2089 newjpos = 0;
2090 /* TARGET is nonzero if it is ok to cross jump
2091 to code before TARGET. If so, see if matches. */
2092 if (x != 0)
2093 find_cross_jump (insn, x, 2,
2094 &newjpos, &newlpos);
2096 if (newjpos != 0)
2098 do_cross_jump (insn, newjpos, newlpos);
2099 /* Make the old conditional jump
2100 into an unconditional one. */
2101 SET_SRC (PATTERN (insn))
2102 = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
2103 INSN_CODE (insn) = -1;
2104 emit_barrier_after (insn);
2105 /* Add to jump_chain unless this is a new label
2106 whose UID is too large. */
2107 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2109 jump_chain[INSN_UID (insn)]
2110 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2111 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2113 changed = 1;
2114 next = insn;
2118 /* Cross jumping of unconditional jumps:
2119 a few differences. */
2121 if (cross_jump && simplejump_p (insn))
2123 rtx newjpos, newlpos;
2124 rtx target;
2126 newjpos = 0;
2128 /* TARGET is nonzero if it is ok to cross jump
2129 to code before TARGET. If so, see if matches. */
2130 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2131 &newjpos, &newlpos);
2133 /* If cannot cross jump to code before the label,
2134 see if we can cross jump to another jump to
2135 the same label. */
2136 /* Try each other jump to this label. */
2137 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2138 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2139 target != 0 && newjpos == 0;
2140 target = jump_chain[INSN_UID (target)])
2141 if (target != insn
2142 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2143 /* Ignore TARGET if it's deleted. */
2144 && ! INSN_DELETED_P (target))
2145 find_cross_jump (insn, target, 2,
2146 &newjpos, &newlpos);
2148 if (newjpos != 0)
2150 do_cross_jump (insn, newjpos, newlpos);
2151 changed = 1;
2152 next = insn;
2156 /* This code was dead in the previous jump.c! */
2157 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2159 /* Return insns all "jump to the same place"
2160 so we can cross-jump between any two of them. */
2162 rtx newjpos, newlpos, target;
2164 newjpos = 0;
2166 /* If cannot cross jump to code before the label,
2167 see if we can cross jump to another jump to
2168 the same label. */
2169 /* Try each other jump to this label. */
2170 for (target = jump_chain[0];
2171 target != 0 && newjpos == 0;
2172 target = jump_chain[INSN_UID (target)])
2173 if (target != insn
2174 && ! INSN_DELETED_P (target)
2175 && GET_CODE (PATTERN (target)) == RETURN)
2176 find_cross_jump (insn, target, 2,
2177 &newjpos, &newlpos);
2179 if (newjpos != 0)
2181 do_cross_jump (insn, newjpos, newlpos);
2182 changed = 1;
2183 next = insn;
2189 first = 0;
2192 /* Delete extraneous line number notes.
2193 Note that two consecutive notes for different lines are not really
2194 extraneous. There should be some indication where that line belonged,
2195 even if it became empty. */
2198 rtx last_note = 0;
2200 for (insn = f; insn; insn = NEXT_INSN (insn))
2201 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2203 /* Delete this note if it is identical to previous note. */
2204 if (last_note
2205 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2206 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2208 delete_insn (insn);
2209 continue;
2212 last_note = insn;
2216 #ifdef HAVE_return
2217 if (HAVE_return)
2219 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2220 in front of it. If the machine allows it at this point (we might be
2221 after reload for a leaf routine), it will improve optimization for it
2222 to be there. We do this both here and at the start of this pass since
2223 the RETURN might have been deleted by some of our optimizations. */
2224 insn = get_last_insn ();
2225 while (insn && GET_CODE (insn) == NOTE)
2226 insn = PREV_INSN (insn);
2228 if (insn && GET_CODE (insn) != BARRIER)
2230 emit_jump_insn (gen_return ());
2231 emit_barrier ();
2234 #endif
2236 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2237 If so, delete it, and record that this function can drop off the end. */
2239 insn = last_insn;
2241 int n_labels = 1;
2242 while (insn
2243 /* One label can follow the end-note: the return label. */
2244 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2245 /* Ordinary insns can follow it if returning a structure. */
2246 || GET_CODE (insn) == INSN
2247 /* If machine uses explicit RETURN insns, no epilogue,
2248 then one of them follows the note. */
2249 || (GET_CODE (insn) == JUMP_INSN
2250 && GET_CODE (PATTERN (insn)) == RETURN)
2251 /* A barrier can follow the return insn. */
2252 || GET_CODE (insn) == BARRIER
2253 /* Other kinds of notes can follow also. */
2254 || (GET_CODE (insn) == NOTE
2255 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
2256 insn = PREV_INSN (insn);
2259 /* Report if control can fall through at the end of the function. */
2260 if (insn && GET_CODE (insn) == NOTE
2261 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2263 can_reach_end = 1;
2264 delete_insn (insn);
2267 /* Show JUMP_CHAIN no longer valid. */
2268 jump_chain = 0;
2271 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2272 jump. Assume that this unconditional jump is to the exit test code. If
2273 the code is sufficiently simple, make a copy of it before INSN,
2274 followed by a jump to the exit of the loop. Then delete the unconditional
2275 jump after INSN.
2277 Return 1 if we made the change, else 0.
2279 This is only safe immediately after a regscan pass because it uses the
2280 values of regno_first_uid and regno_last_uid. */
2282 static int
2283 duplicate_loop_exit_test (loop_start)
2284 rtx loop_start;
2286 rtx insn, set, reg, p, link;
2287 rtx copy = 0;
2288 int num_insns = 0;
2289 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2290 rtx lastexit;
2291 int max_reg = max_reg_num ();
2292 rtx *reg_map = 0;
2294 /* Scan the exit code. We do not perform this optimization if any insn:
2296 is a CALL_INSN
2297 is a CODE_LABEL
2298 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2299 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2300 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2301 are not valid
2303 Also, don't do this if the exit code is more than 20 insns. */
2305 for (insn = exitcode;
2306 insn
2307 && ! (GET_CODE (insn) == NOTE
2308 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2309 insn = NEXT_INSN (insn))
2311 switch (GET_CODE (insn))
2313 case CODE_LABEL:
2314 case CALL_INSN:
2315 return 0;
2316 case NOTE:
2317 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2318 a jump immediately after the loop start that branches outside
2319 the loop but within an outer loop, near the exit test.
2320 If we copied this exit test and created a phony
2321 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2322 before the exit test look like these could be safely moved
2323 out of the loop even if they actually may be never executed.
2324 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2326 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2327 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2328 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2329 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2330 return 0;
2331 break;
2332 case JUMP_INSN:
2333 case INSN:
2334 if (++num_insns > 20
2335 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2336 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2337 return 0;
2338 break;
2339 default:
2340 break;
2344 /* Unless INSN is zero, we can do the optimization. */
2345 if (insn == 0)
2346 return 0;
2348 lastexit = insn;
2350 /* See if any insn sets a register only used in the loop exit code and
2351 not a user variable. If so, replace it with a new register. */
2352 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2353 if (GET_CODE (insn) == INSN
2354 && (set = single_set (insn)) != 0
2355 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2356 || (GET_CODE (reg) == SUBREG
2357 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2358 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2359 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2361 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2362 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2363 break;
2365 if (p != lastexit)
2367 /* We can do the replacement. Allocate reg_map if this is the
2368 first replacement we found. */
2369 if (reg_map == 0)
2371 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2372 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2375 REG_LOOP_TEST_P (reg) = 1;
2377 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2381 /* Now copy each insn. */
2382 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2383 switch (GET_CODE (insn))
2385 case BARRIER:
2386 copy = emit_barrier_before (loop_start);
2387 break;
2388 case NOTE:
2389 /* Only copy line-number notes. */
2390 if (NOTE_LINE_NUMBER (insn) >= 0)
2392 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2393 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2395 break;
2397 case INSN:
2398 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2399 if (reg_map)
2400 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2402 mark_jump_label (PATTERN (copy), copy, 0);
2404 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2405 make them. */
2406 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2407 if (REG_NOTE_KIND (link) != REG_LABEL)
2408 REG_NOTES (copy)
2409 = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
2410 XEXP (link, 0), REG_NOTES (copy)));
2411 if (reg_map && REG_NOTES (copy))
2412 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2413 break;
2415 case JUMP_INSN:
2416 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2417 if (reg_map)
2418 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2419 mark_jump_label (PATTERN (copy), copy, 0);
2420 if (REG_NOTES (insn))
2422 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2423 if (reg_map)
2424 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2427 /* If this is a simple jump, add it to the jump chain. */
2429 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2430 && simplejump_p (copy))
2432 jump_chain[INSN_UID (copy)]
2433 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2434 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2436 break;
2438 default:
2439 abort ();
2442 /* Now clean up by emitting a jump to the end label and deleting the jump
2443 at the start of the loop. */
2444 if (! copy || GET_CODE (copy) != BARRIER)
2446 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2447 loop_start);
2448 mark_jump_label (PATTERN (copy), copy, 0);
2449 if (INSN_UID (copy) < max_jump_chain
2450 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2452 jump_chain[INSN_UID (copy)]
2453 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2454 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2456 emit_barrier_before (loop_start);
2459 /* Mark the exit code as the virtual top of the converted loop. */
2460 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2462 delete_insn (next_nonnote_insn (loop_start));
2464 return 1;
2467 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2468 loop-end notes between START and END out before START. Assume that
2469 END is not such a note. START may be such a note. Returns the value
2470 of the new starting insn, which may be different if the original start
2471 was such a note. */
2474 squeeze_notes (start, end)
2475 rtx start, end;
2477 rtx insn;
2478 rtx next;
2480 for (insn = start; insn != end; insn = next)
2482 next = NEXT_INSN (insn);
2483 if (GET_CODE (insn) == NOTE
2484 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2485 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2486 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2487 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2488 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2489 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2491 if (insn == start)
2492 start = next;
2493 else
2495 rtx prev = PREV_INSN (insn);
2496 PREV_INSN (insn) = PREV_INSN (start);
2497 NEXT_INSN (insn) = start;
2498 NEXT_INSN (PREV_INSN (insn)) = insn;
2499 PREV_INSN (NEXT_INSN (insn)) = insn;
2500 NEXT_INSN (prev) = next;
2501 PREV_INSN (next) = prev;
2506 return start;
2509 /* Compare the instructions before insn E1 with those before E2
2510 to find an opportunity for cross jumping.
2511 (This means detecting identical sequences of insns followed by
2512 jumps to the same place, or followed by a label and a jump
2513 to that label, and replacing one with a jump to the other.)
2515 Assume E1 is a jump that jumps to label E2
2516 (that is not always true but it might as well be).
2517 Find the longest possible equivalent sequences
2518 and store the first insns of those sequences into *F1 and *F2.
2519 Store zero there if no equivalent preceding instructions are found.
2521 We give up if we find a label in stream 1.
2522 Actually we could transfer that label into stream 2. */
2524 static void
2525 find_cross_jump (e1, e2, minimum, f1, f2)
2526 rtx e1, e2;
2527 int minimum;
2528 rtx *f1, *f2;
2530 register rtx i1 = e1, i2 = e2;
2531 register rtx p1, p2;
2532 int lose = 0;
2534 rtx last1 = 0, last2 = 0;
2535 rtx afterlast1 = 0, afterlast2 = 0;
2536 rtx prev1;
2538 *f1 = 0;
2539 *f2 = 0;
2541 while (1)
2543 i1 = prev_nonnote_insn (i1);
2545 i2 = PREV_INSN (i2);
2546 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2547 i2 = PREV_INSN (i2);
2549 if (i1 == 0)
2550 break;
2552 /* Don't allow the range of insns preceding E1 or E2
2553 to include the other (E2 or E1). */
2554 if (i2 == e1 || i1 == e2)
2555 break;
2557 /* If we will get to this code by jumping, those jumps will be
2558 tensioned to go directly to the new label (before I2),
2559 so this cross-jumping won't cost extra. So reduce the minimum. */
2560 if (GET_CODE (i1) == CODE_LABEL)
2562 --minimum;
2563 break;
2566 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2567 break;
2569 p1 = PATTERN (i1);
2570 p2 = PATTERN (i2);
2572 /* If this is a CALL_INSN, compare register usage information.
2573 If we don't check this on stack register machines, the two
2574 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2575 numbers of stack registers in the same basic block.
2576 If we don't check this on machines with delay slots, a delay slot may
2577 be filled that clobbers a parameter expected by the subroutine.
2579 ??? We take the simple route for now and assume that if they're
2580 equal, they were constructed identically. */
2582 if (GET_CODE (i1) == CALL_INSN
2583 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2584 CALL_INSN_FUNCTION_USAGE (i2)))
2585 lose = 1;
2587 #ifdef STACK_REGS
2588 /* If cross_jump_death_matters is not 0, the insn's mode
2589 indicates whether or not the insn contains any stack-like
2590 regs. */
2592 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
2594 /* If register stack conversion has already been done, then
2595 death notes must also be compared before it is certain that
2596 the two instruction streams match. */
2598 rtx note;
2599 HARD_REG_SET i1_regset, i2_regset;
2601 CLEAR_HARD_REG_SET (i1_regset);
2602 CLEAR_HARD_REG_SET (i2_regset);
2604 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2605 if (REG_NOTE_KIND (note) == REG_DEAD
2606 && STACK_REG_P (XEXP (note, 0)))
2607 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2609 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2610 if (REG_NOTE_KIND (note) == REG_DEAD
2611 && STACK_REG_P (XEXP (note, 0)))
2612 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2614 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2616 lose = 1;
2618 done:
2621 #endif
2623 /* Don't allow old-style asm or volatile extended asms to be accepted
2624 for cross jumping purposes. It is conceptually correct to allow
2625 them, since cross-jumping preserves the dynamic instruction order
2626 even though it is changing the static instruction order. However,
2627 if an asm is being used to emit an assembler pseudo-op, such as
2628 the MIPS `.set reorder' pseudo-op, then the static instruction order
2629 matters and it must be preserved. */
2630 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
2631 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
2632 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
2633 lose = 1;
2635 if (lose || GET_CODE (p1) != GET_CODE (p2)
2636 || ! rtx_renumbered_equal_p (p1, p2))
2638 /* The following code helps take care of G++ cleanups. */
2639 rtx equiv1;
2640 rtx equiv2;
2642 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2643 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2644 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2645 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2646 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2647 /* If the equivalences are not to a constant, they may
2648 reference pseudos that no longer exist, so we can't
2649 use them. */
2650 && CONSTANT_P (XEXP (equiv1, 0))
2651 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2653 rtx s1 = single_set (i1);
2654 rtx s2 = single_set (i2);
2655 if (s1 != 0 && s2 != 0
2656 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2658 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2659 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2660 if (! rtx_renumbered_equal_p (p1, p2))
2661 cancel_changes (0);
2662 else if (apply_change_group ())
2663 goto win;
2667 /* Insns fail to match; cross jumping is limited to the following
2668 insns. */
2670 #ifdef HAVE_cc0
2671 /* Don't allow the insn after a compare to be shared by
2672 cross-jumping unless the compare is also shared.
2673 Here, if either of these non-matching insns is a compare,
2674 exclude the following insn from possible cross-jumping. */
2675 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2676 last1 = afterlast1, last2 = afterlast2, ++minimum;
2677 #endif
2679 /* If cross-jumping here will feed a jump-around-jump
2680 optimization, this jump won't cost extra, so reduce
2681 the minimum. */
2682 if (GET_CODE (i1) == JUMP_INSN
2683 && JUMP_LABEL (i1)
2684 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2685 --minimum;
2686 break;
2689 win:
2690 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2692 /* Ok, this insn is potentially includable in a cross-jump here. */
2693 afterlast1 = last1, afterlast2 = last2;
2694 last1 = i1, last2 = i2, --minimum;
2698 if (minimum <= 0 && last1 != 0 && last1 != e1)
2699 *f1 = last1, *f2 = last2;
2702 static void
2703 do_cross_jump (insn, newjpos, newlpos)
2704 rtx insn, newjpos, newlpos;
2706 /* Find an existing label at this point
2707 or make a new one if there is none. */
2708 register rtx label = get_label_before (newlpos);
2710 /* Make the same jump insn jump to the new point. */
2711 if (GET_CODE (PATTERN (insn)) == RETURN)
2713 /* Remove from jump chain of returns. */
2714 delete_from_jump_chain (insn);
2715 /* Change the insn. */
2716 PATTERN (insn) = gen_jump (label);
2717 INSN_CODE (insn) = -1;
2718 JUMP_LABEL (insn) = label;
2719 LABEL_NUSES (label)++;
2720 /* Add to new the jump chain. */
2721 if (INSN_UID (label) < max_jump_chain
2722 && INSN_UID (insn) < max_jump_chain)
2724 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2725 jump_chain[INSN_UID (label)] = insn;
2728 else
2729 redirect_jump (insn, label);
2731 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2732 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2733 the NEWJPOS stream. */
2735 while (newjpos != insn)
2737 rtx lnote;
2739 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2740 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2741 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2742 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2743 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2744 remove_note (newlpos, lnote);
2746 delete_insn (newjpos);
2747 newjpos = next_real_insn (newjpos);
2748 newlpos = next_real_insn (newlpos);
2752 /* Return the label before INSN, or put a new label there. */
2755 get_label_before (insn)
2756 rtx insn;
2758 rtx label;
2760 /* Find an existing label at this point
2761 or make a new one if there is none. */
2762 label = prev_nonnote_insn (insn);
2764 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2766 rtx prev = PREV_INSN (insn);
2768 label = gen_label_rtx ();
2769 emit_label_after (label, prev);
2770 LABEL_NUSES (label) = 0;
2772 return label;
2775 /* Return the label after INSN, or put a new label there. */
2778 get_label_after (insn)
2779 rtx insn;
2781 rtx label;
2783 /* Find an existing label at this point
2784 or make a new one if there is none. */
2785 label = next_nonnote_insn (insn);
2787 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2789 label = gen_label_rtx ();
2790 emit_label_after (label, insn);
2791 LABEL_NUSES (label) = 0;
2793 return label;
2796 /* Return 1 if INSN is a jump that jumps to right after TARGET
2797 only on the condition that TARGET itself would drop through.
2798 Assumes that TARGET is a conditional jump. */
2800 static int
2801 jump_back_p (insn, target)
2802 rtx insn, target;
2804 rtx cinsn, ctarget;
2805 enum rtx_code codei, codet;
2807 if (simplejump_p (insn) || ! condjump_p (insn)
2808 || simplejump_p (target)
2809 || target != prev_real_insn (JUMP_LABEL (insn)))
2810 return 0;
2812 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2813 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2815 codei = GET_CODE (cinsn);
2816 codet = GET_CODE (ctarget);
2818 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2820 if (! can_reverse_comparison_p (cinsn, insn))
2821 return 0;
2822 codei = reverse_condition (codei);
2825 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2827 if (! can_reverse_comparison_p (ctarget, target))
2828 return 0;
2829 codet = reverse_condition (codet);
2832 return (codei == codet
2833 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2834 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2837 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2838 return non-zero if it is safe to reverse this comparison. It is if our
2839 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2840 this is known to be an integer comparison. */
2843 can_reverse_comparison_p (comparison, insn)
2844 rtx comparison;
2845 rtx insn;
2847 rtx arg0;
2849 /* If this is not actually a comparison, we can't reverse it. */
2850 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2851 return 0;
2853 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2854 /* If this is an NE comparison, it is safe to reverse it to an EQ
2855 comparison and vice versa, even for floating point. If no operands
2856 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2857 always false and NE is always true, so the reversal is also valid. */
2858 || flag_fast_math
2859 || GET_CODE (comparison) == NE
2860 || GET_CODE (comparison) == EQ)
2861 return 1;
2863 arg0 = XEXP (comparison, 0);
2865 /* Make sure ARG0 is one of the actual objects being compared. If we
2866 can't do this, we can't be sure the comparison can be reversed.
2868 Handle cc0 and a MODE_CC register. */
2869 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2870 #ifdef HAVE_cc0
2871 || arg0 == cc0_rtx
2872 #endif
2875 rtx prev = prev_nonnote_insn (insn);
2876 rtx set = single_set (prev);
2878 if (set == 0 || SET_DEST (set) != arg0)
2879 return 0;
2881 arg0 = SET_SRC (set);
2883 if (GET_CODE (arg0) == COMPARE)
2884 arg0 = XEXP (arg0, 0);
2887 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2888 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2889 return (GET_CODE (arg0) == CONST_INT
2890 || (GET_MODE (arg0) != VOIDmode
2891 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2892 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2895 /* Given an rtx-code for a comparison, return the code
2896 for the negated comparison.
2897 WATCH OUT! reverse_condition is not safe to use on a jump
2898 that might be acting on the results of an IEEE floating point comparison,
2899 because of the special treatment of non-signaling nans in comparisons.
2900 Use can_reverse_comparison_p to be sure. */
2902 enum rtx_code
2903 reverse_condition (code)
2904 enum rtx_code code;
2906 switch (code)
2908 case EQ:
2909 return NE;
2911 case NE:
2912 return EQ;
2914 case GT:
2915 return LE;
2917 case GE:
2918 return LT;
2920 case LT:
2921 return GE;
2923 case LE:
2924 return GT;
2926 case GTU:
2927 return LEU;
2929 case GEU:
2930 return LTU;
2932 case LTU:
2933 return GEU;
2935 case LEU:
2936 return GTU;
2938 default:
2939 abort ();
2940 return UNKNOWN;
2944 /* Similar, but return the code when two operands of a comparison are swapped.
2945 This IS safe for IEEE floating-point. */
2947 enum rtx_code
2948 swap_condition (code)
2949 enum rtx_code code;
2951 switch (code)
2953 case EQ:
2954 case NE:
2955 return code;
2957 case GT:
2958 return LT;
2960 case GE:
2961 return LE;
2963 case LT:
2964 return GT;
2966 case LE:
2967 return GE;
2969 case GTU:
2970 return LTU;
2972 case GEU:
2973 return LEU;
2975 case LTU:
2976 return GTU;
2978 case LEU:
2979 return GEU;
2981 default:
2982 abort ();
2983 return UNKNOWN;
2987 /* Given a comparison CODE, return the corresponding unsigned comparison.
2988 If CODE is an equality comparison or already an unsigned comparison,
2989 CODE is returned. */
2991 enum rtx_code
2992 unsigned_condition (code)
2993 enum rtx_code code;
2995 switch (code)
2997 case EQ:
2998 case NE:
2999 case GTU:
3000 case GEU:
3001 case LTU:
3002 case LEU:
3003 return code;
3005 case GT:
3006 return GTU;
3008 case GE:
3009 return GEU;
3011 case LT:
3012 return LTU;
3014 case LE:
3015 return LEU;
3017 default:
3018 abort ();
3022 /* Similarly, return the signed version of a comparison. */
3024 enum rtx_code
3025 signed_condition (code)
3026 enum rtx_code code;
3028 switch (code)
3030 case EQ:
3031 case NE:
3032 case GT:
3033 case GE:
3034 case LT:
3035 case LE:
3036 return code;
3038 case GTU:
3039 return GT;
3041 case GEU:
3042 return GE;
3044 case LTU:
3045 return LT;
3047 case LEU:
3048 return LE;
3050 default:
3051 abort ();
3055 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3056 truth of CODE1 implies the truth of CODE2. */
3059 comparison_dominates_p (code1, code2)
3060 enum rtx_code code1, code2;
3062 if (code1 == code2)
3063 return 1;
3065 switch (code1)
3067 case EQ:
3068 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3069 return 1;
3070 break;
3072 case LT:
3073 if (code2 == LE || code2 == NE)
3074 return 1;
3075 break;
3077 case GT:
3078 if (code2 == GE || code2 == NE)
3079 return 1;
3080 break;
3082 case LTU:
3083 if (code2 == LEU || code2 == NE)
3084 return 1;
3085 break;
3087 case GTU:
3088 if (code2 == GEU || code2 == NE)
3089 return 1;
3090 break;
3092 default:
3093 break;
3096 return 0;
3099 /* Return 1 if INSN is an unconditional jump and nothing else. */
3102 simplejump_p (insn)
3103 rtx insn;
3105 return (GET_CODE (insn) == JUMP_INSN
3106 && GET_CODE (PATTERN (insn)) == SET
3107 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3108 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3111 /* Return nonzero if INSN is a (possibly) conditional jump
3112 and nothing more. */
3115 condjump_p (insn)
3116 rtx insn;
3118 register rtx x = PATTERN (insn);
3119 if (GET_CODE (x) != SET)
3120 return 0;
3121 if (GET_CODE (SET_DEST (x)) != PC)
3122 return 0;
3123 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3124 return 1;
3125 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3126 return 0;
3127 if (XEXP (SET_SRC (x), 2) == pc_rtx
3128 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3129 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3130 return 1;
3131 if (XEXP (SET_SRC (x), 1) == pc_rtx
3132 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3133 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3134 return 1;
3135 return 0;
3138 /* Return nonzero if INSN is a (possibly) conditional jump
3139 and nothing more. */
3142 condjump_in_parallel_p (insn)
3143 rtx insn;
3145 register rtx x = PATTERN (insn);
3147 if (GET_CODE (x) != PARALLEL)
3148 return 0;
3149 else
3150 x = XVECEXP (x, 0, 0);
3152 if (GET_CODE (x) != SET)
3153 return 0;
3154 if (GET_CODE (SET_DEST (x)) != PC)
3155 return 0;
3156 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3157 return 1;
3158 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3159 return 0;
3160 if (XEXP (SET_SRC (x), 2) == pc_rtx
3161 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3162 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3163 return 1;
3164 if (XEXP (SET_SRC (x), 1) == pc_rtx
3165 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3166 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3167 return 1;
3168 return 0;
3171 /* Return 1 if X is an RTX that does nothing but set the condition codes
3172 and CLOBBER or USE registers.
3173 Return -1 if X does explicitly set the condition codes,
3174 but also does other things. */
3177 sets_cc0_p (x)
3178 rtx x;
3180 #ifdef HAVE_cc0
3181 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3182 return 1;
3183 if (GET_CODE (x) == PARALLEL)
3185 int i;
3186 int sets_cc0 = 0;
3187 int other_things = 0;
3188 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3190 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3191 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3192 sets_cc0 = 1;
3193 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3194 other_things = 1;
3196 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3198 return 0;
3199 #else
3200 abort ();
3201 #endif
3204 /* Follow any unconditional jump at LABEL;
3205 return the ultimate label reached by any such chain of jumps.
3206 If LABEL is not followed by a jump, return LABEL.
3207 If the chain loops or we can't find end, return LABEL,
3208 since that tells caller to avoid changing the insn.
3210 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3211 a USE or CLOBBER. */
3214 follow_jumps (label)
3215 rtx label;
3217 register rtx insn;
3218 register rtx next;
3219 register rtx value = label;
3220 register int depth;
3222 for (depth = 0;
3223 (depth < 10
3224 && (insn = next_active_insn (value)) != 0
3225 && GET_CODE (insn) == JUMP_INSN
3226 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3227 || GET_CODE (PATTERN (insn)) == RETURN)
3228 && (next = NEXT_INSN (insn))
3229 && GET_CODE (next) == BARRIER);
3230 depth++)
3232 /* Don't chain through the insn that jumps into a loop
3233 from outside the loop,
3234 since that would create multiple loop entry jumps
3235 and prevent loop optimization. */
3236 rtx tem;
3237 if (!reload_completed)
3238 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3239 if (GET_CODE (tem) == NOTE
3240 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3241 /* ??? Optional. Disables some optimizations, but makes
3242 gcov output more accurate with -O. */
3243 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3244 return value;
3246 /* If we have found a cycle, make the insn jump to itself. */
3247 if (JUMP_LABEL (insn) == label)
3248 return label;
3250 tem = next_active_insn (JUMP_LABEL (insn));
3251 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3252 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3253 break;
3255 value = JUMP_LABEL (insn);
3257 if (depth == 10)
3258 return label;
3259 return value;
3262 /* Assuming that field IDX of X is a vector of label_refs,
3263 replace each of them by the ultimate label reached by it.
3264 Return nonzero if a change is made.
3265 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3267 static int
3268 tension_vector_labels (x, idx)
3269 register rtx x;
3270 register int idx;
3272 int changed = 0;
3273 register int i;
3274 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3276 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3277 register rtx nlabel = follow_jumps (olabel);
3278 if (nlabel && nlabel != olabel)
3280 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3281 ++LABEL_NUSES (nlabel);
3282 if (--LABEL_NUSES (olabel) == 0)
3283 delete_insn (olabel);
3284 changed = 1;
3287 return changed;
3290 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3291 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3292 in INSN, then store one of them in JUMP_LABEL (INSN).
3293 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3294 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3295 Also, when there are consecutive labels, canonicalize on the last of them.
3297 Note that two labels separated by a loop-beginning note
3298 must be kept distinct if we have not yet done loop-optimization,
3299 because the gap between them is where loop-optimize
3300 will want to move invariant code to. CROSS_JUMP tells us
3301 that loop-optimization is done with.
3303 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3304 two labels distinct if they are separated by only USE or CLOBBER insns. */
3306 static void
3307 mark_jump_label (x, insn, cross_jump)
3308 register rtx x;
3309 rtx insn;
3310 int cross_jump;
3312 register RTX_CODE code = GET_CODE (x);
3313 register int i;
3314 register char *fmt;
3316 switch (code)
3318 case PC:
3319 case CC0:
3320 case REG:
3321 case SUBREG:
3322 case CONST_INT:
3323 case SYMBOL_REF:
3324 case CONST_DOUBLE:
3325 case CLOBBER:
3326 case CALL:
3327 return;
3329 case MEM:
3330 /* If this is a constant-pool reference, see if it is a label. */
3331 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3332 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3333 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3334 break;
3336 case LABEL_REF:
3338 rtx label = XEXP (x, 0);
3339 rtx olabel = label;
3340 rtx note;
3341 rtx next;
3343 if (GET_CODE (label) != CODE_LABEL)
3344 abort ();
3346 /* Ignore references to labels of containing functions. */
3347 if (LABEL_REF_NONLOCAL_P (x))
3348 break;
3350 /* If there are other labels following this one,
3351 replace it with the last of the consecutive labels. */
3352 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3354 if (GET_CODE (next) == CODE_LABEL)
3355 label = next;
3356 else if (cross_jump && GET_CODE (next) == INSN
3357 && (GET_CODE (PATTERN (next)) == USE
3358 || GET_CODE (PATTERN (next)) == CLOBBER))
3359 continue;
3360 else if (GET_CODE (next) != NOTE)
3361 break;
3362 else if (! cross_jump
3363 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3364 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3365 /* ??? Optional. Disables some optimizations, but
3366 makes gcov output more accurate with -O. */
3367 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3368 break;
3371 XEXP (x, 0) = label;
3372 ++LABEL_NUSES (label);
3374 if (insn)
3376 if (GET_CODE (insn) == JUMP_INSN)
3377 JUMP_LABEL (insn) = label;
3379 /* If we've changed OLABEL and we had a REG_LABEL note
3380 for it, update it as well. */
3381 else if (label != olabel
3382 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3383 XEXP (note, 0) = label;
3385 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3386 is one. */
3387 else if (! find_reg_note (insn, REG_LABEL, label))
3389 rtx next = next_real_insn (label);
3390 /* Don't record labels that refer to dispatch tables.
3391 This is not necessary, since the tablejump
3392 references the same label.
3393 And if we did record them, flow.c would make worse code. */
3394 if (next == 0
3395 || ! (GET_CODE (next) == JUMP_INSN
3396 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3397 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
3398 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
3399 REG_NOTES (insn));
3402 return;
3405 /* Do walk the labels in a vector, but not the first operand of an
3406 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3407 case ADDR_VEC:
3408 case ADDR_DIFF_VEC:
3410 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3412 for (i = 0; i < XVECLEN (x, eltnum); i++)
3413 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3415 return;
3417 default:
3418 break;
3421 fmt = GET_RTX_FORMAT (code);
3422 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3424 if (fmt[i] == 'e')
3425 mark_jump_label (XEXP (x, i), insn, cross_jump);
3426 else if (fmt[i] == 'E')
3428 register int j;
3429 for (j = 0; j < XVECLEN (x, i); j++)
3430 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3435 /* If all INSN does is set the pc, delete it,
3436 and delete the insn that set the condition codes for it
3437 if that's what the previous thing was. */
3439 void
3440 delete_jump (insn)
3441 rtx insn;
3443 register rtx set = single_set (insn);
3445 if (set && GET_CODE (SET_DEST (set)) == PC)
3446 delete_computation (insn);
3449 /* Delete INSN and recursively delete insns that compute values used only
3450 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3451 If we are running before flow.c, we need do nothing since flow.c will
3452 delete dead code. We also can't know if the registers being used are
3453 dead or not at this point.
3455 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3456 nothing other than set a register that dies in this insn, we can delete
3457 that insn as well.
3459 On machines with CC0, if CC0 is used in this insn, we may be able to
3460 delete the insn that set it. */
3462 static void
3463 delete_computation (insn)
3464 rtx insn;
3466 rtx note, next;
3468 #ifdef HAVE_cc0
3469 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3471 rtx prev = prev_nonnote_insn (insn);
3472 /* We assume that at this stage
3473 CC's are always set explicitly
3474 and always immediately before the jump that
3475 will use them. So if the previous insn
3476 exists to set the CC's, delete it
3477 (unless it performs auto-increments, etc.). */
3478 if (prev && GET_CODE (prev) == INSN
3479 && sets_cc0_p (PATTERN (prev)))
3481 if (sets_cc0_p (PATTERN (prev)) > 0
3482 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3483 delete_computation (prev);
3484 else
3485 /* Otherwise, show that cc0 won't be used. */
3486 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
3487 cc0_rtx, REG_NOTES (prev));
3490 #endif
3492 for (note = REG_NOTES (insn); note; note = next)
3494 rtx our_prev;
3496 next = XEXP (note, 1);
3498 if (REG_NOTE_KIND (note) != REG_DEAD
3499 /* Verify that the REG_NOTE is legitimate. */
3500 || GET_CODE (XEXP (note, 0)) != REG)
3501 continue;
3503 for (our_prev = prev_nonnote_insn (insn);
3504 our_prev && GET_CODE (our_prev) == INSN;
3505 our_prev = prev_nonnote_insn (our_prev))
3507 /* If we reach a SEQUENCE, it is too complex to try to
3508 do anything with it, so give up. */
3509 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3510 break;
3512 if (GET_CODE (PATTERN (our_prev)) == USE
3513 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3514 /* reorg creates USEs that look like this. We leave them
3515 alone because reorg needs them for its own purposes. */
3516 break;
3518 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3520 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3521 break;
3523 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3525 /* If we find a SET of something else, we can't
3526 delete the insn. */
3528 int i;
3530 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3532 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
3534 if (GET_CODE (part) == SET
3535 && SET_DEST (part) != XEXP (note, 0))
3536 break;
3539 if (i == XVECLEN (PATTERN (our_prev), 0))
3540 delete_computation (our_prev);
3542 else if (GET_CODE (PATTERN (our_prev)) == SET
3543 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3544 delete_computation (our_prev);
3546 break;
3549 /* If OUR_PREV references the register that dies here, it is an
3550 additional use. Hence any prior SET isn't dead. However, this
3551 insn becomes the new place for the REG_DEAD note. */
3552 if (reg_overlap_mentioned_p (XEXP (note, 0),
3553 PATTERN (our_prev)))
3555 XEXP (note, 1) = REG_NOTES (our_prev);
3556 REG_NOTES (our_prev) = note;
3557 break;
3562 delete_insn (insn);
3565 /* Delete insn INSN from the chain of insns and update label ref counts.
3566 May delete some following insns as a consequence; may even delete
3567 a label elsewhere and insns that follow it.
3569 Returns the first insn after INSN that was not deleted. */
3572 delete_insn (insn)
3573 register rtx insn;
3575 register rtx next = NEXT_INSN (insn);
3576 register rtx prev = PREV_INSN (insn);
3577 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3578 register int dont_really_delete = 0;
3580 while (next && INSN_DELETED_P (next))
3581 next = NEXT_INSN (next);
3583 /* This insn is already deleted => return first following nondeleted. */
3584 if (INSN_DELETED_P (insn))
3585 return next;
3587 /* Don't delete user-declared labels. Convert them to special NOTEs
3588 instead. */
3589 if (was_code_label && LABEL_NAME (insn) != 0
3590 && optimize && ! dont_really_delete)
3592 PUT_CODE (insn, NOTE);
3593 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3594 NOTE_SOURCE_FILE (insn) = 0;
3595 dont_really_delete = 1;
3597 else
3598 /* Mark this insn as deleted. */
3599 INSN_DELETED_P (insn) = 1;
3601 /* If this is an unconditional jump, delete it from the jump chain. */
3602 if (simplejump_p (insn))
3603 delete_from_jump_chain (insn);
3605 /* If instruction is followed by a barrier,
3606 delete the barrier too. */
3608 if (next != 0 && GET_CODE (next) == BARRIER)
3610 INSN_DELETED_P (next) = 1;
3611 next = NEXT_INSN (next);
3614 /* Patch out INSN (and the barrier if any) */
3616 if (optimize && ! dont_really_delete)
3618 if (prev)
3620 NEXT_INSN (prev) = next;
3621 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3622 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3623 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3626 if (next)
3628 PREV_INSN (next) = prev;
3629 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3630 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3633 if (prev && NEXT_INSN (prev) == 0)
3634 set_last_insn (prev);
3637 /* If deleting a jump, decrement the count of the label,
3638 and delete the label if it is now unused. */
3640 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3641 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3643 /* This can delete NEXT or PREV,
3644 either directly if NEXT is JUMP_LABEL (INSN),
3645 or indirectly through more levels of jumps. */
3646 delete_insn (JUMP_LABEL (insn));
3647 /* I feel a little doubtful about this loop,
3648 but I see no clean and sure alternative way
3649 to find the first insn after INSN that is not now deleted.
3650 I hope this works. */
3651 while (next && INSN_DELETED_P (next))
3652 next = NEXT_INSN (next);
3653 return next;
3656 /* Likewise if we're deleting a dispatch table. */
3658 if (GET_CODE (insn) == JUMP_INSN
3659 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3660 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3662 rtx pat = PATTERN (insn);
3663 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3664 int len = XVECLEN (pat, diff_vec_p);
3666 for (i = 0; i < len; i++)
3667 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
3668 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
3669 while (next && INSN_DELETED_P (next))
3670 next = NEXT_INSN (next);
3671 return next;
3674 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3675 prev = PREV_INSN (prev);
3677 /* If INSN was a label and a dispatch table follows it,
3678 delete the dispatch table. The tablejump must have gone already.
3679 It isn't useful to fall through into a table. */
3681 if (was_code_label
3682 && NEXT_INSN (insn) != 0
3683 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3684 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3685 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3686 next = delete_insn (NEXT_INSN (insn));
3688 /* If INSN was a label, delete insns following it if now unreachable. */
3690 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3692 register RTX_CODE code;
3693 while (next != 0
3694 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3695 || code == NOTE || code == BARRIER
3696 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3698 if (code == NOTE
3699 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3700 next = NEXT_INSN (next);
3701 /* Keep going past other deleted labels to delete what follows. */
3702 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3703 next = NEXT_INSN (next);
3704 else
3705 /* Note: if this deletes a jump, it can cause more
3706 deletion of unreachable code, after a different label.
3707 As long as the value from this recursive call is correct,
3708 this invocation functions correctly. */
3709 next = delete_insn (next);
3713 return next;
3716 /* Advance from INSN till reaching something not deleted
3717 then return that. May return INSN itself. */
3720 next_nondeleted_insn (insn)
3721 rtx insn;
3723 while (INSN_DELETED_P (insn))
3724 insn = NEXT_INSN (insn);
3725 return insn;
3728 /* Delete a range of insns from FROM to TO, inclusive.
3729 This is for the sake of peephole optimization, so assume
3730 that whatever these insns do will still be done by a new
3731 peephole insn that will replace them. */
3733 void
3734 delete_for_peephole (from, to)
3735 register rtx from, to;
3737 register rtx insn = from;
3739 while (1)
3741 register rtx next = NEXT_INSN (insn);
3742 register rtx prev = PREV_INSN (insn);
3744 if (GET_CODE (insn) != NOTE)
3746 INSN_DELETED_P (insn) = 1;
3748 /* Patch this insn out of the chain. */
3749 /* We don't do this all at once, because we
3750 must preserve all NOTEs. */
3751 if (prev)
3752 NEXT_INSN (prev) = next;
3754 if (next)
3755 PREV_INSN (next) = prev;
3758 if (insn == to)
3759 break;
3760 insn = next;
3763 /* Note that if TO is an unconditional jump
3764 we *do not* delete the BARRIER that follows,
3765 since the peephole that replaces this sequence
3766 is also an unconditional jump in that case. */
3769 /* Invert the condition of the jump JUMP, and make it jump
3770 to label NLABEL instead of where it jumps now. */
3773 invert_jump (jump, nlabel)
3774 rtx jump, nlabel;
3776 /* We have to either invert the condition and change the label or
3777 do neither. Either operation could fail. We first try to invert
3778 the jump. If that succeeds, we try changing the label. If that fails,
3779 we invert the jump back to what it was. */
3781 if (! invert_exp (PATTERN (jump), jump))
3782 return 0;
3784 if (redirect_jump (jump, nlabel))
3786 if (flag_branch_probabilities)
3788 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
3790 /* An inverted jump means that a probability taken becomes a
3791 probability not taken. Subtract the branch probability from the
3792 probability base to convert it back to a taken probability.
3793 (We don't flip the probability on a branch that's never taken. */
3794 if (note && XINT (XEXP (note, 0), 0) >= 0)
3795 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
3798 return 1;
3801 if (! invert_exp (PATTERN (jump), jump))
3802 /* This should just be putting it back the way it was. */
3803 abort ();
3805 return 0;
3808 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3810 Return 1 if we can do so, 0 if we cannot find a way to do so that
3811 matches a pattern. */
3814 invert_exp (x, insn)
3815 rtx x;
3816 rtx insn;
3818 register RTX_CODE code;
3819 register int i;
3820 register char *fmt;
3822 code = GET_CODE (x);
3824 if (code == IF_THEN_ELSE)
3826 register rtx comp = XEXP (x, 0);
3827 register rtx tem;
3829 /* We can do this in two ways: The preferable way, which can only
3830 be done if this is not an integer comparison, is to reverse
3831 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3832 of the IF_THEN_ELSE. If we can't do either, fail. */
3834 if (can_reverse_comparison_p (comp, insn)
3835 && validate_change (insn, &XEXP (x, 0),
3836 gen_rtx (reverse_condition (GET_CODE (comp)),
3837 GET_MODE (comp), XEXP (comp, 0),
3838 XEXP (comp, 1)), 0))
3839 return 1;
3841 tem = XEXP (x, 1);
3842 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3843 validate_change (insn, &XEXP (x, 2), tem, 1);
3844 return apply_change_group ();
3847 fmt = GET_RTX_FORMAT (code);
3848 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3850 if (fmt[i] == 'e')
3851 if (! invert_exp (XEXP (x, i), insn))
3852 return 0;
3853 if (fmt[i] == 'E')
3855 register int j;
3856 for (j = 0; j < XVECLEN (x, i); j++)
3857 if (!invert_exp (XVECEXP (x, i, j), insn))
3858 return 0;
3862 return 1;
3865 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3866 If the old jump target label is unused as a result,
3867 it and the code following it may be deleted.
3869 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3870 RETURN insn.
3872 The return value will be 1 if the change was made, 0 if it wasn't (this
3873 can only occur for NLABEL == 0). */
3876 redirect_jump (jump, nlabel)
3877 rtx jump, nlabel;
3879 register rtx olabel = JUMP_LABEL (jump);
3881 if (nlabel == olabel)
3882 return 1;
3884 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3885 return 0;
3887 /* If this is an unconditional branch, delete it from the jump_chain of
3888 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3889 have UID's in range and JUMP_CHAIN is valid). */
3890 if (jump_chain && (simplejump_p (jump)
3891 || GET_CODE (PATTERN (jump)) == RETURN))
3893 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3895 delete_from_jump_chain (jump);
3896 if (label_index < max_jump_chain
3897 && INSN_UID (jump) < max_jump_chain)
3899 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3900 jump_chain[label_index] = jump;
3904 JUMP_LABEL (jump) = nlabel;
3905 if (nlabel)
3906 ++LABEL_NUSES (nlabel);
3908 if (olabel && --LABEL_NUSES (olabel) == 0)
3909 delete_insn (olabel);
3911 return 1;
3914 /* Delete the instruction JUMP from any jump chain it might be on. */
3916 static void
3917 delete_from_jump_chain (jump)
3918 rtx jump;
3920 int index;
3921 rtx olabel = JUMP_LABEL (jump);
3923 /* Handle unconditional jumps. */
3924 if (jump_chain && olabel != 0
3925 && INSN_UID (olabel) < max_jump_chain
3926 && simplejump_p (jump))
3927 index = INSN_UID (olabel);
3928 /* Handle return insns. */
3929 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3930 index = 0;
3931 else return;
3933 if (jump_chain[index] == jump)
3934 jump_chain[index] = jump_chain[INSN_UID (jump)];
3935 else
3937 rtx insn;
3939 for (insn = jump_chain[index];
3940 insn != 0;
3941 insn = jump_chain[INSN_UID (insn)])
3942 if (jump_chain[INSN_UID (insn)] == jump)
3944 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3945 break;
3950 /* If NLABEL is nonzero, throughout the rtx at LOC,
3951 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3952 zero, alter (RETURN) to (LABEL_REF NLABEL).
3954 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3955 validity with validate_change. Convert (set (pc) (label_ref olabel))
3956 to (return).
3958 Return 0 if we found a change we would like to make but it is invalid.
3959 Otherwise, return 1. */
3962 redirect_exp (loc, olabel, nlabel, insn)
3963 rtx *loc;
3964 rtx olabel, nlabel;
3965 rtx insn;
3967 register rtx x = *loc;
3968 register RTX_CODE code = GET_CODE (x);
3969 register int i;
3970 register char *fmt;
3972 if (code == LABEL_REF)
3974 if (XEXP (x, 0) == olabel)
3976 if (nlabel)
3977 XEXP (x, 0) = nlabel;
3978 else
3979 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3980 return 1;
3983 else if (code == RETURN && olabel == 0)
3985 x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
3986 if (loc == &PATTERN (insn))
3987 x = gen_rtx (SET, VOIDmode, pc_rtx, x);
3988 return validate_change (insn, loc, x, 0);
3991 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3992 && GET_CODE (SET_SRC (x)) == LABEL_REF
3993 && XEXP (SET_SRC (x), 0) == olabel)
3994 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3996 fmt = GET_RTX_FORMAT (code);
3997 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3999 if (fmt[i] == 'e')
4000 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4001 return 0;
4002 if (fmt[i] == 'E')
4004 register int j;
4005 for (j = 0; j < XVECLEN (x, i); j++)
4006 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4007 return 0;
4011 return 1;
4014 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4016 If the old jump target label (before the dispatch table) becomes unused,
4017 it and the dispatch table may be deleted. In that case, find the insn
4018 before the jump references that label and delete it and logical successors
4019 too. */
4021 static void
4022 redirect_tablejump (jump, nlabel)
4023 rtx jump, nlabel;
4025 register rtx olabel = JUMP_LABEL (jump);
4027 /* Add this jump to the jump_chain of NLABEL. */
4028 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4029 && INSN_UID (jump) < max_jump_chain)
4031 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4032 jump_chain[INSN_UID (nlabel)] = jump;
4035 PATTERN (jump) = gen_jump (nlabel);
4036 JUMP_LABEL (jump) = nlabel;
4037 ++LABEL_NUSES (nlabel);
4038 INSN_CODE (jump) = -1;
4040 if (--LABEL_NUSES (olabel) == 0)
4042 delete_labelref_insn (jump, olabel, 0);
4043 delete_insn (olabel);
4047 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4048 If we found one, delete it and then delete this insn if DELETE_THIS is
4049 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4051 static int
4052 delete_labelref_insn (insn, label, delete_this)
4053 rtx insn, label;
4054 int delete_this;
4056 int deleted = 0;
4057 rtx link;
4059 if (GET_CODE (insn) != NOTE
4060 && reg_mentioned_p (label, PATTERN (insn)))
4062 if (delete_this)
4064 delete_insn (insn);
4065 deleted = 1;
4067 else
4068 return 1;
4071 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4072 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4074 if (delete_this)
4076 delete_insn (insn);
4077 deleted = 1;
4079 else
4080 return 1;
4083 return deleted;
4086 /* Like rtx_equal_p except that it considers two REGs as equal
4087 if they renumber to the same value and considers two commutative
4088 operations to be the same if the order of the operands has been
4089 reversed. */
4092 rtx_renumbered_equal_p (x, y)
4093 rtx x, y;
4095 register int i;
4096 register RTX_CODE code = GET_CODE (x);
4097 register char *fmt;
4099 if (x == y)
4100 return 1;
4102 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4103 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4104 && GET_CODE (SUBREG_REG (y)) == REG)))
4106 int reg_x = -1, reg_y = -1;
4107 int word_x = 0, word_y = 0;
4109 if (GET_MODE (x) != GET_MODE (y))
4110 return 0;
4112 /* If we haven't done any renumbering, don't
4113 make any assumptions. */
4114 if (reg_renumber == 0)
4115 return rtx_equal_p (x, y);
4117 if (code == SUBREG)
4119 reg_x = REGNO (SUBREG_REG (x));
4120 word_x = SUBREG_WORD (x);
4122 if (reg_renumber[reg_x] >= 0)
4124 reg_x = reg_renumber[reg_x] + word_x;
4125 word_x = 0;
4129 else
4131 reg_x = REGNO (x);
4132 if (reg_renumber[reg_x] >= 0)
4133 reg_x = reg_renumber[reg_x];
4136 if (GET_CODE (y) == SUBREG)
4138 reg_y = REGNO (SUBREG_REG (y));
4139 word_y = SUBREG_WORD (y);
4141 if (reg_renumber[reg_y] >= 0)
4143 reg_y = reg_renumber[reg_y];
4144 word_y = 0;
4148 else
4150 reg_y = REGNO (y);
4151 if (reg_renumber[reg_y] >= 0)
4152 reg_y = reg_renumber[reg_y];
4155 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4158 /* Now we have disposed of all the cases
4159 in which different rtx codes can match. */
4160 if (code != GET_CODE (y))
4161 return 0;
4163 switch (code)
4165 case PC:
4166 case CC0:
4167 case ADDR_VEC:
4168 case ADDR_DIFF_VEC:
4169 return 0;
4171 case CONST_INT:
4172 return INTVAL (x) == INTVAL (y);
4174 case LABEL_REF:
4175 /* We can't assume nonlocal labels have their following insns yet. */
4176 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4177 return XEXP (x, 0) == XEXP (y, 0);
4179 /* Two label-refs are equivalent if they point at labels
4180 in the same position in the instruction stream. */
4181 return (next_real_insn (XEXP (x, 0))
4182 == next_real_insn (XEXP (y, 0)));
4184 case SYMBOL_REF:
4185 return XSTR (x, 0) == XSTR (y, 0);
4187 default:
4188 break;
4191 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4193 if (GET_MODE (x) != GET_MODE (y))
4194 return 0;
4196 /* For commutative operations, the RTX match if the operand match in any
4197 order. Also handle the simple binary and unary cases without a loop. */
4198 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4199 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4200 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4201 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4202 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4203 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4204 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4205 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4206 else if (GET_RTX_CLASS (code) == '1')
4207 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4209 /* Compare the elements. If any pair of corresponding elements
4210 fail to match, return 0 for the whole things. */
4212 fmt = GET_RTX_FORMAT (code);
4213 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4215 register int j;
4216 switch (fmt[i])
4218 case 'w':
4219 if (XWINT (x, i) != XWINT (y, i))
4220 return 0;
4221 break;
4223 case 'i':
4224 if (XINT (x, i) != XINT (y, i))
4225 return 0;
4226 break;
4228 case 's':
4229 if (strcmp (XSTR (x, i), XSTR (y, i)))
4230 return 0;
4231 break;
4233 case 'e':
4234 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4235 return 0;
4236 break;
4238 case 'u':
4239 if (XEXP (x, i) != XEXP (y, i))
4240 return 0;
4241 /* fall through. */
4242 case '0':
4243 break;
4245 case 'E':
4246 if (XVECLEN (x, i) != XVECLEN (y, i))
4247 return 0;
4248 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4249 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4250 return 0;
4251 break;
4253 default:
4254 abort ();
4257 return 1;
4260 /* If X is a hard register or equivalent to one or a subregister of one,
4261 return the hard register number. If X is a pseudo register that was not
4262 assigned a hard register, return the pseudo register number. Otherwise,
4263 return -1. Any rtx is valid for X. */
4266 true_regnum (x)
4267 rtx x;
4269 if (GET_CODE (x) == REG)
4271 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4272 return reg_renumber[REGNO (x)];
4273 return REGNO (x);
4275 if (GET_CODE (x) == SUBREG)
4277 int base = true_regnum (SUBREG_REG (x));
4278 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4279 return SUBREG_WORD (x) + base;
4281 return -1;
4284 /* Optimize code of the form:
4286 for (x = a[i]; x; ...)
4288 for (x = a[i]; x; ...)
4290 foo:
4292 Loop optimize will change the above code into
4294 if (x = a[i])
4295 for (;;)
4296 { ...; if (! (x = ...)) break; }
4297 if (x = a[i])
4298 for (;;)
4299 { ...; if (! (x = ...)) break; }
4300 foo:
4302 In general, if the first test fails, the program can branch
4303 directly to `foo' and skip the second try which is doomed to fail.
4304 We run this after loop optimization and before flow analysis. */
4306 /* When comparing the insn patterns, we track the fact that different
4307 pseudo-register numbers may have been used in each computation.
4308 The following array stores an equivalence -- same_regs[I] == J means
4309 that pseudo register I was used in the first set of tests in a context
4310 where J was used in the second set. We also count the number of such
4311 pending equivalences. If nonzero, the expressions really aren't the
4312 same. */
4314 static int *same_regs;
4316 static int num_same_regs;
4318 /* Track any registers modified between the target of the first jump and
4319 the second jump. They never compare equal. */
4321 static char *modified_regs;
4323 /* Record if memory was modified. */
4325 static int modified_mem;
4327 /* Called via note_stores on each insn between the target of the first
4328 branch and the second branch. It marks any changed registers. */
4330 static void
4331 mark_modified_reg (dest, x)
4332 rtx dest;
4333 rtx x;
4335 int regno, i;
4337 if (GET_CODE (dest) == SUBREG)
4338 dest = SUBREG_REG (dest);
4340 if (GET_CODE (dest) == MEM)
4341 modified_mem = 1;
4343 if (GET_CODE (dest) != REG)
4344 return;
4346 regno = REGNO (dest);
4347 if (regno >= FIRST_PSEUDO_REGISTER)
4348 modified_regs[regno] = 1;
4349 else
4350 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4351 modified_regs[regno + i] = 1;
4354 /* F is the first insn in the chain of insns. */
4356 void
4357 thread_jumps (f, max_reg, flag_before_loop)
4358 rtx f;
4359 int max_reg;
4360 int flag_before_loop;
4362 /* Basic algorithm is to find a conditional branch,
4363 the label it may branch to, and the branch after
4364 that label. If the two branches test the same condition,
4365 walk back from both branch paths until the insn patterns
4366 differ, or code labels are hit. If we make it back to
4367 the target of the first branch, then we know that the first branch
4368 will either always succeed or always fail depending on the relative
4369 senses of the two branches. So adjust the first branch accordingly
4370 in this case. */
4372 rtx label, b1, b2, t1, t2;
4373 enum rtx_code code1, code2;
4374 rtx b1op0, b1op1, b2op0, b2op1;
4375 int changed = 1;
4376 int i;
4377 int *all_reset;
4379 /* Allocate register tables and quick-reset table. */
4380 modified_regs = (char *) alloca (max_reg * sizeof (char));
4381 same_regs = (int *) alloca (max_reg * sizeof (int));
4382 all_reset = (int *) alloca (max_reg * sizeof (int));
4383 for (i = 0; i < max_reg; i++)
4384 all_reset[i] = -1;
4386 while (changed)
4388 changed = 0;
4390 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4392 /* Get to a candidate branch insn. */
4393 if (GET_CODE (b1) != JUMP_INSN
4394 || ! condjump_p (b1) || simplejump_p (b1)
4395 || JUMP_LABEL (b1) == 0)
4396 continue;
4398 bzero (modified_regs, max_reg * sizeof (char));
4399 modified_mem = 0;
4401 bcopy ((char *) all_reset, (char *) same_regs,
4402 max_reg * sizeof (int));
4403 num_same_regs = 0;
4405 label = JUMP_LABEL (b1);
4407 /* Look for a branch after the target. Record any registers and
4408 memory modified between the target and the branch. Stop when we
4409 get to a label since we can't know what was changed there. */
4410 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4412 if (GET_CODE (b2) == CODE_LABEL)
4413 break;
4415 else if (GET_CODE (b2) == JUMP_INSN)
4417 /* If this is an unconditional jump and is the only use of
4418 its target label, we can follow it. */
4419 if (simplejump_p (b2)
4420 && JUMP_LABEL (b2) != 0
4421 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4423 b2 = JUMP_LABEL (b2);
4424 continue;
4426 else
4427 break;
4430 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4431 continue;
4433 if (GET_CODE (b2) == CALL_INSN)
4435 modified_mem = 1;
4436 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4437 if (call_used_regs[i] && ! fixed_regs[i]
4438 && i != STACK_POINTER_REGNUM
4439 && i != FRAME_POINTER_REGNUM
4440 && i != HARD_FRAME_POINTER_REGNUM
4441 && i != ARG_POINTER_REGNUM)
4442 modified_regs[i] = 1;
4445 note_stores (PATTERN (b2), mark_modified_reg);
4448 /* Check the next candidate branch insn from the label
4449 of the first. */
4450 if (b2 == 0
4451 || GET_CODE (b2) != JUMP_INSN
4452 || b2 == b1
4453 || ! condjump_p (b2)
4454 || simplejump_p (b2))
4455 continue;
4457 /* Get the comparison codes and operands, reversing the
4458 codes if appropriate. If we don't have comparison codes,
4459 we can't do anything. */
4460 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4461 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4462 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4463 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4464 code1 = reverse_condition (code1);
4466 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4467 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4468 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4469 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4470 code2 = reverse_condition (code2);
4472 /* If they test the same things and knowing that B1 branches
4473 tells us whether or not B2 branches, check if we
4474 can thread the branch. */
4475 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4476 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4477 && (comparison_dominates_p (code1, code2)
4478 || (comparison_dominates_p (code1, reverse_condition (code2))
4479 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
4481 b1))))
4483 t1 = prev_nonnote_insn (b1);
4484 t2 = prev_nonnote_insn (b2);
4486 while (t1 != 0 && t2 != 0)
4488 if (t2 == label)
4490 /* We have reached the target of the first branch.
4491 If there are no pending register equivalents,
4492 we know that this branch will either always
4493 succeed (if the senses of the two branches are
4494 the same) or always fail (if not). */
4495 rtx new_label;
4497 if (num_same_regs != 0)
4498 break;
4500 if (comparison_dominates_p (code1, code2))
4501 new_label = JUMP_LABEL (b2);
4502 else
4503 new_label = get_label_after (b2);
4505 if (JUMP_LABEL (b1) != new_label)
4507 rtx prev = PREV_INSN (new_label);
4509 if (flag_before_loop
4510 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4512 /* Don't thread to the loop label. If a loop
4513 label is reused, loop optimization will
4514 be disabled for that loop. */
4515 new_label = gen_label_rtx ();
4516 emit_label_after (new_label, PREV_INSN (prev));
4518 changed |= redirect_jump (b1, new_label);
4520 break;
4523 /* If either of these is not a normal insn (it might be
4524 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4525 have already been skipped above.) Similarly, fail
4526 if the insns are different. */
4527 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4528 || recog_memoized (t1) != recog_memoized (t2)
4529 || ! rtx_equal_for_thread_p (PATTERN (t1),
4530 PATTERN (t2), t2))
4531 break;
4533 t1 = prev_nonnote_insn (t1);
4534 t2 = prev_nonnote_insn (t2);
4541 /* This is like RTX_EQUAL_P except that it knows about our handling of
4542 possibly equivalent registers and knows to consider volatile and
4543 modified objects as not equal.
4545 YINSN is the insn containing Y. */
4548 rtx_equal_for_thread_p (x, y, yinsn)
4549 rtx x, y;
4550 rtx yinsn;
4552 register int i;
4553 register int j;
4554 register enum rtx_code code;
4555 register char *fmt;
4557 code = GET_CODE (x);
4558 /* Rtx's of different codes cannot be equal. */
4559 if (code != GET_CODE (y))
4560 return 0;
4562 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4563 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4565 if (GET_MODE (x) != GET_MODE (y))
4566 return 0;
4568 /* For floating-point, consider everything unequal. This is a bit
4569 pessimistic, but this pass would only rarely do anything for FP
4570 anyway. */
4571 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4572 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
4573 return 0;
4575 /* For commutative operations, the RTX match if the operand match in any
4576 order. Also handle the simple binary and unary cases without a loop. */
4577 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4578 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4579 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4580 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4581 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4582 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4583 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4584 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4585 else if (GET_RTX_CLASS (code) == '1')
4586 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4588 /* Handle special-cases first. */
4589 switch (code)
4591 case REG:
4592 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4593 return 1;
4595 /* If neither is user variable or hard register, check for possible
4596 equivalence. */
4597 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4598 || REGNO (x) < FIRST_PSEUDO_REGISTER
4599 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4600 return 0;
4602 if (same_regs[REGNO (x)] == -1)
4604 same_regs[REGNO (x)] = REGNO (y);
4605 num_same_regs++;
4607 /* If this is the first time we are seeing a register on the `Y'
4608 side, see if it is the last use. If not, we can't thread the
4609 jump, so mark it as not equivalent. */
4610 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4611 return 0;
4613 return 1;
4615 else
4616 return (same_regs[REGNO (x)] == REGNO (y));
4618 break;
4620 case MEM:
4621 /* If memory modified or either volatile, not equivalent.
4622 Else, check address. */
4623 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4624 return 0;
4626 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4628 case ASM_INPUT:
4629 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4630 return 0;
4632 break;
4634 case SET:
4635 /* Cancel a pending `same_regs' if setting equivalenced registers.
4636 Then process source. */
4637 if (GET_CODE (SET_DEST (x)) == REG
4638 && GET_CODE (SET_DEST (y)) == REG)
4640 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4642 same_regs[REGNO (SET_DEST (x))] = -1;
4643 num_same_regs--;
4645 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4646 return 0;
4648 else
4649 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4650 return 0;
4652 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4654 case LABEL_REF:
4655 return XEXP (x, 0) == XEXP (y, 0);
4657 case SYMBOL_REF:
4658 return XSTR (x, 0) == XSTR (y, 0);
4660 default:
4661 break;
4664 if (x == y)
4665 return 1;
4667 fmt = GET_RTX_FORMAT (code);
4668 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4670 switch (fmt[i])
4672 case 'w':
4673 if (XWINT (x, i) != XWINT (y, i))
4674 return 0;
4675 break;
4677 case 'n':
4678 case 'i':
4679 if (XINT (x, i) != XINT (y, i))
4680 return 0;
4681 break;
4683 case 'V':
4684 case 'E':
4685 /* Two vectors must have the same length. */
4686 if (XVECLEN (x, i) != XVECLEN (y, i))
4687 return 0;
4689 /* And the corresponding elements must match. */
4690 for (j = 0; j < XVECLEN (x, i); j++)
4691 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4692 XVECEXP (y, i, j), yinsn) == 0)
4693 return 0;
4694 break;
4696 case 'e':
4697 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4698 return 0;
4699 break;
4701 case 'S':
4702 case 's':
4703 if (strcmp (XSTR (x, i), XSTR (y, i)))
4704 return 0;
4705 break;
4707 case 'u':
4708 /* These are just backpointers, so they don't matter. */
4709 break;
4711 case '0':
4712 break;
4714 /* It is believed that rtx's at this level will never
4715 contain anything but integers and other rtx's,
4716 except for within LABEL_REFs and SYMBOL_REFs. */
4717 default:
4718 abort ();
4721 return 1;
4725 /* Return the insn that NEW can be safely inserted in front of starting at
4726 the jump insn INSN. Return 0 if it is not safe to do this jump
4727 optimization. Note that NEW must contain a single set. */
4729 static rtx
4730 find_insert_position (insn, new)
4731 rtx insn;
4732 rtx new;
4734 int i;
4735 rtx prev;
4737 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
4738 if (GET_CODE (PATTERN (new)) != PARALLEL)
4739 return insn;
4741 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
4742 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
4743 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4744 insn))
4745 break;
4747 if (i < 0)
4748 return insn;
4750 /* There is a good chance that the previous insn PREV sets the thing
4751 being clobbered (often the CC in a hard reg). If PREV does not
4752 use what NEW sets, we can insert NEW before PREV. */
4754 prev = prev_active_insn (insn);
4755 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
4756 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
4757 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4758 insn)
4759 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
4760 prev))
4761 return 0;
4763 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;