(ASM_SPEC): Change {% to %{.
[official-gcc.git] / gcc / jump.c
blobb5e60fc3fc645fa95083fa58d533d9cb186763d3
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-95, 1996 Free Software Foundation, Inc.b
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
54 #include "config.h"
55 #include "rtl.h"
56 #include "flags.h"
57 #include "hard-reg-set.h"
58 #include "regs.h"
59 #include "insn-config.h"
60 #include "insn-flags.h"
61 #include "expr.h"
62 #include "real.h"
64 /* ??? Eventually must record somehow the labels used by jumps
65 from nested functions. */
66 /* Pre-record the next or previous real insn for each label?
67 No, this pass is very fast anyway. */
68 /* Condense consecutive labels?
69 This would make life analysis faster, maybe. */
70 /* Optimize jump y; x: ... y: jumpif... x?
71 Don't know if it is worth bothering with. */
72 /* Optimize two cases of conditional jump to conditional jump?
73 This can never delete any instruction or make anything dead,
74 or even change what is live at any point.
75 So perhaps let combiner do it. */
77 /* Vector indexed by uid.
78 For each CODE_LABEL, index by its uid to get first unconditional jump
79 that jumps to the label.
80 For each JUMP_INSN, index by its uid to get the next unconditional jump
81 that jumps to the same label.
82 Element 0 is the start of a chain of all return insns.
83 (It is safe to use element 0 because insn uid 0 is not used. */
85 static rtx *jump_chain;
87 /* List of labels referred to from initializers.
88 These can never be deleted. */
89 rtx forced_labels;
91 /* Maximum index in jump_chain. */
93 static int max_jump_chain;
95 /* Set nonzero by jump_optimize if control can fall through
96 to the end of the function. */
97 int can_reach_end;
99 /* Indicates whether death notes are significant in cross jump analysis.
100 Normally they are not significant, because of A and B jump to C,
101 and R dies in A, it must die in B. But this might not be true after
102 stack register conversion, and we must compare death notes in that
103 case. */
105 static int cross_jump_death_matters = 0;
107 static int duplicate_loop_exit_test PROTO((rtx));
108 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
109 static void do_cross_jump PROTO((rtx, rtx, rtx));
110 static int jump_back_p PROTO((rtx, rtx));
111 static int tension_vector_labels PROTO((rtx, int));
112 static void mark_jump_label PROTO((rtx, rtx, int));
113 static void delete_computation PROTO((rtx));
114 static void delete_from_jump_chain PROTO((rtx));
115 static int delete_labelref_insn PROTO((rtx, rtx, int));
116 static void redirect_tablejump PROTO((rtx, rtx));
118 /* Delete no-op jumps and optimize jumps to jumps
119 and jumps around jumps.
120 Delete unused labels and unreachable code.
122 If CROSS_JUMP is 1, detect matching code
123 before a jump and its destination and unify them.
124 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
126 If NOOP_MOVES is nonzero, delete no-op move insns.
128 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
129 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
131 If `optimize' is zero, don't change any code,
132 just determine whether control drops off the end of the function.
133 This case occurs when we have -W and not -O.
134 It works because `delete_insn' checks the value of `optimize'
135 and refrains from actually deleting when that is 0. */
137 void
138 jump_optimize (f, cross_jump, noop_moves, after_regscan)
139 rtx f;
140 int cross_jump;
141 int noop_moves;
142 int after_regscan;
144 register rtx insn, next, note;
145 int changed;
146 int first = 1;
147 int max_uid = 0;
148 rtx last_insn;
150 cross_jump_death_matters = (cross_jump == 2);
152 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
153 notes whose labels don't occur in the insn any more. */
155 for (insn = f; insn; insn = NEXT_INSN (insn))
157 if (GET_CODE (insn) == CODE_LABEL)
158 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
159 else if (GET_CODE (insn) == JUMP_INSN)
160 JUMP_LABEL (insn) = 0;
161 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
162 for (note = REG_NOTES (insn); note; note = next)
164 next = XEXP (note, 1);
165 if (REG_NOTE_KIND (note) == REG_LABEL
166 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
167 remove_note (insn, note);
170 if (INSN_UID (insn) > max_uid)
171 max_uid = INSN_UID (insn);
174 max_uid++;
176 /* Delete insns following barriers, up to next label. */
178 for (insn = f; insn;)
180 if (GET_CODE (insn) == BARRIER)
182 insn = NEXT_INSN (insn);
183 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
185 if (GET_CODE (insn) == NOTE
186 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
187 insn = NEXT_INSN (insn);
188 else
189 insn = delete_insn (insn);
191 /* INSN is now the code_label. */
193 else
194 insn = NEXT_INSN (insn);
197 /* Leave some extra room for labels and duplicate exit test insns
198 we make. */
199 max_jump_chain = max_uid * 14 / 10;
200 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
201 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
203 /* Mark the label each jump jumps to.
204 Combine consecutive labels, and count uses of labels.
206 For each label, make a chain (using `jump_chain')
207 of all the *unconditional* jumps that jump to it;
208 also make a chain of all returns. */
210 for (insn = f; insn; insn = NEXT_INSN (insn))
211 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
212 && ! INSN_DELETED_P (insn))
214 mark_jump_label (PATTERN (insn), insn, cross_jump);
215 if (GET_CODE (insn) == JUMP_INSN)
217 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
219 jump_chain[INSN_UID (insn)]
220 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
221 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
223 if (GET_CODE (PATTERN (insn)) == RETURN)
225 jump_chain[INSN_UID (insn)] = jump_chain[0];
226 jump_chain[0] = insn;
231 /* Keep track of labels used from static data;
232 they cannot ever be deleted. */
234 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
235 LABEL_NUSES (XEXP (insn, 0))++;
237 /* Delete all labels already not referenced.
238 Also find the last insn. */
240 last_insn = 0;
241 for (insn = f; insn; )
243 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
244 insn = delete_insn (insn);
245 else
247 last_insn = insn;
248 insn = NEXT_INSN (insn);
252 if (!optimize)
254 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
255 If so record that this function can drop off the end. */
257 insn = last_insn;
259 int n_labels = 1;
260 while (insn
261 /* One label can follow the end-note: the return label. */
262 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
263 /* Ordinary insns can follow it if returning a structure. */
264 || GET_CODE (insn) == INSN
265 /* If machine uses explicit RETURN insns, no epilogue,
266 then one of them follows the note. */
267 || (GET_CODE (insn) == JUMP_INSN
268 && GET_CODE (PATTERN (insn)) == RETURN)
269 /* A barrier can follow the return insn. */
270 || GET_CODE (insn) == BARRIER
271 /* Other kinds of notes can follow also. */
272 || (GET_CODE (insn) == NOTE
273 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
274 insn = PREV_INSN (insn);
277 /* Report if control can fall through at the end of the function. */
278 if (insn && GET_CODE (insn) == NOTE
279 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
280 && ! INSN_DELETED_P (insn))
281 can_reach_end = 1;
283 /* Zero the "deleted" flag of all the "deleted" insns. */
284 for (insn = f; insn; insn = NEXT_INSN (insn))
285 INSN_DELETED_P (insn) = 0;
286 return;
289 #ifdef HAVE_return
290 if (HAVE_return)
292 /* If we fall through to the epilogue, see if we can insert a RETURN insn
293 in front of it. If the machine allows it at this point (we might be
294 after reload for a leaf routine), it will improve optimization for it
295 to be there. */
296 insn = get_last_insn ();
297 while (insn && GET_CODE (insn) == NOTE)
298 insn = PREV_INSN (insn);
300 if (insn && GET_CODE (insn) != BARRIER)
302 emit_jump_insn (gen_return ());
303 emit_barrier ();
306 #endif
308 if (noop_moves)
309 for (insn = f; insn; )
311 next = NEXT_INSN (insn);
313 if (GET_CODE (insn) == INSN)
315 register rtx body = PATTERN (insn);
317 /* Combine stack_adjusts with following push_insns. */
318 #ifdef PUSH_ROUNDING
319 if (GET_CODE (body) == SET
320 && SET_DEST (body) == stack_pointer_rtx
321 && GET_CODE (SET_SRC (body)) == PLUS
322 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
323 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
324 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
326 rtx p;
327 rtx stack_adjust_insn = insn;
328 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
329 int total_pushed = 0;
330 int pushes = 0;
332 /* Find all successive push insns. */
333 p = insn;
334 /* Don't convert more than three pushes;
335 that starts adding too many displaced addresses
336 and the whole thing starts becoming a losing
337 proposition. */
338 while (pushes < 3)
340 rtx pbody, dest;
341 p = next_nonnote_insn (p);
342 if (p == 0 || GET_CODE (p) != INSN)
343 break;
344 pbody = PATTERN (p);
345 if (GET_CODE (pbody) != SET)
346 break;
347 dest = SET_DEST (pbody);
348 /* Allow a no-op move between the adjust and the push. */
349 if (GET_CODE (dest) == REG
350 && GET_CODE (SET_SRC (pbody)) == REG
351 && REGNO (dest) == REGNO (SET_SRC (pbody)))
352 continue;
353 if (! (GET_CODE (dest) == MEM
354 && GET_CODE (XEXP (dest, 0)) == POST_INC
355 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
356 break;
357 pushes++;
358 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
359 > stack_adjust_amount)
360 break;
361 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
364 /* Discard the amount pushed from the stack adjust;
365 maybe eliminate it entirely. */
366 if (total_pushed >= stack_adjust_amount)
368 delete_computation (stack_adjust_insn);
369 total_pushed = stack_adjust_amount;
371 else
372 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
373 = GEN_INT (stack_adjust_amount - total_pushed);
375 /* Change the appropriate push insns to ordinary stores. */
376 p = insn;
377 while (total_pushed > 0)
379 rtx pbody, dest;
380 p = next_nonnote_insn (p);
381 if (GET_CODE (p) != INSN)
382 break;
383 pbody = PATTERN (p);
384 if (GET_CODE (pbody) == SET)
385 break;
386 dest = SET_DEST (pbody);
387 if (! (GET_CODE (dest) == MEM
388 && GET_CODE (XEXP (dest, 0)) == POST_INC
389 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
390 break;
391 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
392 /* If this push doesn't fully fit in the space
393 of the stack adjust that we deleted,
394 make another stack adjust here for what we
395 didn't use up. There should be peepholes
396 to recognize the resulting sequence of insns. */
397 if (total_pushed < 0)
399 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
400 GEN_INT (- total_pushed)),
402 break;
404 XEXP (dest, 0)
405 = plus_constant (stack_pointer_rtx, total_pushed);
408 #endif
410 /* Detect and delete no-op move instructions
411 resulting from not allocating a parameter in a register. */
413 if (GET_CODE (body) == SET
414 && (SET_DEST (body) == SET_SRC (body)
415 || (GET_CODE (SET_DEST (body)) == MEM
416 && GET_CODE (SET_SRC (body)) == MEM
417 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
418 && ! (GET_CODE (SET_DEST (body)) == MEM
419 && MEM_VOLATILE_P (SET_DEST (body)))
420 && ! (GET_CODE (SET_SRC (body)) == MEM
421 && MEM_VOLATILE_P (SET_SRC (body))))
422 delete_computation (insn);
424 /* Detect and ignore no-op move instructions
425 resulting from smart or fortuitous register allocation. */
427 else if (GET_CODE (body) == SET)
429 int sreg = true_regnum (SET_SRC (body));
430 int dreg = true_regnum (SET_DEST (body));
432 if (sreg == dreg && sreg >= 0)
433 delete_insn (insn);
434 else if (sreg >= 0 && dreg >= 0)
436 rtx trial;
437 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
438 sreg, NULL_PTR, dreg,
439 GET_MODE (SET_SRC (body)));
441 #ifdef PRESERVE_DEATH_INFO_REGNO_P
442 /* Deleting insn could lose a death-note for SREG or DREG
443 so don't do it if final needs accurate death-notes. */
444 if (! PRESERVE_DEATH_INFO_REGNO_P (sreg)
445 && ! PRESERVE_DEATH_INFO_REGNO_P (dreg))
446 #endif
448 /* DREG may have been the target of a REG_DEAD note in
449 the insn which makes INSN redundant. If so, reorg
450 would still think it is dead. So search for such a
451 note and delete it if we find it. */
452 for (trial = prev_nonnote_insn (insn);
453 trial && GET_CODE (trial) != CODE_LABEL;
454 trial = prev_nonnote_insn (trial))
455 if (find_regno_note (trial, REG_DEAD, dreg))
457 remove_death (dreg, trial);
458 break;
461 if (tem != 0
462 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
463 delete_insn (insn);
466 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
467 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
468 NULL_PTR, 0,
469 GET_MODE (SET_DEST (body))))
471 /* This handles the case where we have two consecutive
472 assignments of the same constant to pseudos that didn't
473 get a hard reg. Each SET from the constant will be
474 converted into a SET of the spill register and an
475 output reload will be made following it. This produces
476 two loads of the same constant into the same spill
477 register. */
479 rtx in_insn = insn;
481 /* Look back for a death note for the first reg.
482 If there is one, it is no longer accurate. */
483 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
485 if ((GET_CODE (in_insn) == INSN
486 || GET_CODE (in_insn) == JUMP_INSN)
487 && find_regno_note (in_insn, REG_DEAD, dreg))
489 remove_death (dreg, in_insn);
490 break;
492 in_insn = PREV_INSN (in_insn);
495 /* Delete the second load of the value. */
496 delete_insn (insn);
499 else if (GET_CODE (body) == PARALLEL)
501 /* If each part is a set between two identical registers or
502 a USE or CLOBBER, delete the insn. */
503 int i, sreg, dreg;
504 rtx tem;
506 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
508 tem = XVECEXP (body, 0, i);
509 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
510 continue;
512 if (GET_CODE (tem) != SET
513 || (sreg = true_regnum (SET_SRC (tem))) < 0
514 || (dreg = true_regnum (SET_DEST (tem))) < 0
515 || dreg != sreg)
516 break;
519 if (i < 0)
520 delete_insn (insn);
522 /* Also delete insns to store bit fields if they are no-ops. */
523 /* Not worth the hair to detect this in the big-endian case. */
524 else if (! BYTES_BIG_ENDIAN
525 && GET_CODE (body) == SET
526 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
527 && XEXP (SET_DEST (body), 2) == const0_rtx
528 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
529 && ! (GET_CODE (SET_SRC (body)) == MEM
530 && MEM_VOLATILE_P (SET_SRC (body))))
531 delete_insn (insn);
533 insn = next;
536 /* If we haven't yet gotten to reload and we have just run regscan,
537 delete any insn that sets a register that isn't used elsewhere.
538 This helps some of the optimizations below by having less insns
539 being jumped around. */
541 if (! reload_completed && after_regscan)
542 for (insn = f; insn; insn = next)
544 rtx set = single_set (insn);
546 next = NEXT_INSN (insn);
548 if (set && GET_CODE (SET_DEST (set)) == REG
549 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
550 && regno_first_uid[REGNO (SET_DEST (set))] == INSN_UID (insn)
551 /* We use regno_last_note_uid so as not to delete the setting
552 of a reg that's used in notes. A subsequent optimization
553 might arrange to use that reg for real. */
554 && regno_last_note_uid[REGNO (SET_DEST (set))] == INSN_UID (insn)
555 && ! side_effects_p (SET_SRC (set))
556 && ! find_reg_note (insn, REG_RETVAL, 0))
557 delete_insn (insn);
560 /* Now iterate optimizing jumps until nothing changes over one pass. */
561 changed = 1;
562 while (changed)
564 changed = 0;
566 for (insn = f; insn; insn = next)
568 rtx reallabelprev;
569 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
570 rtx nlabel;
571 int this_is_simplejump, this_is_condjump, reversep;
572 int this_is_condjump_in_parallel;
573 #if 0
574 /* If NOT the first iteration, if this is the last jump pass
575 (just before final), do the special peephole optimizations.
576 Avoiding the first iteration gives ordinary jump opts
577 a chance to work before peephole opts. */
579 if (reload_completed && !first && !flag_no_peephole)
580 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN)
581 peephole (insn);
582 #endif
584 /* That could have deleted some insns after INSN, so check now
585 what the following insn is. */
587 next = NEXT_INSN (insn);
589 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
590 jump. Try to optimize by duplicating the loop exit test if so.
591 This is only safe immediately after regscan, because it uses
592 the values of regno_first_uid and regno_last_uid. */
593 if (after_regscan && GET_CODE (insn) == NOTE
594 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
595 && (temp1 = next_nonnote_insn (insn)) != 0
596 && simplejump_p (temp1))
598 temp = PREV_INSN (insn);
599 if (duplicate_loop_exit_test (insn))
601 changed = 1;
602 next = NEXT_INSN (temp);
603 continue;
607 if (GET_CODE (insn) != JUMP_INSN)
608 continue;
610 this_is_simplejump = simplejump_p (insn);
611 this_is_condjump = condjump_p (insn);
612 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
614 /* Tension the labels in dispatch tables. */
616 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
617 changed |= tension_vector_labels (PATTERN (insn), 0);
618 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
619 changed |= tension_vector_labels (PATTERN (insn), 1);
621 /* If a dispatch table always goes to the same place,
622 get rid of it and replace the insn that uses it. */
624 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
625 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
627 int i;
628 rtx pat = PATTERN (insn);
629 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
630 int len = XVECLEN (pat, diff_vec_p);
631 rtx dispatch = prev_real_insn (insn);
633 for (i = 0; i < len; i++)
634 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
635 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
636 break;
637 if (i == len
638 && dispatch != 0
639 && GET_CODE (dispatch) == JUMP_INSN
640 && JUMP_LABEL (dispatch) != 0
641 /* Don't mess with a casesi insn. */
642 && !(GET_CODE (PATTERN (dispatch)) == SET
643 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
644 == IF_THEN_ELSE))
645 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
647 redirect_tablejump (dispatch,
648 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
649 changed = 1;
653 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
655 /* If a jump references the end of the function, try to turn
656 it into a RETURN insn, possibly a conditional one. */
657 if (JUMP_LABEL (insn)
658 && (next_active_insn (JUMP_LABEL (insn)) == 0
659 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
660 == RETURN))
661 changed |= redirect_jump (insn, NULL_RTX);
663 /* Detect jump to following insn. */
664 if (reallabelprev == insn && condjump_p (insn))
666 next = next_real_insn (JUMP_LABEL (insn));
667 delete_jump (insn);
668 changed = 1;
669 continue;
672 /* If we have an unconditional jump preceded by a USE, try to put
673 the USE before the target and jump there. This simplifies many
674 of the optimizations below since we don't have to worry about
675 dealing with these USE insns. We only do this if the label
676 being branch to already has the identical USE or if code
677 never falls through to that label. */
679 if (this_is_simplejump
680 && (temp = prev_nonnote_insn (insn)) != 0
681 && GET_CODE (temp) == INSN && GET_CODE (PATTERN (temp)) == USE
682 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
683 && (GET_CODE (temp1) == BARRIER
684 || (GET_CODE (temp1) == INSN
685 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
686 /* Don't do this optimization if we have a loop containing only
687 the USE instruction, and the loop start label has a usage
688 count of 1. This is because we will redo this optimization
689 everytime through the outer loop, and jump opt will never
690 exit. */
691 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
692 && temp2 == JUMP_LABEL (insn)
693 && LABEL_NUSES (temp2) == 1))
695 if (GET_CODE (temp1) == BARRIER)
697 emit_insn_after (PATTERN (temp), temp1);
698 temp1 = NEXT_INSN (temp1);
701 delete_insn (temp);
702 redirect_jump (insn, get_label_before (temp1));
703 reallabelprev = prev_real_insn (temp1);
704 changed = 1;
707 /* Simplify if (...) x = a; else x = b; by converting it
708 to x = b; if (...) x = a;
709 if B is sufficiently simple, the test doesn't involve X,
710 and nothing in the test modifies B or X.
712 If we have small register classes, we also can't do this if X
713 is a hard register.
715 If the "x = b;" insn has any REG_NOTES, we don't do this because
716 of the possibility that we are running after CSE and there is a
717 REG_EQUAL note that is only valid if the branch has already been
718 taken. If we move the insn with the REG_EQUAL note, we may
719 fold the comparison to always be false in a later CSE pass.
720 (We could also delete the REG_NOTES when moving the insn, but it
721 seems simpler to not move it.) An exception is that we can move
722 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
723 value is the same as "b".
725 INSN is the branch over the `else' part.
727 We set:
729 TEMP to the jump insn preceding "x = a;"
730 TEMP1 to X
731 TEMP2 to the insn that sets "x = b;"
732 TEMP3 to the insn that sets "x = a;"
733 TEMP4 to the set of "x = b"; */
735 if (this_is_simplejump
736 && (temp3 = prev_active_insn (insn)) != 0
737 && GET_CODE (temp3) == INSN
738 && (temp4 = single_set (temp3)) != 0
739 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
740 #ifdef SMALL_REGISTER_CLASSES
741 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
742 #endif
743 && (temp2 = next_active_insn (insn)) != 0
744 && GET_CODE (temp2) == INSN
745 && (temp4 = single_set (temp2)) != 0
746 && rtx_equal_p (SET_DEST (temp4), temp1)
747 && (GET_CODE (SET_SRC (temp4)) == REG
748 || GET_CODE (SET_SRC (temp4)) == SUBREG
749 || CONSTANT_P (SET_SRC (temp4)))
750 && (REG_NOTES (temp2) == 0
751 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
752 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
753 && XEXP (REG_NOTES (temp2), 1) == 0
754 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
755 SET_SRC (temp4))))
756 && (temp = prev_active_insn (temp3)) != 0
757 && condjump_p (temp) && ! simplejump_p (temp)
758 /* TEMP must skip over the "x = a;" insn */
759 && prev_real_insn (JUMP_LABEL (temp)) == insn
760 && no_labels_between_p (insn, JUMP_LABEL (temp))
761 /* There must be no other entries to the "x = b;" insn. */
762 && no_labels_between_p (JUMP_LABEL (temp), temp2)
763 /* INSN must either branch to the insn after TEMP2 or the insn
764 after TEMP2 must branch to the same place as INSN. */
765 && (reallabelprev == temp2
766 || ((temp5 = next_active_insn (temp2)) != 0
767 && simplejump_p (temp5)
768 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
770 /* The test expression, X, may be a complicated test with
771 multiple branches. See if we can find all the uses of
772 the label that TEMP branches to without hitting a CALL_INSN
773 or a jump to somewhere else. */
774 rtx target = JUMP_LABEL (temp);
775 int nuses = LABEL_NUSES (target);
776 rtx p, q;
778 /* Set P to the first jump insn that goes around "x = a;". */
779 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
781 if (GET_CODE (p) == JUMP_INSN)
783 if (condjump_p (p) && ! simplejump_p (p)
784 && JUMP_LABEL (p) == target)
786 nuses--;
787 if (nuses == 0)
788 break;
790 else
791 break;
793 else if (GET_CODE (p) == CALL_INSN)
794 break;
797 #ifdef HAVE_cc0
798 /* We cannot insert anything between a set of cc and its use
799 so if P uses cc0, we must back up to the previous insn. */
800 q = prev_nonnote_insn (p);
801 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
802 && sets_cc0_p (PATTERN (q)))
803 p = q;
804 #endif
806 if (p)
807 p = PREV_INSN (p);
809 /* If we found all the uses and there was no data conflict, we
810 can move the assignment unless we can branch into the middle
811 from somewhere. */
812 if (nuses == 0 && p
813 && no_labels_between_p (p, insn)
814 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
815 && ! reg_set_between_p (temp1, p, temp3)
816 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
817 || ! reg_set_between_p (SET_SRC (temp4), p, temp2)))
819 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
820 delete_insn (temp2);
822 /* Set NEXT to an insn that we know won't go away. */
823 next = next_active_insn (insn);
825 /* Delete the jump around the set. Note that we must do
826 this before we redirect the test jumps so that it won't
827 delete the code immediately following the assignment
828 we moved (which might be a jump). */
830 delete_insn (insn);
832 /* We either have two consecutive labels or a jump to
833 a jump, so adjust all the JUMP_INSNs to branch to where
834 INSN branches to. */
835 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
836 if (GET_CODE (p) == JUMP_INSN)
837 redirect_jump (p, target);
839 changed = 1;
840 continue;
844 #ifndef HAVE_cc0
845 /* If we have if (...) x = exp; and branches are expensive,
846 EXP is a single insn, does not have any side effects, cannot
847 trap, and is not too costly, convert this to
848 t = exp; if (...) x = t;
850 Don't do this when we have CC0 because it is unlikely to help
851 and we'd need to worry about where to place the new insn and
852 the potential for conflicts. We also can't do this when we have
853 notes on the insn for the same reason as above.
855 We set:
857 TEMP to the "x = exp;" insn.
858 TEMP1 to the single set in the "x = exp; insn.
859 TEMP2 to "x". */
861 if (! reload_completed
862 && this_is_condjump && ! this_is_simplejump
863 && BRANCH_COST >= 3
864 && (temp = next_nonnote_insn (insn)) != 0
865 && GET_CODE (temp) == INSN
866 && REG_NOTES (temp) == 0
867 && (reallabelprev == temp
868 || ((temp2 = next_active_insn (temp)) != 0
869 && simplejump_p (temp2)
870 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
871 && (temp1 = single_set (temp)) != 0
872 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
873 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
874 #ifdef SMALL_REGISTER_CLASSES
875 && REGNO (temp2) >= FIRST_PSEUDO_REGISTER
876 #endif
877 && GET_CODE (SET_SRC (temp1)) != REG
878 && GET_CODE (SET_SRC (temp1)) != SUBREG
879 && GET_CODE (SET_SRC (temp1)) != CONST_INT
880 && ! side_effects_p (SET_SRC (temp1))
881 && ! may_trap_p (SET_SRC (temp1))
882 && rtx_cost (SET_SRC (temp1), SET) < 10)
884 rtx new = gen_reg_rtx (GET_MODE (temp2));
886 if (validate_change (temp, &SET_DEST (temp1), new, 0))
888 next = emit_insn_after (gen_move_insn (temp2, new), insn);
889 emit_insn_after_with_line_notes (PATTERN (temp),
890 PREV_INSN (insn), temp);
891 delete_insn (temp);
892 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
896 /* Similarly, if it takes two insns to compute EXP but they
897 have the same destination. Here TEMP3 will be the second
898 insn and TEMP4 the SET from that insn. */
900 if (! reload_completed
901 && this_is_condjump && ! this_is_simplejump
902 && BRANCH_COST >= 4
903 && (temp = next_nonnote_insn (insn)) != 0
904 && GET_CODE (temp) == INSN
905 && REG_NOTES (temp) == 0
906 && (temp3 = next_nonnote_insn (temp)) != 0
907 && GET_CODE (temp3) == INSN
908 && REG_NOTES (temp3) == 0
909 && (reallabelprev == temp3
910 || ((temp2 = next_active_insn (temp3)) != 0
911 && simplejump_p (temp2)
912 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
913 && (temp1 = single_set (temp)) != 0
914 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
915 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
916 #ifdef SMALL_REGISTER_CLASSES
917 && REGNO (temp2) >= FIRST_PSEUDO_REGISTER
918 #endif
919 && ! side_effects_p (SET_SRC (temp1))
920 && ! may_trap_p (SET_SRC (temp1))
921 && rtx_cost (SET_SRC (temp1), SET) < 10
922 && (temp4 = single_set (temp3)) != 0
923 && rtx_equal_p (SET_DEST (temp4), temp2)
924 && ! side_effects_p (SET_SRC (temp4))
925 && ! may_trap_p (SET_SRC (temp4))
926 && rtx_cost (SET_SRC (temp4), SET) < 10)
928 rtx new = gen_reg_rtx (GET_MODE (temp2));
930 if (validate_change (temp, &SET_DEST (temp1), new, 0))
932 next = emit_insn_after (gen_move_insn (temp2, new), insn);
933 emit_insn_after_with_line_notes (PATTERN (temp),
934 PREV_INSN (insn), temp);
935 emit_insn_after_with_line_notes
936 (replace_rtx (PATTERN (temp3), temp2, new),
937 PREV_INSN (insn), temp3);
938 delete_insn (temp);
939 delete_insn (temp3);
940 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
944 /* Finally, handle the case where two insns are used to
945 compute EXP but a temporary register is used. Here we must
946 ensure that the temporary register is not used anywhere else. */
948 if (! reload_completed
949 && after_regscan
950 && this_is_condjump && ! this_is_simplejump
951 && BRANCH_COST >= 4
952 && (temp = next_nonnote_insn (insn)) != 0
953 && GET_CODE (temp) == INSN
954 && REG_NOTES (temp) == 0
955 && (temp3 = next_nonnote_insn (temp)) != 0
956 && GET_CODE (temp3) == INSN
957 && REG_NOTES (temp3) == 0
958 && (reallabelprev == temp3
959 || ((temp2 = next_active_insn (temp3)) != 0
960 && simplejump_p (temp2)
961 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
962 && (temp1 = single_set (temp)) != 0
963 && (temp5 = SET_DEST (temp1),
964 (GET_CODE (temp5) == REG
965 || (GET_CODE (temp5) == SUBREG
966 && (temp5 = SUBREG_REG (temp5),
967 GET_CODE (temp5) == REG))))
968 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
969 && regno_first_uid[REGNO (temp5)] == INSN_UID (temp)
970 && regno_last_uid[REGNO (temp5)] == INSN_UID (temp3)
971 && ! side_effects_p (SET_SRC (temp1))
972 && ! may_trap_p (SET_SRC (temp1))
973 && rtx_cost (SET_SRC (temp1), SET) < 10
974 && (temp4 = single_set (temp3)) != 0
975 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
976 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
977 #ifdef SMALL_REGISTER_CLASSES
978 && REGNO (temp2) >= FIRST_PSEUDO_REGISTER
979 #endif
980 && rtx_equal_p (SET_DEST (temp4), temp2)
981 && ! side_effects_p (SET_SRC (temp4))
982 && ! may_trap_p (SET_SRC (temp4))
983 && rtx_cost (SET_SRC (temp4), SET) < 10)
985 rtx new = gen_reg_rtx (GET_MODE (temp2));
987 if (validate_change (temp3, &SET_DEST (temp4), new, 0))
989 next = emit_insn_after (gen_move_insn (temp2, new), insn);
990 emit_insn_after_with_line_notes (PATTERN (temp),
991 PREV_INSN (insn), temp);
992 emit_insn_after_with_line_notes (PATTERN (temp3),
993 PREV_INSN (insn), temp3);
994 delete_insn (temp);
995 delete_insn (temp3);
996 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
999 #endif /* HAVE_cc0 */
1001 /* Try to use a conditional move (if the target has them), or a
1002 store-flag insn. The general case is:
1004 1) x = a; if (...) x = b; and
1005 2) if (...) x = b;
1007 If the jump would be faster, the machine should not have defined
1008 the movcc or scc insns!. These cases are often made by the
1009 previous optimization.
1011 The second case is treated as x = x; if (...) x = b;.
1013 INSN here is the jump around the store. We set:
1015 TEMP to the "x = b;" insn.
1016 TEMP1 to X.
1017 TEMP2 to B.
1018 TEMP3 to A (X in the second case).
1019 TEMP4 to the condition being tested.
1020 TEMP5 to the earliest insn used to find the condition. */
1022 if (/* We can't do this after reload has completed. */
1023 ! reload_completed
1024 && this_is_condjump && ! this_is_simplejump
1025 /* Set TEMP to the "x = b;" insn. */
1026 && (temp = next_nonnote_insn (insn)) != 0
1027 && GET_CODE (temp) == INSN
1028 && GET_CODE (PATTERN (temp)) == SET
1029 && GET_CODE (temp1 = SET_DEST (PATTERN (temp))) == REG
1030 #ifdef SMALL_REGISTER_CLASSES
1031 && REGNO (temp1) >= FIRST_PSEUDO_REGISTER
1032 #endif
1033 && (GET_CODE (temp2 = SET_SRC (PATTERN (temp))) == REG
1034 || GET_CODE (temp2) == SUBREG
1035 /* ??? How about floating point constants? */
1036 || GET_CODE (temp2) == CONST_INT)
1037 /* Allow either form, but prefer the former if both apply.
1038 There is no point in using the old value of TEMP1 if
1039 it is a register, since cse will alias them. It can
1040 lose if the old value were a hard register since CSE
1041 won't replace hard registers. */
1042 && (((temp3 = reg_set_last (temp1, insn)) != 0)
1043 /* Make the latter case look like x = x; if (...) x = b; */
1044 || (temp3 = temp1, 1))
1045 /* INSN must either branch to the insn after TEMP or the insn
1046 after TEMP must branch to the same place as INSN. */
1047 && (reallabelprev == temp
1048 || ((temp4 = next_active_insn (temp)) != 0
1049 && simplejump_p (temp4)
1050 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1051 && (temp4 = get_condition (insn, &temp5)) != 0
1052 /* We must be comparing objects whose modes imply the size.
1053 We could handle BLKmode if (1) emit_store_flag could
1054 and (2) we could find the size reliably. */
1055 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1056 /* No point in doing any of this if branches are cheap or we
1057 don't have conditional moves. */
1058 && (BRANCH_COST >= 2
1059 #ifdef HAVE_conditional_move
1060 || 1
1061 #endif
1063 #ifdef HAVE_cc0
1064 /* If the previous insn sets CC0 and something else, we can't
1065 do this since we are going to delete that insn. */
1067 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1068 && GET_CODE (temp6) == INSN
1069 && (sets_cc0_p (PATTERN (temp6)) == -1
1070 || (sets_cc0_p (PATTERN (temp6)) == 1
1071 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1072 #endif
1075 #ifdef HAVE_conditional_move
1076 /* First try a conditional move. */
1078 enum rtx_code code = GET_CODE (temp4);
1079 rtx var = temp1;
1080 rtx cond0, cond1, aval, bval;
1081 rtx target;
1083 /* Copy the compared variables into cond0 and cond1, so that
1084 any side effects performed in or after the old comparison,
1085 will not affect our compare which will come later. */
1086 /* ??? Is it possible to just use the comparison in the jump
1087 insn? After all, we're going to delete it. We'd have
1088 to modify emit_conditional_move to take a comparison rtx
1089 instead or write a new function. */
1090 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1091 /* We want the target to be able to simplify comparisons with
1092 zero (and maybe other constants as well), so don't create
1093 pseudos for them. There's no need to either. */
1094 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1095 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1096 cond1 = XEXP (temp4, 1);
1097 else
1098 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1100 aval = temp3;
1101 bval = temp2;
1103 start_sequence ();
1104 target = emit_conditional_move (var, code,
1105 cond0, cond1, VOIDmode,
1106 aval, bval, GET_MODE (var),
1107 (code == LTU || code == GEU
1108 || code == LEU || code == GTU));
1110 if (target)
1112 rtx seq1,seq2;
1114 /* Save the conditional move sequence but don't emit it
1115 yet. On some machines, like the alpha, it is possible
1116 that temp5 == insn, so next generate the sequence that
1117 saves the compared values and then emit both
1118 sequences ensuring seq1 occurs before seq2. */
1119 seq2 = get_insns ();
1120 end_sequence ();
1122 /* Now that we can't fail, generate the copy insns that
1123 preserve the compared values. */
1124 start_sequence ();
1125 emit_move_insn (cond0, XEXP (temp4, 0));
1126 if (cond1 != XEXP (temp4, 1))
1127 emit_move_insn (cond1, XEXP (temp4, 1));
1128 seq1 = get_insns ();
1129 end_sequence ();
1131 emit_insns_before (seq1, temp5);
1132 emit_insns_before (seq2, insn);
1134 /* ??? We can also delete the insn that sets X to A.
1135 Flow will do it too though. */
1136 delete_insn (temp);
1137 next = NEXT_INSN (insn);
1138 delete_jump (insn);
1139 changed = 1;
1140 continue;
1142 else
1143 end_sequence ();
1145 #endif
1147 /* That didn't work, try a store-flag insn.
1149 We further divide the cases into:
1151 1) x = a; if (...) x = b; and either A or B is zero,
1152 2) if (...) x = 0; and jumps are expensive,
1153 3) x = a; if (...) x = b; and A and B are constants where all
1154 the set bits in A are also set in B and jumps are expensive,
1155 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1156 more expensive, and
1157 5) if (...) x = b; if jumps are even more expensive. */
1159 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1160 && ((GET_CODE (temp3) == CONST_INT)
1161 /* Make the latter case look like
1162 x = x; if (...) x = 0; */
1163 || (temp3 = temp1,
1164 ((BRANCH_COST >= 2
1165 && temp2 == const0_rtx)
1166 || BRANCH_COST >= 3)))
1167 /* If B is zero, OK; if A is zero, can only do (1) if we
1168 can reverse the condition. See if (3) applies possibly
1169 by reversing the condition. Prefer reversing to (4) when
1170 branches are very expensive. */
1171 && ((reversep = 0, temp2 == const0_rtx)
1172 || (temp3 == const0_rtx
1173 && (reversep = can_reverse_comparison_p (temp4, insn)))
1174 || (BRANCH_COST >= 2
1175 && GET_CODE (temp2) == CONST_INT
1176 && GET_CODE (temp3) == CONST_INT
1177 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1178 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1179 && (reversep = can_reverse_comparison_p (temp4,
1180 insn)))))
1181 || BRANCH_COST >= 3)
1184 enum rtx_code code = GET_CODE (temp4);
1185 rtx uval, cval, var = temp1;
1186 int normalizep;
1187 rtx target;
1189 /* If necessary, reverse the condition. */
1190 if (reversep)
1191 code = reverse_condition (code), uval = temp2, cval = temp3;
1192 else
1193 uval = temp3, cval = temp2;
1195 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1196 is the constant 1, it is best to just compute the result
1197 directly. If UVAL is constant and STORE_FLAG_VALUE
1198 includes all of its bits, it is best to compute the flag
1199 value unnormalized and `and' it with UVAL. Otherwise,
1200 normalize to -1 and `and' with UVAL. */
1201 normalizep = (cval != const0_rtx ? -1
1202 : (uval == const1_rtx ? 1
1203 : (GET_CODE (uval) == CONST_INT
1204 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1205 ? 0 : -1));
1207 /* We will be putting the store-flag insn immediately in
1208 front of the comparison that was originally being done,
1209 so we know all the variables in TEMP4 will be valid.
1210 However, this might be in front of the assignment of
1211 A to VAR. If it is, it would clobber the store-flag
1212 we will be emitting.
1214 Therefore, emit into a temporary which will be copied to
1215 VAR immediately after TEMP. */
1217 start_sequence ();
1218 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1219 XEXP (temp4, 0), XEXP (temp4, 1),
1220 VOIDmode,
1221 (code == LTU || code == LEU
1222 || code == GEU || code == GTU),
1223 normalizep);
1224 if (target)
1226 rtx seq;
1227 rtx before = insn;
1229 seq = get_insns ();
1230 end_sequence ();
1232 /* Put the store-flag insns in front of the first insn
1233 used to compute the condition to ensure that we
1234 use the same values of them as the current
1235 comparison. However, the remainder of the insns we
1236 generate will be placed directly in front of the
1237 jump insn, in case any of the pseudos we use
1238 are modified earlier. */
1240 emit_insns_before (seq, temp5);
1242 start_sequence ();
1244 /* Both CVAL and UVAL are non-zero. */
1245 if (cval != const0_rtx && uval != const0_rtx)
1247 rtx tem1, tem2;
1249 tem1 = expand_and (uval, target, NULL_RTX);
1250 if (GET_CODE (cval) == CONST_INT
1251 && GET_CODE (uval) == CONST_INT
1252 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1253 tem2 = cval;
1254 else
1256 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1257 target, NULL_RTX, 0);
1258 tem2 = expand_and (cval, tem2,
1259 (GET_CODE (tem2) == REG
1260 ? tem2 : 0));
1263 /* If we usually make new pseudos, do so here. This
1264 turns out to help machines that have conditional
1265 move insns. */
1266 /* ??? Conditional moves have already been handled.
1267 This may be obsolete. */
1269 if (flag_expensive_optimizations)
1270 target = 0;
1272 target = expand_binop (GET_MODE (var), ior_optab,
1273 tem1, tem2, target,
1274 1, OPTAB_WIDEN);
1276 else if (normalizep != 1)
1278 /* We know that either CVAL or UVAL is zero. If
1279 UVAL is zero, negate TARGET and `and' with CVAL.
1280 Otherwise, `and' with UVAL. */
1281 if (uval == const0_rtx)
1283 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1284 target, NULL_RTX, 0);
1285 uval = cval;
1288 target = expand_and (uval, target,
1289 (GET_CODE (target) == REG
1290 && ! preserve_subexpressions_p ()
1291 ? target : NULL_RTX));
1294 emit_move_insn (var, target);
1295 seq = get_insns ();
1296 end_sequence ();
1297 #ifdef HAVE_cc0
1298 /* If INSN uses CC0, we must not separate it from the
1299 insn that sets cc0. */
1300 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1301 before = prev_nonnote_insn (before);
1302 #endif
1303 emit_insns_before (seq, before);
1305 delete_insn (temp);
1306 next = NEXT_INSN (insn);
1307 delete_jump (insn);
1308 changed = 1;
1309 continue;
1311 else
1312 end_sequence ();
1316 /* If branches are expensive, convert
1317 if (foo) bar++; to bar += (foo != 0);
1318 and similarly for "bar--;"
1320 INSN is the conditional branch around the arithmetic. We set:
1322 TEMP is the arithmetic insn.
1323 TEMP1 is the SET doing the arithmetic.
1324 TEMP2 is the operand being incremented or decremented.
1325 TEMP3 to the condition being tested.
1326 TEMP4 to the earliest insn used to find the condition. */
1328 if ((BRANCH_COST >= 2
1329 #ifdef HAVE_incscc
1330 || HAVE_incscc
1331 #endif
1332 #ifdef HAVE_decscc
1333 || HAVE_decscc
1334 #endif
1336 && ! reload_completed
1337 && this_is_condjump && ! this_is_simplejump
1338 && (temp = next_nonnote_insn (insn)) != 0
1339 && (temp1 = single_set (temp)) != 0
1340 && (temp2 = SET_DEST (temp1),
1341 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1342 && GET_CODE (SET_SRC (temp1)) == PLUS
1343 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1344 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1345 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1346 && ! side_effects_p (temp2)
1347 && ! may_trap_p (temp2)
1348 /* INSN must either branch to the insn after TEMP or the insn
1349 after TEMP must branch to the same place as INSN. */
1350 && (reallabelprev == temp
1351 || ((temp3 = next_active_insn (temp)) != 0
1352 && simplejump_p (temp3)
1353 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1354 && (temp3 = get_condition (insn, &temp4)) != 0
1355 /* We must be comparing objects whose modes imply the size.
1356 We could handle BLKmode if (1) emit_store_flag could
1357 and (2) we could find the size reliably. */
1358 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1359 && can_reverse_comparison_p (temp3, insn))
1361 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1362 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1364 start_sequence ();
1366 /* It must be the case that TEMP2 is not modified in the range
1367 [TEMP4, INSN). The one exception we make is if the insn
1368 before INSN sets TEMP2 to something which is also unchanged
1369 in that range. In that case, we can move the initialization
1370 into our sequence. */
1372 if ((temp5 = prev_active_insn (insn)) != 0
1373 && GET_CODE (temp5) == INSN
1374 && (temp6 = single_set (temp5)) != 0
1375 && rtx_equal_p (temp2, SET_DEST (temp6))
1376 && (CONSTANT_P (SET_SRC (temp6))
1377 || GET_CODE (SET_SRC (temp6)) == REG
1378 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1380 emit_insn (PATTERN (temp5));
1381 init_insn = temp5;
1382 init = SET_SRC (temp6);
1385 if (CONSTANT_P (init)
1386 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1387 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1388 XEXP (temp3, 0), XEXP (temp3, 1),
1389 VOIDmode,
1390 (code == LTU || code == LEU
1391 || code == GTU || code == GEU), 1);
1393 /* If we can do the store-flag, do the addition or
1394 subtraction. */
1396 if (target)
1397 target = expand_binop (GET_MODE (temp2),
1398 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1399 ? add_optab : sub_optab),
1400 temp2, target, temp2, 0, OPTAB_WIDEN);
1402 if (target != 0)
1404 /* Put the result back in temp2 in case it isn't already.
1405 Then replace the jump, possible a CC0-setting insn in
1406 front of the jump, and TEMP, with the sequence we have
1407 made. */
1409 if (target != temp2)
1410 emit_move_insn (temp2, target);
1412 seq = get_insns ();
1413 end_sequence ();
1415 emit_insns_before (seq, temp4);
1416 delete_insn (temp);
1418 if (init_insn)
1419 delete_insn (init_insn);
1421 next = NEXT_INSN (insn);
1422 #ifdef HAVE_cc0
1423 delete_insn (prev_nonnote_insn (insn));
1424 #endif
1425 delete_insn (insn);
1426 changed = 1;
1427 continue;
1429 else
1430 end_sequence ();
1433 /* Simplify if (...) x = 1; else {...} if (x) ...
1434 We recognize this case scanning backwards as well.
1436 TEMP is the assignment to x;
1437 TEMP1 is the label at the head of the second if. */
1438 /* ?? This should call get_condition to find the values being
1439 compared, instead of looking for a COMPARE insn when HAVE_cc0
1440 is not defined. This would allow it to work on the m88k. */
1441 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1442 is not defined and the condition is tested by a separate compare
1443 insn. This is because the code below assumes that the result
1444 of the compare dies in the following branch.
1446 Not only that, but there might be other insns between the
1447 compare and branch whose results are live. Those insns need
1448 to be executed.
1450 A way to fix this is to move the insns at JUMP_LABEL (insn)
1451 to before INSN. If we are running before flow, they will
1452 be deleted if they aren't needed. But this doesn't work
1453 well after flow.
1455 This is really a special-case of jump threading, anyway. The
1456 right thing to do is to replace this and jump threading with
1457 much simpler code in cse.
1459 This code has been turned off in the non-cc0 case in the
1460 meantime. */
1462 #ifdef HAVE_cc0
1463 else if (this_is_simplejump
1464 /* Safe to skip USE and CLOBBER insns here
1465 since they will not be deleted. */
1466 && (temp = prev_active_insn (insn))
1467 && no_labels_between_p (temp, insn)
1468 && GET_CODE (temp) == INSN
1469 && GET_CODE (PATTERN (temp)) == SET
1470 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1471 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1472 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1473 /* If we find that the next value tested is `x'
1474 (TEMP1 is the insn where this happens), win. */
1475 && GET_CODE (temp1) == INSN
1476 && GET_CODE (PATTERN (temp1)) == SET
1477 #ifdef HAVE_cc0
1478 /* Does temp1 `tst' the value of x? */
1479 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1480 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1481 && (temp1 = next_nonnote_insn (temp1))
1482 #else
1483 /* Does temp1 compare the value of x against zero? */
1484 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1485 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1486 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1487 == SET_DEST (PATTERN (temp)))
1488 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1489 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1490 #endif
1491 && condjump_p (temp1))
1493 /* Get the if_then_else from the condjump. */
1494 rtx choice = SET_SRC (PATTERN (temp1));
1495 if (GET_CODE (choice) == IF_THEN_ELSE)
1497 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1498 rtx val = SET_SRC (PATTERN (temp));
1499 rtx cond
1500 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1501 val, const0_rtx);
1502 rtx ultimate;
1504 if (cond == const_true_rtx)
1505 ultimate = XEXP (choice, 1);
1506 else if (cond == const0_rtx)
1507 ultimate = XEXP (choice, 2);
1508 else
1509 ultimate = 0;
1511 if (ultimate == pc_rtx)
1512 ultimate = get_label_after (temp1);
1513 else if (ultimate && GET_CODE (ultimate) != RETURN)
1514 ultimate = XEXP (ultimate, 0);
1516 if (ultimate && JUMP_LABEL(insn) != ultimate)
1517 changed |= redirect_jump (insn, ultimate);
1520 #endif
1522 #if 0
1523 /* @@ This needs a bit of work before it will be right.
1525 Any type of comparison can be accepted for the first and
1526 second compare. When rewriting the first jump, we must
1527 compute the what conditions can reach label3, and use the
1528 appropriate code. We can not simply reverse/swap the code
1529 of the first jump. In some cases, the second jump must be
1530 rewritten also.
1532 For example,
1533 < == converts to > ==
1534 < != converts to == >
1535 etc.
1537 If the code is written to only accept an '==' test for the second
1538 compare, then all that needs to be done is to swap the condition
1539 of the first branch.
1541 It is questionable whether we want this optimization anyways,
1542 since if the user wrote code like this because he/she knew that
1543 the jump to label1 is taken most of the time, then rewriting
1544 this gives slower code. */
1545 /* @@ This should call get_condition to find the values being
1546 compared, instead of looking for a COMPARE insn when HAVE_cc0
1547 is not defined. This would allow it to work on the m88k. */
1548 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1549 is not defined and the condition is tested by a separate compare
1550 insn. This is because the code below assumes that the result
1551 of the compare dies in the following branch. */
1553 /* Simplify test a ~= b
1554 condjump label1;
1555 test a == b
1556 condjump label2;
1557 jump label3;
1558 label1:
1560 rewriting as
1561 test a ~~= b
1562 condjump label3
1563 test a == b
1564 condjump label2
1565 label1:
1567 where ~= is an inequality, e.g. >, and ~~= is the swapped
1568 inequality, e.g. <.
1570 We recognize this case scanning backwards.
1572 TEMP is the conditional jump to `label2';
1573 TEMP1 is the test for `a == b';
1574 TEMP2 is the conditional jump to `label1';
1575 TEMP3 is the test for `a ~= b'. */
1576 else if (this_is_simplejump
1577 && (temp = prev_active_insn (insn))
1578 && no_labels_between_p (temp, insn)
1579 && condjump_p (temp)
1580 && (temp1 = prev_active_insn (temp))
1581 && no_labels_between_p (temp1, temp)
1582 && GET_CODE (temp1) == INSN
1583 && GET_CODE (PATTERN (temp1)) == SET
1584 #ifdef HAVE_cc0
1585 && sets_cc0_p (PATTERN (temp1)) == 1
1586 #else
1587 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1588 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1589 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1590 #endif
1591 && (temp2 = prev_active_insn (temp1))
1592 && no_labels_between_p (temp2, temp1)
1593 && condjump_p (temp2)
1594 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1595 && (temp3 = prev_active_insn (temp2))
1596 && no_labels_between_p (temp3, temp2)
1597 && GET_CODE (PATTERN (temp3)) == SET
1598 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1599 SET_DEST (PATTERN (temp1)))
1600 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1601 SET_SRC (PATTERN (temp3)))
1602 && ! inequality_comparisons_p (PATTERN (temp))
1603 && inequality_comparisons_p (PATTERN (temp2)))
1605 rtx fallthrough_label = JUMP_LABEL (temp2);
1607 ++LABEL_NUSES (fallthrough_label);
1608 if (swap_jump (temp2, JUMP_LABEL (insn)))
1610 delete_insn (insn);
1611 changed = 1;
1614 if (--LABEL_NUSES (fallthrough_label) == 0)
1615 delete_insn (fallthrough_label);
1617 #endif
1618 /* Simplify if (...) {... x = 1;} if (x) ...
1620 We recognize this case backwards.
1622 TEMP is the test of `x';
1623 TEMP1 is the assignment to `x' at the end of the
1624 previous statement. */
1625 /* @@ This should call get_condition to find the values being
1626 compared, instead of looking for a COMPARE insn when HAVE_cc0
1627 is not defined. This would allow it to work on the m88k. */
1628 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1629 is not defined and the condition is tested by a separate compare
1630 insn. This is because the code below assumes that the result
1631 of the compare dies in the following branch. */
1633 /* ??? This has to be turned off. The problem is that the
1634 unconditional jump might indirectly end up branching to the
1635 label between TEMP1 and TEMP. We can't detect this, in general,
1636 since it may become a jump to there after further optimizations.
1637 If that jump is done, it will be deleted, so we will retry
1638 this optimization in the next pass, thus an infinite loop.
1640 The present code prevents this by putting the jump after the
1641 label, but this is not logically correct. */
1642 #if 0
1643 else if (this_is_condjump
1644 /* Safe to skip USE and CLOBBER insns here
1645 since they will not be deleted. */
1646 && (temp = prev_active_insn (insn))
1647 && no_labels_between_p (temp, insn)
1648 && GET_CODE (temp) == INSN
1649 && GET_CODE (PATTERN (temp)) == SET
1650 #ifdef HAVE_cc0
1651 && sets_cc0_p (PATTERN (temp)) == 1
1652 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1653 #else
1654 /* Temp must be a compare insn, we can not accept a register
1655 to register move here, since it may not be simply a
1656 tst insn. */
1657 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1658 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1659 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1660 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1661 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1662 #endif
1663 /* May skip USE or CLOBBER insns here
1664 for checking for opportunity, since we
1665 take care of them later. */
1666 && (temp1 = prev_active_insn (temp))
1667 && GET_CODE (temp1) == INSN
1668 && GET_CODE (PATTERN (temp1)) == SET
1669 #ifdef HAVE_cc0
1670 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1671 #else
1672 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1673 == SET_DEST (PATTERN (temp1)))
1674 #endif
1675 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1676 /* If this isn't true, cse will do the job. */
1677 && ! no_labels_between_p (temp1, temp))
1679 /* Get the if_then_else from the condjump. */
1680 rtx choice = SET_SRC (PATTERN (insn));
1681 if (GET_CODE (choice) == IF_THEN_ELSE
1682 && (GET_CODE (XEXP (choice, 0)) == EQ
1683 || GET_CODE (XEXP (choice, 0)) == NE))
1685 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1686 rtx last_insn;
1687 rtx ultimate;
1688 rtx p;
1690 /* Get the place that condjump will jump to
1691 if it is reached from here. */
1692 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1693 == want_nonzero)
1694 ultimate = XEXP (choice, 1);
1695 else
1696 ultimate = XEXP (choice, 2);
1697 /* Get it as a CODE_LABEL. */
1698 if (ultimate == pc_rtx)
1699 ultimate = get_label_after (insn);
1700 else
1701 /* Get the label out of the LABEL_REF. */
1702 ultimate = XEXP (ultimate, 0);
1704 /* Insert the jump immediately before TEMP, specifically
1705 after the label that is between TEMP1 and TEMP. */
1706 last_insn = PREV_INSN (temp);
1708 /* If we would be branching to the next insn, the jump
1709 would immediately be deleted and the re-inserted in
1710 a subsequent pass over the code. So don't do anything
1711 in that case. */
1712 if (next_active_insn (last_insn)
1713 != next_active_insn (ultimate))
1715 emit_barrier_after (last_insn);
1716 p = emit_jump_insn_after (gen_jump (ultimate),
1717 last_insn);
1718 JUMP_LABEL (p) = ultimate;
1719 ++LABEL_NUSES (ultimate);
1720 if (INSN_UID (ultimate) < max_jump_chain
1721 && INSN_CODE (p) < max_jump_chain)
1723 jump_chain[INSN_UID (p)]
1724 = jump_chain[INSN_UID (ultimate)];
1725 jump_chain[INSN_UID (ultimate)] = p;
1727 changed = 1;
1728 continue;
1732 #endif
1733 /* Detect a conditional jump going to the same place
1734 as an immediately following unconditional jump. */
1735 else if (this_is_condjump
1736 && (temp = next_active_insn (insn)) != 0
1737 && simplejump_p (temp)
1738 && (next_active_insn (JUMP_LABEL (insn))
1739 == next_active_insn (JUMP_LABEL (temp))))
1741 delete_jump (insn);
1742 changed = 1;
1743 continue;
1745 /* Detect a conditional jump jumping over an unconditional jump. */
1747 else if ((this_is_condjump || this_is_condjump_in_parallel)
1748 && ! this_is_simplejump
1749 && reallabelprev != 0
1750 && GET_CODE (reallabelprev) == JUMP_INSN
1751 && prev_active_insn (reallabelprev) == insn
1752 && no_labels_between_p (insn, reallabelprev)
1753 && simplejump_p (reallabelprev))
1755 /* When we invert the unconditional jump, we will be
1756 decrementing the usage count of its old label.
1757 Make sure that we don't delete it now because that
1758 might cause the following code to be deleted. */
1759 rtx prev_uses = prev_nonnote_insn (reallabelprev);
1760 rtx prev_label = JUMP_LABEL (insn);
1762 if (prev_label)
1763 ++LABEL_NUSES (prev_label);
1765 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
1767 /* It is very likely that if there are USE insns before
1768 this jump, they hold REG_DEAD notes. These REG_DEAD
1769 notes are no longer valid due to this optimization,
1770 and will cause the life-analysis that following passes
1771 (notably delayed-branch scheduling) to think that
1772 these registers are dead when they are not.
1774 To prevent this trouble, we just remove the USE insns
1775 from the insn chain. */
1777 while (prev_uses && GET_CODE (prev_uses) == INSN
1778 && GET_CODE (PATTERN (prev_uses)) == USE)
1780 rtx useless = prev_uses;
1781 prev_uses = prev_nonnote_insn (prev_uses);
1782 delete_insn (useless);
1785 delete_insn (reallabelprev);
1786 next = insn;
1787 changed = 1;
1790 /* We can now safely delete the label if it is unreferenced
1791 since the delete_insn above has deleted the BARRIER. */
1792 if (prev_label && --LABEL_NUSES (prev_label) == 0)
1793 delete_insn (prev_label);
1794 continue;
1796 else
1798 /* Detect a jump to a jump. */
1800 nlabel = follow_jumps (JUMP_LABEL (insn));
1801 if (nlabel != JUMP_LABEL (insn)
1802 && redirect_jump (insn, nlabel))
1804 changed = 1;
1805 next = insn;
1808 /* Look for if (foo) bar; else break; */
1809 /* The insns look like this:
1810 insn = condjump label1;
1811 ...range1 (some insns)...
1812 jump label2;
1813 label1:
1814 ...range2 (some insns)...
1815 jump somewhere unconditionally
1816 label2: */
1818 rtx label1 = next_label (insn);
1819 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1820 /* Don't do this optimization on the first round, so that
1821 jump-around-a-jump gets simplified before we ask here
1822 whether a jump is unconditional.
1824 Also don't do it when we are called after reload since
1825 it will confuse reorg. */
1826 if (! first
1827 && (reload_completed ? ! flag_delayed_branch : 1)
1828 /* Make sure INSN is something we can invert. */
1829 && condjump_p (insn)
1830 && label1 != 0
1831 && JUMP_LABEL (insn) == label1
1832 && LABEL_NUSES (label1) == 1
1833 && GET_CODE (range1end) == JUMP_INSN
1834 && simplejump_p (range1end))
1836 rtx label2 = next_label (label1);
1837 rtx range2end = label2 ? prev_active_insn (label2) : 0;
1838 if (range1end != range2end
1839 && JUMP_LABEL (range1end) == label2
1840 && GET_CODE (range2end) == JUMP_INSN
1841 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
1842 /* Invert the jump condition, so we
1843 still execute the same insns in each case. */
1844 && invert_jump (insn, label1))
1846 rtx range1beg = next_active_insn (insn);
1847 rtx range2beg = next_active_insn (label1);
1848 rtx range1after, range2after;
1849 rtx range1before, range2before;
1850 rtx rangenext;
1852 /* Include in each range any notes before it, to be
1853 sure that we get the line number note if any, even
1854 if there are other notes here. */
1855 while (PREV_INSN (range1beg)
1856 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
1857 range1beg = PREV_INSN (range1beg);
1859 while (PREV_INSN (range2beg)
1860 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
1861 range2beg = PREV_INSN (range2beg);
1863 /* Don't move NOTEs for blocks or loops; shift them
1864 outside the ranges, where they'll stay put. */
1865 range1beg = squeeze_notes (range1beg, range1end);
1866 range2beg = squeeze_notes (range2beg, range2end);
1868 /* Get current surrounds of the 2 ranges. */
1869 range1before = PREV_INSN (range1beg);
1870 range2before = PREV_INSN (range2beg);
1871 range1after = NEXT_INSN (range1end);
1872 range2after = NEXT_INSN (range2end);
1874 /* Splice range2 where range1 was. */
1875 NEXT_INSN (range1before) = range2beg;
1876 PREV_INSN (range2beg) = range1before;
1877 NEXT_INSN (range2end) = range1after;
1878 PREV_INSN (range1after) = range2end;
1879 /* Splice range1 where range2 was. */
1880 NEXT_INSN (range2before) = range1beg;
1881 PREV_INSN (range1beg) = range2before;
1882 NEXT_INSN (range1end) = range2after;
1883 PREV_INSN (range2after) = range1end;
1885 /* Check for a loop end note between the end of
1886 range2, and the next code label. If there is one,
1887 then what we have really seen is
1888 if (foo) break; end_of_loop;
1889 and moved the break sequence outside the loop.
1890 We must move the LOOP_END note to where the
1891 loop really ends now, or we will confuse loop
1892 optimization. Stop if we find a LOOP_BEG note
1893 first, since we don't want to move the LOOP_END
1894 note in that case. */
1895 for (;range2after != label2; range2after = rangenext)
1897 rangenext = NEXT_INSN (range2after);
1898 if (GET_CODE (range2after) == NOTE)
1900 if (NOTE_LINE_NUMBER (range2after)
1901 == NOTE_INSN_LOOP_END)
1903 NEXT_INSN (PREV_INSN (range2after))
1904 = rangenext;
1905 PREV_INSN (rangenext)
1906 = PREV_INSN (range2after);
1907 PREV_INSN (range2after)
1908 = PREV_INSN (range1beg);
1909 NEXT_INSN (range2after) = range1beg;
1910 NEXT_INSN (PREV_INSN (range1beg))
1911 = range2after;
1912 PREV_INSN (range1beg) = range2after;
1914 else if (NOTE_LINE_NUMBER (range2after)
1915 == NOTE_INSN_LOOP_BEG)
1916 break;
1919 changed = 1;
1920 continue;
1925 /* Now that the jump has been tensioned,
1926 try cross jumping: check for identical code
1927 before the jump and before its target label. */
1929 /* First, cross jumping of conditional jumps: */
1931 if (cross_jump && condjump_p (insn))
1933 rtx newjpos, newlpos;
1934 rtx x = prev_real_insn (JUMP_LABEL (insn));
1936 /* A conditional jump may be crossjumped
1937 only if the place it jumps to follows
1938 an opposing jump that comes back here. */
1940 if (x != 0 && ! jump_back_p (x, insn))
1941 /* We have no opposing jump;
1942 cannot cross jump this insn. */
1943 x = 0;
1945 newjpos = 0;
1946 /* TARGET is nonzero if it is ok to cross jump
1947 to code before TARGET. If so, see if matches. */
1948 if (x != 0)
1949 find_cross_jump (insn, x, 2,
1950 &newjpos, &newlpos);
1952 if (newjpos != 0)
1954 do_cross_jump (insn, newjpos, newlpos);
1955 /* Make the old conditional jump
1956 into an unconditional one. */
1957 SET_SRC (PATTERN (insn))
1958 = gen_rtx (LABEL_REF, VOIDmode, JUMP_LABEL (insn));
1959 INSN_CODE (insn) = -1;
1960 emit_barrier_after (insn);
1961 /* Add to jump_chain unless this is a new label
1962 whose UID is too large. */
1963 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
1965 jump_chain[INSN_UID (insn)]
1966 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1967 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
1969 changed = 1;
1970 next = insn;
1974 /* Cross jumping of unconditional jumps:
1975 a few differences. */
1977 if (cross_jump && simplejump_p (insn))
1979 rtx newjpos, newlpos;
1980 rtx target;
1982 newjpos = 0;
1984 /* TARGET is nonzero if it is ok to cross jump
1985 to code before TARGET. If so, see if matches. */
1986 find_cross_jump (insn, JUMP_LABEL (insn), 1,
1987 &newjpos, &newlpos);
1989 /* If cannot cross jump to code before the label,
1990 see if we can cross jump to another jump to
1991 the same label. */
1992 /* Try each other jump to this label. */
1993 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
1994 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
1995 target != 0 && newjpos == 0;
1996 target = jump_chain[INSN_UID (target)])
1997 if (target != insn
1998 && JUMP_LABEL (target) == JUMP_LABEL (insn)
1999 /* Ignore TARGET if it's deleted. */
2000 && ! INSN_DELETED_P (target))
2001 find_cross_jump (insn, target, 2,
2002 &newjpos, &newlpos);
2004 if (newjpos != 0)
2006 do_cross_jump (insn, newjpos, newlpos);
2007 changed = 1;
2008 next = insn;
2012 /* This code was dead in the previous jump.c! */
2013 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2015 /* Return insns all "jump to the same place"
2016 so we can cross-jump between any two of them. */
2018 rtx newjpos, newlpos, target;
2020 newjpos = 0;
2022 /* If cannot cross jump to code before the label,
2023 see if we can cross jump to another jump to
2024 the same label. */
2025 /* Try each other jump to this label. */
2026 for (target = jump_chain[0];
2027 target != 0 && newjpos == 0;
2028 target = jump_chain[INSN_UID (target)])
2029 if (target != insn
2030 && ! INSN_DELETED_P (target)
2031 && GET_CODE (PATTERN (target)) == RETURN)
2032 find_cross_jump (insn, target, 2,
2033 &newjpos, &newlpos);
2035 if (newjpos != 0)
2037 do_cross_jump (insn, newjpos, newlpos);
2038 changed = 1;
2039 next = insn;
2045 first = 0;
2048 /* Delete extraneous line number notes.
2049 Note that two consecutive notes for different lines are not really
2050 extraneous. There should be some indication where that line belonged,
2051 even if it became empty. */
2054 rtx last_note = 0;
2056 for (insn = f; insn; insn = NEXT_INSN (insn))
2057 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2059 /* Delete this note if it is identical to previous note. */
2060 if (last_note
2061 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2062 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2064 delete_insn (insn);
2065 continue;
2068 last_note = insn;
2072 #ifdef HAVE_return
2073 if (HAVE_return)
2075 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2076 in front of it. If the machine allows it at this point (we might be
2077 after reload for a leaf routine), it will improve optimization for it
2078 to be there. We do this both here and at the start of this pass since
2079 the RETURN might have been deleted by some of our optimizations. */
2080 insn = get_last_insn ();
2081 while (insn && GET_CODE (insn) == NOTE)
2082 insn = PREV_INSN (insn);
2084 if (insn && GET_CODE (insn) != BARRIER)
2086 emit_jump_insn (gen_return ());
2087 emit_barrier ();
2090 #endif
2092 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2093 If so, delete it, and record that this function can drop off the end. */
2095 insn = last_insn;
2097 int n_labels = 1;
2098 while (insn
2099 /* One label can follow the end-note: the return label. */
2100 && ((GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2101 /* Ordinary insns can follow it if returning a structure. */
2102 || GET_CODE (insn) == INSN
2103 /* If machine uses explicit RETURN insns, no epilogue,
2104 then one of them follows the note. */
2105 || (GET_CODE (insn) == JUMP_INSN
2106 && GET_CODE (PATTERN (insn)) == RETURN)
2107 /* A barrier can follow the return insn. */
2108 || GET_CODE (insn) == BARRIER
2109 /* Other kinds of notes can follow also. */
2110 || (GET_CODE (insn) == NOTE
2111 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)))
2112 insn = PREV_INSN (insn);
2115 /* Report if control can fall through at the end of the function. */
2116 if (insn && GET_CODE (insn) == NOTE
2117 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END)
2119 can_reach_end = 1;
2120 delete_insn (insn);
2123 /* Show JUMP_CHAIN no longer valid. */
2124 jump_chain = 0;
2127 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2128 jump. Assume that this unconditional jump is to the exit test code. If
2129 the code is sufficiently simple, make a copy of it before INSN,
2130 followed by a jump to the exit of the loop. Then delete the unconditional
2131 jump after INSN.
2133 Return 1 if we made the change, else 0.
2135 This is only safe immediately after a regscan pass because it uses the
2136 values of regno_first_uid and regno_last_uid. */
2138 static int
2139 duplicate_loop_exit_test (loop_start)
2140 rtx loop_start;
2142 rtx insn, set, reg, p, link;
2143 rtx copy = 0;
2144 int num_insns = 0;
2145 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2146 rtx lastexit;
2147 int max_reg = max_reg_num ();
2148 rtx *reg_map = 0;
2150 /* Scan the exit code. We do not perform this optimization if any insn:
2152 is a CALL_INSN
2153 is a CODE_LABEL
2154 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2155 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2156 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2157 are not valid
2159 Also, don't do this if the exit code is more than 20 insns. */
2161 for (insn = exitcode;
2162 insn
2163 && ! (GET_CODE (insn) == NOTE
2164 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2165 insn = NEXT_INSN (insn))
2167 switch (GET_CODE (insn))
2169 case CODE_LABEL:
2170 case CALL_INSN:
2171 return 0;
2172 case NOTE:
2173 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2174 a jump immediately after the loop start that branches outside
2175 the loop but within an outer loop, near the exit test.
2176 If we copied this exit test and created a phony
2177 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2178 before the exit test look like these could be safely moved
2179 out of the loop even if they actually may be never executed.
2180 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2182 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2183 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2184 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2185 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2186 return 0;
2187 break;
2188 case JUMP_INSN:
2189 case INSN:
2190 if (++num_insns > 20
2191 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2192 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2193 return 0;
2194 break;
2198 /* Unless INSN is zero, we can do the optimization. */
2199 if (insn == 0)
2200 return 0;
2202 lastexit = insn;
2204 /* See if any insn sets a register only used in the loop exit code and
2205 not a user variable. If so, replace it with a new register. */
2206 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2207 if (GET_CODE (insn) == INSN
2208 && (set = single_set (insn)) != 0
2209 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2210 || (GET_CODE (reg) == SUBREG
2211 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2212 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2213 && regno_first_uid[REGNO (reg)] == INSN_UID (insn))
2215 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2216 if (regno_last_uid[REGNO (reg)] == INSN_UID (p))
2217 break;
2219 if (p != lastexit)
2221 /* We can do the replacement. Allocate reg_map if this is the
2222 first replacement we found. */
2223 if (reg_map == 0)
2225 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2226 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2229 REG_LOOP_TEST_P (reg) = 1;
2231 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2235 /* Now copy each insn. */
2236 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2237 switch (GET_CODE (insn))
2239 case BARRIER:
2240 copy = emit_barrier_before (loop_start);
2241 break;
2242 case NOTE:
2243 /* Only copy line-number notes. */
2244 if (NOTE_LINE_NUMBER (insn) >= 0)
2246 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2247 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2249 break;
2251 case INSN:
2252 copy = emit_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2253 if (reg_map)
2254 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2256 mark_jump_label (PATTERN (copy), copy, 0);
2258 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2259 make them. */
2260 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2261 if (REG_NOTE_KIND (link) != REG_LABEL)
2262 REG_NOTES (copy)
2263 = copy_rtx (gen_rtx (EXPR_LIST, REG_NOTE_KIND (link),
2264 XEXP (link, 0), REG_NOTES (copy)));
2265 if (reg_map && REG_NOTES (copy))
2266 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2267 break;
2269 case JUMP_INSN:
2270 copy = emit_jump_insn_before (copy_rtx (PATTERN (insn)), loop_start);
2271 if (reg_map)
2272 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2273 mark_jump_label (PATTERN (copy), copy, 0);
2274 if (REG_NOTES (insn))
2276 REG_NOTES (copy) = copy_rtx (REG_NOTES (insn));
2277 if (reg_map)
2278 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2281 /* If this is a simple jump, add it to the jump chain. */
2283 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2284 && simplejump_p (copy))
2286 jump_chain[INSN_UID (copy)]
2287 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2288 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2290 break;
2292 default:
2293 abort ();
2296 /* Now clean up by emitting a jump to the end label and deleting the jump
2297 at the start of the loop. */
2298 if (! copy || GET_CODE (copy) != BARRIER)
2300 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2301 loop_start);
2302 mark_jump_label (PATTERN (copy), copy, 0);
2303 if (INSN_UID (copy) < max_jump_chain
2304 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2306 jump_chain[INSN_UID (copy)]
2307 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2308 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2310 emit_barrier_before (loop_start);
2313 /* Mark the exit code as the virtual top of the converted loop. */
2314 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2316 delete_insn (next_nonnote_insn (loop_start));
2318 return 1;
2321 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2322 loop-end notes between START and END out before START. Assume that
2323 END is not such a note. START may be such a note. Returns the value
2324 of the new starting insn, which may be different if the original start
2325 was such a note. */
2328 squeeze_notes (start, end)
2329 rtx start, end;
2331 rtx insn;
2332 rtx next;
2334 for (insn = start; insn != end; insn = next)
2336 next = NEXT_INSN (insn);
2337 if (GET_CODE (insn) == NOTE
2338 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2339 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2340 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2341 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2342 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2343 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2345 if (insn == start)
2346 start = next;
2347 else
2349 rtx prev = PREV_INSN (insn);
2350 PREV_INSN (insn) = PREV_INSN (start);
2351 NEXT_INSN (insn) = start;
2352 NEXT_INSN (PREV_INSN (insn)) = insn;
2353 PREV_INSN (NEXT_INSN (insn)) = insn;
2354 NEXT_INSN (prev) = next;
2355 PREV_INSN (next) = prev;
2360 return start;
2363 /* Compare the instructions before insn E1 with those before E2
2364 to find an opportunity for cross jumping.
2365 (This means detecting identical sequences of insns followed by
2366 jumps to the same place, or followed by a label and a jump
2367 to that label, and replacing one with a jump to the other.)
2369 Assume E1 is a jump that jumps to label E2
2370 (that is not always true but it might as well be).
2371 Find the longest possible equivalent sequences
2372 and store the first insns of those sequences into *F1 and *F2.
2373 Store zero there if no equivalent preceding instructions are found.
2375 We give up if we find a label in stream 1.
2376 Actually we could transfer that label into stream 2. */
2378 static void
2379 find_cross_jump (e1, e2, minimum, f1, f2)
2380 rtx e1, e2;
2381 int minimum;
2382 rtx *f1, *f2;
2384 register rtx i1 = e1, i2 = e2;
2385 register rtx p1, p2;
2386 int lose = 0;
2388 rtx last1 = 0, last2 = 0;
2389 rtx afterlast1 = 0, afterlast2 = 0;
2390 rtx prev1;
2392 *f1 = 0;
2393 *f2 = 0;
2395 while (1)
2397 i1 = prev_nonnote_insn (i1);
2399 i2 = PREV_INSN (i2);
2400 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
2401 i2 = PREV_INSN (i2);
2403 if (i1 == 0)
2404 break;
2406 /* Don't allow the range of insns preceding E1 or E2
2407 to include the other (E2 or E1). */
2408 if (i2 == e1 || i1 == e2)
2409 break;
2411 /* If we will get to this code by jumping, those jumps will be
2412 tensioned to go directly to the new label (before I2),
2413 so this cross-jumping won't cost extra. So reduce the minimum. */
2414 if (GET_CODE (i1) == CODE_LABEL)
2416 --minimum;
2417 break;
2420 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
2421 break;
2423 p1 = PATTERN (i1);
2424 p2 = PATTERN (i2);
2426 /* If this is a CALL_INSN, compare register usage information.
2427 If we don't check this on stack register machines, the two
2428 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2429 numbers of stack registers in the same basic block.
2430 If we don't check this on machines with delay slots, a delay slot may
2431 be filled that clobbers a parameter expected by the subroutine.
2433 ??? We take the simple route for now and assume that if they're
2434 equal, they were constructed identically. */
2436 if (GET_CODE (i1) == CALL_INSN
2437 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
2438 CALL_INSN_FUNCTION_USAGE (i2)))
2439 lose = 1;
2441 #ifdef STACK_REGS
2442 /* If cross_jump_death_matters is not 0, the insn's mode
2443 indicates whether or not the insn contains any stack-like
2444 regs. */
2446 if (!lose && cross_jump_death_matters && GET_MODE (i1) == QImode)
2448 /* If register stack conversion has already been done, then
2449 death notes must also be compared before it is certain that
2450 the two instruction streams match. */
2452 rtx note;
2453 HARD_REG_SET i1_regset, i2_regset;
2455 CLEAR_HARD_REG_SET (i1_regset);
2456 CLEAR_HARD_REG_SET (i2_regset);
2458 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
2459 if (REG_NOTE_KIND (note) == REG_DEAD
2460 && STACK_REG_P (XEXP (note, 0)))
2461 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
2463 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
2464 if (REG_NOTE_KIND (note) == REG_DEAD
2465 && STACK_REG_P (XEXP (note, 0)))
2466 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
2468 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
2470 lose = 1;
2472 done:
2475 #endif
2477 if (lose || GET_CODE (p1) != GET_CODE (p2)
2478 || ! rtx_renumbered_equal_p (p1, p2))
2480 /* The following code helps take care of G++ cleanups. */
2481 rtx equiv1;
2482 rtx equiv2;
2484 if (!lose && GET_CODE (p1) == GET_CODE (p2)
2485 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
2486 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
2487 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
2488 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
2489 /* If the equivalences are not to a constant, they may
2490 reference pseudos that no longer exist, so we can't
2491 use them. */
2492 && CONSTANT_P (XEXP (equiv1, 0))
2493 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
2495 rtx s1 = single_set (i1);
2496 rtx s2 = single_set (i2);
2497 if (s1 != 0 && s2 != 0
2498 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
2500 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
2501 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
2502 if (! rtx_renumbered_equal_p (p1, p2))
2503 cancel_changes (0);
2504 else if (apply_change_group ())
2505 goto win;
2509 /* Insns fail to match; cross jumping is limited to the following
2510 insns. */
2512 #ifdef HAVE_cc0
2513 /* Don't allow the insn after a compare to be shared by
2514 cross-jumping unless the compare is also shared.
2515 Here, if either of these non-matching insns is a compare,
2516 exclude the following insn from possible cross-jumping. */
2517 if (sets_cc0_p (p1) || sets_cc0_p (p2))
2518 last1 = afterlast1, last2 = afterlast2, ++minimum;
2519 #endif
2521 /* If cross-jumping here will feed a jump-around-jump
2522 optimization, this jump won't cost extra, so reduce
2523 the minimum. */
2524 if (GET_CODE (i1) == JUMP_INSN
2525 && JUMP_LABEL (i1)
2526 && prev_real_insn (JUMP_LABEL (i1)) == e1)
2527 --minimum;
2528 break;
2531 win:
2532 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
2534 /* Ok, this insn is potentially includable in a cross-jump here. */
2535 afterlast1 = last1, afterlast2 = last2;
2536 last1 = i1, last2 = i2, --minimum;
2540 if (minimum <= 0 && last1 != 0 && last1 != e1)
2541 *f1 = last1, *f2 = last2;
2544 static void
2545 do_cross_jump (insn, newjpos, newlpos)
2546 rtx insn, newjpos, newlpos;
2548 /* Find an existing label at this point
2549 or make a new one if there is none. */
2550 register rtx label = get_label_before (newlpos);
2552 /* Make the same jump insn jump to the new point. */
2553 if (GET_CODE (PATTERN (insn)) == RETURN)
2555 /* Remove from jump chain of returns. */
2556 delete_from_jump_chain (insn);
2557 /* Change the insn. */
2558 PATTERN (insn) = gen_jump (label);
2559 INSN_CODE (insn) = -1;
2560 JUMP_LABEL (insn) = label;
2561 LABEL_NUSES (label)++;
2562 /* Add to new the jump chain. */
2563 if (INSN_UID (label) < max_jump_chain
2564 && INSN_UID (insn) < max_jump_chain)
2566 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
2567 jump_chain[INSN_UID (label)] = insn;
2570 else
2571 redirect_jump (insn, label);
2573 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
2574 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
2575 the NEWJPOS stream. */
2577 while (newjpos != insn)
2579 rtx lnote;
2581 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
2582 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
2583 || REG_NOTE_KIND (lnote) == REG_EQUIV)
2584 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
2585 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
2586 remove_note (newlpos, lnote);
2588 delete_insn (newjpos);
2589 newjpos = next_real_insn (newjpos);
2590 newlpos = next_real_insn (newlpos);
2594 /* Return the label before INSN, or put a new label there. */
2597 get_label_before (insn)
2598 rtx insn;
2600 rtx label;
2602 /* Find an existing label at this point
2603 or make a new one if there is none. */
2604 label = prev_nonnote_insn (insn);
2606 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2608 rtx prev = PREV_INSN (insn);
2610 label = gen_label_rtx ();
2611 emit_label_after (label, prev);
2612 LABEL_NUSES (label) = 0;
2614 return label;
2617 /* Return the label after INSN, or put a new label there. */
2620 get_label_after (insn)
2621 rtx insn;
2623 rtx label;
2625 /* Find an existing label at this point
2626 or make a new one if there is none. */
2627 label = next_nonnote_insn (insn);
2629 if (label == 0 || GET_CODE (label) != CODE_LABEL)
2631 label = gen_label_rtx ();
2632 emit_label_after (label, insn);
2633 LABEL_NUSES (label) = 0;
2635 return label;
2638 /* Return 1 if INSN is a jump that jumps to right after TARGET
2639 only on the condition that TARGET itself would drop through.
2640 Assumes that TARGET is a conditional jump. */
2642 static int
2643 jump_back_p (insn, target)
2644 rtx insn, target;
2646 rtx cinsn, ctarget;
2647 enum rtx_code codei, codet;
2649 if (simplejump_p (insn) || ! condjump_p (insn)
2650 || simplejump_p (target)
2651 || target != prev_real_insn (JUMP_LABEL (insn)))
2652 return 0;
2654 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
2655 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
2657 codei = GET_CODE (cinsn);
2658 codet = GET_CODE (ctarget);
2660 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
2662 if (! can_reverse_comparison_p (cinsn, insn))
2663 return 0;
2664 codei = reverse_condition (codei);
2667 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
2669 if (! can_reverse_comparison_p (ctarget, target))
2670 return 0;
2671 codet = reverse_condition (codet);
2674 return (codei == codet
2675 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
2676 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
2679 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
2680 return non-zero if it is safe to reverse this comparison. It is if our
2681 floating-point is not IEEE, if this is an NE or EQ comparison, or if
2682 this is known to be an integer comparison. */
2685 can_reverse_comparison_p (comparison, insn)
2686 rtx comparison;
2687 rtx insn;
2689 rtx arg0;
2691 /* If this is not actually a comparison, we can't reverse it. */
2692 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
2693 return 0;
2695 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2696 /* If this is an NE comparison, it is safe to reverse it to an EQ
2697 comparison and vice versa, even for floating point. If no operands
2698 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
2699 always false and NE is always true, so the reversal is also valid. */
2700 || flag_fast_math
2701 || GET_CODE (comparison) == NE
2702 || GET_CODE (comparison) == EQ)
2703 return 1;
2705 arg0 = XEXP (comparison, 0);
2707 /* Make sure ARG0 is one of the actual objects being compared. If we
2708 can't do this, we can't be sure the comparison can be reversed.
2710 Handle cc0 and a MODE_CC register. */
2711 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
2712 #ifdef HAVE_cc0
2713 || arg0 == cc0_rtx
2714 #endif
2717 rtx prev = prev_nonnote_insn (insn);
2718 rtx set = single_set (prev);
2720 if (set == 0 || SET_DEST (set) != arg0)
2721 return 0;
2723 arg0 = SET_SRC (set);
2725 if (GET_CODE (arg0) == COMPARE)
2726 arg0 = XEXP (arg0, 0);
2729 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
2730 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
2731 return (GET_CODE (arg0) == CONST_INT
2732 || (GET_MODE (arg0) != VOIDmode
2733 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
2734 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
2737 /* Given an rtx-code for a comparison, return the code
2738 for the negated comparison.
2739 WATCH OUT! reverse_condition is not safe to use on a jump
2740 that might be acting on the results of an IEEE floating point comparison,
2741 because of the special treatment of non-signaling nans in comparisons.
2742 Use can_reverse_comparison_p to be sure. */
2744 enum rtx_code
2745 reverse_condition (code)
2746 enum rtx_code code;
2748 switch (code)
2750 case EQ:
2751 return NE;
2753 case NE:
2754 return EQ;
2756 case GT:
2757 return LE;
2759 case GE:
2760 return LT;
2762 case LT:
2763 return GE;
2765 case LE:
2766 return GT;
2768 case GTU:
2769 return LEU;
2771 case GEU:
2772 return LTU;
2774 case LTU:
2775 return GEU;
2777 case LEU:
2778 return GTU;
2780 default:
2781 abort ();
2782 return UNKNOWN;
2786 /* Similar, but return the code when two operands of a comparison are swapped.
2787 This IS safe for IEEE floating-point. */
2789 enum rtx_code
2790 swap_condition (code)
2791 enum rtx_code code;
2793 switch (code)
2795 case EQ:
2796 case NE:
2797 return code;
2799 case GT:
2800 return LT;
2802 case GE:
2803 return LE;
2805 case LT:
2806 return GT;
2808 case LE:
2809 return GE;
2811 case GTU:
2812 return LTU;
2814 case GEU:
2815 return LEU;
2817 case LTU:
2818 return GTU;
2820 case LEU:
2821 return GEU;
2823 default:
2824 abort ();
2825 return UNKNOWN;
2829 /* Given a comparison CODE, return the corresponding unsigned comparison.
2830 If CODE is an equality comparison or already an unsigned comparison,
2831 CODE is returned. */
2833 enum rtx_code
2834 unsigned_condition (code)
2835 enum rtx_code code;
2837 switch (code)
2839 case EQ:
2840 case NE:
2841 case GTU:
2842 case GEU:
2843 case LTU:
2844 case LEU:
2845 return code;
2847 case GT:
2848 return GTU;
2850 case GE:
2851 return GEU;
2853 case LT:
2854 return LTU;
2856 case LE:
2857 return LEU;
2859 default:
2860 abort ();
2864 /* Similarly, return the signed version of a comparison. */
2866 enum rtx_code
2867 signed_condition (code)
2868 enum rtx_code code;
2870 switch (code)
2872 case EQ:
2873 case NE:
2874 case GT:
2875 case GE:
2876 case LT:
2877 case LE:
2878 return code;
2880 case GTU:
2881 return GT;
2883 case GEU:
2884 return GE;
2886 case LTU:
2887 return LT;
2889 case LEU:
2890 return LE;
2892 default:
2893 abort ();
2897 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2898 truth of CODE1 implies the truth of CODE2. */
2901 comparison_dominates_p (code1, code2)
2902 enum rtx_code code1, code2;
2904 if (code1 == code2)
2905 return 1;
2907 switch (code1)
2909 case EQ:
2910 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
2911 return 1;
2912 break;
2914 case LT:
2915 if (code2 == LE || code2 == NE)
2916 return 1;
2917 break;
2919 case GT:
2920 if (code2 == GE || code2 == NE)
2921 return 1;
2922 break;
2924 case LTU:
2925 if (code2 == LEU || code2 == NE)
2926 return 1;
2927 break;
2929 case GTU:
2930 if (code2 == GEU || code2 == NE)
2931 return 1;
2932 break;
2935 return 0;
2938 /* Return 1 if INSN is an unconditional jump and nothing else. */
2941 simplejump_p (insn)
2942 rtx insn;
2944 return (GET_CODE (insn) == JUMP_INSN
2945 && GET_CODE (PATTERN (insn)) == SET
2946 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2947 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2950 /* Return nonzero if INSN is a (possibly) conditional jump
2951 and nothing more. */
2954 condjump_p (insn)
2955 rtx insn;
2957 register rtx x = PATTERN (insn);
2958 if (GET_CODE (x) != SET)
2959 return 0;
2960 if (GET_CODE (SET_DEST (x)) != PC)
2961 return 0;
2962 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2963 return 1;
2964 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2965 return 0;
2966 if (XEXP (SET_SRC (x), 2) == pc_rtx
2967 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2968 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2969 return 1;
2970 if (XEXP (SET_SRC (x), 1) == pc_rtx
2971 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2972 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2973 return 1;
2974 return 0;
2977 /* Return nonzero if INSN is a (possibly) conditional jump
2978 and nothing more. */
2981 condjump_in_parallel_p (insn)
2982 rtx insn;
2984 register rtx x = PATTERN (insn);
2986 if (GET_CODE (x) != PARALLEL)
2987 return 0;
2988 else
2989 x = XVECEXP (x, 0, 0);
2991 if (GET_CODE (x) != SET)
2992 return 0;
2993 if (GET_CODE (SET_DEST (x)) != PC)
2994 return 0;
2995 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2996 return 1;
2997 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2998 return 0;
2999 if (XEXP (SET_SRC (x), 2) == pc_rtx
3000 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3001 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3002 return 1;
3003 if (XEXP (SET_SRC (x), 1) == pc_rtx
3004 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3005 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3006 return 1;
3007 return 0;
3010 /* Return 1 if X is an RTX that does nothing but set the condition codes
3011 and CLOBBER or USE registers.
3012 Return -1 if X does explicitly set the condition codes,
3013 but also does other things. */
3016 sets_cc0_p (x)
3017 rtx x;
3019 #ifdef HAVE_cc0
3020 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3021 return 1;
3022 if (GET_CODE (x) == PARALLEL)
3024 int i;
3025 int sets_cc0 = 0;
3026 int other_things = 0;
3027 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3029 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3030 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3031 sets_cc0 = 1;
3032 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3033 other_things = 1;
3035 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3037 return 0;
3038 #else
3039 abort ();
3040 #endif
3043 /* Follow any unconditional jump at LABEL;
3044 return the ultimate label reached by any such chain of jumps.
3045 If LABEL is not followed by a jump, return LABEL.
3046 If the chain loops or we can't find end, return LABEL,
3047 since that tells caller to avoid changing the insn.
3049 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3050 a USE or CLOBBER. */
3053 follow_jumps (label)
3054 rtx label;
3056 register rtx insn;
3057 register rtx next;
3058 register rtx value = label;
3059 register int depth;
3061 for (depth = 0;
3062 (depth < 10
3063 && (insn = next_active_insn (value)) != 0
3064 && GET_CODE (insn) == JUMP_INSN
3065 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3066 || GET_CODE (PATTERN (insn)) == RETURN)
3067 && (next = NEXT_INSN (insn))
3068 && GET_CODE (next) == BARRIER);
3069 depth++)
3071 /* Don't chain through the insn that jumps into a loop
3072 from outside the loop,
3073 since that would create multiple loop entry jumps
3074 and prevent loop optimization. */
3075 rtx tem;
3076 if (!reload_completed)
3077 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3078 if (GET_CODE (tem) == NOTE
3079 && NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG)
3080 return value;
3082 /* If we have found a cycle, make the insn jump to itself. */
3083 if (JUMP_LABEL (insn) == label)
3084 return label;
3086 tem = next_active_insn (JUMP_LABEL (insn));
3087 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3088 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3089 break;
3091 value = JUMP_LABEL (insn);
3093 if (depth == 10)
3094 return label;
3095 return value;
3098 /* Assuming that field IDX of X is a vector of label_refs,
3099 replace each of them by the ultimate label reached by it.
3100 Return nonzero if a change is made.
3101 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3103 static int
3104 tension_vector_labels (x, idx)
3105 register rtx x;
3106 register int idx;
3108 int changed = 0;
3109 register int i;
3110 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3112 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3113 register rtx nlabel = follow_jumps (olabel);
3114 if (nlabel && nlabel != olabel)
3116 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3117 ++LABEL_NUSES (nlabel);
3118 if (--LABEL_NUSES (olabel) == 0)
3119 delete_insn (olabel);
3120 changed = 1;
3123 return changed;
3126 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3127 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3128 in INSN, then store one of them in JUMP_LABEL (INSN).
3129 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3130 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3131 Also, when there are consecutive labels, canonicalize on the last of them.
3133 Note that two labels separated by a loop-beginning note
3134 must be kept distinct if we have not yet done loop-optimization,
3135 because the gap between them is where loop-optimize
3136 will want to move invariant code to. CROSS_JUMP tells us
3137 that loop-optimization is done with.
3139 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3140 two labels distinct if they are separated by only USE or CLOBBER insns. */
3142 static void
3143 mark_jump_label (x, insn, cross_jump)
3144 register rtx x;
3145 rtx insn;
3146 int cross_jump;
3148 register RTX_CODE code = GET_CODE (x);
3149 register int i;
3150 register char *fmt;
3152 switch (code)
3154 case PC:
3155 case CC0:
3156 case REG:
3157 case SUBREG:
3158 case CONST_INT:
3159 case SYMBOL_REF:
3160 case CONST_DOUBLE:
3161 case CLOBBER:
3162 case CALL:
3163 return;
3165 case MEM:
3166 /* If this is a constant-pool reference, see if it is a label. */
3167 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3168 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3169 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3170 break;
3172 case LABEL_REF:
3174 rtx label = XEXP (x, 0);
3175 rtx olabel = label;
3176 rtx note;
3177 rtx next;
3179 if (GET_CODE (label) != CODE_LABEL)
3180 abort ();
3182 /* Ignore references to labels of containing functions. */
3183 if (LABEL_REF_NONLOCAL_P (x))
3184 break;
3186 /* If there are other labels following this one,
3187 replace it with the last of the consecutive labels. */
3188 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3190 if (GET_CODE (next) == CODE_LABEL)
3191 label = next;
3192 else if (cross_jump && GET_CODE (next) == INSN
3193 && (GET_CODE (PATTERN (next)) == USE
3194 || GET_CODE (PATTERN (next)) == CLOBBER))
3195 continue;
3196 else if (GET_CODE (next) != NOTE)
3197 break;
3198 else if (! cross_jump
3199 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3200 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END))
3201 break;
3204 XEXP (x, 0) = label;
3205 ++LABEL_NUSES (label);
3207 if (insn)
3209 if (GET_CODE (insn) == JUMP_INSN)
3210 JUMP_LABEL (insn) = label;
3212 /* If we've changed OLABEL and we had a REG_LABEL note
3213 for it, update it as well. */
3214 else if (label != olabel
3215 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3216 XEXP (note, 0) = label;
3218 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3219 is one. */
3220 else if (! find_reg_note (insn, REG_LABEL, label))
3222 rtx next = next_real_insn (label);
3223 /* Don't record labels that refer to dispatch tables.
3224 This is not necessary, since the tablejump
3225 references the same label.
3226 And if we did record them, flow.c would make worse code. */
3227 if (next == 0
3228 || ! (GET_CODE (next) == JUMP_INSN
3229 && (GET_CODE (PATTERN (next)) == ADDR_VEC
3230 || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)))
3231 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_LABEL, label,
3232 REG_NOTES (insn));
3235 return;
3238 /* Do walk the labels in a vector, but not the first operand of an
3239 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3240 case ADDR_VEC:
3241 case ADDR_DIFF_VEC:
3243 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3245 for (i = 0; i < XVECLEN (x, eltnum); i++)
3246 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
3247 return;
3251 fmt = GET_RTX_FORMAT (code);
3252 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3254 if (fmt[i] == 'e')
3255 mark_jump_label (XEXP (x, i), insn, cross_jump);
3256 else if (fmt[i] == 'E')
3258 register int j;
3259 for (j = 0; j < XVECLEN (x, i); j++)
3260 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
3265 /* If all INSN does is set the pc, delete it,
3266 and delete the insn that set the condition codes for it
3267 if that's what the previous thing was. */
3269 void
3270 delete_jump (insn)
3271 rtx insn;
3273 register rtx set = single_set (insn);
3275 if (set && GET_CODE (SET_DEST (set)) == PC)
3276 delete_computation (insn);
3279 /* Delete INSN and recursively delete insns that compute values used only
3280 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3281 If we are running before flow.c, we need do nothing since flow.c will
3282 delete dead code. We also can't know if the registers being used are
3283 dead or not at this point.
3285 Otherwise, look at all our REG_DEAD notes. If a previous insn does
3286 nothing other than set a register that dies in this insn, we can delete
3287 that insn as well.
3289 On machines with CC0, if CC0 is used in this insn, we may be able to
3290 delete the insn that set it. */
3292 static void
3293 delete_computation (insn)
3294 rtx insn;
3296 rtx note, next;
3298 #ifdef HAVE_cc0
3299 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3301 rtx prev = prev_nonnote_insn (insn);
3302 /* We assume that at this stage
3303 CC's are always set explicitly
3304 and always immediately before the jump that
3305 will use them. So if the previous insn
3306 exists to set the CC's, delete it
3307 (unless it performs auto-increments, etc.). */
3308 if (prev && GET_CODE (prev) == INSN
3309 && sets_cc0_p (PATTERN (prev)))
3311 if (sets_cc0_p (PATTERN (prev)) > 0
3312 && !FIND_REG_INC_NOTE (prev, NULL_RTX))
3313 delete_computation (prev);
3314 else
3315 /* Otherwise, show that cc0 won't be used. */
3316 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_UNUSED,
3317 cc0_rtx, REG_NOTES (prev));
3320 #endif
3322 for (note = REG_NOTES (insn); note; note = next)
3324 rtx our_prev;
3326 next = XEXP (note, 1);
3328 if (REG_NOTE_KIND (note) != REG_DEAD
3329 /* Verify that the REG_NOTE is legitimate. */
3330 || GET_CODE (XEXP (note, 0)) != REG)
3331 continue;
3333 for (our_prev = prev_nonnote_insn (insn);
3334 our_prev && GET_CODE (our_prev) == INSN;
3335 our_prev = prev_nonnote_insn (our_prev))
3337 /* If we reach a SEQUENCE, it is too complex to try to
3338 do anything with it, so give up. */
3339 if (GET_CODE (PATTERN (our_prev)) == SEQUENCE)
3340 break;
3342 if (GET_CODE (PATTERN (our_prev)) == USE
3343 && GET_CODE (XEXP (PATTERN (our_prev), 0)) == INSN)
3344 /* reorg creates USEs that look like this. We leave them
3345 alone because reorg needs them for its own purposes. */
3346 break;
3348 if (reg_set_p (XEXP (note, 0), PATTERN (our_prev)))
3350 if (FIND_REG_INC_NOTE (our_prev, NULL_RTX))
3351 break;
3353 if (GET_CODE (PATTERN (our_prev)) == PARALLEL)
3355 /* If we find a SET of something else, we can't
3356 delete the insn. */
3358 int i;
3360 for (i = 0; i < XVECLEN (PATTERN (our_prev), 0); i++)
3362 rtx part = XVECEXP (PATTERN (our_prev), 0, i);
3364 if (GET_CODE (part) == SET
3365 && SET_DEST (part) != XEXP (note, 0))
3366 break;
3369 if (i == XVECLEN (PATTERN (our_prev), 0))
3370 delete_computation (our_prev);
3372 else if (GET_CODE (PATTERN (our_prev)) == SET
3373 && SET_DEST (PATTERN (our_prev)) == XEXP (note, 0))
3374 delete_computation (our_prev);
3376 break;
3379 /* If OUR_PREV references the register that dies here, it is an
3380 additional use. Hence any prior SET isn't dead. However, this
3381 insn becomes the new place for the REG_DEAD note. */
3382 if (reg_overlap_mentioned_p (XEXP (note, 0),
3383 PATTERN (our_prev)))
3385 XEXP (note, 1) = REG_NOTES (our_prev);
3386 REG_NOTES (our_prev) = note;
3387 break;
3392 delete_insn (insn);
3395 /* Delete insn INSN from the chain of insns and update label ref counts.
3396 May delete some following insns as a consequence; may even delete
3397 a label elsewhere and insns that follow it.
3399 Returns the first insn after INSN that was not deleted. */
3402 delete_insn (insn)
3403 register rtx insn;
3405 register rtx next = NEXT_INSN (insn);
3406 register rtx prev = PREV_INSN (insn);
3407 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
3408 register int dont_really_delete = 0;
3410 while (next && INSN_DELETED_P (next))
3411 next = NEXT_INSN (next);
3413 /* This insn is already deleted => return first following nondeleted. */
3414 if (INSN_DELETED_P (insn))
3415 return next;
3417 /* Don't delete user-declared labels. Convert them to special NOTEs
3418 instead. */
3419 if (was_code_label && LABEL_NAME (insn) != 0
3420 && optimize && ! dont_really_delete)
3422 PUT_CODE (insn, NOTE);
3423 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
3424 NOTE_SOURCE_FILE (insn) = 0;
3425 dont_really_delete = 1;
3427 else
3428 /* Mark this insn as deleted. */
3429 INSN_DELETED_P (insn) = 1;
3431 /* If this is an unconditional jump, delete it from the jump chain. */
3432 if (simplejump_p (insn))
3433 delete_from_jump_chain (insn);
3435 /* If instruction is followed by a barrier,
3436 delete the barrier too. */
3438 if (next != 0 && GET_CODE (next) == BARRIER)
3440 INSN_DELETED_P (next) = 1;
3441 next = NEXT_INSN (next);
3444 /* Patch out INSN (and the barrier if any) */
3446 if (optimize && ! dont_really_delete)
3448 if (prev)
3450 NEXT_INSN (prev) = next;
3451 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3452 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
3453 XVECLEN (PATTERN (prev), 0) - 1)) = next;
3456 if (next)
3458 PREV_INSN (next) = prev;
3459 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3460 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3463 if (prev && NEXT_INSN (prev) == 0)
3464 set_last_insn (prev);
3467 /* If deleting a jump, decrement the count of the label,
3468 and delete the label if it is now unused. */
3470 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
3471 if (--LABEL_NUSES (JUMP_LABEL (insn)) == 0)
3473 /* This can delete NEXT or PREV,
3474 either directly if NEXT is JUMP_LABEL (INSN),
3475 or indirectly through more levels of jumps. */
3476 delete_insn (JUMP_LABEL (insn));
3477 /* I feel a little doubtful about this loop,
3478 but I see no clean and sure alternative way
3479 to find the first insn after INSN that is not now deleted.
3480 I hope this works. */
3481 while (next && INSN_DELETED_P (next))
3482 next = NEXT_INSN (next);
3483 return next;
3486 /* Likewise if we're deleting a dispatch table. */
3488 if (GET_CODE (insn) == JUMP_INSN
3489 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3490 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3492 rtx pat = PATTERN (insn);
3493 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
3494 int len = XVECLEN (pat, diff_vec_p);
3496 for (i = 0; i < len; i++)
3497 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
3498 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
3499 while (next && INSN_DELETED_P (next))
3500 next = NEXT_INSN (next);
3501 return next;
3504 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3505 prev = PREV_INSN (prev);
3507 /* If INSN was a label and a dispatch table follows it,
3508 delete the dispatch table. The tablejump must have gone already.
3509 It isn't useful to fall through into a table. */
3511 if (was_code_label
3512 && NEXT_INSN (insn) != 0
3513 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3514 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3515 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3516 next = delete_insn (NEXT_INSN (insn));
3518 /* If INSN was a label, delete insns following it if now unreachable. */
3520 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3522 register RTX_CODE code;
3523 while (next != 0
3524 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3525 || code == NOTE || code == BARRIER
3526 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3528 if (code == NOTE
3529 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3530 next = NEXT_INSN (next);
3531 /* Keep going past other deleted labels to delete what follows. */
3532 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3533 next = NEXT_INSN (next);
3534 else
3535 /* Note: if this deletes a jump, it can cause more
3536 deletion of unreachable code, after a different label.
3537 As long as the value from this recursive call is correct,
3538 this invocation functions correctly. */
3539 next = delete_insn (next);
3543 return next;
3546 /* Advance from INSN till reaching something not deleted
3547 then return that. May return INSN itself. */
3550 next_nondeleted_insn (insn)
3551 rtx insn;
3553 while (INSN_DELETED_P (insn))
3554 insn = NEXT_INSN (insn);
3555 return insn;
3558 /* Delete a range of insns from FROM to TO, inclusive.
3559 This is for the sake of peephole optimization, so assume
3560 that whatever these insns do will still be done by a new
3561 peephole insn that will replace them. */
3563 void
3564 delete_for_peephole (from, to)
3565 register rtx from, to;
3567 register rtx insn = from;
3569 while (1)
3571 register rtx next = NEXT_INSN (insn);
3572 register rtx prev = PREV_INSN (insn);
3574 if (GET_CODE (insn) != NOTE)
3576 INSN_DELETED_P (insn) = 1;
3578 /* Patch this insn out of the chain. */
3579 /* We don't do this all at once, because we
3580 must preserve all NOTEs. */
3581 if (prev)
3582 NEXT_INSN (prev) = next;
3584 if (next)
3585 PREV_INSN (next) = prev;
3588 if (insn == to)
3589 break;
3590 insn = next;
3593 /* Note that if TO is an unconditional jump
3594 we *do not* delete the BARRIER that follows,
3595 since the peephole that replaces this sequence
3596 is also an unconditional jump in that case. */
3599 /* Invert the condition of the jump JUMP, and make it jump
3600 to label NLABEL instead of where it jumps now. */
3603 invert_jump (jump, nlabel)
3604 rtx jump, nlabel;
3606 /* We have to either invert the condition and change the label or
3607 do neither. Either operation could fail. We first try to invert
3608 the jump. If that succeeds, we try changing the label. If that fails,
3609 we invert the jump back to what it was. */
3611 if (! invert_exp (PATTERN (jump), jump))
3612 return 0;
3614 if (redirect_jump (jump, nlabel))
3615 return 1;
3617 if (! invert_exp (PATTERN (jump), jump))
3618 /* This should just be putting it back the way it was. */
3619 abort ();
3621 return 0;
3624 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3626 Return 1 if we can do so, 0 if we cannot find a way to do so that
3627 matches a pattern. */
3630 invert_exp (x, insn)
3631 rtx x;
3632 rtx insn;
3634 register RTX_CODE code;
3635 register int i;
3636 register char *fmt;
3638 code = GET_CODE (x);
3640 if (code == IF_THEN_ELSE)
3642 register rtx comp = XEXP (x, 0);
3643 register rtx tem;
3645 /* We can do this in two ways: The preferable way, which can only
3646 be done if this is not an integer comparison, is to reverse
3647 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3648 of the IF_THEN_ELSE. If we can't do either, fail. */
3650 if (can_reverse_comparison_p (comp, insn)
3651 && validate_change (insn, &XEXP (x, 0),
3652 gen_rtx (reverse_condition (GET_CODE (comp)),
3653 GET_MODE (comp), XEXP (comp, 0),
3654 XEXP (comp, 1)), 0))
3655 return 1;
3657 tem = XEXP (x, 1);
3658 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3659 validate_change (insn, &XEXP (x, 2), tem, 1);
3660 return apply_change_group ();
3663 fmt = GET_RTX_FORMAT (code);
3664 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3666 if (fmt[i] == 'e')
3667 if (! invert_exp (XEXP (x, i), insn))
3668 return 0;
3669 if (fmt[i] == 'E')
3671 register int j;
3672 for (j = 0; j < XVECLEN (x, i); j++)
3673 if (!invert_exp (XVECEXP (x, i, j), insn))
3674 return 0;
3678 return 1;
3681 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
3682 If the old jump target label is unused as a result,
3683 it and the code following it may be deleted.
3685 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3686 RETURN insn.
3688 The return value will be 1 if the change was made, 0 if it wasn't (this
3689 can only occur for NLABEL == 0). */
3692 redirect_jump (jump, nlabel)
3693 rtx jump, nlabel;
3695 register rtx olabel = JUMP_LABEL (jump);
3697 if (nlabel == olabel)
3698 return 1;
3700 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
3701 return 0;
3703 /* If this is an unconditional branch, delete it from the jump_chain of
3704 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3705 have UID's in range and JUMP_CHAIN is valid). */
3706 if (jump_chain && (simplejump_p (jump)
3707 || GET_CODE (PATTERN (jump)) == RETURN))
3709 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3711 delete_from_jump_chain (jump);
3712 if (label_index < max_jump_chain
3713 && INSN_UID (jump) < max_jump_chain)
3715 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3716 jump_chain[label_index] = jump;
3720 JUMP_LABEL (jump) = nlabel;
3721 if (nlabel)
3722 ++LABEL_NUSES (nlabel);
3724 if (olabel && --LABEL_NUSES (olabel) == 0)
3725 delete_insn (olabel);
3727 return 1;
3730 /* Delete the instruction JUMP from any jump chain it might be on. */
3732 static void
3733 delete_from_jump_chain (jump)
3734 rtx jump;
3736 int index;
3737 rtx olabel = JUMP_LABEL (jump);
3739 /* Handle unconditional jumps. */
3740 if (jump_chain && olabel != 0
3741 && INSN_UID (olabel) < max_jump_chain
3742 && simplejump_p (jump))
3743 index = INSN_UID (olabel);
3744 /* Handle return insns. */
3745 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3746 index = 0;
3747 else return;
3749 if (jump_chain[index] == jump)
3750 jump_chain[index] = jump_chain[INSN_UID (jump)];
3751 else
3753 rtx insn;
3755 for (insn = jump_chain[index];
3756 insn != 0;
3757 insn = jump_chain[INSN_UID (insn)])
3758 if (jump_chain[INSN_UID (insn)] == jump)
3760 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3761 break;
3766 /* If NLABEL is nonzero, throughout the rtx at LOC,
3767 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
3768 zero, alter (RETURN) to (LABEL_REF NLABEL).
3770 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
3771 validity with validate_change. Convert (set (pc) (label_ref olabel))
3772 to (return).
3774 Return 0 if we found a change we would like to make but it is invalid.
3775 Otherwise, return 1. */
3778 redirect_exp (loc, olabel, nlabel, insn)
3779 rtx *loc;
3780 rtx olabel, nlabel;
3781 rtx insn;
3783 register rtx x = *loc;
3784 register RTX_CODE code = GET_CODE (x);
3785 register int i;
3786 register char *fmt;
3788 if (code == LABEL_REF)
3790 if (XEXP (x, 0) == olabel)
3792 if (nlabel)
3793 XEXP (x, 0) = nlabel;
3794 else
3795 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3796 return 1;
3799 else if (code == RETURN && olabel == 0)
3801 x = gen_rtx (LABEL_REF, VOIDmode, nlabel);
3802 if (loc == &PATTERN (insn))
3803 x = gen_rtx (SET, VOIDmode, pc_rtx, x);
3804 return validate_change (insn, loc, x, 0);
3807 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3808 && GET_CODE (SET_SRC (x)) == LABEL_REF
3809 && XEXP (SET_SRC (x), 0) == olabel)
3810 return validate_change (insn, loc, gen_rtx (RETURN, VOIDmode), 0);
3812 fmt = GET_RTX_FORMAT (code);
3813 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3815 if (fmt[i] == 'e')
3816 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
3817 return 0;
3818 if (fmt[i] == 'E')
3820 register int j;
3821 for (j = 0; j < XVECLEN (x, i); j++)
3822 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
3823 return 0;
3827 return 1;
3830 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3832 If the old jump target label (before the dispatch table) becomes unused,
3833 it and the dispatch table may be deleted. In that case, find the insn
3834 before the jump references that label and delete it and logical successors
3835 too. */
3837 static void
3838 redirect_tablejump (jump, nlabel)
3839 rtx jump, nlabel;
3841 register rtx olabel = JUMP_LABEL (jump);
3843 /* Add this jump to the jump_chain of NLABEL. */
3844 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3845 && INSN_UID (jump) < max_jump_chain)
3847 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3848 jump_chain[INSN_UID (nlabel)] = jump;
3851 PATTERN (jump) = gen_jump (nlabel);
3852 JUMP_LABEL (jump) = nlabel;
3853 ++LABEL_NUSES (nlabel);
3854 INSN_CODE (jump) = -1;
3856 if (--LABEL_NUSES (olabel) == 0)
3858 delete_labelref_insn (jump, olabel, 0);
3859 delete_insn (olabel);
3863 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3864 If we found one, delete it and then delete this insn if DELETE_THIS is
3865 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3867 static int
3868 delete_labelref_insn (insn, label, delete_this)
3869 rtx insn, label;
3870 int delete_this;
3872 int deleted = 0;
3873 rtx link;
3875 if (GET_CODE (insn) != NOTE
3876 && reg_mentioned_p (label, PATTERN (insn)))
3878 if (delete_this)
3880 delete_insn (insn);
3881 deleted = 1;
3883 else
3884 return 1;
3887 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3888 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3890 if (delete_this)
3892 delete_insn (insn);
3893 deleted = 1;
3895 else
3896 return 1;
3899 return deleted;
3902 /* Like rtx_equal_p except that it considers two REGs as equal
3903 if they renumber to the same value and considers two commutative
3904 operations to be the same if the order of the operands has been
3905 reversed. */
3908 rtx_renumbered_equal_p (x, y)
3909 rtx x, y;
3911 register int i;
3912 register RTX_CODE code = GET_CODE (x);
3913 register char *fmt;
3915 if (x == y)
3916 return 1;
3918 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3919 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3920 && GET_CODE (SUBREG_REG (y)) == REG)))
3922 int reg_x = -1, reg_y = -1;
3923 int word_x = 0, word_y = 0;
3925 if (GET_MODE (x) != GET_MODE (y))
3926 return 0;
3928 /* If we haven't done any renumbering, don't
3929 make any assumptions. */
3930 if (reg_renumber == 0)
3931 return rtx_equal_p (x, y);
3933 if (code == SUBREG)
3935 reg_x = REGNO (SUBREG_REG (x));
3936 word_x = SUBREG_WORD (x);
3938 if (reg_renumber[reg_x] >= 0)
3940 reg_x = reg_renumber[reg_x] + word_x;
3941 word_x = 0;
3945 else
3947 reg_x = REGNO (x);
3948 if (reg_renumber[reg_x] >= 0)
3949 reg_x = reg_renumber[reg_x];
3952 if (GET_CODE (y) == SUBREG)
3954 reg_y = REGNO (SUBREG_REG (y));
3955 word_y = SUBREG_WORD (y);
3957 if (reg_renumber[reg_y] >= 0)
3959 reg_y = reg_renumber[reg_y];
3960 word_y = 0;
3964 else
3966 reg_y = REGNO (y);
3967 if (reg_renumber[reg_y] >= 0)
3968 reg_y = reg_renumber[reg_y];
3971 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3974 /* Now we have disposed of all the cases
3975 in which different rtx codes can match. */
3976 if (code != GET_CODE (y))
3977 return 0;
3979 switch (code)
3981 case PC:
3982 case CC0:
3983 case ADDR_VEC:
3984 case ADDR_DIFF_VEC:
3985 return 0;
3987 case CONST_INT:
3988 return INTVAL (x) == INTVAL (y);
3990 case LABEL_REF:
3991 /* We can't assume nonlocal labels have their following insns yet. */
3992 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3993 return XEXP (x, 0) == XEXP (y, 0);
3995 /* Two label-refs are equivalent if they point at labels
3996 in the same position in the instruction stream. */
3997 return (next_real_insn (XEXP (x, 0))
3998 == next_real_insn (XEXP (y, 0)));
4000 case SYMBOL_REF:
4001 return XSTR (x, 0) == XSTR (y, 0);
4004 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4006 if (GET_MODE (x) != GET_MODE (y))
4007 return 0;
4009 /* For commutative operations, the RTX match if the operand match in any
4010 order. Also handle the simple binary and unary cases without a loop. */
4011 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4012 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4013 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4014 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4015 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4016 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4017 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4018 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4019 else if (GET_RTX_CLASS (code) == '1')
4020 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4022 /* Compare the elements. If any pair of corresponding elements
4023 fail to match, return 0 for the whole things. */
4025 fmt = GET_RTX_FORMAT (code);
4026 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4028 register int j;
4029 switch (fmt[i])
4031 case 'w':
4032 if (XWINT (x, i) != XWINT (y, i))
4033 return 0;
4034 break;
4036 case 'i':
4037 if (XINT (x, i) != XINT (y, i))
4038 return 0;
4039 break;
4041 case 's':
4042 if (strcmp (XSTR (x, i), XSTR (y, i)))
4043 return 0;
4044 break;
4046 case 'e':
4047 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4048 return 0;
4049 break;
4051 case 'u':
4052 if (XEXP (x, i) != XEXP (y, i))
4053 return 0;
4054 /* fall through. */
4055 case '0':
4056 break;
4058 case 'E':
4059 if (XVECLEN (x, i) != XVECLEN (y, i))
4060 return 0;
4061 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4062 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
4063 return 0;
4064 break;
4066 default:
4067 abort ();
4070 return 1;
4073 /* If X is a hard register or equivalent to one or a subregister of one,
4074 return the hard register number. If X is a pseudo register that was not
4075 assigned a hard register, return the pseudo register number. Otherwise,
4076 return -1. Any rtx is valid for X. */
4079 true_regnum (x)
4080 rtx x;
4082 if (GET_CODE (x) == REG)
4084 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
4085 return reg_renumber[REGNO (x)];
4086 return REGNO (x);
4088 if (GET_CODE (x) == SUBREG)
4090 int base = true_regnum (SUBREG_REG (x));
4091 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
4092 return SUBREG_WORD (x) + base;
4094 return -1;
4097 /* Optimize code of the form:
4099 for (x = a[i]; x; ...)
4101 for (x = a[i]; x; ...)
4103 foo:
4105 Loop optimize will change the above code into
4107 if (x = a[i])
4108 for (;;)
4109 { ...; if (! (x = ...)) break; }
4110 if (x = a[i])
4111 for (;;)
4112 { ...; if (! (x = ...)) break; }
4113 foo:
4115 In general, if the first test fails, the program can branch
4116 directly to `foo' and skip the second try which is doomed to fail.
4117 We run this after loop optimization and before flow analysis. */
4119 /* When comparing the insn patterns, we track the fact that different
4120 pseudo-register numbers may have been used in each computation.
4121 The following array stores an equivalence -- same_regs[I] == J means
4122 that pseudo register I was used in the first set of tests in a context
4123 where J was used in the second set. We also count the number of such
4124 pending equivalences. If nonzero, the expressions really aren't the
4125 same. */
4127 static int *same_regs;
4129 static int num_same_regs;
4131 /* Track any registers modified between the target of the first jump and
4132 the second jump. They never compare equal. */
4134 static char *modified_regs;
4136 /* Record if memory was modified. */
4138 static int modified_mem;
4140 /* Called via note_stores on each insn between the target of the first
4141 branch and the second branch. It marks any changed registers. */
4143 static void
4144 mark_modified_reg (dest, x)
4145 rtx dest;
4146 rtx x;
4148 int regno, i;
4150 if (GET_CODE (dest) == SUBREG)
4151 dest = SUBREG_REG (dest);
4153 if (GET_CODE (dest) == MEM)
4154 modified_mem = 1;
4156 if (GET_CODE (dest) != REG)
4157 return;
4159 regno = REGNO (dest);
4160 if (regno >= FIRST_PSEUDO_REGISTER)
4161 modified_regs[regno] = 1;
4162 else
4163 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
4164 modified_regs[regno + i] = 1;
4167 /* F is the first insn in the chain of insns. */
4169 void
4170 thread_jumps (f, max_reg, flag_before_loop)
4171 rtx f;
4172 int max_reg;
4173 int flag_before_loop;
4175 /* Basic algorithm is to find a conditional branch,
4176 the label it may branch to, and the branch after
4177 that label. If the two branches test the same condition,
4178 walk back from both branch paths until the insn patterns
4179 differ, or code labels are hit. If we make it back to
4180 the target of the first branch, then we know that the first branch
4181 will either always succeed or always fail depending on the relative
4182 senses of the two branches. So adjust the first branch accordingly
4183 in this case. */
4185 rtx label, b1, b2, t1, t2;
4186 enum rtx_code code1, code2;
4187 rtx b1op0, b1op1, b2op0, b2op1;
4188 int changed = 1;
4189 int i;
4190 int *all_reset;
4192 /* Allocate register tables and quick-reset table. */
4193 modified_regs = (char *) alloca (max_reg * sizeof (char));
4194 same_regs = (int *) alloca (max_reg * sizeof (int));
4195 all_reset = (int *) alloca (max_reg * sizeof (int));
4196 for (i = 0; i < max_reg; i++)
4197 all_reset[i] = -1;
4199 while (changed)
4201 changed = 0;
4203 for (b1 = f; b1; b1 = NEXT_INSN (b1))
4205 /* Get to a candidate branch insn. */
4206 if (GET_CODE (b1) != JUMP_INSN
4207 || ! condjump_p (b1) || simplejump_p (b1)
4208 || JUMP_LABEL (b1) == 0)
4209 continue;
4211 bzero (modified_regs, max_reg * sizeof (char));
4212 modified_mem = 0;
4214 bcopy ((char *) all_reset, (char *) same_regs,
4215 max_reg * sizeof (int));
4216 num_same_regs = 0;
4218 label = JUMP_LABEL (b1);
4220 /* Look for a branch after the target. Record any registers and
4221 memory modified between the target and the branch. Stop when we
4222 get to a label since we can't know what was changed there. */
4223 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
4225 if (GET_CODE (b2) == CODE_LABEL)
4226 break;
4228 else if (GET_CODE (b2) == JUMP_INSN)
4230 /* If this is an unconditional jump and is the only use of
4231 its target label, we can follow it. */
4232 if (simplejump_p (b2)
4233 && JUMP_LABEL (b2) != 0
4234 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
4236 b2 = JUMP_LABEL (b2);
4237 continue;
4239 else
4240 break;
4243 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
4244 continue;
4246 if (GET_CODE (b2) == CALL_INSN)
4248 modified_mem = 1;
4249 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4250 if (call_used_regs[i] && ! fixed_regs[i]
4251 && i != STACK_POINTER_REGNUM
4252 && i != FRAME_POINTER_REGNUM
4253 && i != HARD_FRAME_POINTER_REGNUM
4254 && i != ARG_POINTER_REGNUM)
4255 modified_regs[i] = 1;
4258 note_stores (PATTERN (b2), mark_modified_reg);
4261 /* Check the next candidate branch insn from the label
4262 of the first. */
4263 if (b2 == 0
4264 || GET_CODE (b2) != JUMP_INSN
4265 || b2 == b1
4266 || ! condjump_p (b2)
4267 || simplejump_p (b2))
4268 continue;
4270 /* Get the comparison codes and operands, reversing the
4271 codes if appropriate. If we don't have comparison codes,
4272 we can't do anything. */
4273 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
4274 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
4275 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
4276 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
4277 code1 = reverse_condition (code1);
4279 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
4280 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
4281 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
4282 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
4283 code2 = reverse_condition (code2);
4285 /* If they test the same things and knowing that B1 branches
4286 tells us whether or not B2 branches, check if we
4287 can thread the branch. */
4288 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
4289 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
4290 && (comparison_dominates_p (code1, code2)
4291 || comparison_dominates_p (code1, reverse_condition (code2))))
4293 t1 = prev_nonnote_insn (b1);
4294 t2 = prev_nonnote_insn (b2);
4296 while (t1 != 0 && t2 != 0)
4298 if (t2 == label)
4300 /* We have reached the target of the first branch.
4301 If there are no pending register equivalents,
4302 we know that this branch will either always
4303 succeed (if the senses of the two branches are
4304 the same) or always fail (if not). */
4305 rtx new_label;
4307 if (num_same_regs != 0)
4308 break;
4310 if (comparison_dominates_p (code1, code2))
4311 new_label = JUMP_LABEL (b2);
4312 else
4313 new_label = get_label_after (b2);
4315 if (JUMP_LABEL (b1) != new_label)
4317 rtx prev = PREV_INSN (new_label);
4319 if (flag_before_loop
4320 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
4322 /* Don't thread to the loop label. If a loop
4323 label is reused, loop optimization will
4324 be disabled for that loop. */
4325 new_label = gen_label_rtx ();
4326 emit_label_after (new_label, PREV_INSN (prev));
4328 changed |= redirect_jump (b1, new_label);
4330 break;
4333 /* If either of these is not a normal insn (it might be
4334 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4335 have already been skipped above.) Similarly, fail
4336 if the insns are different. */
4337 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4338 || recog_memoized (t1) != recog_memoized (t2)
4339 || ! rtx_equal_for_thread_p (PATTERN (t1),
4340 PATTERN (t2), t2))
4341 break;
4343 t1 = prev_nonnote_insn (t1);
4344 t2 = prev_nonnote_insn (t2);
4351 /* This is like RTX_EQUAL_P except that it knows about our handling of
4352 possibly equivalent registers and knows to consider volatile and
4353 modified objects as not equal.
4355 YINSN is the insn containing Y. */
4358 rtx_equal_for_thread_p (x, y, yinsn)
4359 rtx x, y;
4360 rtx yinsn;
4362 register int i;
4363 register int j;
4364 register enum rtx_code code;
4365 register char *fmt;
4367 code = GET_CODE (x);
4368 /* Rtx's of different codes cannot be equal. */
4369 if (code != GET_CODE (y))
4370 return 0;
4372 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4373 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4375 if (GET_MODE (x) != GET_MODE (y))
4376 return 0;
4378 /* For commutative operations, the RTX match if the operand match in any
4379 order. Also handle the simple binary and unary cases without a loop. */
4380 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4381 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4382 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4383 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4384 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4385 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4386 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4387 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4388 else if (GET_RTX_CLASS (code) == '1')
4389 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4391 /* Handle special-cases first. */
4392 switch (code)
4394 case REG:
4395 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4396 return 1;
4398 /* If neither is user variable or hard register, check for possible
4399 equivalence. */
4400 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4401 || REGNO (x) < FIRST_PSEUDO_REGISTER
4402 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4403 return 0;
4405 if (same_regs[REGNO (x)] == -1)
4407 same_regs[REGNO (x)] = REGNO (y);
4408 num_same_regs++;
4410 /* If this is the first time we are seeing a register on the `Y'
4411 side, see if it is the last use. If not, we can't thread the
4412 jump, so mark it as not equivalent. */
4413 if (regno_last_uid[REGNO (y)] != INSN_UID (yinsn))
4414 return 0;
4416 return 1;
4418 else
4419 return (same_regs[REGNO (x)] == REGNO (y));
4421 break;
4423 case MEM:
4424 /* If memory modified or either volatile, not equivalent.
4425 Else, check address. */
4426 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4427 return 0;
4429 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4431 case ASM_INPUT:
4432 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4433 return 0;
4435 break;
4437 case SET:
4438 /* Cancel a pending `same_regs' if setting equivalenced registers.
4439 Then process source. */
4440 if (GET_CODE (SET_DEST (x)) == REG
4441 && GET_CODE (SET_DEST (y)) == REG)
4443 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
4445 same_regs[REGNO (SET_DEST (x))] = -1;
4446 num_same_regs--;
4448 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4449 return 0;
4451 else
4452 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4453 return 0;
4455 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4457 case LABEL_REF:
4458 return XEXP (x, 0) == XEXP (y, 0);
4460 case SYMBOL_REF:
4461 return XSTR (x, 0) == XSTR (y, 0);
4464 if (x == y)
4465 return 1;
4467 fmt = GET_RTX_FORMAT (code);
4468 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4470 switch (fmt[i])
4472 case 'w':
4473 if (XWINT (x, i) != XWINT (y, i))
4474 return 0;
4475 break;
4477 case 'n':
4478 case 'i':
4479 if (XINT (x, i) != XINT (y, i))
4480 return 0;
4481 break;
4483 case 'V':
4484 case 'E':
4485 /* Two vectors must have the same length. */
4486 if (XVECLEN (x, i) != XVECLEN (y, i))
4487 return 0;
4489 /* And the corresponding elements must match. */
4490 for (j = 0; j < XVECLEN (x, i); j++)
4491 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4492 XVECEXP (y, i, j), yinsn) == 0)
4493 return 0;
4494 break;
4496 case 'e':
4497 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4498 return 0;
4499 break;
4501 case 'S':
4502 case 's':
4503 if (strcmp (XSTR (x, i), XSTR (y, i)))
4504 return 0;
4505 break;
4507 case 'u':
4508 /* These are just backpointers, so they don't matter. */
4509 break;
4511 case '0':
4512 break;
4514 /* It is believed that rtx's at this level will never
4515 contain anything but integers and other rtx's,
4516 except for within LABEL_REFs and SYMBOL_REFs. */
4517 default:
4518 abort ();
4521 return 1;