* config/i386/i386.md (clrstrsi): Call ix86_set_move_mem_attrs.
[official-gcc.git] / gcc / jump.c
blobf6f524e5e33d3563aa9259a515c0edac931afa91
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
92 static rtx *jump_chain;
94 /* Maximum index in jump_chain. */
96 static int max_jump_chain;
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
104 static int cross_jump_death_matters = 0;
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void delete_computation PARAMS ((rtx));
117 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
118 static int redirect_exp PARAMS ((rtx, rtx, rtx));
119 static void invert_exp_1 PARAMS ((rtx));
120 static int invert_exp PARAMS ((rtx));
121 static void delete_from_jump_chain PARAMS ((rtx));
122 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
123 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
124 static void redirect_tablejump PARAMS ((rtx, rtx));
125 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
126 static int returnjump_p_1 PARAMS ((rtx *, void *));
127 static void delete_prior_computation PARAMS ((rtx, rtx));
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
131 void
132 jump_optimize (f, cross_jump, noop_moves, after_regscan)
133 rtx f;
134 int cross_jump;
135 int noop_moves;
136 int after_regscan;
138 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
143 instructions. */
144 void
145 rebuild_jump_labels (f)
146 rtx f;
148 jump_optimize_1 (f, 0, 0, 0, 1, 0);
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
153 void
154 jump_optimize_minimal (f)
155 rtx f;
157 jump_optimize_1 (f, 0, 0, 0, 0, 1);
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
168 If NOOP_MOVES is nonzero, delete no-op move insns.
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
194 static void
195 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
196 mark_labels_only, minimal)
197 rtx f;
198 int cross_jump;
199 int noop_moves;
200 int after_regscan;
201 int mark_labels_only;
202 int minimal;
204 register rtx insn, next;
205 int changed;
206 int old_max_reg;
207 int first = 1;
208 int max_uid = 0;
209 rtx last_insn;
210 #ifdef HAVE_trap
211 enum rtx_code reversed_code;
212 #endif
214 cross_jump_death_matters = (cross_jump == 2);
215 max_uid = init_label_info (f) + 1;
217 /* If we are performing cross jump optimizations, then initialize
218 tables mapping UIDs to EH regions to avoid incorrect movement
219 of insns from one EH region to another. */
220 if (flag_exceptions && cross_jump)
221 init_insn_eh_region (f, max_uid);
223 if (! mark_labels_only)
224 delete_barrier_successors (f);
226 /* Leave some extra room for labels and duplicate exit test insns
227 we make. */
228 max_jump_chain = max_uid * 14 / 10;
229 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
231 mark_all_labels (f, cross_jump);
233 /* Keep track of labels used from static data; we don't track them
234 closely enough to delete them here, so make sure their reference
235 count doesn't drop to zero. */
237 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
238 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
239 LABEL_NUSES (XEXP (insn, 0))++;
241 check_exception_handler_labels ();
243 /* Keep track of labels used for marking handlers for exception
244 regions; they cannot usually be deleted. */
246 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
247 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
248 LABEL_NUSES (XEXP (insn, 0))++;
250 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
251 notes and recompute LABEL_NUSES. */
252 if (mark_labels_only)
253 goto end;
255 if (! minimal)
256 exception_optimize ();
258 last_insn = delete_unreferenced_labels (f);
260 if (noop_moves)
261 delete_noop_moves (f);
263 /* If we haven't yet gotten to reload and we have just run regscan,
264 delete any insn that sets a register that isn't used elsewhere.
265 This helps some of the optimizations below by having less insns
266 being jumped around. */
268 if (optimize && ! reload_completed && after_regscan)
269 for (insn = f; insn; insn = next)
271 rtx set = single_set (insn);
273 next = NEXT_INSN (insn);
275 if (set && GET_CODE (SET_DEST (set)) == REG
276 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
277 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
278 /* We use regno_last_note_uid so as not to delete the setting
279 of a reg that's used in notes. A subsequent optimization
280 might arrange to use that reg for real. */
281 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
282 && ! side_effects_p (SET_SRC (set))
283 && ! find_reg_note (insn, REG_RETVAL, 0)
284 /* An ADDRESSOF expression can turn into a use of the internal arg
285 pointer, so do not delete the initialization of the internal
286 arg pointer yet. If it is truly dead, flow will delete the
287 initializing insn. */
288 && SET_DEST (set) != current_function_internal_arg_pointer)
289 delete_insn (insn);
292 /* Now iterate optimizing jumps until nothing changes over one pass. */
293 changed = 1;
294 old_max_reg = max_reg_num ();
295 while (changed)
297 changed = 0;
299 for (insn = f; insn; insn = next)
301 rtx reallabelprev;
302 rtx temp, temp1, temp2 = NULL_RTX;
303 rtx temp4 ATTRIBUTE_UNUSED;
304 rtx nlabel;
305 int this_is_any_uncondjump;
306 int this_is_any_condjump;
307 int this_is_onlyjump;
309 next = NEXT_INSN (insn);
311 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
312 jump. Try to optimize by duplicating the loop exit test if so.
313 This is only safe immediately after regscan, because it uses
314 the values of regno_first_uid and regno_last_uid. */
315 if (after_regscan && GET_CODE (insn) == NOTE
316 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
317 && (temp1 = next_nonnote_insn (insn)) != 0
318 && any_uncondjump_p (temp1)
319 && onlyjump_p (temp1))
321 temp = PREV_INSN (insn);
322 if (duplicate_loop_exit_test (insn))
324 changed = 1;
325 next = NEXT_INSN (temp);
326 continue;
330 if (GET_CODE (insn) != JUMP_INSN)
331 continue;
333 this_is_any_condjump = any_condjump_p (insn);
334 this_is_any_uncondjump = any_uncondjump_p (insn);
335 this_is_onlyjump = onlyjump_p (insn);
337 /* Tension the labels in dispatch tables. */
339 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
340 changed |= tension_vector_labels (PATTERN (insn), 0);
341 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
342 changed |= tension_vector_labels (PATTERN (insn), 1);
344 /* See if this jump goes to another jump and redirect if so. */
345 nlabel = follow_jumps (JUMP_LABEL (insn));
346 if (nlabel != JUMP_LABEL (insn))
347 changed |= redirect_jump (insn, nlabel, 1);
349 if (! optimize || minimal)
350 continue;
352 /* If a dispatch table always goes to the same place,
353 get rid of it and replace the insn that uses it. */
355 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
356 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
358 int i;
359 rtx pat = PATTERN (insn);
360 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
361 int len = XVECLEN (pat, diff_vec_p);
362 rtx dispatch = prev_real_insn (insn);
363 rtx set;
365 for (i = 0; i < len; i++)
366 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
367 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
368 break;
370 if (i == len
371 && dispatch != 0
372 && GET_CODE (dispatch) == JUMP_INSN
373 && JUMP_LABEL (dispatch) != 0
374 /* Don't mess with a casesi insn.
375 XXX according to the comment before computed_jump_p(),
376 all casesi insns should be a parallel of the jump
377 and a USE of a LABEL_REF. */
378 && ! ((set = single_set (dispatch)) != NULL
379 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
380 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
382 redirect_tablejump (dispatch,
383 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
384 changed = 1;
388 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
390 /* Detect jump to following insn. */
391 if (reallabelprev == insn
392 && (this_is_any_condjump || this_is_any_uncondjump)
393 && this_is_onlyjump)
395 next = next_real_insn (JUMP_LABEL (insn));
396 delete_jump (insn);
398 /* Remove the "inactive" but "real" insns (i.e. uses and
399 clobbers) in between here and there. */
400 temp = insn;
401 while ((temp = next_real_insn (temp)) != next)
402 delete_insn (temp);
404 changed = 1;
405 continue;
408 /* Detect a conditional jump going to the same place
409 as an immediately following unconditional jump. */
410 else if (this_is_any_condjump && this_is_onlyjump
411 && (temp = next_active_insn (insn)) != 0
412 && simplejump_p (temp)
413 && (next_active_insn (JUMP_LABEL (insn))
414 == next_active_insn (JUMP_LABEL (temp))))
416 /* Don't mess up test coverage analysis. */
417 temp2 = temp;
418 if (flag_test_coverage && !reload_completed)
419 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
420 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
421 break;
423 if (temp2 == temp)
425 /* Ensure that we jump to the later of the two labels.
426 Consider:
428 if (test) goto L2;
429 goto L1;
432 (clobber return-reg)
434 (use return-reg)
436 If we leave the goto L1, we'll incorrectly leave
437 return-reg dead for TEST true. */
439 temp2 = next_active_insn (JUMP_LABEL (insn));
440 if (!temp2)
441 temp2 = get_last_insn ();
442 if (GET_CODE (temp2) != CODE_LABEL)
443 temp2 = prev_label (temp2);
444 if (temp2 != JUMP_LABEL (temp))
445 redirect_jump (temp, temp2, 1);
447 delete_jump (insn);
448 changed = 1;
449 continue;
453 /* Detect a conditional jump jumping over an unconditional jump. */
455 else if (this_is_any_condjump
456 && reallabelprev != 0
457 && GET_CODE (reallabelprev) == JUMP_INSN
458 && prev_active_insn (reallabelprev) == insn
459 && no_labels_between_p (insn, reallabelprev)
460 && any_uncondjump_p (reallabelprev)
461 && onlyjump_p (reallabelprev))
463 /* When we invert the unconditional jump, we will be
464 decrementing the usage count of its old label.
465 Make sure that we don't delete it now because that
466 might cause the following code to be deleted. */
467 rtx prev_uses = prev_nonnote_insn (reallabelprev);
468 rtx prev_label = JUMP_LABEL (insn);
470 if (prev_label)
471 ++LABEL_NUSES (prev_label);
473 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
475 /* It is very likely that if there are USE insns before
476 this jump, they hold REG_DEAD notes. These REG_DEAD
477 notes are no longer valid due to this optimization,
478 and will cause the life-analysis that following passes
479 (notably delayed-branch scheduling) to think that
480 these registers are dead when they are not.
482 To prevent this trouble, we just remove the USE insns
483 from the insn chain. */
485 while (prev_uses && GET_CODE (prev_uses) == INSN
486 && GET_CODE (PATTERN (prev_uses)) == USE)
488 rtx useless = prev_uses;
489 prev_uses = prev_nonnote_insn (prev_uses);
490 delete_insn (useless);
493 delete_insn (reallabelprev);
494 changed = 1;
497 /* We can now safely delete the label if it is unreferenced
498 since the delete_insn above has deleted the BARRIER. */
499 if (prev_label && --LABEL_NUSES (prev_label) == 0)
500 delete_insn (prev_label);
502 next = NEXT_INSN (insn);
505 /* If we have an unconditional jump preceded by a USE, try to put
506 the USE before the target and jump there. This simplifies many
507 of the optimizations below since we don't have to worry about
508 dealing with these USE insns. We only do this if the label
509 being branch to already has the identical USE or if code
510 never falls through to that label. */
512 else if (this_is_any_uncondjump
513 && (temp = prev_nonnote_insn (insn)) != 0
514 && GET_CODE (temp) == INSN
515 && GET_CODE (PATTERN (temp)) == USE
516 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
517 && (GET_CODE (temp1) == BARRIER
518 || (GET_CODE (temp1) == INSN
519 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
520 /* Don't do this optimization if we have a loop containing
521 only the USE instruction, and the loop start label has
522 a usage count of 1. This is because we will redo this
523 optimization everytime through the outer loop, and jump
524 opt will never exit. */
525 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
526 && temp2 == JUMP_LABEL (insn)
527 && LABEL_NUSES (temp2) == 1))
529 if (GET_CODE (temp1) == BARRIER)
531 emit_insn_after (PATTERN (temp), temp1);
532 temp1 = NEXT_INSN (temp1);
535 delete_insn (temp);
536 redirect_jump (insn, get_label_before (temp1), 1);
537 reallabelprev = prev_real_insn (temp1);
538 changed = 1;
539 next = NEXT_INSN (insn);
542 #ifdef HAVE_trap
543 /* Detect a conditional jump jumping over an unconditional trap. */
544 if (HAVE_trap
545 && this_is_any_condjump && this_is_onlyjump
546 && reallabelprev != 0
547 && GET_CODE (reallabelprev) == INSN
548 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
549 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
550 && prev_active_insn (reallabelprev) == insn
551 && no_labels_between_p (insn, reallabelprev)
552 && (temp2 = get_condition (insn, &temp4))
553 && ((reversed_code = reversed_comparison_code (temp2, insn))
554 != UNKNOWN))
556 rtx new = gen_cond_trap (reversed_code,
557 XEXP (temp2, 0), XEXP (temp2, 1),
558 TRAP_CODE (PATTERN (reallabelprev)));
560 if (new)
562 emit_insn_before (new, temp4);
563 delete_insn (reallabelprev);
564 delete_jump (insn);
565 changed = 1;
566 continue;
569 /* Detect a jump jumping to an unconditional trap. */
570 else if (HAVE_trap && this_is_onlyjump
571 && (temp = next_active_insn (JUMP_LABEL (insn)))
572 && GET_CODE (temp) == INSN
573 && GET_CODE (PATTERN (temp)) == TRAP_IF
574 && (this_is_any_uncondjump
575 || (this_is_any_condjump
576 && (temp2 = get_condition (insn, &temp4)))))
578 rtx tc = TRAP_CONDITION (PATTERN (temp));
580 if (tc == const_true_rtx
581 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
583 rtx new;
584 /* Replace an unconditional jump to a trap with a trap. */
585 if (this_is_any_uncondjump)
587 emit_barrier_after (emit_insn_before (gen_trap (), insn));
588 delete_jump (insn);
589 changed = 1;
590 continue;
592 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
593 XEXP (temp2, 1),
594 TRAP_CODE (PATTERN (temp)));
595 if (new)
597 emit_insn_before (new, temp4);
598 delete_jump (insn);
599 changed = 1;
600 continue;
603 /* If the trap condition and jump condition are mutually
604 exclusive, redirect the jump to the following insn. */
605 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
606 && this_is_any_condjump
607 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
608 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
609 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
610 && redirect_jump (insn, get_label_after (temp), 1))
612 changed = 1;
613 continue;
616 #endif
617 else
619 /* Now that the jump has been tensioned,
620 try cross jumping: check for identical code
621 before the jump and before its target label. */
623 /* First, cross jumping of conditional jumps: */
625 if (cross_jump && condjump_p (insn))
627 rtx newjpos, newlpos;
628 rtx x = prev_real_insn (JUMP_LABEL (insn));
630 /* A conditional jump may be crossjumped
631 only if the place it jumps to follows
632 an opposing jump that comes back here. */
634 if (x != 0 && ! jump_back_p (x, insn))
635 /* We have no opposing jump;
636 cannot cross jump this insn. */
637 x = 0;
639 newjpos = 0;
640 /* TARGET is nonzero if it is ok to cross jump
641 to code before TARGET. If so, see if matches. */
642 if (x != 0)
643 find_cross_jump (insn, x, 2,
644 &newjpos, &newlpos);
646 if (newjpos != 0)
648 do_cross_jump (insn, newjpos, newlpos);
649 /* Make the old conditional jump
650 into an unconditional one. */
651 SET_SRC (PATTERN (insn))
652 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
653 INSN_CODE (insn) = -1;
654 emit_barrier_after (insn);
655 /* Add to jump_chain unless this is a new label
656 whose UID is too large. */
657 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
659 jump_chain[INSN_UID (insn)]
660 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
661 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
663 changed = 1;
664 next = insn;
668 /* Cross jumping of unconditional jumps:
669 a few differences. */
671 if (cross_jump && simplejump_p (insn))
673 rtx newjpos, newlpos;
674 rtx target;
676 newjpos = 0;
678 /* TARGET is nonzero if it is ok to cross jump
679 to code before TARGET. If so, see if matches. */
680 find_cross_jump (insn, JUMP_LABEL (insn), 1,
681 &newjpos, &newlpos);
683 /* If cannot cross jump to code before the label,
684 see if we can cross jump to another jump to
685 the same label. */
686 /* Try each other jump to this label. */
687 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
688 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
689 target != 0 && newjpos == 0;
690 target = jump_chain[INSN_UID (target)])
691 if (target != insn
692 && JUMP_LABEL (target) == JUMP_LABEL (insn)
693 /* Ignore TARGET if it's deleted. */
694 && ! INSN_DELETED_P (target))
695 find_cross_jump (insn, target, 2,
696 &newjpos, &newlpos);
698 if (newjpos != 0)
700 do_cross_jump (insn, newjpos, newlpos);
701 changed = 1;
702 next = insn;
706 /* This code was dead in the previous jump.c! */
707 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
709 /* Return insns all "jump to the same place"
710 so we can cross-jump between any two of them. */
712 rtx newjpos, newlpos, target;
714 newjpos = 0;
716 /* If cannot cross jump to code before the label,
717 see if we can cross jump to another jump to
718 the same label. */
719 /* Try each other jump to this label. */
720 for (target = jump_chain[0];
721 target != 0 && newjpos == 0;
722 target = jump_chain[INSN_UID (target)])
723 if (target != insn
724 && ! INSN_DELETED_P (target)
725 && GET_CODE (PATTERN (target)) == RETURN)
726 find_cross_jump (insn, target, 2,
727 &newjpos, &newlpos);
729 if (newjpos != 0)
731 do_cross_jump (insn, newjpos, newlpos);
732 changed = 1;
733 next = insn;
739 first = 0;
742 /* Delete extraneous line number notes.
743 Note that two consecutive notes for different lines are not really
744 extraneous. There should be some indication where that line belonged,
745 even if it became empty. */
748 rtx last_note = 0;
750 for (insn = f; insn; insn = NEXT_INSN (insn))
751 if (GET_CODE (insn) == NOTE)
753 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
754 /* Any previous line note was for the prologue; gdb wants a new
755 note after the prologue even if it is for the same line. */
756 last_note = NULL_RTX;
757 else if (NOTE_LINE_NUMBER (insn) >= 0)
759 /* Delete this note if it is identical to previous note. */
760 if (last_note
761 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
762 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
764 delete_insn (insn);
765 continue;
768 last_note = insn;
773 end:
774 /* Clean up. */
775 free (jump_chain);
776 jump_chain = 0;
779 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
780 notes whose labels don't occur in the insn any more. Returns the
781 largest INSN_UID found. */
782 static int
783 init_label_info (f)
784 rtx f;
786 int largest_uid = 0;
787 rtx insn;
789 for (insn = f; insn; insn = NEXT_INSN (insn))
791 if (GET_CODE (insn) == CODE_LABEL)
792 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
793 else if (GET_CODE (insn) == JUMP_INSN)
794 JUMP_LABEL (insn) = 0;
795 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
797 rtx note, next;
799 for (note = REG_NOTES (insn); note; note = next)
801 next = XEXP (note, 1);
802 if (REG_NOTE_KIND (note) == REG_LABEL
803 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
804 remove_note (insn, note);
807 if (INSN_UID (insn) > largest_uid)
808 largest_uid = INSN_UID (insn);
811 return largest_uid;
814 /* Delete insns following barriers, up to next label.
816 Also delete no-op jumps created by gcse. */
818 static void
819 delete_barrier_successors (f)
820 rtx f;
822 rtx insn;
823 rtx set;
825 for (insn = f; insn;)
827 if (GET_CODE (insn) == BARRIER)
829 insn = NEXT_INSN (insn);
831 never_reached_warning (insn);
833 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
835 if (GET_CODE (insn) == NOTE
836 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
837 insn = NEXT_INSN (insn);
838 else
839 insn = delete_insn (insn);
841 /* INSN is now the code_label. */
844 /* Also remove (set (pc) (pc)) insns which can be created by
845 gcse. We eliminate such insns now to avoid having them
846 cause problems later. */
847 else if (GET_CODE (insn) == JUMP_INSN
848 && (set = pc_set (insn)) != NULL
849 && SET_SRC (set) == pc_rtx
850 && SET_DEST (set) == pc_rtx
851 && onlyjump_p (insn))
852 insn = delete_insn (insn);
854 else
855 insn = NEXT_INSN (insn);
859 /* Mark the label each jump jumps to.
860 Combine consecutive labels, and count uses of labels.
862 For each label, make a chain (using `jump_chain')
863 of all the *unconditional* jumps that jump to it;
864 also make a chain of all returns.
866 CROSS_JUMP indicates whether we are doing cross jumping
867 and if we are whether we will be paying attention to
868 death notes or not. */
870 static void
871 mark_all_labels (f, cross_jump)
872 rtx f;
873 int cross_jump;
875 rtx insn;
877 for (insn = f; insn; insn = NEXT_INSN (insn))
878 if (INSN_P (insn))
880 if (GET_CODE (insn) == CALL_INSN
881 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
883 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
884 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
885 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
886 continue;
889 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
890 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
892 /* When we know the LABEL_REF contained in a REG used in
893 an indirect jump, we'll have a REG_LABEL note so that
894 flow can tell where it's going. */
895 if (JUMP_LABEL (insn) == 0)
897 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
898 if (label_note)
900 /* But a LABEL_REF around the REG_LABEL note, so
901 that we can canonicalize it. */
902 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
903 XEXP (label_note, 0));
905 mark_jump_label (label_ref, insn, cross_jump, 0);
906 XEXP (label_note, 0) = XEXP (label_ref, 0);
907 JUMP_LABEL (insn) = XEXP (label_note, 0);
910 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
912 jump_chain[INSN_UID (insn)]
913 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
914 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
916 if (GET_CODE (PATTERN (insn)) == RETURN)
918 jump_chain[INSN_UID (insn)] = jump_chain[0];
919 jump_chain[0] = insn;
925 /* Delete all labels already not referenced.
926 Also find and return the last insn. */
928 static rtx
929 delete_unreferenced_labels (f)
930 rtx f;
932 rtx final = NULL_RTX;
933 rtx insn;
935 for (insn = f; insn;)
937 if (GET_CODE (insn) == CODE_LABEL
938 && LABEL_NUSES (insn) == 0
939 && LABEL_ALTERNATE_NAME (insn) == NULL)
940 insn = delete_insn (insn);
941 else
943 final = insn;
944 insn = NEXT_INSN (insn);
948 return final;
951 /* Delete various simple forms of moves which have no necessary
952 side effect. */
954 static void
955 delete_noop_moves (f)
956 rtx f;
958 rtx insn, next;
960 for (insn = f; insn;)
962 next = NEXT_INSN (insn);
964 if (GET_CODE (insn) == INSN)
966 register rtx body = PATTERN (insn);
968 /* Detect and delete no-op move instructions
969 resulting from not allocating a parameter in a register. */
971 if (GET_CODE (body) == SET
972 && (SET_DEST (body) == SET_SRC (body)
973 || (GET_CODE (SET_DEST (body)) == MEM
974 && GET_CODE (SET_SRC (body)) == MEM
975 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
976 && ! (GET_CODE (SET_DEST (body)) == MEM
977 && MEM_VOLATILE_P (SET_DEST (body)))
978 && ! (GET_CODE (SET_SRC (body)) == MEM
979 && MEM_VOLATILE_P (SET_SRC (body))))
980 delete_computation (insn);
982 /* Detect and ignore no-op move instructions
983 resulting from smart or fortuitous register allocation. */
985 else if (GET_CODE (body) == SET)
987 int sreg = true_regnum (SET_SRC (body));
988 int dreg = true_regnum (SET_DEST (body));
990 if (sreg == dreg && sreg >= 0)
991 delete_insn (insn);
992 else if (sreg >= 0 && dreg >= 0)
994 rtx trial;
995 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
996 sreg, NULL_PTR, dreg,
997 GET_MODE (SET_SRC (body)));
999 if (tem != 0
1000 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
1002 /* DREG may have been the target of a REG_DEAD note in
1003 the insn which makes INSN redundant. If so, reorg
1004 would still think it is dead. So search for such a
1005 note and delete it if we find it. */
1006 if (! find_regno_note (insn, REG_UNUSED, dreg))
1007 for (trial = prev_nonnote_insn (insn);
1008 trial && GET_CODE (trial) != CODE_LABEL;
1009 trial = prev_nonnote_insn (trial))
1010 if (find_regno_note (trial, REG_DEAD, dreg))
1012 remove_death (dreg, trial);
1013 break;
1016 /* Deleting insn could lose a death-note for SREG. */
1017 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
1019 /* Change this into a USE so that we won't emit
1020 code for it, but still can keep the note. */
1021 PATTERN (insn)
1022 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
1023 INSN_CODE (insn) = -1;
1024 /* Remove all reg notes but the REG_DEAD one. */
1025 REG_NOTES (insn) = trial;
1026 XEXP (trial, 1) = NULL_RTX;
1028 else
1029 delete_insn (insn);
1032 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
1033 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
1034 NULL_PTR, 0,
1035 GET_MODE (SET_DEST (body))))
1037 /* This handles the case where we have two consecutive
1038 assignments of the same constant to pseudos that didn't
1039 get a hard reg. Each SET from the constant will be
1040 converted into a SET of the spill register and an
1041 output reload will be made following it. This produces
1042 two loads of the same constant into the same spill
1043 register. */
1045 rtx in_insn = insn;
1047 /* Look back for a death note for the first reg.
1048 If there is one, it is no longer accurate. */
1049 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1051 if ((GET_CODE (in_insn) == INSN
1052 || GET_CODE (in_insn) == JUMP_INSN)
1053 && find_regno_note (in_insn, REG_DEAD, dreg))
1055 remove_death (dreg, in_insn);
1056 break;
1058 in_insn = PREV_INSN (in_insn);
1061 /* Delete the second load of the value. */
1062 delete_insn (insn);
1065 else if (GET_CODE (body) == PARALLEL)
1067 /* If each part is a set between two identical registers or
1068 a USE or CLOBBER, delete the insn. */
1069 int i, sreg, dreg;
1070 rtx tem;
1072 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1074 tem = XVECEXP (body, 0, i);
1075 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1076 continue;
1078 if (GET_CODE (tem) != SET
1079 || (sreg = true_regnum (SET_SRC (tem))) < 0
1080 || (dreg = true_regnum (SET_DEST (tem))) < 0
1081 || dreg != sreg)
1082 break;
1085 if (i < 0)
1086 delete_insn (insn);
1088 /* Also delete insns to store bit fields if they are no-ops. */
1089 /* Not worth the hair to detect this in the big-endian case. */
1090 else if (! BYTES_BIG_ENDIAN
1091 && GET_CODE (body) == SET
1092 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
1093 && XEXP (SET_DEST (body), 2) == const0_rtx
1094 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
1095 && ! (GET_CODE (SET_SRC (body)) == MEM
1096 && MEM_VOLATILE_P (SET_SRC (body))))
1097 delete_insn (insn);
1099 insn = next;
1103 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1104 jump. Assume that this unconditional jump is to the exit test code. If
1105 the code is sufficiently simple, make a copy of it before INSN,
1106 followed by a jump to the exit of the loop. Then delete the unconditional
1107 jump after INSN.
1109 Return 1 if we made the change, else 0.
1111 This is only safe immediately after a regscan pass because it uses the
1112 values of regno_first_uid and regno_last_uid. */
1114 static int
1115 duplicate_loop_exit_test (loop_start)
1116 rtx loop_start;
1118 rtx insn, set, reg, p, link;
1119 rtx copy = 0, first_copy = 0;
1120 int num_insns = 0;
1121 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1122 rtx lastexit;
1123 int max_reg = max_reg_num ();
1124 rtx *reg_map = 0;
1126 /* Scan the exit code. We do not perform this optimization if any insn:
1128 is a CALL_INSN
1129 is a CODE_LABEL
1130 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1131 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1132 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1133 is not valid.
1135 We also do not do this if we find an insn with ASM_OPERANDS. While
1136 this restriction should not be necessary, copying an insn with
1137 ASM_OPERANDS can confuse asm_noperands in some cases.
1139 Also, don't do this if the exit code is more than 20 insns. */
1141 for (insn = exitcode;
1142 insn
1143 && ! (GET_CODE (insn) == NOTE
1144 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1145 insn = NEXT_INSN (insn))
1147 switch (GET_CODE (insn))
1149 case CODE_LABEL:
1150 case CALL_INSN:
1151 return 0;
1152 case NOTE:
1153 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1154 a jump immediately after the loop start that branches outside
1155 the loop but within an outer loop, near the exit test.
1156 If we copied this exit test and created a phony
1157 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1158 before the exit test look like these could be safely moved
1159 out of the loop even if they actually may be never executed.
1160 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1162 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1163 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1164 return 0;
1166 if (optimize < 2
1167 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1168 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1169 /* If we were to duplicate this code, we would not move
1170 the BLOCK notes, and so debugging the moved code would
1171 be difficult. Thus, we only move the code with -O2 or
1172 higher. */
1173 return 0;
1175 break;
1176 case JUMP_INSN:
1177 case INSN:
1178 /* The code below would grossly mishandle REG_WAS_0 notes,
1179 so get rid of them here. */
1180 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1181 remove_note (insn, p);
1182 if (++num_insns > 20
1183 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1184 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1185 return 0;
1186 break;
1187 default:
1188 break;
1192 /* Unless INSN is zero, we can do the optimization. */
1193 if (insn == 0)
1194 return 0;
1196 lastexit = insn;
1198 /* See if any insn sets a register only used in the loop exit code and
1199 not a user variable. If so, replace it with a new register. */
1200 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1201 if (GET_CODE (insn) == INSN
1202 && (set = single_set (insn)) != 0
1203 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1204 || (GET_CODE (reg) == SUBREG
1205 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1206 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1207 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1209 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1210 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1211 break;
1213 if (p != lastexit)
1215 /* We can do the replacement. Allocate reg_map if this is the
1216 first replacement we found. */
1217 if (reg_map == 0)
1218 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1220 REG_LOOP_TEST_P (reg) = 1;
1222 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1226 /* Now copy each insn. */
1227 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1229 switch (GET_CODE (insn))
1231 case BARRIER:
1232 copy = emit_barrier_before (loop_start);
1233 break;
1234 case NOTE:
1235 /* Only copy line-number notes. */
1236 if (NOTE_LINE_NUMBER (insn) >= 0)
1238 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1239 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1241 break;
1243 case INSN:
1244 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1245 if (reg_map)
1246 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1248 mark_jump_label (PATTERN (copy), copy, 0, 0);
1250 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1251 make them. */
1252 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1253 if (REG_NOTE_KIND (link) != REG_LABEL)
1255 if (GET_CODE (link) == EXPR_LIST)
1256 REG_NOTES (copy)
1257 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1258 XEXP (link, 0),
1259 REG_NOTES (copy)));
1260 else
1261 REG_NOTES (copy)
1262 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1263 XEXP (link, 0),
1264 REG_NOTES (copy)));
1267 if (reg_map && REG_NOTES (copy))
1268 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1269 break;
1271 case JUMP_INSN:
1272 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1273 loop_start);
1274 if (reg_map)
1275 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1276 mark_jump_label (PATTERN (copy), copy, 0, 0);
1277 if (REG_NOTES (insn))
1279 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1280 if (reg_map)
1281 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1284 /* If this is a simple jump, add it to the jump chain. */
1286 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1287 && simplejump_p (copy))
1289 jump_chain[INSN_UID (copy)]
1290 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1291 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1293 break;
1295 default:
1296 abort ();
1299 /* Record the first insn we copied. We need it so that we can
1300 scan the copied insns for new pseudo registers. */
1301 if (! first_copy)
1302 first_copy = copy;
1305 /* Now clean up by emitting a jump to the end label and deleting the jump
1306 at the start of the loop. */
1307 if (! copy || GET_CODE (copy) != BARRIER)
1309 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1310 loop_start);
1312 /* Record the first insn we copied. We need it so that we can
1313 scan the copied insns for new pseudo registers. This may not
1314 be strictly necessary since we should have copied at least one
1315 insn above. But I am going to be safe. */
1316 if (! first_copy)
1317 first_copy = copy;
1319 mark_jump_label (PATTERN (copy), copy, 0, 0);
1320 if (INSN_UID (copy) < max_jump_chain
1321 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1323 jump_chain[INSN_UID (copy)]
1324 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1325 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1327 emit_barrier_before (loop_start);
1330 /* Now scan from the first insn we copied to the last insn we copied
1331 (copy) for new pseudo registers. Do this after the code to jump to
1332 the end label since that might create a new pseudo too. */
1333 reg_scan_update (first_copy, copy, max_reg);
1335 /* Mark the exit code as the virtual top of the converted loop. */
1336 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1338 delete_insn (next_nonnote_insn (loop_start));
1340 /* Clean up. */
1341 if (reg_map)
1342 free (reg_map);
1344 return 1;
1347 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1348 notes between START and END out before START. Assume that END is not
1349 such a note. START may be such a note. Returns the value of the new
1350 starting insn, which may be different if the original start was such a
1351 note. */
1354 squeeze_notes (start, end)
1355 rtx start, end;
1357 rtx insn;
1358 rtx next;
1360 for (insn = start; insn != end; insn = next)
1362 next = NEXT_INSN (insn);
1363 if (GET_CODE (insn) == NOTE
1364 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1365 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1366 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1367 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1368 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1369 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1371 if (insn == start)
1372 start = next;
1373 else
1375 rtx prev = PREV_INSN (insn);
1376 PREV_INSN (insn) = PREV_INSN (start);
1377 NEXT_INSN (insn) = start;
1378 NEXT_INSN (PREV_INSN (insn)) = insn;
1379 PREV_INSN (NEXT_INSN (insn)) = insn;
1380 NEXT_INSN (prev) = next;
1381 PREV_INSN (next) = prev;
1386 return start;
1389 /* Compare the instructions before insn E1 with those before E2
1390 to find an opportunity for cross jumping.
1391 (This means detecting identical sequences of insns followed by
1392 jumps to the same place, or followed by a label and a jump
1393 to that label, and replacing one with a jump to the other.)
1395 Assume E1 is a jump that jumps to label E2
1396 (that is not always true but it might as well be).
1397 Find the longest possible equivalent sequences
1398 and store the first insns of those sequences into *F1 and *F2.
1399 Store zero there if no equivalent preceding instructions are found.
1401 We give up if we find a label in stream 1.
1402 Actually we could transfer that label into stream 2. */
1404 static void
1405 find_cross_jump (e1, e2, minimum, f1, f2)
1406 rtx e1, e2;
1407 int minimum;
1408 rtx *f1, *f2;
1410 register rtx i1 = e1, i2 = e2;
1411 register rtx p1, p2;
1412 int lose = 0;
1414 rtx last1 = 0, last2 = 0;
1415 rtx afterlast1 = 0, afterlast2 = 0;
1417 *f1 = 0;
1418 *f2 = 0;
1420 while (1)
1422 i1 = prev_nonnote_insn (i1);
1424 i2 = PREV_INSN (i2);
1425 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1426 i2 = PREV_INSN (i2);
1428 if (i1 == 0)
1429 break;
1431 /* Don't allow the range of insns preceding E1 or E2
1432 to include the other (E2 or E1). */
1433 if (i2 == e1 || i1 == e2)
1434 break;
1436 /* If we will get to this code by jumping, those jumps will be
1437 tensioned to go directly to the new label (before I2),
1438 so this cross-jumping won't cost extra. So reduce the minimum. */
1439 if (GET_CODE (i1) == CODE_LABEL)
1441 --minimum;
1442 break;
1445 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1446 break;
1448 /* Avoid moving insns across EH regions if either of the insns
1449 can throw. */
1450 if (flag_exceptions
1451 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
1452 && !in_same_eh_region (i1, i2))
1453 break;
1455 p1 = PATTERN (i1);
1456 p2 = PATTERN (i2);
1458 /* If this is a CALL_INSN, compare register usage information.
1459 If we don't check this on stack register machines, the two
1460 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1461 numbers of stack registers in the same basic block.
1462 If we don't check this on machines with delay slots, a delay slot may
1463 be filled that clobbers a parameter expected by the subroutine.
1465 ??? We take the simple route for now and assume that if they're
1466 equal, they were constructed identically. */
1468 if (GET_CODE (i1) == CALL_INSN
1469 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1470 CALL_INSN_FUNCTION_USAGE (i2)))
1471 lose = 1;
1473 #ifdef STACK_REGS
1474 /* If cross_jump_death_matters is not 0, the insn's mode
1475 indicates whether or not the insn contains any stack-like
1476 regs. */
1478 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1480 /* If register stack conversion has already been done, then
1481 death notes must also be compared before it is certain that
1482 the two instruction streams match. */
1484 rtx note;
1485 HARD_REG_SET i1_regset, i2_regset;
1487 CLEAR_HARD_REG_SET (i1_regset);
1488 CLEAR_HARD_REG_SET (i2_regset);
1490 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1491 if (REG_NOTE_KIND (note) == REG_DEAD
1492 && STACK_REG_P (XEXP (note, 0)))
1493 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1495 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1496 if (REG_NOTE_KIND (note) == REG_DEAD
1497 && STACK_REG_P (XEXP (note, 0)))
1498 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1500 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1502 lose = 1;
1504 done:
1507 #endif
1509 /* Don't allow old-style asm or volatile extended asms to be accepted
1510 for cross jumping purposes. It is conceptually correct to allow
1511 them, since cross-jumping preserves the dynamic instruction order
1512 even though it is changing the static instruction order. However,
1513 if an asm is being used to emit an assembler pseudo-op, such as
1514 the MIPS `.set reorder' pseudo-op, then the static instruction order
1515 matters and it must be preserved. */
1516 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1517 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1518 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1519 lose = 1;
1521 if (lose || GET_CODE (p1) != GET_CODE (p2)
1522 || ! rtx_renumbered_equal_p (p1, p2))
1524 /* The following code helps take care of G++ cleanups. */
1525 rtx equiv1;
1526 rtx equiv2;
1528 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1529 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1530 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1531 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1532 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1533 /* If the equivalences are not to a constant, they may
1534 reference pseudos that no longer exist, so we can't
1535 use them. */
1536 && CONSTANT_P (XEXP (equiv1, 0))
1537 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1539 rtx s1 = single_set (i1);
1540 rtx s2 = single_set (i2);
1541 if (s1 != 0 && s2 != 0
1542 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1544 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1545 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1546 if (! rtx_renumbered_equal_p (p1, p2))
1547 cancel_changes (0);
1548 else if (apply_change_group ())
1549 goto win;
1553 /* Insns fail to match; cross jumping is limited to the following
1554 insns. */
1556 #ifdef HAVE_cc0
1557 /* Don't allow the insn after a compare to be shared by
1558 cross-jumping unless the compare is also shared.
1559 Here, if either of these non-matching insns is a compare,
1560 exclude the following insn from possible cross-jumping. */
1561 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1562 last1 = afterlast1, last2 = afterlast2, ++minimum;
1563 #endif
1565 /* If cross-jumping here will feed a jump-around-jump
1566 optimization, this jump won't cost extra, so reduce
1567 the minimum. */
1568 if (GET_CODE (i1) == JUMP_INSN
1569 && JUMP_LABEL (i1)
1570 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1571 --minimum;
1572 break;
1575 win:
1576 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1578 /* Ok, this insn is potentially includable in a cross-jump here. */
1579 afterlast1 = last1, afterlast2 = last2;
1580 last1 = i1, last2 = i2, --minimum;
1584 if (minimum <= 0 && last1 != 0 && last1 != e1)
1585 *f1 = last1, *f2 = last2;
1588 static void
1589 do_cross_jump (insn, newjpos, newlpos)
1590 rtx insn, newjpos, newlpos;
1592 /* Find an existing label at this point
1593 or make a new one if there is none. */
1594 register rtx label = get_label_before (newlpos);
1596 /* Make the same jump insn jump to the new point. */
1597 if (GET_CODE (PATTERN (insn)) == RETURN)
1599 /* Remove from jump chain of returns. */
1600 delete_from_jump_chain (insn);
1601 /* Change the insn. */
1602 PATTERN (insn) = gen_jump (label);
1603 INSN_CODE (insn) = -1;
1604 JUMP_LABEL (insn) = label;
1605 LABEL_NUSES (label)++;
1606 /* Add to new the jump chain. */
1607 if (INSN_UID (label) < max_jump_chain
1608 && INSN_UID (insn) < max_jump_chain)
1610 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1611 jump_chain[INSN_UID (label)] = insn;
1614 else
1615 redirect_jump (insn, label, 1);
1617 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1618 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1619 the NEWJPOS stream. */
1621 while (newjpos != insn)
1623 rtx lnote;
1625 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1626 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1627 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1628 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1629 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1630 remove_note (newlpos, lnote);
1632 delete_insn (newjpos);
1633 newjpos = next_real_insn (newjpos);
1634 newlpos = next_real_insn (newlpos);
1638 /* Return the label before INSN, or put a new label there. */
1641 get_label_before (insn)
1642 rtx insn;
1644 rtx label;
1646 /* Find an existing label at this point
1647 or make a new one if there is none. */
1648 label = prev_nonnote_insn (insn);
1650 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1652 rtx prev = PREV_INSN (insn);
1654 label = gen_label_rtx ();
1655 emit_label_after (label, prev);
1656 LABEL_NUSES (label) = 0;
1658 return label;
1661 /* Return the label after INSN, or put a new label there. */
1664 get_label_after (insn)
1665 rtx insn;
1667 rtx label;
1669 /* Find an existing label at this point
1670 or make a new one if there is none. */
1671 label = next_nonnote_insn (insn);
1673 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1675 label = gen_label_rtx ();
1676 emit_label_after (label, insn);
1677 LABEL_NUSES (label) = 0;
1679 return label;
1682 /* Return 1 if INSN is a jump that jumps to right after TARGET
1683 only on the condition that TARGET itself would drop through.
1684 Assumes that TARGET is a conditional jump. */
1686 static int
1687 jump_back_p (insn, target)
1688 rtx insn, target;
1690 rtx cinsn, ctarget;
1691 enum rtx_code codei, codet;
1692 rtx set, tset;
1694 if (! any_condjump_p (insn)
1695 || any_uncondjump_p (target)
1696 || target != prev_real_insn (JUMP_LABEL (insn)))
1697 return 0;
1698 set = pc_set (insn);
1699 tset = pc_set (target);
1701 cinsn = XEXP (SET_SRC (set), 0);
1702 ctarget = XEXP (SET_SRC (tset), 0);
1704 codei = GET_CODE (cinsn);
1705 codet = GET_CODE (ctarget);
1707 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1709 codei = reversed_comparison_code (cinsn, insn);
1710 if (codei == UNKNOWN)
1711 return 0;
1714 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1716 codet = reversed_comparison_code (ctarget, target);
1717 if (codei == UNKNOWN)
1718 return 0;
1721 return (codei == codet
1722 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1723 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1726 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1727 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1728 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1729 know whether it's source is floating point or integer comparison. Machine
1730 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1731 to help this function avoid overhead in these cases. */
1732 enum rtx_code
1733 reversed_comparison_code_parts (code, arg0, arg1, insn)
1734 rtx insn, arg0, arg1;
1735 enum rtx_code code;
1737 enum machine_mode mode;
1739 /* If this is not actually a comparison, we can't reverse it. */
1740 if (GET_RTX_CLASS (code) != '<')
1741 return UNKNOWN;
1743 mode = GET_MODE (arg0);
1744 if (mode == VOIDmode)
1745 mode = GET_MODE (arg1);
1747 /* First see if machine description supply us way to reverse the comparison.
1748 Give it priority over everything else to allow machine description to do
1749 tricks. */
1750 #ifdef REVERSIBLE_CC_MODE
1751 if (GET_MODE_CLASS (mode) == MODE_CC
1752 && REVERSIBLE_CC_MODE (mode))
1754 #ifdef REVERSE_CONDITION
1755 return REVERSE_CONDITION (code, mode);
1756 #endif
1757 return reverse_condition (code);
1759 #endif
1761 /* Try few special cases based on the comparison code. */
1762 switch (code)
1764 case GEU:
1765 case GTU:
1766 case LEU:
1767 case LTU:
1768 case NE:
1769 case EQ:
1770 /* It is always safe to reverse EQ and NE, even for the floating
1771 point. Similary the unsigned comparisons are never used for
1772 floating point so we can reverse them in the default way. */
1773 return reverse_condition (code);
1774 case ORDERED:
1775 case UNORDERED:
1776 case LTGT:
1777 case UNEQ:
1778 /* In case we already see unordered comparison, we can be sure to
1779 be dealing with floating point so we don't need any more tests. */
1780 return reverse_condition_maybe_unordered (code);
1781 case UNLT:
1782 case UNLE:
1783 case UNGT:
1784 case UNGE:
1785 /* We don't have safe way to reverse these yet - we would need
1786 ordered compares that may not trap. */
1787 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1788 || flag_unsafe_math_optimizations)
1789 return reverse_condition_maybe_unordered (code);
1790 return UNKNOWN;
1791 default:
1792 break;
1795 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1796 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1797 || flag_unsafe_math_optimizations)
1798 return reverse_condition (code);
1800 if (GET_MODE_CLASS (mode) == MODE_CC
1801 #ifdef HAVE_cc0
1802 || arg0 == cc0_rtx
1803 #endif
1806 rtx prev;
1807 /* Try to search for the comparison to determine the real mode.
1808 This code is expensive, but with sane machine description it
1809 will be never used, since REVERSIBLE_CC_MODE will return true
1810 in all cases. */
1811 if (! insn)
1812 return UNKNOWN;
1814 for (prev = prev_nonnote_insn (insn);
1815 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1816 prev = prev_nonnote_insn (prev))
1818 rtx set = set_of (arg0, prev);
1819 if (set && GET_CODE (set) == SET
1820 && rtx_equal_p (SET_DEST (set), arg0))
1822 rtx src = SET_SRC (set);
1824 if (GET_CODE (src) == COMPARE)
1826 rtx comparison = src;
1827 arg0 = XEXP (src, 0);
1828 mode = GET_MODE (arg0);
1829 if (mode == VOIDmode)
1830 mode = GET_MODE (XEXP (comparison, 1));
1831 break;
1833 /* We can get past reg-reg moves. This may be usefull for model
1834 of i387 comparisons that first move flag registers around. */
1835 if (REG_P (src))
1837 arg0 = src;
1838 continue;
1841 /* If register is clobbered in some ununderstandable way,
1842 give up. */
1843 if (set)
1844 return UNKNOWN;
1848 /* In case of floating point modes, we may reverse here, since
1849 we will be always converting an ordered compare to unordered.
1850 The unsafe code has been caught earlier. */
1851 if (FLOAT_MODE_P (mode))
1852 return reverse_condition_maybe_unordered (code);
1854 /* An integer condition. */
1855 if (GET_CODE (arg0) == CONST_INT
1856 || (GET_MODE (arg0) != VOIDmode
1857 && GET_MODE_CLASS (mode) != MODE_CC
1858 && ! FLOAT_MODE_P (mode)))
1859 return reverse_condition (code);
1861 return UNKNOWN;
1864 /* An wrapper around the previous function to take COMPARISON as rtx
1865 expression. This simplifies many callers. */
1866 enum rtx_code
1867 reversed_comparison_code (comparison, insn)
1868 rtx comparison, insn;
1870 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1871 return UNKNOWN;
1872 return reversed_comparison_code_parts (GET_CODE (comparison),
1873 XEXP (comparison, 0),
1874 XEXP (comparison, 1), insn);
1877 /* Given an rtx-code for a comparison, return the code for the negated
1878 comparison. If no such code exists, return UNKNOWN.
1880 WATCH OUT! reverse_condition is not safe to use on a jump that might
1881 be acting on the results of an IEEE floating point comparison, because
1882 of the special treatment of non-signaling nans in comparisons.
1883 Use reversed_comparison_code instead. */
1885 enum rtx_code
1886 reverse_condition (code)
1887 enum rtx_code code;
1889 switch (code)
1891 case EQ:
1892 return NE;
1893 case NE:
1894 return EQ;
1895 case GT:
1896 return LE;
1897 case GE:
1898 return LT;
1899 case LT:
1900 return GE;
1901 case LE:
1902 return GT;
1903 case GTU:
1904 return LEU;
1905 case GEU:
1906 return LTU;
1907 case LTU:
1908 return GEU;
1909 case LEU:
1910 return GTU;
1911 case UNORDERED:
1912 return ORDERED;
1913 case ORDERED:
1914 return UNORDERED;
1916 case UNLT:
1917 case UNLE:
1918 case UNGT:
1919 case UNGE:
1920 case UNEQ:
1921 case LTGT:
1922 return UNKNOWN;
1924 default:
1925 abort ();
1929 /* Similar, but we're allowed to generate unordered comparisons, which
1930 makes it safe for IEEE floating-point. Of course, we have to recognize
1931 that the target will support them too... */
1933 enum rtx_code
1934 reverse_condition_maybe_unordered (code)
1935 enum rtx_code code;
1937 /* Non-IEEE formats don't have unordered conditions. */
1938 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1939 return reverse_condition (code);
1941 switch (code)
1943 case EQ:
1944 return NE;
1945 case NE:
1946 return EQ;
1947 case GT:
1948 return UNLE;
1949 case GE:
1950 return UNLT;
1951 case LT:
1952 return UNGE;
1953 case LE:
1954 return UNGT;
1955 case LTGT:
1956 return UNEQ;
1957 case UNORDERED:
1958 return ORDERED;
1959 case ORDERED:
1960 return UNORDERED;
1961 case UNLT:
1962 return GE;
1963 case UNLE:
1964 return GT;
1965 case UNGT:
1966 return LE;
1967 case UNGE:
1968 return LT;
1969 case UNEQ:
1970 return LTGT;
1972 default:
1973 abort ();
1977 /* Similar, but return the code when two operands of a comparison are swapped.
1978 This IS safe for IEEE floating-point. */
1980 enum rtx_code
1981 swap_condition (code)
1982 enum rtx_code code;
1984 switch (code)
1986 case EQ:
1987 case NE:
1988 case UNORDERED:
1989 case ORDERED:
1990 case UNEQ:
1991 case LTGT:
1992 return code;
1994 case GT:
1995 return LT;
1996 case GE:
1997 return LE;
1998 case LT:
1999 return GT;
2000 case LE:
2001 return GE;
2002 case GTU:
2003 return LTU;
2004 case GEU:
2005 return LEU;
2006 case LTU:
2007 return GTU;
2008 case LEU:
2009 return GEU;
2010 case UNLT:
2011 return UNGT;
2012 case UNLE:
2013 return UNGE;
2014 case UNGT:
2015 return UNLT;
2016 case UNGE:
2017 return UNLE;
2019 default:
2020 abort ();
2024 /* Given a comparison CODE, return the corresponding unsigned comparison.
2025 If CODE is an equality comparison or already an unsigned comparison,
2026 CODE is returned. */
2028 enum rtx_code
2029 unsigned_condition (code)
2030 enum rtx_code code;
2032 switch (code)
2034 case EQ:
2035 case NE:
2036 case GTU:
2037 case GEU:
2038 case LTU:
2039 case LEU:
2040 return code;
2042 case GT:
2043 return GTU;
2044 case GE:
2045 return GEU;
2046 case LT:
2047 return LTU;
2048 case LE:
2049 return LEU;
2051 default:
2052 abort ();
2056 /* Similarly, return the signed version of a comparison. */
2058 enum rtx_code
2059 signed_condition (code)
2060 enum rtx_code code;
2062 switch (code)
2064 case EQ:
2065 case NE:
2066 case GT:
2067 case GE:
2068 case LT:
2069 case LE:
2070 return code;
2072 case GTU:
2073 return GT;
2074 case GEU:
2075 return GE;
2076 case LTU:
2077 return LT;
2078 case LEU:
2079 return LE;
2081 default:
2082 abort ();
2086 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2087 truth of CODE1 implies the truth of CODE2. */
2090 comparison_dominates_p (code1, code2)
2091 enum rtx_code code1, code2;
2093 /* UNKNOWN comparison codes can happen as a result of trying to revert
2094 comparison codes.
2095 They can't match anything, so we have to reject them here. */
2096 if (code1 == UNKNOWN || code2 == UNKNOWN)
2097 return 0;
2099 if (code1 == code2)
2100 return 1;
2102 switch (code1)
2104 case UNEQ:
2105 if (code2 == UNLE || code2 == UNGE)
2106 return 1;
2107 break;
2109 case EQ:
2110 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2111 || code2 == ORDERED)
2112 return 1;
2113 break;
2115 case UNLT:
2116 if (code2 == UNLE || code2 == NE)
2117 return 1;
2118 break;
2120 case LT:
2121 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2122 return 1;
2123 break;
2125 case UNGT:
2126 if (code2 == UNGE || code2 == NE)
2127 return 1;
2128 break;
2130 case GT:
2131 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2132 return 1;
2133 break;
2135 case GE:
2136 case LE:
2137 if (code2 == ORDERED)
2138 return 1;
2139 break;
2141 case LTGT:
2142 if (code2 == NE || code2 == ORDERED)
2143 return 1;
2144 break;
2146 case LTU:
2147 if (code2 == LEU || code2 == NE)
2148 return 1;
2149 break;
2151 case GTU:
2152 if (code2 == GEU || code2 == NE)
2153 return 1;
2154 break;
2156 case UNORDERED:
2157 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2158 || code2 == UNGE || code2 == UNGT)
2159 return 1;
2160 break;
2162 default:
2163 break;
2166 return 0;
2169 /* Return 1 if INSN is an unconditional jump and nothing else. */
2172 simplejump_p (insn)
2173 rtx insn;
2175 return (GET_CODE (insn) == JUMP_INSN
2176 && GET_CODE (PATTERN (insn)) == SET
2177 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2178 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2181 /* Return nonzero if INSN is a (possibly) conditional jump
2182 and nothing more.
2184 Use this function is deprecated, since we need to support combined
2185 branch and compare insns. Use any_condjump_p instead whenever possible. */
2188 condjump_p (insn)
2189 rtx insn;
2191 register rtx x = PATTERN (insn);
2193 if (GET_CODE (x) != SET
2194 || GET_CODE (SET_DEST (x)) != PC)
2195 return 0;
2197 x = SET_SRC (x);
2198 if (GET_CODE (x) == LABEL_REF)
2199 return 1;
2200 else
2201 return (GET_CODE (x) == IF_THEN_ELSE
2202 && ((GET_CODE (XEXP (x, 2)) == PC
2203 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2204 || GET_CODE (XEXP (x, 1)) == RETURN))
2205 || (GET_CODE (XEXP (x, 1)) == PC
2206 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2207 || GET_CODE (XEXP (x, 2)) == RETURN))));
2209 return 0;
2212 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2213 PARALLEL.
2215 Use this function is deprecated, since we need to support combined
2216 branch and compare insns. Use any_condjump_p instead whenever possible. */
2219 condjump_in_parallel_p (insn)
2220 rtx insn;
2222 register rtx x = PATTERN (insn);
2224 if (GET_CODE (x) != PARALLEL)
2225 return 0;
2226 else
2227 x = XVECEXP (x, 0, 0);
2229 if (GET_CODE (x) != SET)
2230 return 0;
2231 if (GET_CODE (SET_DEST (x)) != PC)
2232 return 0;
2233 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2234 return 1;
2235 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2236 return 0;
2237 if (XEXP (SET_SRC (x), 2) == pc_rtx
2238 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2239 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2240 return 1;
2241 if (XEXP (SET_SRC (x), 1) == pc_rtx
2242 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2243 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2244 return 1;
2245 return 0;
2248 /* Return set of PC, otherwise NULL. */
2251 pc_set (insn)
2252 rtx insn;
2254 rtx pat;
2255 if (GET_CODE (insn) != JUMP_INSN)
2256 return NULL_RTX;
2257 pat = PATTERN (insn);
2259 /* The set is allowed to appear either as the insn pattern or
2260 the first set in a PARALLEL. */
2261 if (GET_CODE (pat) == PARALLEL)
2262 pat = XVECEXP (pat, 0, 0);
2263 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2264 return pat;
2266 return NULL_RTX;
2269 /* Return true when insn is an unconditional direct jump,
2270 possibly bundled inside a PARALLEL. */
2273 any_uncondjump_p (insn)
2274 rtx insn;
2276 rtx x = pc_set (insn);
2277 if (!x)
2278 return 0;
2279 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2280 return 0;
2281 return 1;
2284 /* Return true when insn is a conditional jump. This function works for
2285 instructions containing PC sets in PARALLELs. The instruction may have
2286 various other effects so before removing the jump you must verify
2287 onlyjump_p.
2289 Note that unlike condjump_p it returns false for unconditional jumps. */
2292 any_condjump_p (insn)
2293 rtx insn;
2295 rtx x = pc_set (insn);
2296 enum rtx_code a, b;
2298 if (!x)
2299 return 0;
2300 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2301 return 0;
2303 a = GET_CODE (XEXP (SET_SRC (x), 1));
2304 b = GET_CODE (XEXP (SET_SRC (x), 2));
2306 return ((b == PC && (a == LABEL_REF || a == RETURN))
2307 || (a == PC && (b == LABEL_REF || b == RETURN)));
2310 /* Return the label of a conditional jump. */
2313 condjump_label (insn)
2314 rtx insn;
2316 rtx x = pc_set (insn);
2318 if (!x)
2319 return NULL_RTX;
2320 x = SET_SRC (x);
2321 if (GET_CODE (x) == LABEL_REF)
2322 return x;
2323 if (GET_CODE (x) != IF_THEN_ELSE)
2324 return NULL_RTX;
2325 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2326 return XEXP (x, 1);
2327 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2328 return XEXP (x, 2);
2329 return NULL_RTX;
2332 /* Return true if INSN is a (possibly conditional) return insn. */
2334 static int
2335 returnjump_p_1 (loc, data)
2336 rtx *loc;
2337 void *data ATTRIBUTE_UNUSED;
2339 rtx x = *loc;
2340 return x && GET_CODE (x) == RETURN;
2344 returnjump_p (insn)
2345 rtx insn;
2347 if (GET_CODE (insn) != JUMP_INSN)
2348 return 0;
2349 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2352 /* Return true if INSN is a jump that only transfers control and
2353 nothing more. */
2356 onlyjump_p (insn)
2357 rtx insn;
2359 rtx set;
2361 if (GET_CODE (insn) != JUMP_INSN)
2362 return 0;
2364 set = single_set (insn);
2365 if (set == NULL)
2366 return 0;
2367 if (GET_CODE (SET_DEST (set)) != PC)
2368 return 0;
2369 if (side_effects_p (SET_SRC (set)))
2370 return 0;
2372 return 1;
2375 #ifdef HAVE_cc0
2377 /* Return 1 if X is an RTX that does nothing but set the condition codes
2378 and CLOBBER or USE registers.
2379 Return -1 if X does explicitly set the condition codes,
2380 but also does other things. */
2383 sets_cc0_p (x)
2384 rtx x ATTRIBUTE_UNUSED;
2386 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2387 return 1;
2388 if (GET_CODE (x) == PARALLEL)
2390 int i;
2391 int sets_cc0 = 0;
2392 int other_things = 0;
2393 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2395 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2396 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2397 sets_cc0 = 1;
2398 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2399 other_things = 1;
2401 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2403 return 0;
2405 #endif
2407 /* Follow any unconditional jump at LABEL;
2408 return the ultimate label reached by any such chain of jumps.
2409 If LABEL is not followed by a jump, return LABEL.
2410 If the chain loops or we can't find end, return LABEL,
2411 since that tells caller to avoid changing the insn.
2413 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2414 a USE or CLOBBER. */
2417 follow_jumps (label)
2418 rtx label;
2420 register rtx insn;
2421 register rtx next;
2422 register rtx value = label;
2423 register int depth;
2425 for (depth = 0;
2426 (depth < 10
2427 && (insn = next_active_insn (value)) != 0
2428 && GET_CODE (insn) == JUMP_INSN
2429 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2430 && onlyjump_p (insn))
2431 || GET_CODE (PATTERN (insn)) == RETURN)
2432 && (next = NEXT_INSN (insn))
2433 && GET_CODE (next) == BARRIER);
2434 depth++)
2436 /* Don't chain through the insn that jumps into a loop
2437 from outside the loop,
2438 since that would create multiple loop entry jumps
2439 and prevent loop optimization. */
2440 rtx tem;
2441 if (!reload_completed)
2442 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2443 if (GET_CODE (tem) == NOTE
2444 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2445 /* ??? Optional. Disables some optimizations, but makes
2446 gcov output more accurate with -O. */
2447 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2448 return value;
2450 /* If we have found a cycle, make the insn jump to itself. */
2451 if (JUMP_LABEL (insn) == label)
2452 return label;
2454 tem = next_active_insn (JUMP_LABEL (insn));
2455 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2456 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2457 break;
2459 value = JUMP_LABEL (insn);
2461 if (depth == 10)
2462 return label;
2463 return value;
2466 /* Assuming that field IDX of X is a vector of label_refs,
2467 replace each of them by the ultimate label reached by it.
2468 Return nonzero if a change is made.
2469 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2471 static int
2472 tension_vector_labels (x, idx)
2473 register rtx x;
2474 register int idx;
2476 int changed = 0;
2477 register int i;
2478 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2480 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2481 register rtx nlabel = follow_jumps (olabel);
2482 if (nlabel && nlabel != olabel)
2484 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2485 ++LABEL_NUSES (nlabel);
2486 if (--LABEL_NUSES (olabel) == 0)
2487 delete_insn (olabel);
2488 changed = 1;
2491 return changed;
2494 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2495 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2496 in INSN, then store one of them in JUMP_LABEL (INSN).
2497 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2498 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2499 Also, when there are consecutive labels, canonicalize on the last of them.
2501 Note that two labels separated by a loop-beginning note
2502 must be kept distinct if we have not yet done loop-optimization,
2503 because the gap between them is where loop-optimize
2504 will want to move invariant code to. CROSS_JUMP tells us
2505 that loop-optimization is done with.
2507 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2508 two labels distinct if they are separated by only USE or CLOBBER insns. */
2510 void
2511 mark_jump_label (x, insn, cross_jump, in_mem)
2512 register rtx x;
2513 rtx insn;
2514 int cross_jump;
2515 int in_mem;
2517 register RTX_CODE code = GET_CODE (x);
2518 register int i;
2519 register const char *fmt;
2521 switch (code)
2523 case PC:
2524 case CC0:
2525 case REG:
2526 case SUBREG:
2527 case CONST_INT:
2528 case CONST_DOUBLE:
2529 case CLOBBER:
2530 case CALL:
2531 return;
2533 case MEM:
2534 in_mem = 1;
2535 break;
2537 case SYMBOL_REF:
2538 if (!in_mem)
2539 return;
2541 /* If this is a constant-pool reference, see if it is a label. */
2542 if (CONSTANT_POOL_ADDRESS_P (x))
2543 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2544 break;
2546 case LABEL_REF:
2548 rtx label = XEXP (x, 0);
2549 rtx olabel = label;
2550 rtx note;
2551 rtx next;
2553 /* Ignore remaining references to unreachable labels that
2554 have been deleted. */
2555 if (GET_CODE (label) == NOTE
2556 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2557 break;
2559 if (GET_CODE (label) != CODE_LABEL)
2560 abort ();
2562 /* Ignore references to labels of containing functions. */
2563 if (LABEL_REF_NONLOCAL_P (x))
2564 break;
2566 /* If there are other labels following this one,
2567 replace it with the last of the consecutive labels. */
2568 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2570 if (GET_CODE (next) == CODE_LABEL)
2571 label = next;
2572 else if (cross_jump && GET_CODE (next) == INSN
2573 && (GET_CODE (PATTERN (next)) == USE
2574 || GET_CODE (PATTERN (next)) == CLOBBER))
2575 continue;
2576 else if (GET_CODE (next) != NOTE)
2577 break;
2578 else if (! cross_jump
2579 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2580 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2581 /* ??? Optional. Disables some optimizations, but
2582 makes gcov output more accurate with -O. */
2583 || (flag_test_coverage
2584 && NOTE_LINE_NUMBER (next) > 0)))
2585 break;
2588 XEXP (x, 0) = label;
2589 if (! insn || ! INSN_DELETED_P (insn))
2590 ++LABEL_NUSES (label);
2592 if (insn)
2594 if (GET_CODE (insn) == JUMP_INSN)
2595 JUMP_LABEL (insn) = label;
2597 /* If we've changed OLABEL and we had a REG_LABEL note
2598 for it, update it as well. */
2599 else if (label != olabel
2600 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2601 XEXP (note, 0) = label;
2603 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2604 is one. */
2605 else if (! find_reg_note (insn, REG_LABEL, label))
2607 /* This code used to ignore labels which refered to dispatch
2608 tables to avoid flow.c generating worse code.
2610 However, in the presense of global optimizations like
2611 gcse which call find_basic_blocks without calling
2612 life_analysis, not recording such labels will lead
2613 to compiler aborts because of inconsistencies in the
2614 flow graph. So we go ahead and record the label.
2616 It may also be the case that the optimization argument
2617 is no longer valid because of the more accurate cfg
2618 we build in find_basic_blocks -- it no longer pessimizes
2619 code when it finds a REG_LABEL note. */
2620 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2621 REG_NOTES (insn));
2624 return;
2627 /* Do walk the labels in a vector, but not the first operand of an
2628 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2629 case ADDR_VEC:
2630 case ADDR_DIFF_VEC:
2631 if (! INSN_DELETED_P (insn))
2633 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2635 for (i = 0; i < XVECLEN (x, eltnum); i++)
2636 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2637 cross_jump, in_mem);
2639 return;
2641 default:
2642 break;
2645 fmt = GET_RTX_FORMAT (code);
2646 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2648 if (fmt[i] == 'e')
2649 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2650 else if (fmt[i] == 'E')
2652 register int j;
2653 for (j = 0; j < XVECLEN (x, i); j++)
2654 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2659 /* If all INSN does is set the pc, delete it,
2660 and delete the insn that set the condition codes for it
2661 if that's what the previous thing was. */
2663 void
2664 delete_jump (insn)
2665 rtx insn;
2667 register rtx set = single_set (insn);
2669 if (set && GET_CODE (SET_DEST (set)) == PC)
2670 delete_computation (insn);
2673 /* Verify INSN is a BARRIER and delete it. */
2675 void
2676 delete_barrier (insn)
2677 rtx insn;
2679 if (GET_CODE (insn) != BARRIER)
2680 abort ();
2682 delete_insn (insn);
2685 /* Recursively delete prior insns that compute the value (used only by INSN
2686 which the caller is deleting) stored in the register mentioned by NOTE
2687 which is a REG_DEAD note associated with INSN. */
2689 static void
2690 delete_prior_computation (note, insn)
2691 rtx note;
2692 rtx insn;
2694 rtx our_prev;
2695 rtx reg = XEXP (note, 0);
2697 for (our_prev = prev_nonnote_insn (insn);
2698 our_prev && (GET_CODE (our_prev) == INSN
2699 || GET_CODE (our_prev) == CALL_INSN);
2700 our_prev = prev_nonnote_insn (our_prev))
2702 rtx pat = PATTERN (our_prev);
2704 /* If we reach a CALL which is not calling a const function
2705 or the callee pops the arguments, then give up. */
2706 if (GET_CODE (our_prev) == CALL_INSN
2707 && (! CONST_CALL_P (our_prev)
2708 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2709 break;
2711 /* If we reach a SEQUENCE, it is too complex to try to
2712 do anything with it, so give up. */
2713 if (GET_CODE (pat) == SEQUENCE)
2714 break;
2716 if (GET_CODE (pat) == USE
2717 && GET_CODE (XEXP (pat, 0)) == INSN)
2718 /* reorg creates USEs that look like this. We leave them
2719 alone because reorg needs them for its own purposes. */
2720 break;
2722 if (reg_set_p (reg, pat))
2724 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2725 break;
2727 if (GET_CODE (pat) == PARALLEL)
2729 /* If we find a SET of something else, we can't
2730 delete the insn. */
2732 int i;
2734 for (i = 0; i < XVECLEN (pat, 0); i++)
2736 rtx part = XVECEXP (pat, 0, i);
2738 if (GET_CODE (part) == SET
2739 && SET_DEST (part) != reg)
2740 break;
2743 if (i == XVECLEN (pat, 0))
2744 delete_computation (our_prev);
2746 else if (GET_CODE (pat) == SET
2747 && GET_CODE (SET_DEST (pat)) == REG)
2749 int dest_regno = REGNO (SET_DEST (pat));
2750 int dest_endregno
2751 = (dest_regno
2752 + (dest_regno < FIRST_PSEUDO_REGISTER
2753 ? HARD_REGNO_NREGS (dest_regno,
2754 GET_MODE (SET_DEST (pat))) : 1));
2755 int regno = REGNO (reg);
2756 int endregno
2757 = (regno
2758 + (regno < FIRST_PSEUDO_REGISTER
2759 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2761 if (dest_regno >= regno
2762 && dest_endregno <= endregno)
2763 delete_computation (our_prev);
2765 /* We may have a multi-word hard register and some, but not
2766 all, of the words of the register are needed in subsequent
2767 insns. Write REG_UNUSED notes for those parts that were not
2768 needed. */
2769 else if (dest_regno <= regno
2770 && dest_endregno >= endregno)
2772 int i;
2774 REG_NOTES (our_prev)
2775 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2776 REG_NOTES (our_prev));
2778 for (i = dest_regno; i < dest_endregno; i++)
2779 if (! find_regno_note (our_prev, REG_UNUSED, i))
2780 break;
2782 if (i == dest_endregno)
2783 delete_computation (our_prev);
2787 break;
2790 /* If PAT references the register that dies here, it is an
2791 additional use. Hence any prior SET isn't dead. However, this
2792 insn becomes the new place for the REG_DEAD note. */
2793 if (reg_overlap_mentioned_p (reg, pat))
2795 XEXP (note, 1) = REG_NOTES (our_prev);
2796 REG_NOTES (our_prev) = note;
2797 break;
2802 /* Delete INSN and recursively delete insns that compute values used only
2803 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2804 If we are running before flow.c, we need do nothing since flow.c will
2805 delete dead code. We also can't know if the registers being used are
2806 dead or not at this point.
2808 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2809 nothing other than set a register that dies in this insn, we can delete
2810 that insn as well.
2812 On machines with CC0, if CC0 is used in this insn, we may be able to
2813 delete the insn that set it. */
2815 static void
2816 delete_computation (insn)
2817 rtx insn;
2819 rtx note, next;
2821 #ifdef HAVE_cc0
2822 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2824 rtx prev = prev_nonnote_insn (insn);
2825 /* We assume that at this stage
2826 CC's are always set explicitly
2827 and always immediately before the jump that
2828 will use them. So if the previous insn
2829 exists to set the CC's, delete it
2830 (unless it performs auto-increments, etc.). */
2831 if (prev && GET_CODE (prev) == INSN
2832 && sets_cc0_p (PATTERN (prev)))
2834 if (sets_cc0_p (PATTERN (prev)) > 0
2835 && ! side_effects_p (PATTERN (prev)))
2836 delete_computation (prev);
2837 else
2838 /* Otherwise, show that cc0 won't be used. */
2839 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2840 cc0_rtx, REG_NOTES (prev));
2843 #endif
2845 for (note = REG_NOTES (insn); note; note = next)
2847 next = XEXP (note, 1);
2849 if (REG_NOTE_KIND (note) != REG_DEAD
2850 /* Verify that the REG_NOTE is legitimate. */
2851 || GET_CODE (XEXP (note, 0)) != REG)
2852 continue;
2854 delete_prior_computation (note, insn);
2857 delete_insn (insn);
2860 /* Delete insn INSN from the chain of insns and update label ref counts.
2861 May delete some following insns as a consequence; may even delete
2862 a label elsewhere and insns that follow it.
2864 Returns the first insn after INSN that was not deleted. */
2867 delete_insn (insn)
2868 register rtx insn;
2870 register rtx next = NEXT_INSN (insn);
2871 register rtx prev = PREV_INSN (insn);
2872 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2873 register int dont_really_delete = 0;
2874 rtx note;
2876 while (next && INSN_DELETED_P (next))
2877 next = NEXT_INSN (next);
2879 /* This insn is already deleted => return first following nondeleted. */
2880 if (INSN_DELETED_P (insn))
2881 return next;
2883 if (was_code_label)
2884 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2886 /* Don't delete user-declared labels. When optimizing, convert them
2887 to special NOTEs instead. When not optimizing, leave them alone. */
2888 if (was_code_label && LABEL_NAME (insn) != 0)
2890 if (! optimize)
2891 dont_really_delete = 1;
2892 else if (! dont_really_delete)
2894 const char *name = LABEL_NAME (insn);
2895 PUT_CODE (insn, NOTE);
2896 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2897 NOTE_SOURCE_FILE (insn) = name;
2898 dont_really_delete = 1;
2901 else
2902 /* Mark this insn as deleted. */
2903 INSN_DELETED_P (insn) = 1;
2905 /* If this is an unconditional jump, delete it from the jump chain. */
2906 if (simplejump_p (insn))
2907 delete_from_jump_chain (insn);
2909 /* If instruction is followed by a barrier,
2910 delete the barrier too. */
2912 if (next != 0 && GET_CODE (next) == BARRIER)
2914 INSN_DELETED_P (next) = 1;
2915 next = NEXT_INSN (next);
2918 /* Patch out INSN (and the barrier if any) */
2920 if (! dont_really_delete)
2922 if (prev)
2924 NEXT_INSN (prev) = next;
2925 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2926 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2927 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2930 if (next)
2932 PREV_INSN (next) = prev;
2933 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2934 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2937 if (prev && NEXT_INSN (prev) == 0)
2938 set_last_insn (prev);
2941 /* If deleting a jump, decrement the count of the label,
2942 and delete the label if it is now unused. */
2944 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2946 rtx lab = JUMP_LABEL (insn), lab_next;
2948 if (--LABEL_NUSES (lab) == 0)
2950 /* This can delete NEXT or PREV,
2951 either directly if NEXT is JUMP_LABEL (INSN),
2952 or indirectly through more levels of jumps. */
2953 delete_insn (lab);
2955 /* I feel a little doubtful about this loop,
2956 but I see no clean and sure alternative way
2957 to find the first insn after INSN that is not now deleted.
2958 I hope this works. */
2959 while (next && INSN_DELETED_P (next))
2960 next = NEXT_INSN (next);
2961 return next;
2963 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2964 && GET_CODE (lab_next) == JUMP_INSN
2965 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2966 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2968 /* If we're deleting the tablejump, delete the dispatch table.
2969 We may not be able to kill the label immediately preceeding
2970 just yet, as it might be referenced in code leading up to
2971 the tablejump. */
2972 delete_insn (lab_next);
2976 /* Likewise if we're deleting a dispatch table. */
2978 if (GET_CODE (insn) == JUMP_INSN
2979 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2980 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2982 rtx pat = PATTERN (insn);
2983 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2984 int len = XVECLEN (pat, diff_vec_p);
2986 for (i = 0; i < len; i++)
2987 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2988 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2989 while (next && INSN_DELETED_P (next))
2990 next = NEXT_INSN (next);
2991 return next;
2994 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2995 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2996 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2997 if (REG_NOTE_KIND (note) == REG_LABEL
2998 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2999 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
3000 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
3001 delete_insn (XEXP (note, 0));
3003 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
3004 prev = PREV_INSN (prev);
3006 /* If INSN was a label and a dispatch table follows it,
3007 delete the dispatch table. The tablejump must have gone already.
3008 It isn't useful to fall through into a table. */
3010 if (was_code_label
3011 && NEXT_INSN (insn) != 0
3012 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3013 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
3014 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
3015 next = delete_insn (NEXT_INSN (insn));
3017 /* If INSN was a label, delete insns following it if now unreachable. */
3019 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
3021 register RTX_CODE code;
3022 while (next != 0
3023 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
3024 || code == NOTE || code == BARRIER
3025 || (code == CODE_LABEL && INSN_DELETED_P (next))))
3027 if (code == NOTE
3028 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
3029 next = NEXT_INSN (next);
3030 /* Keep going past other deleted labels to delete what follows. */
3031 else if (code == CODE_LABEL && INSN_DELETED_P (next))
3032 next = NEXT_INSN (next);
3033 else
3034 /* Note: if this deletes a jump, it can cause more
3035 deletion of unreachable code, after a different label.
3036 As long as the value from this recursive call is correct,
3037 this invocation functions correctly. */
3038 next = delete_insn (next);
3042 return next;
3045 /* Advance from INSN till reaching something not deleted
3046 then return that. May return INSN itself. */
3049 next_nondeleted_insn (insn)
3050 rtx insn;
3052 while (INSN_DELETED_P (insn))
3053 insn = NEXT_INSN (insn);
3054 return insn;
3057 /* Delete a range of insns from FROM to TO, inclusive.
3058 This is for the sake of peephole optimization, so assume
3059 that whatever these insns do will still be done by a new
3060 peephole insn that will replace them. */
3062 void
3063 delete_for_peephole (from, to)
3064 register rtx from, to;
3066 register rtx insn = from;
3068 while (1)
3070 register rtx next = NEXT_INSN (insn);
3071 register rtx prev = PREV_INSN (insn);
3073 if (GET_CODE (insn) != NOTE)
3075 INSN_DELETED_P (insn) = 1;
3077 /* Patch this insn out of the chain. */
3078 /* We don't do this all at once, because we
3079 must preserve all NOTEs. */
3080 if (prev)
3081 NEXT_INSN (prev) = next;
3083 if (next)
3084 PREV_INSN (next) = prev;
3087 if (insn == to)
3088 break;
3089 insn = next;
3092 /* Note that if TO is an unconditional jump
3093 we *do not* delete the BARRIER that follows,
3094 since the peephole that replaces this sequence
3095 is also an unconditional jump in that case. */
3098 /* We have determined that INSN is never reached, and are about to
3099 delete it. Print a warning if the user asked for one.
3101 To try to make this warning more useful, this should only be called
3102 once per basic block not reached, and it only warns when the basic
3103 block contains more than one line from the current function, and
3104 contains at least one operation. CSE and inlining can duplicate insns,
3105 so it's possible to get spurious warnings from this. */
3107 void
3108 never_reached_warning (avoided_insn)
3109 rtx avoided_insn;
3111 rtx insn;
3112 rtx a_line_note = NULL;
3113 int two_avoided_lines = 0;
3114 int contains_insn = 0;
3116 if (! warn_notreached)
3117 return;
3119 /* Scan forwards, looking at LINE_NUMBER notes, until
3120 we hit a LABEL or we run out of insns. */
3122 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3124 if (GET_CODE (insn) == CODE_LABEL)
3125 break;
3126 else if (GET_CODE (insn) == NOTE /* A line number note? */
3127 && NOTE_LINE_NUMBER (insn) >= 0)
3129 if (a_line_note == NULL)
3130 a_line_note = insn;
3131 else
3132 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3133 != NOTE_LINE_NUMBER (insn));
3135 else if (INSN_P (insn))
3136 contains_insn = 1;
3138 if (two_avoided_lines && contains_insn)
3139 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3140 NOTE_LINE_NUMBER (a_line_note),
3141 "will never be executed");
3144 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3145 NLABEL as a return. Accrue modifications into the change group. */
3147 static void
3148 redirect_exp_1 (loc, olabel, nlabel, insn)
3149 rtx *loc;
3150 rtx olabel, nlabel;
3151 rtx insn;
3153 register rtx x = *loc;
3154 register RTX_CODE code = GET_CODE (x);
3155 register int i;
3156 register const char *fmt;
3158 if (code == LABEL_REF)
3160 if (XEXP (x, 0) == olabel)
3162 rtx n;
3163 if (nlabel)
3164 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3165 else
3166 n = gen_rtx_RETURN (VOIDmode);
3168 validate_change (insn, loc, n, 1);
3169 return;
3172 else if (code == RETURN && olabel == 0)
3174 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3175 if (loc == &PATTERN (insn))
3176 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3177 validate_change (insn, loc, x, 1);
3178 return;
3181 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3182 && GET_CODE (SET_SRC (x)) == LABEL_REF
3183 && XEXP (SET_SRC (x), 0) == olabel)
3185 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3186 return;
3189 fmt = GET_RTX_FORMAT (code);
3190 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3192 if (fmt[i] == 'e')
3193 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3194 else if (fmt[i] == 'E')
3196 register int j;
3197 for (j = 0; j < XVECLEN (x, i); j++)
3198 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3203 /* Similar, but apply the change group and report success or failure. */
3205 static int
3206 redirect_exp (olabel, nlabel, insn)
3207 rtx olabel, nlabel;
3208 rtx insn;
3210 rtx *loc;
3212 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3213 loc = &XVECEXP (PATTERN (insn), 0, 0);
3214 else
3215 loc = &PATTERN (insn);
3217 redirect_exp_1 (loc, olabel, nlabel, insn);
3218 if (num_validated_changes () == 0)
3219 return 0;
3221 return apply_change_group ();
3224 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3225 the modifications into the change group. Return false if we did
3226 not see how to do that. */
3229 redirect_jump_1 (jump, nlabel)
3230 rtx jump, nlabel;
3232 int ochanges = num_validated_changes ();
3233 rtx *loc;
3235 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3236 loc = &XVECEXP (PATTERN (jump), 0, 0);
3237 else
3238 loc = &PATTERN (jump);
3240 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3241 return num_validated_changes () > ochanges;
3244 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3245 jump target label is unused as a result, it and the code following
3246 it may be deleted.
3248 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3249 RETURN insn.
3251 The return value will be 1 if the change was made, 0 if it wasn't
3252 (this can only occur for NLABEL == 0). */
3255 redirect_jump (jump, nlabel, delete_unused)
3256 rtx jump, nlabel;
3257 int delete_unused;
3259 register rtx olabel = JUMP_LABEL (jump);
3261 if (nlabel == olabel)
3262 return 1;
3264 if (! redirect_exp (olabel, nlabel, jump))
3265 return 0;
3267 /* If this is an unconditional branch, delete it from the jump_chain of
3268 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3269 have UID's in range and JUMP_CHAIN is valid). */
3270 if (jump_chain && (simplejump_p (jump)
3271 || GET_CODE (PATTERN (jump)) == RETURN))
3273 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3275 delete_from_jump_chain (jump);
3276 if (label_index < max_jump_chain
3277 && INSN_UID (jump) < max_jump_chain)
3279 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3280 jump_chain[label_index] = jump;
3284 JUMP_LABEL (jump) = nlabel;
3285 if (nlabel)
3286 ++LABEL_NUSES (nlabel);
3288 /* If we're eliding the jump over exception cleanups at the end of a
3289 function, move the function end note so that -Wreturn-type works. */
3290 if (olabel && nlabel
3291 && NEXT_INSN (olabel)
3292 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3293 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3294 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3296 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3297 delete_insn (olabel);
3299 return 1;
3302 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3303 Accrue the modifications into the change group. */
3305 static void
3306 invert_exp_1 (insn)
3307 rtx insn;
3309 register RTX_CODE code;
3310 rtx x = pc_set (insn);
3312 if (!x)
3313 abort ();
3314 x = SET_SRC (x);
3316 code = GET_CODE (x);
3318 if (code == IF_THEN_ELSE)
3320 register rtx comp = XEXP (x, 0);
3321 register rtx tem;
3322 enum rtx_code reversed_code;
3324 /* We can do this in two ways: The preferable way, which can only
3325 be done if this is not an integer comparison, is to reverse
3326 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3327 of the IF_THEN_ELSE. If we can't do either, fail. */
3329 reversed_code = reversed_comparison_code (comp, insn);
3331 if (reversed_code != UNKNOWN)
3333 validate_change (insn, &XEXP (x, 0),
3334 gen_rtx_fmt_ee (reversed_code,
3335 GET_MODE (comp), XEXP (comp, 0),
3336 XEXP (comp, 1)),
3338 return;
3341 tem = XEXP (x, 1);
3342 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3343 validate_change (insn, &XEXP (x, 2), tem, 1);
3345 else
3346 abort ();
3349 /* Invert the jump condition of conditional jump insn, INSN.
3351 Return 1 if we can do so, 0 if we cannot find a way to do so that
3352 matches a pattern. */
3354 static int
3355 invert_exp (insn)
3356 rtx insn;
3358 invert_exp_1 (insn);
3359 if (num_validated_changes () == 0)
3360 return 0;
3362 return apply_change_group ();
3365 /* Invert the condition of the jump JUMP, and make it jump to label
3366 NLABEL instead of where it jumps now. Accrue changes into the
3367 change group. Return false if we didn't see how to perform the
3368 inversion and redirection. */
3371 invert_jump_1 (jump, nlabel)
3372 rtx jump, nlabel;
3374 int ochanges;
3376 ochanges = num_validated_changes ();
3377 invert_exp_1 (jump);
3378 if (num_validated_changes () == ochanges)
3379 return 0;
3381 return redirect_jump_1 (jump, nlabel);
3384 /* Invert the condition of the jump JUMP, and make it jump to label
3385 NLABEL instead of where it jumps now. Return true if successful. */
3388 invert_jump (jump, nlabel, delete_unused)
3389 rtx jump, nlabel;
3390 int delete_unused;
3392 /* We have to either invert the condition and change the label or
3393 do neither. Either operation could fail. We first try to invert
3394 the jump. If that succeeds, we try changing the label. If that fails,
3395 we invert the jump back to what it was. */
3397 if (! invert_exp (jump))
3398 return 0;
3400 if (redirect_jump (jump, nlabel, delete_unused))
3402 /* An inverted jump means that a probability taken becomes a
3403 probability not taken. Subtract the branch probability from the
3404 probability base to convert it back to a taken probability. */
3406 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3407 if (note)
3408 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3410 return 1;
3413 if (! invert_exp (jump))
3414 /* This should just be putting it back the way it was. */
3415 abort ();
3417 return 0;
3420 /* Delete the instruction JUMP from any jump chain it might be on. */
3422 static void
3423 delete_from_jump_chain (jump)
3424 rtx jump;
3426 int index;
3427 rtx olabel = JUMP_LABEL (jump);
3429 /* Handle unconditional jumps. */
3430 if (jump_chain && olabel != 0
3431 && INSN_UID (olabel) < max_jump_chain
3432 && simplejump_p (jump))
3433 index = INSN_UID (olabel);
3434 /* Handle return insns. */
3435 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3436 index = 0;
3437 else
3438 return;
3440 if (jump_chain[index] == jump)
3441 jump_chain[index] = jump_chain[INSN_UID (jump)];
3442 else
3444 rtx insn;
3446 for (insn = jump_chain[index];
3447 insn != 0;
3448 insn = jump_chain[INSN_UID (insn)])
3449 if (jump_chain[INSN_UID (insn)] == jump)
3451 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3452 break;
3457 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3459 If the old jump target label (before the dispatch table) becomes unused,
3460 it and the dispatch table may be deleted. In that case, find the insn
3461 before the jump references that label and delete it and logical successors
3462 too. */
3464 static void
3465 redirect_tablejump (jump, nlabel)
3466 rtx jump, nlabel;
3468 register rtx olabel = JUMP_LABEL (jump);
3469 rtx *notep, note, next;
3471 /* Add this jump to the jump_chain of NLABEL. */
3472 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3473 && INSN_UID (jump) < max_jump_chain)
3475 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3476 jump_chain[INSN_UID (nlabel)] = jump;
3479 for (notep = &REG_NOTES (jump), note = *notep; note; note = next)
3481 next = XEXP (note, 1);
3483 if (REG_NOTE_KIND (note) != REG_DEAD
3484 /* Verify that the REG_NOTE is legitimate. */
3485 || GET_CODE (XEXP (note, 0)) != REG
3486 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3487 notep = &XEXP (note, 1);
3488 else
3490 delete_prior_computation (note, jump);
3491 *notep = next;
3495 PATTERN (jump) = gen_jump (nlabel);
3496 JUMP_LABEL (jump) = nlabel;
3497 ++LABEL_NUSES (nlabel);
3498 INSN_CODE (jump) = -1;
3500 if (--LABEL_NUSES (olabel) == 0)
3502 delete_labelref_insn (jump, olabel, 0);
3503 delete_insn (olabel);
3507 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3508 If we found one, delete it and then delete this insn if DELETE_THIS is
3509 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3511 static int
3512 delete_labelref_insn (insn, label, delete_this)
3513 rtx insn, label;
3514 int delete_this;
3516 int deleted = 0;
3517 rtx link;
3519 if (GET_CODE (insn) != NOTE
3520 && reg_mentioned_p (label, PATTERN (insn)))
3522 if (delete_this)
3524 delete_insn (insn);
3525 deleted = 1;
3527 else
3528 return 1;
3531 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3532 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3534 if (delete_this)
3536 delete_insn (insn);
3537 deleted = 1;
3539 else
3540 return 1;
3543 return deleted;
3546 /* Like rtx_equal_p except that it considers two REGs as equal
3547 if they renumber to the same value and considers two commutative
3548 operations to be the same if the order of the operands has been
3549 reversed.
3551 ??? Addition is not commutative on the PA due to the weird implicit
3552 space register selection rules for memory addresses. Therefore, we
3553 don't consider a + b == b + a.
3555 We could/should make this test a little tighter. Possibly only
3556 disabling it on the PA via some backend macro or only disabling this
3557 case when the PLUS is inside a MEM. */
3560 rtx_renumbered_equal_p (x, y)
3561 rtx x, y;
3563 register int i;
3564 register RTX_CODE code = GET_CODE (x);
3565 register const char *fmt;
3567 if (x == y)
3568 return 1;
3570 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3571 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3572 && GET_CODE (SUBREG_REG (y)) == REG)))
3574 int reg_x = -1, reg_y = -1;
3575 int word_x = 0, word_y = 0;
3577 if (GET_MODE (x) != GET_MODE (y))
3578 return 0;
3580 /* If we haven't done any renumbering, don't
3581 make any assumptions. */
3582 if (reg_renumber == 0)
3583 return rtx_equal_p (x, y);
3585 if (code == SUBREG)
3587 reg_x = REGNO (SUBREG_REG (x));
3588 word_x = SUBREG_WORD (x);
3590 if (reg_renumber[reg_x] >= 0)
3592 reg_x = reg_renumber[reg_x] + word_x;
3593 word_x = 0;
3597 else
3599 reg_x = REGNO (x);
3600 if (reg_renumber[reg_x] >= 0)
3601 reg_x = reg_renumber[reg_x];
3604 if (GET_CODE (y) == SUBREG)
3606 reg_y = REGNO (SUBREG_REG (y));
3607 word_y = SUBREG_WORD (y);
3609 if (reg_renumber[reg_y] >= 0)
3611 reg_y = reg_renumber[reg_y];
3612 word_y = 0;
3616 else
3618 reg_y = REGNO (y);
3619 if (reg_renumber[reg_y] >= 0)
3620 reg_y = reg_renumber[reg_y];
3623 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
3626 /* Now we have disposed of all the cases
3627 in which different rtx codes can match. */
3628 if (code != GET_CODE (y))
3629 return 0;
3631 switch (code)
3633 case PC:
3634 case CC0:
3635 case ADDR_VEC:
3636 case ADDR_DIFF_VEC:
3637 return 0;
3639 case CONST_INT:
3640 return INTVAL (x) == INTVAL (y);
3642 case LABEL_REF:
3643 /* We can't assume nonlocal labels have their following insns yet. */
3644 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3645 return XEXP (x, 0) == XEXP (y, 0);
3647 /* Two label-refs are equivalent if they point at labels
3648 in the same position in the instruction stream. */
3649 return (next_real_insn (XEXP (x, 0))
3650 == next_real_insn (XEXP (y, 0)));
3652 case SYMBOL_REF:
3653 return XSTR (x, 0) == XSTR (y, 0);
3655 case CODE_LABEL:
3656 /* If we didn't match EQ equality above, they aren't the same. */
3657 return 0;
3659 default:
3660 break;
3663 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3665 if (GET_MODE (x) != GET_MODE (y))
3666 return 0;
3668 /* For commutative operations, the RTX match if the operand match in any
3669 order. Also handle the simple binary and unary cases without a loop.
3671 ??? Don't consider PLUS a commutative operator; see comments above. */
3672 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3673 && code != PLUS)
3674 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3675 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3676 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3677 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3678 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3679 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3680 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3681 else if (GET_RTX_CLASS (code) == '1')
3682 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3684 /* Compare the elements. If any pair of corresponding elements
3685 fail to match, return 0 for the whole things. */
3687 fmt = GET_RTX_FORMAT (code);
3688 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3690 register int j;
3691 switch (fmt[i])
3693 case 'w':
3694 if (XWINT (x, i) != XWINT (y, i))
3695 return 0;
3696 break;
3698 case 'i':
3699 if (XINT (x, i) != XINT (y, i))
3700 return 0;
3701 break;
3703 case 's':
3704 if (strcmp (XSTR (x, i), XSTR (y, i)))
3705 return 0;
3706 break;
3708 case 'e':
3709 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3710 return 0;
3711 break;
3713 case 'u':
3714 if (XEXP (x, i) != XEXP (y, i))
3715 return 0;
3716 /* fall through. */
3717 case '0':
3718 break;
3720 case 'E':
3721 if (XVECLEN (x, i) != XVECLEN (y, i))
3722 return 0;
3723 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3724 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3725 return 0;
3726 break;
3728 default:
3729 abort ();
3732 return 1;
3735 /* If X is a hard register or equivalent to one or a subregister of one,
3736 return the hard register number. If X is a pseudo register that was not
3737 assigned a hard register, return the pseudo register number. Otherwise,
3738 return -1. Any rtx is valid for X. */
3741 true_regnum (x)
3742 rtx x;
3744 if (GET_CODE (x) == REG)
3746 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3747 return reg_renumber[REGNO (x)];
3748 return REGNO (x);
3750 if (GET_CODE (x) == SUBREG)
3752 int base = true_regnum (SUBREG_REG (x));
3753 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3754 return SUBREG_WORD (x) + base;
3756 return -1;
3759 /* Optimize code of the form:
3761 for (x = a[i]; x; ...)
3763 for (x = a[i]; x; ...)
3765 foo:
3767 Loop optimize will change the above code into
3769 if (x = a[i])
3770 for (;;)
3771 { ...; if (! (x = ...)) break; }
3772 if (x = a[i])
3773 for (;;)
3774 { ...; if (! (x = ...)) break; }
3775 foo:
3777 In general, if the first test fails, the program can branch
3778 directly to `foo' and skip the second try which is doomed to fail.
3779 We run this after loop optimization and before flow analysis. */
3781 /* When comparing the insn patterns, we track the fact that different
3782 pseudo-register numbers may have been used in each computation.
3783 The following array stores an equivalence -- same_regs[I] == J means
3784 that pseudo register I was used in the first set of tests in a context
3785 where J was used in the second set. We also count the number of such
3786 pending equivalences. If nonzero, the expressions really aren't the
3787 same. */
3789 static int *same_regs;
3791 static int num_same_regs;
3793 /* Track any registers modified between the target of the first jump and
3794 the second jump. They never compare equal. */
3796 static char *modified_regs;
3798 /* Record if memory was modified. */
3800 static int modified_mem;
3802 /* Called via note_stores on each insn between the target of the first
3803 branch and the second branch. It marks any changed registers. */
3805 static void
3806 mark_modified_reg (dest, x, data)
3807 rtx dest;
3808 rtx x ATTRIBUTE_UNUSED;
3809 void *data ATTRIBUTE_UNUSED;
3811 int regno;
3812 unsigned int i;
3814 if (GET_CODE (dest) == SUBREG)
3815 dest = SUBREG_REG (dest);
3817 if (GET_CODE (dest) == MEM)
3818 modified_mem = 1;
3820 if (GET_CODE (dest) != REG)
3821 return;
3823 regno = REGNO (dest);
3824 if (regno >= FIRST_PSEUDO_REGISTER)
3825 modified_regs[regno] = 1;
3826 else
3827 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3828 modified_regs[regno + i] = 1;
3831 /* F is the first insn in the chain of insns. */
3833 void
3834 thread_jumps (f, max_reg, flag_before_loop)
3835 rtx f;
3836 int max_reg;
3837 int flag_before_loop;
3839 /* Basic algorithm is to find a conditional branch,
3840 the label it may branch to, and the branch after
3841 that label. If the two branches test the same condition,
3842 walk back from both branch paths until the insn patterns
3843 differ, or code labels are hit. If we make it back to
3844 the target of the first branch, then we know that the first branch
3845 will either always succeed or always fail depending on the relative
3846 senses of the two branches. So adjust the first branch accordingly
3847 in this case. */
3849 rtx label, b1, b2, t1, t2;
3850 enum rtx_code code1, code2;
3851 rtx b1op0, b1op1, b2op0, b2op1;
3852 int changed = 1;
3853 int i;
3854 int *all_reset;
3855 enum rtx_code reversed_code1, reversed_code2;
3857 /* Allocate register tables and quick-reset table. */
3858 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3859 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3860 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3861 for (i = 0; i < max_reg; i++)
3862 all_reset[i] = -1;
3864 while (changed)
3866 changed = 0;
3868 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3870 rtx set;
3871 rtx set2;
3873 /* Get to a candidate branch insn. */
3874 if (GET_CODE (b1) != JUMP_INSN
3875 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3876 continue;
3878 memset (modified_regs, 0, max_reg * sizeof (char));
3879 modified_mem = 0;
3881 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3882 num_same_regs = 0;
3884 label = JUMP_LABEL (b1);
3886 /* Look for a branch after the target. Record any registers and
3887 memory modified between the target and the branch. Stop when we
3888 get to a label since we can't know what was changed there. */
3889 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3891 if (GET_CODE (b2) == CODE_LABEL)
3892 break;
3894 else if (GET_CODE (b2) == JUMP_INSN)
3896 /* If this is an unconditional jump and is the only use of
3897 its target label, we can follow it. */
3898 if (any_uncondjump_p (b2)
3899 && onlyjump_p (b2)
3900 && JUMP_LABEL (b2) != 0
3901 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3903 b2 = JUMP_LABEL (b2);
3904 continue;
3906 else
3907 break;
3910 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3911 continue;
3913 if (GET_CODE (b2) == CALL_INSN)
3915 modified_mem = 1;
3916 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3917 if (call_used_regs[i] && ! fixed_regs[i]
3918 && i != STACK_POINTER_REGNUM
3919 && i != FRAME_POINTER_REGNUM
3920 && i != HARD_FRAME_POINTER_REGNUM
3921 && i != ARG_POINTER_REGNUM)
3922 modified_regs[i] = 1;
3925 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3928 /* Check the next candidate branch insn from the label
3929 of the first. */
3930 if (b2 == 0
3931 || GET_CODE (b2) != JUMP_INSN
3932 || b2 == b1
3933 || !any_condjump_p (b2)
3934 || !onlyjump_p (b2))
3935 continue;
3936 set = pc_set (b1);
3937 set2 = pc_set (b2);
3939 /* Get the comparison codes and operands, reversing the
3940 codes if appropriate. If we don't have comparison codes,
3941 we can't do anything. */
3942 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3943 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3944 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3945 reversed_code1 = code1;
3946 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3947 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3948 else
3949 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3951 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3952 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3953 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3954 reversed_code2 = code2;
3955 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3956 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3957 else
3958 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3960 /* If they test the same things and knowing that B1 branches
3961 tells us whether or not B2 branches, check if we
3962 can thread the branch. */
3963 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3964 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3965 && (comparison_dominates_p (code1, code2)
3966 || comparison_dominates_p (code1, reversed_code2)))
3969 t1 = prev_nonnote_insn (b1);
3970 t2 = prev_nonnote_insn (b2);
3972 while (t1 != 0 && t2 != 0)
3974 if (t2 == label)
3976 /* We have reached the target of the first branch.
3977 If there are no pending register equivalents,
3978 we know that this branch will either always
3979 succeed (if the senses of the two branches are
3980 the same) or always fail (if not). */
3981 rtx new_label;
3983 if (num_same_regs != 0)
3984 break;
3986 if (comparison_dominates_p (code1, code2))
3987 new_label = JUMP_LABEL (b2);
3988 else
3989 new_label = get_label_after (b2);
3991 if (JUMP_LABEL (b1) != new_label)
3993 rtx prev = PREV_INSN (new_label);
3995 if (flag_before_loop
3996 && GET_CODE (prev) == NOTE
3997 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3999 /* Don't thread to the loop label. If a loop
4000 label is reused, loop optimization will
4001 be disabled for that loop. */
4002 new_label = gen_label_rtx ();
4003 emit_label_after (new_label, PREV_INSN (prev));
4005 changed |= redirect_jump (b1, new_label, 1);
4007 break;
4010 /* If either of these is not a normal insn (it might be
4011 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
4012 have already been skipped above.) Similarly, fail
4013 if the insns are different. */
4014 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
4015 || recog_memoized (t1) != recog_memoized (t2)
4016 || ! rtx_equal_for_thread_p (PATTERN (t1),
4017 PATTERN (t2), t2))
4018 break;
4020 t1 = prev_nonnote_insn (t1);
4021 t2 = prev_nonnote_insn (t2);
4027 /* Clean up. */
4028 free (modified_regs);
4029 free (same_regs);
4030 free (all_reset);
4033 /* This is like RTX_EQUAL_P except that it knows about our handling of
4034 possibly equivalent registers and knows to consider volatile and
4035 modified objects as not equal.
4037 YINSN is the insn containing Y. */
4040 rtx_equal_for_thread_p (x, y, yinsn)
4041 rtx x, y;
4042 rtx yinsn;
4044 register int i;
4045 register int j;
4046 register enum rtx_code code;
4047 register const char *fmt;
4049 code = GET_CODE (x);
4050 /* Rtx's of different codes cannot be equal. */
4051 if (code != GET_CODE (y))
4052 return 0;
4054 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4055 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4057 if (GET_MODE (x) != GET_MODE (y))
4058 return 0;
4060 /* For floating-point, consider everything unequal. This is a bit
4061 pessimistic, but this pass would only rarely do anything for FP
4062 anyway. */
4063 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4064 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
4065 return 0;
4067 /* For commutative operations, the RTX match if the operand match in any
4068 order. Also handle the simple binary and unary cases without a loop. */
4069 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4070 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4071 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4072 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4073 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4074 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4075 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4076 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4077 else if (GET_RTX_CLASS (code) == '1')
4078 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4080 /* Handle special-cases first. */
4081 switch (code)
4083 case REG:
4084 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4085 return 1;
4087 /* If neither is user variable or hard register, check for possible
4088 equivalence. */
4089 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4090 || REGNO (x) < FIRST_PSEUDO_REGISTER
4091 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4092 return 0;
4094 if (same_regs[REGNO (x)] == -1)
4096 same_regs[REGNO (x)] = REGNO (y);
4097 num_same_regs++;
4099 /* If this is the first time we are seeing a register on the `Y'
4100 side, see if it is the last use. If not, we can't thread the
4101 jump, so mark it as not equivalent. */
4102 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4103 return 0;
4105 return 1;
4107 else
4108 return (same_regs[REGNO (x)] == (int) REGNO (y));
4110 break;
4112 case MEM:
4113 /* If memory modified or either volatile, not equivalent.
4114 Else, check address. */
4115 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4116 return 0;
4118 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4120 case ASM_INPUT:
4121 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4122 return 0;
4124 break;
4126 case SET:
4127 /* Cancel a pending `same_regs' if setting equivalenced registers.
4128 Then process source. */
4129 if (GET_CODE (SET_DEST (x)) == REG
4130 && GET_CODE (SET_DEST (y)) == REG)
4132 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4134 same_regs[REGNO (SET_DEST (x))] = -1;
4135 num_same_regs--;
4137 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4138 return 0;
4140 else
4142 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4143 return 0;
4146 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4148 case LABEL_REF:
4149 return XEXP (x, 0) == XEXP (y, 0);
4151 case SYMBOL_REF:
4152 return XSTR (x, 0) == XSTR (y, 0);
4154 default:
4155 break;
4158 if (x == y)
4159 return 1;
4161 fmt = GET_RTX_FORMAT (code);
4162 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4164 switch (fmt[i])
4166 case 'w':
4167 if (XWINT (x, i) != XWINT (y, i))
4168 return 0;
4169 break;
4171 case 'n':
4172 case 'i':
4173 if (XINT (x, i) != XINT (y, i))
4174 return 0;
4175 break;
4177 case 'V':
4178 case 'E':
4179 /* Two vectors must have the same length. */
4180 if (XVECLEN (x, i) != XVECLEN (y, i))
4181 return 0;
4183 /* And the corresponding elements must match. */
4184 for (j = 0; j < XVECLEN (x, i); j++)
4185 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4186 XVECEXP (y, i, j), yinsn) == 0)
4187 return 0;
4188 break;
4190 case 'e':
4191 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4192 return 0;
4193 break;
4195 case 'S':
4196 case 's':
4197 if (strcmp (XSTR (x, i), XSTR (y, i)))
4198 return 0;
4199 break;
4201 case 'u':
4202 /* These are just backpointers, so they don't matter. */
4203 break;
4205 case '0':
4206 case 't':
4207 break;
4209 /* It is believed that rtx's at this level will never
4210 contain anything but integers and other rtx's,
4211 except for within LABEL_REFs and SYMBOL_REFs. */
4212 default:
4213 abort ();
4216 return 1;