* gcc.dg/cpp/sysmac1.c,sysmac2.c: Return to original file.
[official-gcc.git] / gcc / jump.c
blob314a20bb0290f9649d59c80d9c6c31aaa42189d1
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-attr.h"
63 #include "recog.h"
64 #include "function.h"
65 #include "expr.h"
66 #include "real.h"
67 #include "except.h"
68 #include "toplev.h"
69 #include "reload.h"
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
92 static rtx *jump_chain;
94 /* Maximum index in jump_chain. */
96 static int max_jump_chain;
98 /* Indicates whether death notes are significant in cross jump analysis.
99 Normally they are not significant, because of A and B jump to C,
100 and R dies in A, it must die in B. But this might not be true after
101 stack register conversion, and we must compare death notes in that
102 case. */
104 static int cross_jump_death_matters = 0;
106 static int init_label_info PARAMS ((rtx));
107 static void delete_barrier_successors PARAMS ((rtx));
108 static void mark_all_labels PARAMS ((rtx, int));
109 static rtx delete_unreferenced_labels PARAMS ((rtx));
110 static void delete_noop_moves PARAMS ((rtx));
111 static int duplicate_loop_exit_test PARAMS ((rtx));
112 static void find_cross_jump PARAMS ((rtx, rtx, int, rtx *, rtx *));
113 static void do_cross_jump PARAMS ((rtx, rtx, rtx));
114 static int jump_back_p PARAMS ((rtx, rtx));
115 static int tension_vector_labels PARAMS ((rtx, int));
116 static void delete_computation PARAMS ((rtx));
117 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
118 static int redirect_exp PARAMS ((rtx, rtx, rtx));
119 static void invert_exp_1 PARAMS ((rtx));
120 static int invert_exp PARAMS ((rtx));
121 static void delete_from_jump_chain PARAMS ((rtx));
122 static int delete_labelref_insn PARAMS ((rtx, rtx, int));
123 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
124 static void redirect_tablejump PARAMS ((rtx, rtx));
125 static void jump_optimize_1 PARAMS ((rtx, int, int, int, int, int));
126 static int returnjump_p_1 PARAMS ((rtx *, void *));
127 static void delete_prior_computation PARAMS ((rtx, rtx));
129 /* Main external entry point into the jump optimizer. See comments before
130 jump_optimize_1 for descriptions of the arguments. */
131 void
132 jump_optimize (f, cross_jump, noop_moves, after_regscan)
133 rtx f;
134 int cross_jump;
135 int noop_moves;
136 int after_regscan;
138 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0, 0);
141 /* Alternate entry into the jump optimizer. This entry point only rebuilds
142 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
143 instructions. */
144 void
145 rebuild_jump_labels (f)
146 rtx f;
148 jump_optimize_1 (f, 0, 0, 0, 1, 0);
151 /* Alternate entry into the jump optimizer. Do only trivial optimizations. */
153 void
154 jump_optimize_minimal (f)
155 rtx f;
157 jump_optimize_1 (f, 0, 0, 0, 0, 1);
160 /* Delete no-op jumps and optimize jumps to jumps
161 and jumps around jumps.
162 Delete unused labels and unreachable code.
164 If CROSS_JUMP is 1, detect matching code
165 before a jump and its destination and unify them.
166 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
168 If NOOP_MOVES is nonzero, delete no-op move insns.
170 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
171 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
173 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
174 and JUMP_LABEL field for jumping insns.
176 If `optimize' is zero, don't change any code,
177 just determine whether control drops off the end of the function.
178 This case occurs when we have -W and not -O.
179 It works because `delete_insn' checks the value of `optimize'
180 and refrains from actually deleting when that is 0.
182 If MINIMAL is nonzero, then we only perform trivial optimizations:
184 * Removal of unreachable code after BARRIERs.
185 * Removal of unreferenced CODE_LABELs.
186 * Removal of a jump to the next instruction.
187 * Removal of a conditional jump followed by an unconditional jump
188 to the same target as the conditional jump.
189 * Simplify a conditional jump around an unconditional jump.
190 * Simplify a jump to a jump.
191 * Delete extraneous line number notes.
194 static void
195 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan,
196 mark_labels_only, minimal)
197 rtx f;
198 int cross_jump;
199 int noop_moves;
200 int after_regscan;
201 int mark_labels_only;
202 int minimal;
204 register rtx insn, next;
205 int changed;
206 int old_max_reg;
207 int first = 1;
208 int max_uid = 0;
209 rtx last_insn;
210 #ifdef HAVE_trap
211 enum rtx_code reversed_code;
212 #endif
214 cross_jump_death_matters = (cross_jump == 2);
215 max_uid = init_label_info (f) + 1;
217 /* Leave some extra room for labels and duplicate exit test insns
218 we make. */
219 max_jump_chain = max_uid * 14 / 10;
220 jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
222 mark_all_labels (f, cross_jump);
224 /* Keep track of labels used from static data; we don't track them
225 closely enough to delete them here, so make sure their reference
226 count doesn't drop to zero. */
228 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
229 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
230 LABEL_NUSES (XEXP (insn, 0))++;
232 /* Keep track of labels used for marking handlers for exception
233 regions; they cannot usually be deleted. */
235 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
236 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
237 LABEL_NUSES (XEXP (insn, 0))++;
239 if (! mark_labels_only)
240 delete_barrier_successors (f);
242 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
243 notes and recompute LABEL_NUSES. */
244 if (mark_labels_only)
245 goto end;
247 last_insn = delete_unreferenced_labels (f);
249 if (noop_moves)
250 delete_noop_moves (f);
252 /* Now iterate optimizing jumps until nothing changes over one pass. */
253 changed = 1;
254 old_max_reg = max_reg_num ();
255 while (changed)
257 changed = 0;
259 for (insn = f; insn; insn = next)
261 rtx reallabelprev;
262 rtx temp, temp1, temp2 = NULL_RTX;
263 rtx temp4 ATTRIBUTE_UNUSED;
264 rtx nlabel;
265 int this_is_any_uncondjump;
266 int this_is_any_condjump;
267 int this_is_onlyjump;
269 next = NEXT_INSN (insn);
271 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
272 jump. Try to optimize by duplicating the loop exit test if so.
273 This is only safe immediately after regscan, because it uses
274 the values of regno_first_uid and regno_last_uid. */
275 if (after_regscan && GET_CODE (insn) == NOTE
276 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
277 && (temp1 = next_nonnote_insn (insn)) != 0
278 && any_uncondjump_p (temp1)
279 && onlyjump_p (temp1))
281 temp = PREV_INSN (insn);
282 if (duplicate_loop_exit_test (insn))
284 changed = 1;
285 next = NEXT_INSN (temp);
286 continue;
290 if (GET_CODE (insn) != JUMP_INSN)
291 continue;
293 this_is_any_condjump = any_condjump_p (insn);
294 this_is_any_uncondjump = any_uncondjump_p (insn);
295 this_is_onlyjump = onlyjump_p (insn);
297 /* Tension the labels in dispatch tables. */
299 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
300 changed |= tension_vector_labels (PATTERN (insn), 0);
301 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
302 changed |= tension_vector_labels (PATTERN (insn), 1);
304 /* See if this jump goes to another jump and redirect if so. */
305 nlabel = follow_jumps (JUMP_LABEL (insn));
306 if (nlabel != JUMP_LABEL (insn))
307 changed |= redirect_jump (insn, nlabel, 1);
309 if (! optimize || minimal)
310 continue;
312 /* If a dispatch table always goes to the same place,
313 get rid of it and replace the insn that uses it. */
315 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
316 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
318 int i;
319 rtx pat = PATTERN (insn);
320 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
321 int len = XVECLEN (pat, diff_vec_p);
322 rtx dispatch = prev_real_insn (insn);
323 rtx set;
325 for (i = 0; i < len; i++)
326 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
327 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
328 break;
330 if (i == len
331 && dispatch != 0
332 && GET_CODE (dispatch) == JUMP_INSN
333 && JUMP_LABEL (dispatch) != 0
334 /* Don't mess with a casesi insn.
335 XXX according to the comment before computed_jump_p(),
336 all casesi insns should be a parallel of the jump
337 and a USE of a LABEL_REF. */
338 && ! ((set = single_set (dispatch)) != NULL
339 && (GET_CODE (SET_SRC (set)) == IF_THEN_ELSE))
340 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
342 redirect_tablejump (dispatch,
343 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
344 changed = 1;
348 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
350 /* Detect jump to following insn. */
351 if (reallabelprev == insn
352 && (this_is_any_condjump || this_is_any_uncondjump)
353 && this_is_onlyjump)
355 next = next_real_insn (JUMP_LABEL (insn));
356 delete_jump (insn);
358 /* Remove the "inactive" but "real" insns (i.e. uses and
359 clobbers) in between here and there. */
360 temp = insn;
361 while ((temp = next_real_insn (temp)) != next)
362 delete_insn (temp);
364 changed = 1;
365 continue;
368 /* Detect a conditional jump going to the same place
369 as an immediately following unconditional jump. */
370 else if (this_is_any_condjump && this_is_onlyjump
371 && (temp = next_active_insn (insn)) != 0
372 && simplejump_p (temp)
373 && (next_active_insn (JUMP_LABEL (insn))
374 == next_active_insn (JUMP_LABEL (temp))))
376 /* Don't mess up test coverage analysis. */
377 temp2 = temp;
378 if (flag_test_coverage && !reload_completed)
379 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
380 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
381 break;
383 if (temp2 == temp)
385 /* Ensure that we jump to the later of the two labels.
386 Consider:
388 if (test) goto L2;
389 goto L1;
392 (clobber return-reg)
394 (use return-reg)
396 If we leave the goto L1, we'll incorrectly leave
397 return-reg dead for TEST true. */
399 temp2 = next_active_insn (JUMP_LABEL (insn));
400 if (!temp2)
401 temp2 = get_last_insn ();
402 if (GET_CODE (temp2) != CODE_LABEL)
403 temp2 = prev_label (temp2);
404 if (temp2 != JUMP_LABEL (temp))
405 redirect_jump (temp, temp2, 1);
407 delete_jump (insn);
408 changed = 1;
409 continue;
413 /* Detect a conditional jump jumping over an unconditional jump. */
415 else if (this_is_any_condjump
416 && reallabelprev != 0
417 && GET_CODE (reallabelprev) == JUMP_INSN
418 && prev_active_insn (reallabelprev) == insn
419 && no_labels_between_p (insn, reallabelprev)
420 && any_uncondjump_p (reallabelprev)
421 && onlyjump_p (reallabelprev))
423 /* When we invert the unconditional jump, we will be
424 decrementing the usage count of its old label.
425 Make sure that we don't delete it now because that
426 might cause the following code to be deleted. */
427 rtx prev_uses = prev_nonnote_insn (reallabelprev);
428 rtx prev_label = JUMP_LABEL (insn);
430 if (prev_label)
431 ++LABEL_NUSES (prev_label);
433 if (invert_jump (insn, JUMP_LABEL (reallabelprev), 1))
435 /* It is very likely that if there are USE insns before
436 this jump, they hold REG_DEAD notes. These REG_DEAD
437 notes are no longer valid due to this optimization,
438 and will cause the life-analysis that following passes
439 (notably delayed-branch scheduling) to think that
440 these registers are dead when they are not.
442 To prevent this trouble, we just remove the USE insns
443 from the insn chain. */
445 while (prev_uses && GET_CODE (prev_uses) == INSN
446 && GET_CODE (PATTERN (prev_uses)) == USE)
448 rtx useless = prev_uses;
449 prev_uses = prev_nonnote_insn (prev_uses);
450 delete_insn (useless);
453 delete_insn (reallabelprev);
454 changed = 1;
457 /* We can now safely delete the label if it is unreferenced
458 since the delete_insn above has deleted the BARRIER. */
459 if (prev_label && --LABEL_NUSES (prev_label) == 0)
460 delete_insn (prev_label);
462 next = NEXT_INSN (insn);
465 /* If we have an unconditional jump preceded by a USE, try to put
466 the USE before the target and jump there. This simplifies many
467 of the optimizations below since we don't have to worry about
468 dealing with these USE insns. We only do this if the label
469 being branch to already has the identical USE or if code
470 never falls through to that label. */
472 else if (this_is_any_uncondjump
473 && (temp = prev_nonnote_insn (insn)) != 0
474 && GET_CODE (temp) == INSN
475 && GET_CODE (PATTERN (temp)) == USE
476 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
477 && (GET_CODE (temp1) == BARRIER
478 || (GET_CODE (temp1) == INSN
479 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
480 /* Don't do this optimization if we have a loop containing
481 only the USE instruction, and the loop start label has
482 a usage count of 1. This is because we will redo this
483 optimization everytime through the outer loop, and jump
484 opt will never exit. */
485 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
486 && temp2 == JUMP_LABEL (insn)
487 && LABEL_NUSES (temp2) == 1))
489 if (GET_CODE (temp1) == BARRIER)
491 emit_insn_after (PATTERN (temp), temp1);
492 temp1 = NEXT_INSN (temp1);
495 delete_insn (temp);
496 redirect_jump (insn, get_label_before (temp1), 1);
497 reallabelprev = prev_real_insn (temp1);
498 changed = 1;
499 next = NEXT_INSN (insn);
502 #ifdef HAVE_trap
503 /* Detect a conditional jump jumping over an unconditional trap. */
504 if (HAVE_trap
505 && this_is_any_condjump && this_is_onlyjump
506 && reallabelprev != 0
507 && GET_CODE (reallabelprev) == INSN
508 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
509 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
510 && prev_active_insn (reallabelprev) == insn
511 && no_labels_between_p (insn, reallabelprev)
512 && (temp2 = get_condition (insn, &temp4))
513 && ((reversed_code = reversed_comparison_code (temp2, insn))
514 != UNKNOWN))
516 rtx new = gen_cond_trap (reversed_code,
517 XEXP (temp2, 0), XEXP (temp2, 1),
518 TRAP_CODE (PATTERN (reallabelprev)));
520 if (new)
522 emit_insn_before (new, temp4);
523 delete_insn (reallabelprev);
524 delete_jump (insn);
525 changed = 1;
526 continue;
529 /* Detect a jump jumping to an unconditional trap. */
530 else if (HAVE_trap && this_is_onlyjump
531 && (temp = next_active_insn (JUMP_LABEL (insn)))
532 && GET_CODE (temp) == INSN
533 && GET_CODE (PATTERN (temp)) == TRAP_IF
534 && (this_is_any_uncondjump
535 || (this_is_any_condjump
536 && (temp2 = get_condition (insn, &temp4)))))
538 rtx tc = TRAP_CONDITION (PATTERN (temp));
540 if (tc == const_true_rtx
541 || (! this_is_any_uncondjump && rtx_equal_p (temp2, tc)))
543 rtx new;
544 /* Replace an unconditional jump to a trap with a trap. */
545 if (this_is_any_uncondjump)
547 emit_barrier_after (emit_insn_before (gen_trap (), insn));
548 delete_jump (insn);
549 changed = 1;
550 continue;
552 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
553 XEXP (temp2, 1),
554 TRAP_CODE (PATTERN (temp)));
555 if (new)
557 emit_insn_before (new, temp4);
558 delete_jump (insn);
559 changed = 1;
560 continue;
563 /* If the trap condition and jump condition are mutually
564 exclusive, redirect the jump to the following insn. */
565 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
566 && this_is_any_condjump
567 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
568 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
569 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
570 && redirect_jump (insn, get_label_after (temp), 1))
572 changed = 1;
573 continue;
576 #endif
577 else
579 /* Now that the jump has been tensioned,
580 try cross jumping: check for identical code
581 before the jump and before its target label. */
583 /* First, cross jumping of conditional jumps: */
585 if (cross_jump && condjump_p (insn))
587 rtx newjpos, newlpos;
588 rtx x = prev_real_insn (JUMP_LABEL (insn));
590 /* A conditional jump may be crossjumped
591 only if the place it jumps to follows
592 an opposing jump that comes back here. */
594 if (x != 0 && ! jump_back_p (x, insn))
595 /* We have no opposing jump;
596 cannot cross jump this insn. */
597 x = 0;
599 newjpos = 0;
600 /* TARGET is nonzero if it is ok to cross jump
601 to code before TARGET. If so, see if matches. */
602 if (x != 0)
603 find_cross_jump (insn, x, 2,
604 &newjpos, &newlpos);
606 if (newjpos != 0)
608 do_cross_jump (insn, newjpos, newlpos);
609 /* Make the old conditional jump
610 into an unconditional one. */
611 SET_SRC (PATTERN (insn))
612 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
613 INSN_CODE (insn) = -1;
614 emit_barrier_after (insn);
615 /* Add to jump_chain unless this is a new label
616 whose UID is too large. */
617 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
619 jump_chain[INSN_UID (insn)]
620 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
621 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
623 changed = 1;
624 next = insn;
628 /* Cross jumping of unconditional jumps:
629 a few differences. */
631 if (cross_jump && simplejump_p (insn))
633 rtx newjpos, newlpos;
634 rtx target;
636 newjpos = 0;
638 /* TARGET is nonzero if it is ok to cross jump
639 to code before TARGET. If so, see if matches. */
640 find_cross_jump (insn, JUMP_LABEL (insn), 1,
641 &newjpos, &newlpos);
643 /* If cannot cross jump to code before the label,
644 see if we can cross jump to another jump to
645 the same label. */
646 /* Try each other jump to this label. */
647 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
648 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
649 target != 0 && newjpos == 0;
650 target = jump_chain[INSN_UID (target)])
651 if (target != insn
652 && JUMP_LABEL (target) == JUMP_LABEL (insn)
653 /* Ignore TARGET if it's deleted. */
654 && ! INSN_DELETED_P (target))
655 find_cross_jump (insn, target, 2,
656 &newjpos, &newlpos);
658 if (newjpos != 0)
660 do_cross_jump (insn, newjpos, newlpos);
661 changed = 1;
662 next = insn;
666 /* This code was dead in the previous jump.c! */
667 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
669 /* Return insns all "jump to the same place"
670 so we can cross-jump between any two of them. */
672 rtx newjpos, newlpos, target;
674 newjpos = 0;
676 /* If cannot cross jump to code before the label,
677 see if we can cross jump to another jump to
678 the same label. */
679 /* Try each other jump to this label. */
680 for (target = jump_chain[0];
681 target != 0 && newjpos == 0;
682 target = jump_chain[INSN_UID (target)])
683 if (target != insn
684 && ! INSN_DELETED_P (target)
685 && GET_CODE (PATTERN (target)) == RETURN)
686 find_cross_jump (insn, target, 2,
687 &newjpos, &newlpos);
689 if (newjpos != 0)
691 do_cross_jump (insn, newjpos, newlpos);
692 changed = 1;
693 next = insn;
699 first = 0;
702 /* Delete extraneous line number notes.
703 Note that two consecutive notes for different lines are not really
704 extraneous. There should be some indication where that line belonged,
705 even if it became empty. */
708 rtx last_note = 0;
710 for (insn = f; insn; insn = NEXT_INSN (insn))
711 if (GET_CODE (insn) == NOTE)
713 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
714 /* Any previous line note was for the prologue; gdb wants a new
715 note after the prologue even if it is for the same line. */
716 last_note = NULL_RTX;
717 else if (NOTE_LINE_NUMBER (insn) >= 0)
719 /* Delete this note if it is identical to previous note. */
720 if (last_note
721 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
722 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
724 delete_insn (insn);
725 continue;
728 last_note = insn;
733 end:
734 /* Clean up. */
735 free (jump_chain);
736 jump_chain = 0;
739 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
740 notes whose labels don't occur in the insn any more. Returns the
741 largest INSN_UID found. */
742 static int
743 init_label_info (f)
744 rtx f;
746 int largest_uid = 0;
747 rtx insn;
749 for (insn = f; insn; insn = NEXT_INSN (insn))
751 if (GET_CODE (insn) == CODE_LABEL)
752 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
753 else if (GET_CODE (insn) == JUMP_INSN)
754 JUMP_LABEL (insn) = 0;
755 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
757 rtx note, next;
759 for (note = REG_NOTES (insn); note; note = next)
761 next = XEXP (note, 1);
762 if (REG_NOTE_KIND (note) == REG_LABEL
763 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
764 remove_note (insn, note);
767 if (INSN_UID (insn) > largest_uid)
768 largest_uid = INSN_UID (insn);
771 return largest_uid;
774 /* Delete insns following barriers, up to next label.
776 Also delete no-op jumps created by gcse. */
778 static void
779 delete_barrier_successors (f)
780 rtx f;
782 rtx insn;
783 rtx set;
785 for (insn = f; insn;)
787 if (GET_CODE (insn) == BARRIER)
789 insn = NEXT_INSN (insn);
791 never_reached_warning (insn);
793 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
795 if (GET_CODE (insn) == JUMP_INSN)
797 /* Detect when we're deleting a tablejump; get rid of
798 the jump table as well. */
799 rtx next1 = next_nonnote_insn (insn);
800 rtx next2 = next1 ? next_nonnote_insn (next1) : 0;
801 if (next2 && GET_CODE (next1) == CODE_LABEL
802 && GET_CODE (next2) == JUMP_INSN
803 && (GET_CODE (PATTERN (next2)) == ADDR_VEC
804 || GET_CODE (PATTERN (next2)) == ADDR_DIFF_VEC))
806 delete_insn (insn);
807 insn = next2;
809 else
810 insn = delete_insn (insn);
812 else if (GET_CODE (insn) == NOTE
813 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
814 insn = NEXT_INSN (insn);
815 else
816 insn = delete_insn (insn);
818 /* INSN is now the code_label. */
821 /* Also remove (set (pc) (pc)) insns which can be created by
822 gcse. We eliminate such insns now to avoid having them
823 cause problems later. */
824 else if (GET_CODE (insn) == JUMP_INSN
825 && (set = pc_set (insn)) != NULL
826 && SET_SRC (set) == pc_rtx
827 && SET_DEST (set) == pc_rtx
828 && onlyjump_p (insn))
829 insn = delete_insn (insn);
831 else
832 insn = NEXT_INSN (insn);
836 /* Mark the label each jump jumps to.
837 Combine consecutive labels, and count uses of labels.
839 For each label, make a chain (using `jump_chain')
840 of all the *unconditional* jumps that jump to it;
841 also make a chain of all returns.
843 CROSS_JUMP indicates whether we are doing cross jumping
844 and if we are whether we will be paying attention to
845 death notes or not. */
847 static void
848 mark_all_labels (f, cross_jump)
849 rtx f;
850 int cross_jump;
852 rtx insn;
854 for (insn = f; insn; insn = NEXT_INSN (insn))
855 if (INSN_P (insn))
857 if (GET_CODE (insn) == CALL_INSN
858 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
860 mark_all_labels (XEXP (PATTERN (insn), 0), cross_jump);
861 mark_all_labels (XEXP (PATTERN (insn), 1), cross_jump);
862 mark_all_labels (XEXP (PATTERN (insn), 2), cross_jump);
864 /* Canonicalize the tail recursion label attached to the
865 CALL_PLACEHOLDER insn. */
866 if (XEXP (PATTERN (insn), 3))
868 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
869 XEXP (PATTERN (insn), 3));
870 mark_jump_label (label_ref, insn, cross_jump, 0);
871 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
874 continue;
877 mark_jump_label (PATTERN (insn), insn, cross_jump, 0);
878 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
880 /* When we know the LABEL_REF contained in a REG used in
881 an indirect jump, we'll have a REG_LABEL note so that
882 flow can tell where it's going. */
883 if (JUMP_LABEL (insn) == 0)
885 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
886 if (label_note)
888 /* But a LABEL_REF around the REG_LABEL note, so
889 that we can canonicalize it. */
890 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
891 XEXP (label_note, 0));
893 mark_jump_label (label_ref, insn, cross_jump, 0);
894 XEXP (label_note, 0) = XEXP (label_ref, 0);
895 JUMP_LABEL (insn) = XEXP (label_note, 0);
898 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
900 jump_chain[INSN_UID (insn)]
901 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
902 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
904 if (GET_CODE (PATTERN (insn)) == RETURN)
906 jump_chain[INSN_UID (insn)] = jump_chain[0];
907 jump_chain[0] = insn;
913 /* Delete all labels already not referenced.
914 Also find and return the last insn. */
916 static rtx
917 delete_unreferenced_labels (f)
918 rtx f;
920 rtx final = NULL_RTX;
921 rtx insn;
923 for (insn = f; insn;)
925 if (GET_CODE (insn) == CODE_LABEL
926 && LABEL_NUSES (insn) == 0
927 && LABEL_ALTERNATE_NAME (insn) == NULL)
928 insn = delete_insn (insn);
929 else
931 final = insn;
932 insn = NEXT_INSN (insn);
936 return final;
939 /* Delete various simple forms of moves which have no necessary
940 side effect. */
942 static void
943 delete_noop_moves (f)
944 rtx f;
946 rtx insn, next;
948 for (insn = f; insn;)
950 next = NEXT_INSN (insn);
952 if (GET_CODE (insn) == INSN)
954 register rtx body = PATTERN (insn);
956 /* Detect and delete no-op move instructions
957 resulting from not allocating a parameter in a register. */
959 if (GET_CODE (body) == SET && set_noop_p (body))
960 delete_computation (insn);
962 /* Detect and ignore no-op move instructions
963 resulting from smart or fortuitous register allocation. */
965 else if (GET_CODE (body) == SET)
967 int sreg = true_regnum (SET_SRC (body));
968 int dreg = true_regnum (SET_DEST (body));
970 if (sreg == dreg && sreg >= 0)
971 delete_insn (insn);
972 else if (sreg >= 0 && dreg >= 0)
974 rtx trial;
975 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
976 sreg, NULL, dreg,
977 GET_MODE (SET_SRC (body)));
979 if (tem != 0
980 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
982 /* DREG may have been the target of a REG_DEAD note in
983 the insn which makes INSN redundant. If so, reorg
984 would still think it is dead. So search for such a
985 note and delete it if we find it. */
986 if (! find_regno_note (insn, REG_UNUSED, dreg))
987 for (trial = prev_nonnote_insn (insn);
988 trial && GET_CODE (trial) != CODE_LABEL;
989 trial = prev_nonnote_insn (trial))
990 if (find_regno_note (trial, REG_DEAD, dreg))
992 remove_death (dreg, trial);
993 break;
996 /* Deleting insn could lose a death-note for SREG. */
997 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
999 /* Change this into a USE so that we won't emit
1000 code for it, but still can keep the note. */
1001 PATTERN (insn)
1002 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
1003 INSN_CODE (insn) = -1;
1004 /* Remove all reg notes but the REG_DEAD one. */
1005 REG_NOTES (insn) = trial;
1006 XEXP (trial, 1) = NULL_RTX;
1008 else
1009 delete_insn (insn);
1012 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
1013 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
1014 NULL, 0, GET_MODE (SET_DEST (body))))
1016 /* This handles the case where we have two consecutive
1017 assignments of the same constant to pseudos that didn't
1018 get a hard reg. Each SET from the constant will be
1019 converted into a SET of the spill register and an
1020 output reload will be made following it. This produces
1021 two loads of the same constant into the same spill
1022 register. */
1024 rtx in_insn = insn;
1026 /* Look back for a death note for the first reg.
1027 If there is one, it is no longer accurate. */
1028 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
1030 if ((GET_CODE (in_insn) == INSN
1031 || GET_CODE (in_insn) == JUMP_INSN)
1032 && find_regno_note (in_insn, REG_DEAD, dreg))
1034 remove_death (dreg, in_insn);
1035 break;
1037 in_insn = PREV_INSN (in_insn);
1040 /* Delete the second load of the value. */
1041 delete_insn (insn);
1044 else if (GET_CODE (body) == PARALLEL)
1046 /* If each part is a set between two identical registers or
1047 a USE or CLOBBER, delete the insn. */
1048 int i, sreg, dreg;
1049 rtx tem;
1051 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1053 tem = XVECEXP (body, 0, i);
1054 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
1055 continue;
1057 if (GET_CODE (tem) != SET
1058 || (sreg = true_regnum (SET_SRC (tem))) < 0
1059 || (dreg = true_regnum (SET_DEST (tem))) < 0
1060 || dreg != sreg)
1061 break;
1064 if (i < 0)
1065 delete_insn (insn);
1068 insn = next;
1072 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
1073 jump. Assume that this unconditional jump is to the exit test code. If
1074 the code is sufficiently simple, make a copy of it before INSN,
1075 followed by a jump to the exit of the loop. Then delete the unconditional
1076 jump after INSN.
1078 Return 1 if we made the change, else 0.
1080 This is only safe immediately after a regscan pass because it uses the
1081 values of regno_first_uid and regno_last_uid. */
1083 static int
1084 duplicate_loop_exit_test (loop_start)
1085 rtx loop_start;
1087 rtx insn, set, reg, p, link;
1088 rtx copy = 0, first_copy = 0;
1089 int num_insns = 0;
1090 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
1091 rtx lastexit;
1092 int max_reg = max_reg_num ();
1093 rtx *reg_map = 0;
1095 /* Scan the exit code. We do not perform this optimization if any insn:
1097 is a CALL_INSN
1098 is a CODE_LABEL
1099 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
1100 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
1101 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
1102 is not valid.
1104 We also do not do this if we find an insn with ASM_OPERANDS. While
1105 this restriction should not be necessary, copying an insn with
1106 ASM_OPERANDS can confuse asm_noperands in some cases.
1108 Also, don't do this if the exit code is more than 20 insns. */
1110 for (insn = exitcode;
1111 insn
1112 && ! (GET_CODE (insn) == NOTE
1113 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
1114 insn = NEXT_INSN (insn))
1116 switch (GET_CODE (insn))
1118 case CODE_LABEL:
1119 case CALL_INSN:
1120 return 0;
1121 case NOTE:
1122 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
1123 a jump immediately after the loop start that branches outside
1124 the loop but within an outer loop, near the exit test.
1125 If we copied this exit test and created a phony
1126 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
1127 before the exit test look like these could be safely moved
1128 out of the loop even if they actually may be never executed.
1129 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
1131 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1132 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
1133 return 0;
1135 if (optimize < 2
1136 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1137 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
1138 /* If we were to duplicate this code, we would not move
1139 the BLOCK notes, and so debugging the moved code would
1140 be difficult. Thus, we only move the code with -O2 or
1141 higher. */
1142 return 0;
1144 break;
1145 case JUMP_INSN:
1146 case INSN:
1147 /* The code below would grossly mishandle REG_WAS_0 notes,
1148 so get rid of them here. */
1149 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
1150 remove_note (insn, p);
1151 if (++num_insns > 20
1152 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
1153 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
1154 return 0;
1155 break;
1156 default:
1157 break;
1161 /* Unless INSN is zero, we can do the optimization. */
1162 if (insn == 0)
1163 return 0;
1165 lastexit = insn;
1167 /* See if any insn sets a register only used in the loop exit code and
1168 not a user variable. If so, replace it with a new register. */
1169 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1170 if (GET_CODE (insn) == INSN
1171 && (set = single_set (insn)) != 0
1172 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
1173 || (GET_CODE (reg) == SUBREG
1174 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
1175 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
1176 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
1178 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
1179 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
1180 break;
1182 if (p != lastexit)
1184 /* We can do the replacement. Allocate reg_map if this is the
1185 first replacement we found. */
1186 if (reg_map == 0)
1187 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
1189 REG_LOOP_TEST_P (reg) = 1;
1191 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
1195 /* Now copy each insn. */
1196 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
1198 switch (GET_CODE (insn))
1200 case BARRIER:
1201 copy = emit_barrier_before (loop_start);
1202 break;
1203 case NOTE:
1204 /* Only copy line-number notes. */
1205 if (NOTE_LINE_NUMBER (insn) >= 0)
1207 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
1208 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
1210 break;
1212 case INSN:
1213 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
1214 if (reg_map)
1215 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1217 mark_jump_label (PATTERN (copy), copy, 0, 0);
1219 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
1220 make them. */
1221 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1222 if (REG_NOTE_KIND (link) != REG_LABEL)
1224 if (GET_CODE (link) == EXPR_LIST)
1225 REG_NOTES (copy)
1226 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
1227 XEXP (link, 0),
1228 REG_NOTES (copy)));
1229 else
1230 REG_NOTES (copy)
1231 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
1232 XEXP (link, 0),
1233 REG_NOTES (copy)));
1236 if (reg_map && REG_NOTES (copy))
1237 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1238 break;
1240 case JUMP_INSN:
1241 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
1242 loop_start);
1243 if (reg_map)
1244 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
1245 mark_jump_label (PATTERN (copy), copy, 0, 0);
1246 if (REG_NOTES (insn))
1248 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
1249 if (reg_map)
1250 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
1253 /* If this is a simple jump, add it to the jump chain. */
1255 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
1256 && simplejump_p (copy))
1258 jump_chain[INSN_UID (copy)]
1259 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1260 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1262 break;
1264 default:
1265 abort ();
1268 /* Record the first insn we copied. We need it so that we can
1269 scan the copied insns for new pseudo registers. */
1270 if (! first_copy)
1271 first_copy = copy;
1274 /* Now clean up by emitting a jump to the end label and deleting the jump
1275 at the start of the loop. */
1276 if (! copy || GET_CODE (copy) != BARRIER)
1278 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
1279 loop_start);
1281 /* Record the first insn we copied. We need it so that we can
1282 scan the copied insns for new pseudo registers. This may not
1283 be strictly necessary since we should have copied at least one
1284 insn above. But I am going to be safe. */
1285 if (! first_copy)
1286 first_copy = copy;
1288 mark_jump_label (PATTERN (copy), copy, 0, 0);
1289 if (INSN_UID (copy) < max_jump_chain
1290 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
1292 jump_chain[INSN_UID (copy)]
1293 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
1294 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
1296 emit_barrier_before (loop_start);
1299 /* Now scan from the first insn we copied to the last insn we copied
1300 (copy) for new pseudo registers. Do this after the code to jump to
1301 the end label since that might create a new pseudo too. */
1302 reg_scan_update (first_copy, copy, max_reg);
1304 /* Mark the exit code as the virtual top of the converted loop. */
1305 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
1307 delete_insn (next_nonnote_insn (loop_start));
1309 /* Clean up. */
1310 if (reg_map)
1311 free (reg_map);
1313 return 1;
1316 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
1317 notes between START and END out before START. Assume that END is not
1318 such a note. START may be such a note. Returns the value of the new
1319 starting insn, which may be different if the original start was such a
1320 note. */
1323 squeeze_notes (start, end)
1324 rtx start, end;
1326 rtx insn;
1327 rtx next;
1329 for (insn = start; insn != end; insn = next)
1331 next = NEXT_INSN (insn);
1332 if (GET_CODE (insn) == NOTE
1333 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
1334 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
1335 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
1336 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
1337 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
1338 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
1340 if (insn == start)
1341 start = next;
1342 else
1344 rtx prev = PREV_INSN (insn);
1345 PREV_INSN (insn) = PREV_INSN (start);
1346 NEXT_INSN (insn) = start;
1347 NEXT_INSN (PREV_INSN (insn)) = insn;
1348 PREV_INSN (NEXT_INSN (insn)) = insn;
1349 NEXT_INSN (prev) = next;
1350 PREV_INSN (next) = prev;
1355 return start;
1358 /* Compare the instructions before insn E1 with those before E2
1359 to find an opportunity for cross jumping.
1360 (This means detecting identical sequences of insns followed by
1361 jumps to the same place, or followed by a label and a jump
1362 to that label, and replacing one with a jump to the other.)
1364 Assume E1 is a jump that jumps to label E2
1365 (that is not always true but it might as well be).
1366 Find the longest possible equivalent sequences
1367 and store the first insns of those sequences into *F1 and *F2.
1368 Store zero there if no equivalent preceding instructions are found.
1370 We give up if we find a label in stream 1.
1371 Actually we could transfer that label into stream 2. */
1373 static void
1374 find_cross_jump (e1, e2, minimum, f1, f2)
1375 rtx e1, e2;
1376 int minimum;
1377 rtx *f1, *f2;
1379 register rtx i1 = e1, i2 = e2;
1380 register rtx p1, p2;
1381 int lose = 0;
1383 rtx last1 = 0, last2 = 0;
1384 rtx afterlast1 = 0, afterlast2 = 0;
1386 *f1 = 0;
1387 *f2 = 0;
1389 while (1)
1391 i1 = prev_nonnote_insn (i1);
1393 i2 = PREV_INSN (i2);
1394 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
1395 i2 = PREV_INSN (i2);
1397 if (i1 == 0)
1398 break;
1400 /* Don't allow the range of insns preceding E1 or E2
1401 to include the other (E2 or E1). */
1402 if (i2 == e1 || i1 == e2)
1403 break;
1405 /* If we will get to this code by jumping, those jumps will be
1406 tensioned to go directly to the new label (before I2),
1407 so this cross-jumping won't cost extra. So reduce the minimum. */
1408 if (GET_CODE (i1) == CODE_LABEL)
1410 --minimum;
1411 break;
1414 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
1415 break;
1417 p1 = PATTERN (i1);
1418 p2 = PATTERN (i2);
1420 /* If this is a CALL_INSN, compare register usage information.
1421 If we don't check this on stack register machines, the two
1422 CALL_INSNs might be merged leaving reg-stack.c with mismatching
1423 numbers of stack registers in the same basic block.
1424 If we don't check this on machines with delay slots, a delay slot may
1425 be filled that clobbers a parameter expected by the subroutine.
1427 ??? We take the simple route for now and assume that if they're
1428 equal, they were constructed identically. */
1430 if (GET_CODE (i1) == CALL_INSN
1431 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
1432 CALL_INSN_FUNCTION_USAGE (i2)))
1433 lose = 1;
1435 #ifdef STACK_REGS
1436 /* If cross_jump_death_matters is not 0, the insn's mode
1437 indicates whether or not the insn contains any stack-like
1438 regs. */
1440 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
1442 /* If register stack conversion has already been done, then
1443 death notes must also be compared before it is certain that
1444 the two instruction streams match. */
1446 rtx note;
1447 HARD_REG_SET i1_regset, i2_regset;
1449 CLEAR_HARD_REG_SET (i1_regset);
1450 CLEAR_HARD_REG_SET (i2_regset);
1452 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
1453 if (REG_NOTE_KIND (note) == REG_DEAD
1454 && STACK_REG_P (XEXP (note, 0)))
1455 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
1457 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
1458 if (REG_NOTE_KIND (note) == REG_DEAD
1459 && STACK_REG_P (XEXP (note, 0)))
1460 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
1462 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
1464 lose = 1;
1466 done:
1469 #endif
1471 /* Don't allow old-style asm or volatile extended asms to be accepted
1472 for cross jumping purposes. It is conceptually correct to allow
1473 them, since cross-jumping preserves the dynamic instruction order
1474 even though it is changing the static instruction order. However,
1475 if an asm is being used to emit an assembler pseudo-op, such as
1476 the MIPS `.set reorder' pseudo-op, then the static instruction order
1477 matters and it must be preserved. */
1478 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
1479 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
1480 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
1481 lose = 1;
1483 if (lose || GET_CODE (p1) != GET_CODE (p2)
1484 || ! rtx_renumbered_equal_p (p1, p2))
1486 /* The following code helps take care of G++ cleanups. */
1487 rtx equiv1;
1488 rtx equiv2;
1490 if (!lose && GET_CODE (p1) == GET_CODE (p2)
1491 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
1492 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
1493 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
1494 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
1495 /* If the equivalences are not to a constant, they may
1496 reference pseudos that no longer exist, so we can't
1497 use them. */
1498 && CONSTANT_P (XEXP (equiv1, 0))
1499 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
1501 rtx s1 = single_set (i1);
1502 rtx s2 = single_set (i2);
1503 if (s1 != 0 && s2 != 0
1504 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
1506 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
1507 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
1508 if (! rtx_renumbered_equal_p (p1, p2))
1509 cancel_changes (0);
1510 else if (apply_change_group ())
1511 goto win;
1515 /* Insns fail to match; cross jumping is limited to the following
1516 insns. */
1518 #ifdef HAVE_cc0
1519 /* Don't allow the insn after a compare to be shared by
1520 cross-jumping unless the compare is also shared.
1521 Here, if either of these non-matching insns is a compare,
1522 exclude the following insn from possible cross-jumping. */
1523 if (sets_cc0_p (p1) || sets_cc0_p (p2))
1524 last1 = afterlast1, last2 = afterlast2, ++minimum;
1525 #endif
1527 /* If cross-jumping here will feed a jump-around-jump
1528 optimization, this jump won't cost extra, so reduce
1529 the minimum. */
1530 if (GET_CODE (i1) == JUMP_INSN
1531 && JUMP_LABEL (i1)
1532 && prev_real_insn (JUMP_LABEL (i1)) == e1)
1533 --minimum;
1534 break;
1537 win:
1538 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
1540 /* Ok, this insn is potentially includable in a cross-jump here. */
1541 afterlast1 = last1, afterlast2 = last2;
1542 last1 = i1, last2 = i2, --minimum;
1546 if (minimum <= 0 && last1 != 0 && last1 != e1)
1547 *f1 = last1, *f2 = last2;
1550 static void
1551 do_cross_jump (insn, newjpos, newlpos)
1552 rtx insn, newjpos, newlpos;
1554 /* Find an existing label at this point
1555 or make a new one if there is none. */
1556 register rtx label = get_label_before (newlpos);
1558 /* Make the same jump insn jump to the new point. */
1559 if (GET_CODE (PATTERN (insn)) == RETURN)
1561 /* Remove from jump chain of returns. */
1562 delete_from_jump_chain (insn);
1563 /* Change the insn. */
1564 PATTERN (insn) = gen_jump (label);
1565 INSN_CODE (insn) = -1;
1566 JUMP_LABEL (insn) = label;
1567 LABEL_NUSES (label)++;
1568 /* Add to new the jump chain. */
1569 if (INSN_UID (label) < max_jump_chain
1570 && INSN_UID (insn) < max_jump_chain)
1572 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
1573 jump_chain[INSN_UID (label)] = insn;
1576 else
1577 redirect_jump (insn, label, 1);
1579 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
1580 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
1581 the NEWJPOS stream. */
1583 while (newjpos != insn)
1585 rtx lnote;
1587 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
1588 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
1589 || REG_NOTE_KIND (lnote) == REG_EQUIV)
1590 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
1591 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
1592 remove_note (newlpos, lnote);
1594 delete_insn (newjpos);
1595 newjpos = next_real_insn (newjpos);
1596 newlpos = next_real_insn (newlpos);
1600 /* Return the label before INSN, or put a new label there. */
1603 get_label_before (insn)
1604 rtx insn;
1606 rtx label;
1608 /* Find an existing label at this point
1609 or make a new one if there is none. */
1610 label = prev_nonnote_insn (insn);
1612 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1614 rtx prev = PREV_INSN (insn);
1616 label = gen_label_rtx ();
1617 emit_label_after (label, prev);
1618 LABEL_NUSES (label) = 0;
1620 return label;
1623 /* Return the label after INSN, or put a new label there. */
1626 get_label_after (insn)
1627 rtx insn;
1629 rtx label;
1631 /* Find an existing label at this point
1632 or make a new one if there is none. */
1633 label = next_nonnote_insn (insn);
1635 if (label == 0 || GET_CODE (label) != CODE_LABEL)
1637 label = gen_label_rtx ();
1638 emit_label_after (label, insn);
1639 LABEL_NUSES (label) = 0;
1641 return label;
1644 /* Return 1 if INSN is a jump that jumps to right after TARGET
1645 only on the condition that TARGET itself would drop through.
1646 Assumes that TARGET is a conditional jump. */
1648 static int
1649 jump_back_p (insn, target)
1650 rtx insn, target;
1652 rtx cinsn, ctarget;
1653 enum rtx_code codei, codet;
1654 rtx set, tset;
1656 if (! any_condjump_p (insn)
1657 || any_uncondjump_p (target)
1658 || target != prev_real_insn (JUMP_LABEL (insn)))
1659 return 0;
1660 set = pc_set (insn);
1661 tset = pc_set (target);
1663 cinsn = XEXP (SET_SRC (set), 0);
1664 ctarget = XEXP (SET_SRC (tset), 0);
1666 codei = GET_CODE (cinsn);
1667 codet = GET_CODE (ctarget);
1669 if (XEXP (SET_SRC (set), 1) == pc_rtx)
1671 codei = reversed_comparison_code (cinsn, insn);
1672 if (codei == UNKNOWN)
1673 return 0;
1676 if (XEXP (SET_SRC (tset), 2) == pc_rtx)
1678 codet = reversed_comparison_code (ctarget, target);
1679 if (codei == UNKNOWN)
1680 return 0;
1683 return (codei == codet
1684 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
1685 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
1688 /* Given a comparison (CODE ARG0 ARG1), inside a insn, INSN, return an code
1689 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
1690 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
1691 know whether it's source is floating point or integer comparison. Machine
1692 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
1693 to help this function avoid overhead in these cases. */
1694 enum rtx_code
1695 reversed_comparison_code_parts (code, arg0, arg1, insn)
1696 rtx insn, arg0, arg1;
1697 enum rtx_code code;
1699 enum machine_mode mode;
1701 /* If this is not actually a comparison, we can't reverse it. */
1702 if (GET_RTX_CLASS (code) != '<')
1703 return UNKNOWN;
1705 mode = GET_MODE (arg0);
1706 if (mode == VOIDmode)
1707 mode = GET_MODE (arg1);
1709 /* First see if machine description supply us way to reverse the comparison.
1710 Give it priority over everything else to allow machine description to do
1711 tricks. */
1712 #ifdef REVERSIBLE_CC_MODE
1713 if (GET_MODE_CLASS (mode) == MODE_CC
1714 && REVERSIBLE_CC_MODE (mode))
1716 #ifdef REVERSE_CONDITION
1717 return REVERSE_CONDITION (code, mode);
1718 #endif
1719 return reverse_condition (code);
1721 #endif
1723 /* Try few special cases based on the comparison code. */
1724 switch (code)
1726 case GEU:
1727 case GTU:
1728 case LEU:
1729 case LTU:
1730 case NE:
1731 case EQ:
1732 /* It is always safe to reverse EQ and NE, even for the floating
1733 point. Similary the unsigned comparisons are never used for
1734 floating point so we can reverse them in the default way. */
1735 return reverse_condition (code);
1736 case ORDERED:
1737 case UNORDERED:
1738 case LTGT:
1739 case UNEQ:
1740 /* In case we already see unordered comparison, we can be sure to
1741 be dealing with floating point so we don't need any more tests. */
1742 return reverse_condition_maybe_unordered (code);
1743 case UNLT:
1744 case UNLE:
1745 case UNGT:
1746 case UNGE:
1747 /* We don't have safe way to reverse these yet. */
1748 return UNKNOWN;
1749 default:
1750 break;
1753 /* In case we give up IEEE compatibility, all comparisons are reversible. */
1754 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
1755 || flag_unsafe_math_optimizations)
1756 return reverse_condition (code);
1758 if (GET_MODE_CLASS (mode) == MODE_CC
1759 #ifdef HAVE_cc0
1760 || arg0 == cc0_rtx
1761 #endif
1764 rtx prev;
1765 /* Try to search for the comparison to determine the real mode.
1766 This code is expensive, but with sane machine description it
1767 will be never used, since REVERSIBLE_CC_MODE will return true
1768 in all cases. */
1769 if (! insn)
1770 return UNKNOWN;
1772 for (prev = prev_nonnote_insn (insn);
1773 prev != 0 && GET_CODE (prev) != CODE_LABEL;
1774 prev = prev_nonnote_insn (prev))
1776 rtx set = set_of (arg0, prev);
1777 if (set && GET_CODE (set) == SET
1778 && rtx_equal_p (SET_DEST (set), arg0))
1780 rtx src = SET_SRC (set);
1782 if (GET_CODE (src) == COMPARE)
1784 rtx comparison = src;
1785 arg0 = XEXP (src, 0);
1786 mode = GET_MODE (arg0);
1787 if (mode == VOIDmode)
1788 mode = GET_MODE (XEXP (comparison, 1));
1789 break;
1791 /* We can get past reg-reg moves. This may be usefull for model
1792 of i387 comparisons that first move flag registers around. */
1793 if (REG_P (src))
1795 arg0 = src;
1796 continue;
1799 /* If register is clobbered in some ununderstandable way,
1800 give up. */
1801 if (set)
1802 return UNKNOWN;
1806 /* An integer condition. */
1807 if (GET_CODE (arg0) == CONST_INT
1808 || (GET_MODE (arg0) != VOIDmode
1809 && GET_MODE_CLASS (mode) != MODE_CC
1810 && ! FLOAT_MODE_P (mode)))
1811 return reverse_condition (code);
1813 return UNKNOWN;
1816 /* An wrapper around the previous function to take COMPARISON as rtx
1817 expression. This simplifies many callers. */
1818 enum rtx_code
1819 reversed_comparison_code (comparison, insn)
1820 rtx comparison, insn;
1822 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
1823 return UNKNOWN;
1824 return reversed_comparison_code_parts (GET_CODE (comparison),
1825 XEXP (comparison, 0),
1826 XEXP (comparison, 1), insn);
1829 /* Given an rtx-code for a comparison, return the code for the negated
1830 comparison. If no such code exists, return UNKNOWN.
1832 WATCH OUT! reverse_condition is not safe to use on a jump that might
1833 be acting on the results of an IEEE floating point comparison, because
1834 of the special treatment of non-signaling nans in comparisons.
1835 Use reversed_comparison_code instead. */
1837 enum rtx_code
1838 reverse_condition (code)
1839 enum rtx_code code;
1841 switch (code)
1843 case EQ:
1844 return NE;
1845 case NE:
1846 return EQ;
1847 case GT:
1848 return LE;
1849 case GE:
1850 return LT;
1851 case LT:
1852 return GE;
1853 case LE:
1854 return GT;
1855 case GTU:
1856 return LEU;
1857 case GEU:
1858 return LTU;
1859 case LTU:
1860 return GEU;
1861 case LEU:
1862 return GTU;
1863 case UNORDERED:
1864 return ORDERED;
1865 case ORDERED:
1866 return UNORDERED;
1868 case UNLT:
1869 case UNLE:
1870 case UNGT:
1871 case UNGE:
1872 case UNEQ:
1873 case LTGT:
1874 return UNKNOWN;
1876 default:
1877 abort ();
1881 /* Similar, but we're allowed to generate unordered comparisons, which
1882 makes it safe for IEEE floating-point. Of course, we have to recognize
1883 that the target will support them too... */
1885 enum rtx_code
1886 reverse_condition_maybe_unordered (code)
1887 enum rtx_code code;
1889 /* Non-IEEE formats don't have unordered conditions. */
1890 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
1891 return reverse_condition (code);
1893 switch (code)
1895 case EQ:
1896 return NE;
1897 case NE:
1898 return EQ;
1899 case GT:
1900 return UNLE;
1901 case GE:
1902 return UNLT;
1903 case LT:
1904 return UNGE;
1905 case LE:
1906 return UNGT;
1907 case LTGT:
1908 return UNEQ;
1909 case UNORDERED:
1910 return ORDERED;
1911 case ORDERED:
1912 return UNORDERED;
1913 case UNLT:
1914 return GE;
1915 case UNLE:
1916 return GT;
1917 case UNGT:
1918 return LE;
1919 case UNGE:
1920 return LT;
1921 case UNEQ:
1922 return LTGT;
1924 default:
1925 abort ();
1929 /* Similar, but return the code when two operands of a comparison are swapped.
1930 This IS safe for IEEE floating-point. */
1932 enum rtx_code
1933 swap_condition (code)
1934 enum rtx_code code;
1936 switch (code)
1938 case EQ:
1939 case NE:
1940 case UNORDERED:
1941 case ORDERED:
1942 case UNEQ:
1943 case LTGT:
1944 return code;
1946 case GT:
1947 return LT;
1948 case GE:
1949 return LE;
1950 case LT:
1951 return GT;
1952 case LE:
1953 return GE;
1954 case GTU:
1955 return LTU;
1956 case GEU:
1957 return LEU;
1958 case LTU:
1959 return GTU;
1960 case LEU:
1961 return GEU;
1962 case UNLT:
1963 return UNGT;
1964 case UNLE:
1965 return UNGE;
1966 case UNGT:
1967 return UNLT;
1968 case UNGE:
1969 return UNLE;
1971 default:
1972 abort ();
1976 /* Given a comparison CODE, return the corresponding unsigned comparison.
1977 If CODE is an equality comparison or already an unsigned comparison,
1978 CODE is returned. */
1980 enum rtx_code
1981 unsigned_condition (code)
1982 enum rtx_code code;
1984 switch (code)
1986 case EQ:
1987 case NE:
1988 case GTU:
1989 case GEU:
1990 case LTU:
1991 case LEU:
1992 return code;
1994 case GT:
1995 return GTU;
1996 case GE:
1997 return GEU;
1998 case LT:
1999 return LTU;
2000 case LE:
2001 return LEU;
2003 default:
2004 abort ();
2008 /* Similarly, return the signed version of a comparison. */
2010 enum rtx_code
2011 signed_condition (code)
2012 enum rtx_code code;
2014 switch (code)
2016 case EQ:
2017 case NE:
2018 case GT:
2019 case GE:
2020 case LT:
2021 case LE:
2022 return code;
2024 case GTU:
2025 return GT;
2026 case GEU:
2027 return GE;
2028 case LTU:
2029 return LT;
2030 case LEU:
2031 return LE;
2033 default:
2034 abort ();
2038 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
2039 truth of CODE1 implies the truth of CODE2. */
2042 comparison_dominates_p (code1, code2)
2043 enum rtx_code code1, code2;
2045 /* UNKNOWN comparison codes can happen as a result of trying to revert
2046 comparison codes.
2047 They can't match anything, so we have to reject them here. */
2048 if (code1 == UNKNOWN || code2 == UNKNOWN)
2049 return 0;
2051 if (code1 == code2)
2052 return 1;
2054 switch (code1)
2056 case UNEQ:
2057 if (code2 == UNLE || code2 == UNGE)
2058 return 1;
2059 break;
2061 case EQ:
2062 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
2063 || code2 == ORDERED)
2064 return 1;
2065 break;
2067 case UNLT:
2068 if (code2 == UNLE || code2 == NE)
2069 return 1;
2070 break;
2072 case LT:
2073 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2074 return 1;
2075 break;
2077 case UNGT:
2078 if (code2 == UNGE || code2 == NE)
2079 return 1;
2080 break;
2082 case GT:
2083 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
2084 return 1;
2085 break;
2087 case GE:
2088 case LE:
2089 if (code2 == ORDERED)
2090 return 1;
2091 break;
2093 case LTGT:
2094 if (code2 == NE || code2 == ORDERED)
2095 return 1;
2096 break;
2098 case LTU:
2099 if (code2 == LEU || code2 == NE)
2100 return 1;
2101 break;
2103 case GTU:
2104 if (code2 == GEU || code2 == NE)
2105 return 1;
2106 break;
2108 case UNORDERED:
2109 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
2110 || code2 == UNGE || code2 == UNGT)
2111 return 1;
2112 break;
2114 default:
2115 break;
2118 return 0;
2121 /* Return 1 if INSN is an unconditional jump and nothing else. */
2124 simplejump_p (insn)
2125 rtx insn;
2127 return (GET_CODE (insn) == JUMP_INSN
2128 && GET_CODE (PATTERN (insn)) == SET
2129 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
2130 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
2133 /* Return nonzero if INSN is a (possibly) conditional jump
2134 and nothing more.
2136 Use this function is deprecated, since we need to support combined
2137 branch and compare insns. Use any_condjump_p instead whenever possible. */
2140 condjump_p (insn)
2141 rtx insn;
2143 register rtx x = PATTERN (insn);
2145 if (GET_CODE (x) != SET
2146 || GET_CODE (SET_DEST (x)) != PC)
2147 return 0;
2149 x = SET_SRC (x);
2150 if (GET_CODE (x) == LABEL_REF)
2151 return 1;
2152 else
2153 return (GET_CODE (x) == IF_THEN_ELSE
2154 && ((GET_CODE (XEXP (x, 2)) == PC
2155 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
2156 || GET_CODE (XEXP (x, 1)) == RETURN))
2157 || (GET_CODE (XEXP (x, 1)) == PC
2158 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
2159 || GET_CODE (XEXP (x, 2)) == RETURN))));
2161 return 0;
2164 /* Return nonzero if INSN is a (possibly) conditional jump inside a
2165 PARALLEL.
2167 Use this function is deprecated, since we need to support combined
2168 branch and compare insns. Use any_condjump_p instead whenever possible. */
2171 condjump_in_parallel_p (insn)
2172 rtx insn;
2174 register rtx x = PATTERN (insn);
2176 if (GET_CODE (x) != PARALLEL)
2177 return 0;
2178 else
2179 x = XVECEXP (x, 0, 0);
2181 if (GET_CODE (x) != SET)
2182 return 0;
2183 if (GET_CODE (SET_DEST (x)) != PC)
2184 return 0;
2185 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
2186 return 1;
2187 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2188 return 0;
2189 if (XEXP (SET_SRC (x), 2) == pc_rtx
2190 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
2191 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
2192 return 1;
2193 if (XEXP (SET_SRC (x), 1) == pc_rtx
2194 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
2195 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
2196 return 1;
2197 return 0;
2200 /* Return set of PC, otherwise NULL. */
2203 pc_set (insn)
2204 rtx insn;
2206 rtx pat;
2207 if (GET_CODE (insn) != JUMP_INSN)
2208 return NULL_RTX;
2209 pat = PATTERN (insn);
2211 /* The set is allowed to appear either as the insn pattern or
2212 the first set in a PARALLEL. */
2213 if (GET_CODE (pat) == PARALLEL)
2214 pat = XVECEXP (pat, 0, 0);
2215 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
2216 return pat;
2218 return NULL_RTX;
2221 /* Return true when insn is an unconditional direct jump,
2222 possibly bundled inside a PARALLEL. */
2225 any_uncondjump_p (insn)
2226 rtx insn;
2228 rtx x = pc_set (insn);
2229 if (!x)
2230 return 0;
2231 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
2232 return 0;
2233 return 1;
2236 /* Return true when insn is a conditional jump. This function works for
2237 instructions containing PC sets in PARALLELs. The instruction may have
2238 various other effects so before removing the jump you must verify
2239 onlyjump_p.
2241 Note that unlike condjump_p it returns false for unconditional jumps. */
2244 any_condjump_p (insn)
2245 rtx insn;
2247 rtx x = pc_set (insn);
2248 enum rtx_code a, b;
2250 if (!x)
2251 return 0;
2252 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
2253 return 0;
2255 a = GET_CODE (XEXP (SET_SRC (x), 1));
2256 b = GET_CODE (XEXP (SET_SRC (x), 2));
2258 return ((b == PC && (a == LABEL_REF || a == RETURN))
2259 || (a == PC && (b == LABEL_REF || b == RETURN)));
2262 /* Return the label of a conditional jump. */
2265 condjump_label (insn)
2266 rtx insn;
2268 rtx x = pc_set (insn);
2270 if (!x)
2271 return NULL_RTX;
2272 x = SET_SRC (x);
2273 if (GET_CODE (x) == LABEL_REF)
2274 return x;
2275 if (GET_CODE (x) != IF_THEN_ELSE)
2276 return NULL_RTX;
2277 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
2278 return XEXP (x, 1);
2279 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
2280 return XEXP (x, 2);
2281 return NULL_RTX;
2284 /* Return true if INSN is a (possibly conditional) return insn. */
2286 static int
2287 returnjump_p_1 (loc, data)
2288 rtx *loc;
2289 void *data ATTRIBUTE_UNUSED;
2291 rtx x = *loc;
2292 return x && GET_CODE (x) == RETURN;
2296 returnjump_p (insn)
2297 rtx insn;
2299 if (GET_CODE (insn) != JUMP_INSN)
2300 return 0;
2301 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
2304 /* Return true if INSN is a jump that only transfers control and
2305 nothing more. */
2308 onlyjump_p (insn)
2309 rtx insn;
2311 rtx set;
2313 if (GET_CODE (insn) != JUMP_INSN)
2314 return 0;
2316 set = single_set (insn);
2317 if (set == NULL)
2318 return 0;
2319 if (GET_CODE (SET_DEST (set)) != PC)
2320 return 0;
2321 if (side_effects_p (SET_SRC (set)))
2322 return 0;
2324 return 1;
2327 #ifdef HAVE_cc0
2329 /* Return 1 if X is an RTX that does nothing but set the condition codes
2330 and CLOBBER or USE registers.
2331 Return -1 if X does explicitly set the condition codes,
2332 but also does other things. */
2335 sets_cc0_p (x)
2336 rtx x ATTRIBUTE_UNUSED;
2338 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
2339 return 1;
2340 if (GET_CODE (x) == PARALLEL)
2342 int i;
2343 int sets_cc0 = 0;
2344 int other_things = 0;
2345 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
2347 if (GET_CODE (XVECEXP (x, 0, i)) == SET
2348 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
2349 sets_cc0 = 1;
2350 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
2351 other_things = 1;
2353 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
2355 return 0;
2357 #endif
2359 /* Follow any unconditional jump at LABEL;
2360 return the ultimate label reached by any such chain of jumps.
2361 If LABEL is not followed by a jump, return LABEL.
2362 If the chain loops or we can't find end, return LABEL,
2363 since that tells caller to avoid changing the insn.
2365 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
2366 a USE or CLOBBER. */
2369 follow_jumps (label)
2370 rtx label;
2372 register rtx insn;
2373 register rtx next;
2374 register rtx value = label;
2375 register int depth;
2377 for (depth = 0;
2378 (depth < 10
2379 && (insn = next_active_insn (value)) != 0
2380 && GET_CODE (insn) == JUMP_INSN
2381 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
2382 && onlyjump_p (insn))
2383 || GET_CODE (PATTERN (insn)) == RETURN)
2384 && (next = NEXT_INSN (insn))
2385 && GET_CODE (next) == BARRIER);
2386 depth++)
2388 /* Don't chain through the insn that jumps into a loop
2389 from outside the loop,
2390 since that would create multiple loop entry jumps
2391 and prevent loop optimization. */
2392 rtx tem;
2393 if (!reload_completed)
2394 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
2395 if (GET_CODE (tem) == NOTE
2396 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
2397 /* ??? Optional. Disables some optimizations, but makes
2398 gcov output more accurate with -O. */
2399 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
2400 return value;
2402 /* If we have found a cycle, make the insn jump to itself. */
2403 if (JUMP_LABEL (insn) == label)
2404 return label;
2406 tem = next_active_insn (JUMP_LABEL (insn));
2407 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2408 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2409 break;
2411 value = JUMP_LABEL (insn);
2413 if (depth == 10)
2414 return label;
2415 return value;
2418 /* Assuming that field IDX of X is a vector of label_refs,
2419 replace each of them by the ultimate label reached by it.
2420 Return nonzero if a change is made.
2421 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
2423 static int
2424 tension_vector_labels (x, idx)
2425 register rtx x;
2426 register int idx;
2428 int changed = 0;
2429 register int i;
2430 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
2432 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
2433 register rtx nlabel = follow_jumps (olabel);
2434 if (nlabel && nlabel != olabel)
2436 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
2437 ++LABEL_NUSES (nlabel);
2438 if (--LABEL_NUSES (olabel) == 0)
2439 delete_insn (olabel);
2440 changed = 1;
2443 return changed;
2446 /* Find all CODE_LABELs referred to in X, and increment their use counts.
2447 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
2448 in INSN, then store one of them in JUMP_LABEL (INSN).
2449 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
2450 referenced in INSN, add a REG_LABEL note containing that label to INSN.
2451 Also, when there are consecutive labels, canonicalize on the last of them.
2453 Note that two labels separated by a loop-beginning note
2454 must be kept distinct if we have not yet done loop-optimization,
2455 because the gap between them is where loop-optimize
2456 will want to move invariant code to. CROSS_JUMP tells us
2457 that loop-optimization is done with.
2459 Once reload has completed (CROSS_JUMP non-zero), we need not consider
2460 two labels distinct if they are separated by only USE or CLOBBER insns. */
2462 void
2463 mark_jump_label (x, insn, cross_jump, in_mem)
2464 register rtx x;
2465 rtx insn;
2466 int cross_jump;
2467 int in_mem;
2469 register RTX_CODE code = GET_CODE (x);
2470 register int i;
2471 register const char *fmt;
2473 switch (code)
2475 case PC:
2476 case CC0:
2477 case REG:
2478 case SUBREG:
2479 case CONST_INT:
2480 case CONST_DOUBLE:
2481 case CLOBBER:
2482 case CALL:
2483 return;
2485 case MEM:
2486 in_mem = 1;
2487 break;
2489 case SYMBOL_REF:
2490 if (!in_mem)
2491 return;
2493 /* If this is a constant-pool reference, see if it is a label. */
2494 if (CONSTANT_POOL_ADDRESS_P (x))
2495 mark_jump_label (get_pool_constant (x), insn, cross_jump, in_mem);
2496 break;
2498 case LABEL_REF:
2500 rtx label = XEXP (x, 0);
2501 rtx olabel = label;
2502 rtx note;
2503 rtx next;
2505 /* Ignore remaining references to unreachable labels that
2506 have been deleted. */
2507 if (GET_CODE (label) == NOTE
2508 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
2509 break;
2511 if (GET_CODE (label) != CODE_LABEL)
2512 abort ();
2514 /* Ignore references to labels of containing functions. */
2515 if (LABEL_REF_NONLOCAL_P (x))
2516 break;
2518 /* If there are other labels following this one,
2519 replace it with the last of the consecutive labels. */
2520 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
2522 if (GET_CODE (next) == CODE_LABEL)
2523 label = next;
2524 else if (cross_jump && GET_CODE (next) == INSN
2525 && (GET_CODE (PATTERN (next)) == USE
2526 || GET_CODE (PATTERN (next)) == CLOBBER))
2527 continue;
2528 else if (GET_CODE (next) != NOTE)
2529 break;
2530 else if (! cross_jump
2531 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
2532 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
2533 /* ??? Optional. Disables some optimizations, but
2534 makes gcov output more accurate with -O. */
2535 || (flag_test_coverage
2536 && NOTE_LINE_NUMBER (next) > 0)))
2537 break;
2540 XEXP (x, 0) = label;
2541 if (! insn || ! INSN_DELETED_P (insn))
2542 ++LABEL_NUSES (label);
2544 if (insn)
2546 if (GET_CODE (insn) == JUMP_INSN)
2547 JUMP_LABEL (insn) = label;
2549 /* If we've changed OLABEL and we had a REG_LABEL note
2550 for it, update it as well. */
2551 else if (label != olabel
2552 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
2553 XEXP (note, 0) = label;
2555 /* Otherwise, add a REG_LABEL note for LABEL unless there already
2556 is one. */
2557 else if (! find_reg_note (insn, REG_LABEL, label))
2559 /* This code used to ignore labels which refered to dispatch
2560 tables to avoid flow.c generating worse code.
2562 However, in the presense of global optimizations like
2563 gcse which call find_basic_blocks without calling
2564 life_analysis, not recording such labels will lead
2565 to compiler aborts because of inconsistencies in the
2566 flow graph. So we go ahead and record the label.
2568 It may also be the case that the optimization argument
2569 is no longer valid because of the more accurate cfg
2570 we build in find_basic_blocks -- it no longer pessimizes
2571 code when it finds a REG_LABEL note. */
2572 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
2573 REG_NOTES (insn));
2576 return;
2579 /* Do walk the labels in a vector, but not the first operand of an
2580 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
2581 case ADDR_VEC:
2582 case ADDR_DIFF_VEC:
2583 if (! INSN_DELETED_P (insn))
2585 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
2587 for (i = 0; i < XVECLEN (x, eltnum); i++)
2588 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX,
2589 cross_jump, in_mem);
2591 return;
2593 default:
2594 break;
2597 fmt = GET_RTX_FORMAT (code);
2598 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2600 if (fmt[i] == 'e')
2601 mark_jump_label (XEXP (x, i), insn, cross_jump, in_mem);
2602 else if (fmt[i] == 'E')
2604 register int j;
2605 for (j = 0; j < XVECLEN (x, i); j++)
2606 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump, in_mem);
2611 /* If all INSN does is set the pc, delete it,
2612 and delete the insn that set the condition codes for it
2613 if that's what the previous thing was. */
2615 void
2616 delete_jump (insn)
2617 rtx insn;
2619 register rtx set = single_set (insn);
2621 if (set && GET_CODE (SET_DEST (set)) == PC)
2622 delete_computation (insn);
2625 /* Verify INSN is a BARRIER and delete it. */
2627 void
2628 delete_barrier (insn)
2629 rtx insn;
2631 if (GET_CODE (insn) != BARRIER)
2632 abort ();
2634 delete_insn (insn);
2637 /* Recursively delete prior insns that compute the value (used only by INSN
2638 which the caller is deleting) stored in the register mentioned by NOTE
2639 which is a REG_DEAD note associated with INSN. */
2641 static void
2642 delete_prior_computation (note, insn)
2643 rtx note;
2644 rtx insn;
2646 rtx our_prev;
2647 rtx reg = XEXP (note, 0);
2649 for (our_prev = prev_nonnote_insn (insn);
2650 our_prev && (GET_CODE (our_prev) == INSN
2651 || GET_CODE (our_prev) == CALL_INSN);
2652 our_prev = prev_nonnote_insn (our_prev))
2654 rtx pat = PATTERN (our_prev);
2656 /* If we reach a CALL which is not calling a const function
2657 or the callee pops the arguments, then give up. */
2658 if (GET_CODE (our_prev) == CALL_INSN
2659 && (! CONST_CALL_P (our_prev)
2660 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
2661 break;
2663 /* If we reach a SEQUENCE, it is too complex to try to
2664 do anything with it, so give up. */
2665 if (GET_CODE (pat) == SEQUENCE)
2666 break;
2668 if (GET_CODE (pat) == USE
2669 && GET_CODE (XEXP (pat, 0)) == INSN)
2670 /* reorg creates USEs that look like this. We leave them
2671 alone because reorg needs them for its own purposes. */
2672 break;
2674 if (reg_set_p (reg, pat))
2676 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
2677 break;
2679 if (GET_CODE (pat) == PARALLEL)
2681 /* If we find a SET of something else, we can't
2682 delete the insn. */
2684 int i;
2686 for (i = 0; i < XVECLEN (pat, 0); i++)
2688 rtx part = XVECEXP (pat, 0, i);
2690 if (GET_CODE (part) == SET
2691 && SET_DEST (part) != reg)
2692 break;
2695 if (i == XVECLEN (pat, 0))
2696 delete_computation (our_prev);
2698 else if (GET_CODE (pat) == SET
2699 && GET_CODE (SET_DEST (pat)) == REG)
2701 int dest_regno = REGNO (SET_DEST (pat));
2702 int dest_endregno
2703 = (dest_regno
2704 + (dest_regno < FIRST_PSEUDO_REGISTER
2705 ? HARD_REGNO_NREGS (dest_regno,
2706 GET_MODE (SET_DEST (pat))) : 1));
2707 int regno = REGNO (reg);
2708 int endregno
2709 = (regno
2710 + (regno < FIRST_PSEUDO_REGISTER
2711 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
2713 if (dest_regno >= regno
2714 && dest_endregno <= endregno)
2715 delete_computation (our_prev);
2717 /* We may have a multi-word hard register and some, but not
2718 all, of the words of the register are needed in subsequent
2719 insns. Write REG_UNUSED notes for those parts that were not
2720 needed. */
2721 else if (dest_regno <= regno
2722 && dest_endregno >= endregno)
2724 int i;
2726 REG_NOTES (our_prev)
2727 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
2728 REG_NOTES (our_prev));
2730 for (i = dest_regno; i < dest_endregno; i++)
2731 if (! find_regno_note (our_prev, REG_UNUSED, i))
2732 break;
2734 if (i == dest_endregno)
2735 delete_computation (our_prev);
2739 break;
2742 /* If PAT references the register that dies here, it is an
2743 additional use. Hence any prior SET isn't dead. However, this
2744 insn becomes the new place for the REG_DEAD note. */
2745 if (reg_overlap_mentioned_p (reg, pat))
2747 XEXP (note, 1) = REG_NOTES (our_prev);
2748 REG_NOTES (our_prev) = note;
2749 break;
2754 /* Delete INSN and recursively delete insns that compute values used only
2755 by INSN. This uses the REG_DEAD notes computed during flow analysis.
2756 If we are running before flow.c, we need do nothing since flow.c will
2757 delete dead code. We also can't know if the registers being used are
2758 dead or not at this point.
2760 Otherwise, look at all our REG_DEAD notes. If a previous insn does
2761 nothing other than set a register that dies in this insn, we can delete
2762 that insn as well.
2764 On machines with CC0, if CC0 is used in this insn, we may be able to
2765 delete the insn that set it. */
2767 static void
2768 delete_computation (insn)
2769 rtx insn;
2771 rtx note, next;
2773 #ifdef HAVE_cc0
2774 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
2776 rtx prev = prev_nonnote_insn (insn);
2777 /* We assume that at this stage
2778 CC's are always set explicitly
2779 and always immediately before the jump that
2780 will use them. So if the previous insn
2781 exists to set the CC's, delete it
2782 (unless it performs auto-increments, etc.). */
2783 if (prev && GET_CODE (prev) == INSN
2784 && sets_cc0_p (PATTERN (prev)))
2786 if (sets_cc0_p (PATTERN (prev)) > 0
2787 && ! side_effects_p (PATTERN (prev)))
2788 delete_computation (prev);
2789 else
2790 /* Otherwise, show that cc0 won't be used. */
2791 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
2792 cc0_rtx, REG_NOTES (prev));
2795 #endif
2797 for (note = REG_NOTES (insn); note; note = next)
2799 next = XEXP (note, 1);
2801 if (REG_NOTE_KIND (note) != REG_DEAD
2802 /* Verify that the REG_NOTE is legitimate. */
2803 || GET_CODE (XEXP (note, 0)) != REG)
2804 continue;
2806 delete_prior_computation (note, insn);
2809 delete_insn (insn);
2812 /* Delete insn INSN from the chain of insns and update label ref counts.
2813 May delete some following insns as a consequence; may even delete
2814 a label elsewhere and insns that follow it.
2816 Returns the first insn after INSN that was not deleted. */
2819 delete_insn (insn)
2820 register rtx insn;
2822 register rtx next = NEXT_INSN (insn);
2823 register rtx prev = PREV_INSN (insn);
2824 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
2825 register int dont_really_delete = 0;
2826 rtx note;
2828 while (next && INSN_DELETED_P (next))
2829 next = NEXT_INSN (next);
2831 /* This insn is already deleted => return first following nondeleted. */
2832 if (INSN_DELETED_P (insn))
2833 return next;
2835 if (was_code_label)
2836 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
2838 /* Don't delete user-declared labels. When optimizing, convert them
2839 to special NOTEs instead. When not optimizing, leave them alone. */
2840 if (was_code_label && LABEL_NAME (insn) != 0)
2842 if (optimize)
2844 const char *name = LABEL_NAME (insn);
2845 PUT_CODE (insn, NOTE);
2846 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
2847 NOTE_SOURCE_FILE (insn) = name;
2850 dont_really_delete = 1;
2852 else
2853 /* Mark this insn as deleted. */
2854 INSN_DELETED_P (insn) = 1;
2856 /* If this is an unconditional jump, delete it from the jump chain. */
2857 if (simplejump_p (insn))
2858 delete_from_jump_chain (insn);
2860 /* If instruction is followed by a barrier,
2861 delete the barrier too. */
2863 if (next != 0 && GET_CODE (next) == BARRIER)
2865 INSN_DELETED_P (next) = 1;
2866 next = NEXT_INSN (next);
2869 /* Patch out INSN (and the barrier if any) */
2871 if (! dont_really_delete)
2873 if (prev)
2875 NEXT_INSN (prev) = next;
2876 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
2877 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
2878 XVECLEN (PATTERN (prev), 0) - 1)) = next;
2881 if (next)
2883 PREV_INSN (next) = prev;
2884 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
2885 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
2888 if (prev && NEXT_INSN (prev) == 0)
2889 set_last_insn (prev);
2892 /* If deleting a jump, decrement the count of the label,
2893 and delete the label if it is now unused. */
2895 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
2897 rtx lab = JUMP_LABEL (insn), lab_next;
2899 if (--LABEL_NUSES (lab) == 0)
2901 /* This can delete NEXT or PREV,
2902 either directly if NEXT is JUMP_LABEL (INSN),
2903 or indirectly through more levels of jumps. */
2904 delete_insn (lab);
2906 /* I feel a little doubtful about this loop,
2907 but I see no clean and sure alternative way
2908 to find the first insn after INSN that is not now deleted.
2909 I hope this works. */
2910 while (next && INSN_DELETED_P (next))
2911 next = NEXT_INSN (next);
2912 return next;
2914 else if ((lab_next = next_nonnote_insn (lab)) != NULL
2915 && GET_CODE (lab_next) == JUMP_INSN
2916 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
2917 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
2919 /* If we're deleting the tablejump, delete the dispatch table.
2920 We may not be able to kill the label immediately preceeding
2921 just yet, as it might be referenced in code leading up to
2922 the tablejump. */
2923 delete_insn (lab_next);
2927 /* Likewise if we're deleting a dispatch table. */
2929 if (GET_CODE (insn) == JUMP_INSN
2930 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
2931 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
2933 rtx pat = PATTERN (insn);
2934 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
2935 int len = XVECLEN (pat, diff_vec_p);
2937 for (i = 0; i < len; i++)
2938 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
2939 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
2940 while (next && INSN_DELETED_P (next))
2941 next = NEXT_INSN (next);
2942 return next;
2945 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
2946 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2947 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
2948 if (REG_NOTE_KIND (note) == REG_LABEL
2949 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
2950 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
2951 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
2952 delete_insn (XEXP (note, 0));
2954 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
2955 prev = PREV_INSN (prev);
2957 /* If INSN was a label and a dispatch table follows it,
2958 delete the dispatch table. The tablejump must have gone already.
2959 It isn't useful to fall through into a table. */
2961 if (was_code_label
2962 && NEXT_INSN (insn) != 0
2963 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
2964 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
2965 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
2966 next = delete_insn (NEXT_INSN (insn));
2968 /* If INSN was a label, delete insns following it if now unreachable. */
2970 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
2972 register RTX_CODE code;
2973 while (next != 0
2974 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
2975 || code == NOTE || code == BARRIER
2976 || (code == CODE_LABEL && INSN_DELETED_P (next))))
2978 if (code == NOTE
2979 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
2980 next = NEXT_INSN (next);
2981 /* Keep going past other deleted labels to delete what follows. */
2982 else if (code == CODE_LABEL && INSN_DELETED_P (next))
2983 next = NEXT_INSN (next);
2984 else
2985 /* Note: if this deletes a jump, it can cause more
2986 deletion of unreachable code, after a different label.
2987 As long as the value from this recursive call is correct,
2988 this invocation functions correctly. */
2989 next = delete_insn (next);
2993 return next;
2996 /* Advance from INSN till reaching something not deleted
2997 then return that. May return INSN itself. */
3000 next_nondeleted_insn (insn)
3001 rtx insn;
3003 while (INSN_DELETED_P (insn))
3004 insn = NEXT_INSN (insn);
3005 return insn;
3008 /* Delete a range of insns from FROM to TO, inclusive.
3009 This is for the sake of peephole optimization, so assume
3010 that whatever these insns do will still be done by a new
3011 peephole insn that will replace them. */
3013 void
3014 delete_for_peephole (from, to)
3015 register rtx from, to;
3017 register rtx insn = from;
3019 while (1)
3021 register rtx next = NEXT_INSN (insn);
3022 register rtx prev = PREV_INSN (insn);
3024 if (GET_CODE (insn) != NOTE)
3026 INSN_DELETED_P (insn) = 1;
3028 /* Patch this insn out of the chain. */
3029 /* We don't do this all at once, because we
3030 must preserve all NOTEs. */
3031 if (prev)
3032 NEXT_INSN (prev) = next;
3034 if (next)
3035 PREV_INSN (next) = prev;
3038 if (insn == to)
3039 break;
3040 insn = next;
3043 /* Note that if TO is an unconditional jump
3044 we *do not* delete the BARRIER that follows,
3045 since the peephole that replaces this sequence
3046 is also an unconditional jump in that case. */
3049 /* We have determined that INSN is never reached, and are about to
3050 delete it. Print a warning if the user asked for one.
3052 To try to make this warning more useful, this should only be called
3053 once per basic block not reached, and it only warns when the basic
3054 block contains more than one line from the current function, and
3055 contains at least one operation. CSE and inlining can duplicate insns,
3056 so it's possible to get spurious warnings from this. */
3058 void
3059 never_reached_warning (avoided_insn)
3060 rtx avoided_insn;
3062 rtx insn;
3063 rtx a_line_note = NULL;
3064 int two_avoided_lines = 0;
3065 int contains_insn = 0;
3067 if (! warn_notreached)
3068 return;
3070 /* Scan forwards, looking at LINE_NUMBER notes, until
3071 we hit a LABEL or we run out of insns. */
3073 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
3075 if (GET_CODE (insn) == CODE_LABEL)
3076 break;
3077 else if (GET_CODE (insn) == NOTE /* A line number note? */
3078 && NOTE_LINE_NUMBER (insn) >= 0)
3080 if (a_line_note == NULL)
3081 a_line_note = insn;
3082 else
3083 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
3084 != NOTE_LINE_NUMBER (insn));
3086 else if (INSN_P (insn))
3087 contains_insn = 1;
3089 if (two_avoided_lines && contains_insn)
3090 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
3091 NOTE_LINE_NUMBER (a_line_note),
3092 "will never be executed");
3095 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
3096 NLABEL as a return. Accrue modifications into the change group. */
3098 static void
3099 redirect_exp_1 (loc, olabel, nlabel, insn)
3100 rtx *loc;
3101 rtx olabel, nlabel;
3102 rtx insn;
3104 register rtx x = *loc;
3105 register RTX_CODE code = GET_CODE (x);
3106 register int i;
3107 register const char *fmt;
3109 if (code == LABEL_REF)
3111 if (XEXP (x, 0) == olabel)
3113 rtx n;
3114 if (nlabel)
3115 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3116 else
3117 n = gen_rtx_RETURN (VOIDmode);
3119 validate_change (insn, loc, n, 1);
3120 return;
3123 else if (code == RETURN && olabel == 0)
3125 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
3126 if (loc == &PATTERN (insn))
3127 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
3128 validate_change (insn, loc, x, 1);
3129 return;
3132 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
3133 && GET_CODE (SET_SRC (x)) == LABEL_REF
3134 && XEXP (SET_SRC (x), 0) == olabel)
3136 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
3137 return;
3140 fmt = GET_RTX_FORMAT (code);
3141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3143 if (fmt[i] == 'e')
3144 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
3145 else if (fmt[i] == 'E')
3147 register int j;
3148 for (j = 0; j < XVECLEN (x, i); j++)
3149 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
3154 /* Similar, but apply the change group and report success or failure. */
3156 static int
3157 redirect_exp (olabel, nlabel, insn)
3158 rtx olabel, nlabel;
3159 rtx insn;
3161 rtx *loc;
3163 if (GET_CODE (PATTERN (insn)) == PARALLEL)
3164 loc = &XVECEXP (PATTERN (insn), 0, 0);
3165 else
3166 loc = &PATTERN (insn);
3168 redirect_exp_1 (loc, olabel, nlabel, insn);
3169 if (num_validated_changes () == 0)
3170 return 0;
3172 return apply_change_group ();
3175 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
3176 the modifications into the change group. Return false if we did
3177 not see how to do that. */
3180 redirect_jump_1 (jump, nlabel)
3181 rtx jump, nlabel;
3183 int ochanges = num_validated_changes ();
3184 rtx *loc;
3186 if (GET_CODE (PATTERN (jump)) == PARALLEL)
3187 loc = &XVECEXP (PATTERN (jump), 0, 0);
3188 else
3189 loc = &PATTERN (jump);
3191 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
3192 return num_validated_changes () > ochanges;
3195 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
3196 jump target label is unused as a result, it and the code following
3197 it may be deleted.
3199 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
3200 RETURN insn.
3202 The return value will be 1 if the change was made, 0 if it wasn't
3203 (this can only occur for NLABEL == 0). */
3206 redirect_jump (jump, nlabel, delete_unused)
3207 rtx jump, nlabel;
3208 int delete_unused;
3210 register rtx olabel = JUMP_LABEL (jump);
3212 if (nlabel == olabel)
3213 return 1;
3215 if (! redirect_exp (olabel, nlabel, jump))
3216 return 0;
3218 /* If this is an unconditional branch, delete it from the jump_chain of
3219 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
3220 have UID's in range and JUMP_CHAIN is valid). */
3221 if (jump_chain && (simplejump_p (jump)
3222 || GET_CODE (PATTERN (jump)) == RETURN))
3224 int label_index = nlabel ? INSN_UID (nlabel) : 0;
3226 delete_from_jump_chain (jump);
3227 if (label_index < max_jump_chain
3228 && INSN_UID (jump) < max_jump_chain)
3230 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
3231 jump_chain[label_index] = jump;
3235 JUMP_LABEL (jump) = nlabel;
3236 if (nlabel)
3237 ++LABEL_NUSES (nlabel);
3239 /* If we're eliding the jump over exception cleanups at the end of a
3240 function, move the function end note so that -Wreturn-type works. */
3241 if (olabel && nlabel
3242 && NEXT_INSN (olabel)
3243 && GET_CODE (NEXT_INSN (olabel)) == NOTE
3244 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
3245 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
3247 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
3248 delete_insn (olabel);
3250 return 1;
3253 /* Invert the jump condition of rtx X contained in jump insn, INSN.
3254 Accrue the modifications into the change group. */
3256 static void
3257 invert_exp_1 (insn)
3258 rtx insn;
3260 register RTX_CODE code;
3261 rtx x = pc_set (insn);
3263 if (!x)
3264 abort ();
3265 x = SET_SRC (x);
3267 code = GET_CODE (x);
3269 if (code == IF_THEN_ELSE)
3271 register rtx comp = XEXP (x, 0);
3272 register rtx tem;
3273 enum rtx_code reversed_code;
3275 /* We can do this in two ways: The preferable way, which can only
3276 be done if this is not an integer comparison, is to reverse
3277 the comparison code. Otherwise, swap the THEN-part and ELSE-part
3278 of the IF_THEN_ELSE. If we can't do either, fail. */
3280 reversed_code = reversed_comparison_code (comp, insn);
3282 if (reversed_code != UNKNOWN)
3284 validate_change (insn, &XEXP (x, 0),
3285 gen_rtx_fmt_ee (reversed_code,
3286 GET_MODE (comp), XEXP (comp, 0),
3287 XEXP (comp, 1)),
3289 return;
3292 tem = XEXP (x, 1);
3293 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
3294 validate_change (insn, &XEXP (x, 2), tem, 1);
3296 else
3297 abort ();
3300 /* Invert the jump condition of conditional jump insn, INSN.
3302 Return 1 if we can do so, 0 if we cannot find a way to do so that
3303 matches a pattern. */
3305 static int
3306 invert_exp (insn)
3307 rtx insn;
3309 invert_exp_1 (insn);
3310 if (num_validated_changes () == 0)
3311 return 0;
3313 return apply_change_group ();
3316 /* Invert the condition of the jump JUMP, and make it jump to label
3317 NLABEL instead of where it jumps now. Accrue changes into the
3318 change group. Return false if we didn't see how to perform the
3319 inversion and redirection. */
3322 invert_jump_1 (jump, nlabel)
3323 rtx jump, nlabel;
3325 int ochanges;
3327 ochanges = num_validated_changes ();
3328 invert_exp_1 (jump);
3329 if (num_validated_changes () == ochanges)
3330 return 0;
3332 return redirect_jump_1 (jump, nlabel);
3335 /* Invert the condition of the jump JUMP, and make it jump to label
3336 NLABEL instead of where it jumps now. Return true if successful. */
3339 invert_jump (jump, nlabel, delete_unused)
3340 rtx jump, nlabel;
3341 int delete_unused;
3343 /* We have to either invert the condition and change the label or
3344 do neither. Either operation could fail. We first try to invert
3345 the jump. If that succeeds, we try changing the label. If that fails,
3346 we invert the jump back to what it was. */
3348 if (! invert_exp (jump))
3349 return 0;
3351 if (redirect_jump (jump, nlabel, delete_unused))
3353 /* An inverted jump means that a probability taken becomes a
3354 probability not taken. Subtract the branch probability from the
3355 probability base to convert it back to a taken probability. */
3357 rtx note = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3358 if (note)
3359 XEXP (note, 0) = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (note, 0)));
3361 return 1;
3364 if (! invert_exp (jump))
3365 /* This should just be putting it back the way it was. */
3366 abort ();
3368 return 0;
3371 /* Delete the instruction JUMP from any jump chain it might be on. */
3373 static void
3374 delete_from_jump_chain (jump)
3375 rtx jump;
3377 int index;
3378 rtx olabel = JUMP_LABEL (jump);
3380 /* Handle unconditional jumps. */
3381 if (jump_chain && olabel != 0
3382 && INSN_UID (olabel) < max_jump_chain
3383 && simplejump_p (jump))
3384 index = INSN_UID (olabel);
3385 /* Handle return insns. */
3386 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
3387 index = 0;
3388 else
3389 return;
3391 if (jump_chain[index] == jump)
3392 jump_chain[index] = jump_chain[INSN_UID (jump)];
3393 else
3395 rtx insn;
3397 for (insn = jump_chain[index];
3398 insn != 0;
3399 insn = jump_chain[INSN_UID (insn)])
3400 if (jump_chain[INSN_UID (insn)] == jump)
3402 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
3403 break;
3408 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
3410 If the old jump target label (before the dispatch table) becomes unused,
3411 it and the dispatch table may be deleted. In that case, find the insn
3412 before the jump references that label and delete it and logical successors
3413 too. */
3415 static void
3416 redirect_tablejump (jump, nlabel)
3417 rtx jump, nlabel;
3419 register rtx olabel = JUMP_LABEL (jump);
3420 rtx *notep, note, next;
3422 /* Add this jump to the jump_chain of NLABEL. */
3423 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
3424 && INSN_UID (jump) < max_jump_chain)
3426 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
3427 jump_chain[INSN_UID (nlabel)] = jump;
3430 for (notep = &REG_NOTES (jump), note = *notep; note; note = next)
3432 next = XEXP (note, 1);
3434 if (REG_NOTE_KIND (note) != REG_DEAD
3435 /* Verify that the REG_NOTE is legitimate. */
3436 || GET_CODE (XEXP (note, 0)) != REG
3437 || ! reg_mentioned_p (XEXP (note, 0), PATTERN (jump)))
3438 notep = &XEXP (note, 1);
3439 else
3441 delete_prior_computation (note, jump);
3442 *notep = next;
3446 PATTERN (jump) = gen_jump (nlabel);
3447 JUMP_LABEL (jump) = nlabel;
3448 ++LABEL_NUSES (nlabel);
3449 INSN_CODE (jump) = -1;
3451 if (--LABEL_NUSES (olabel) == 0)
3453 delete_labelref_insn (jump, olabel, 0);
3454 delete_insn (olabel);
3458 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
3459 If we found one, delete it and then delete this insn if DELETE_THIS is
3460 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
3462 static int
3463 delete_labelref_insn (insn, label, delete_this)
3464 rtx insn, label;
3465 int delete_this;
3467 int deleted = 0;
3468 rtx link;
3470 if (GET_CODE (insn) != NOTE
3471 && reg_mentioned_p (label, PATTERN (insn)))
3473 if (delete_this)
3475 delete_insn (insn);
3476 deleted = 1;
3478 else
3479 return 1;
3482 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
3483 if (delete_labelref_insn (XEXP (link, 0), label, 1))
3485 if (delete_this)
3487 delete_insn (insn);
3488 deleted = 1;
3490 else
3491 return 1;
3494 return deleted;
3497 /* Like rtx_equal_p except that it considers two REGs as equal
3498 if they renumber to the same value and considers two commutative
3499 operations to be the same if the order of the operands has been
3500 reversed.
3502 ??? Addition is not commutative on the PA due to the weird implicit
3503 space register selection rules for memory addresses. Therefore, we
3504 don't consider a + b == b + a.
3506 We could/should make this test a little tighter. Possibly only
3507 disabling it on the PA via some backend macro or only disabling this
3508 case when the PLUS is inside a MEM. */
3511 rtx_renumbered_equal_p (x, y)
3512 rtx x, y;
3514 register int i;
3515 register RTX_CODE code = GET_CODE (x);
3516 register const char *fmt;
3518 if (x == y)
3519 return 1;
3521 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
3522 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
3523 && GET_CODE (SUBREG_REG (y)) == REG)))
3525 int reg_x = -1, reg_y = -1;
3526 int byte_x = 0, byte_y = 0;
3528 if (GET_MODE (x) != GET_MODE (y))
3529 return 0;
3531 /* If we haven't done any renumbering, don't
3532 make any assumptions. */
3533 if (reg_renumber == 0)
3534 return rtx_equal_p (x, y);
3536 if (code == SUBREG)
3538 reg_x = REGNO (SUBREG_REG (x));
3539 byte_x = SUBREG_BYTE (x);
3541 if (reg_renumber[reg_x] >= 0)
3543 reg_x = subreg_regno_offset (reg_renumber[reg_x],
3544 GET_MODE (SUBREG_REG (x)),
3545 byte_x,
3546 GET_MODE (x));
3547 byte_x = 0;
3550 else
3552 reg_x = REGNO (x);
3553 if (reg_renumber[reg_x] >= 0)
3554 reg_x = reg_renumber[reg_x];
3557 if (GET_CODE (y) == SUBREG)
3559 reg_y = REGNO (SUBREG_REG (y));
3560 byte_y = SUBREG_BYTE (y);
3562 if (reg_renumber[reg_y] >= 0)
3564 reg_y = subreg_regno_offset (reg_renumber[reg_y],
3565 GET_MODE (SUBREG_REG (y)),
3566 byte_y,
3567 GET_MODE (y));
3568 byte_y = 0;
3571 else
3573 reg_y = REGNO (y);
3574 if (reg_renumber[reg_y] >= 0)
3575 reg_y = reg_renumber[reg_y];
3578 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
3581 /* Now we have disposed of all the cases
3582 in which different rtx codes can match. */
3583 if (code != GET_CODE (y))
3584 return 0;
3586 switch (code)
3588 case PC:
3589 case CC0:
3590 case ADDR_VEC:
3591 case ADDR_DIFF_VEC:
3592 return 0;
3594 case CONST_INT:
3595 return INTVAL (x) == INTVAL (y);
3597 case LABEL_REF:
3598 /* We can't assume nonlocal labels have their following insns yet. */
3599 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
3600 return XEXP (x, 0) == XEXP (y, 0);
3602 /* Two label-refs are equivalent if they point at labels
3603 in the same position in the instruction stream. */
3604 return (next_real_insn (XEXP (x, 0))
3605 == next_real_insn (XEXP (y, 0)));
3607 case SYMBOL_REF:
3608 return XSTR (x, 0) == XSTR (y, 0);
3610 case CODE_LABEL:
3611 /* If we didn't match EQ equality above, they aren't the same. */
3612 return 0;
3614 default:
3615 break;
3618 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
3620 if (GET_MODE (x) != GET_MODE (y))
3621 return 0;
3623 /* For commutative operations, the RTX match if the operand match in any
3624 order. Also handle the simple binary and unary cases without a loop.
3626 ??? Don't consider PLUS a commutative operator; see comments above. */
3627 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
3628 && code != PLUS)
3629 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3630 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
3631 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
3632 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
3633 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
3634 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
3635 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
3636 else if (GET_RTX_CLASS (code) == '1')
3637 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
3639 /* Compare the elements. If any pair of corresponding elements
3640 fail to match, return 0 for the whole things. */
3642 fmt = GET_RTX_FORMAT (code);
3643 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3645 register int j;
3646 switch (fmt[i])
3648 case 'w':
3649 if (XWINT (x, i) != XWINT (y, i))
3650 return 0;
3651 break;
3653 case 'i':
3654 if (XINT (x, i) != XINT (y, i))
3655 return 0;
3656 break;
3658 case 's':
3659 if (strcmp (XSTR (x, i), XSTR (y, i)))
3660 return 0;
3661 break;
3663 case 'e':
3664 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
3665 return 0;
3666 break;
3668 case 'u':
3669 if (XEXP (x, i) != XEXP (y, i))
3670 return 0;
3671 /* fall through. */
3672 case '0':
3673 break;
3675 case 'E':
3676 if (XVECLEN (x, i) != XVECLEN (y, i))
3677 return 0;
3678 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3679 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
3680 return 0;
3681 break;
3683 default:
3684 abort ();
3687 return 1;
3690 /* If X is a hard register or equivalent to one or a subregister of one,
3691 return the hard register number. If X is a pseudo register that was not
3692 assigned a hard register, return the pseudo register number. Otherwise,
3693 return -1. Any rtx is valid for X. */
3696 true_regnum (x)
3697 rtx x;
3699 if (GET_CODE (x) == REG)
3701 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
3702 return reg_renumber[REGNO (x)];
3703 return REGNO (x);
3705 if (GET_CODE (x) == SUBREG)
3707 int base = true_regnum (SUBREG_REG (x));
3708 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
3709 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
3710 GET_MODE (SUBREG_REG (x)),
3711 SUBREG_BYTE (x), GET_MODE (x));
3713 return -1;
3716 /* Optimize code of the form:
3718 for (x = a[i]; x; ...)
3720 for (x = a[i]; x; ...)
3722 foo:
3724 Loop optimize will change the above code into
3726 if (x = a[i])
3727 for (;;)
3728 { ...; if (! (x = ...)) break; }
3729 if (x = a[i])
3730 for (;;)
3731 { ...; if (! (x = ...)) break; }
3732 foo:
3734 In general, if the first test fails, the program can branch
3735 directly to `foo' and skip the second try which is doomed to fail.
3736 We run this after loop optimization and before flow analysis. */
3738 /* When comparing the insn patterns, we track the fact that different
3739 pseudo-register numbers may have been used in each computation.
3740 The following array stores an equivalence -- same_regs[I] == J means
3741 that pseudo register I was used in the first set of tests in a context
3742 where J was used in the second set. We also count the number of such
3743 pending equivalences. If nonzero, the expressions really aren't the
3744 same. */
3746 static int *same_regs;
3748 static int num_same_regs;
3750 /* Track any registers modified between the target of the first jump and
3751 the second jump. They never compare equal. */
3753 static char *modified_regs;
3755 /* Record if memory was modified. */
3757 static int modified_mem;
3759 /* Called via note_stores on each insn between the target of the first
3760 branch and the second branch. It marks any changed registers. */
3762 static void
3763 mark_modified_reg (dest, x, data)
3764 rtx dest;
3765 rtx x ATTRIBUTE_UNUSED;
3766 void *data ATTRIBUTE_UNUSED;
3768 int regno;
3769 unsigned int i;
3771 if (GET_CODE (dest) == SUBREG)
3772 dest = SUBREG_REG (dest);
3774 if (GET_CODE (dest) == MEM)
3775 modified_mem = 1;
3777 if (GET_CODE (dest) != REG)
3778 return;
3780 regno = REGNO (dest);
3781 if (regno >= FIRST_PSEUDO_REGISTER)
3782 modified_regs[regno] = 1;
3783 else
3784 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
3785 modified_regs[regno + i] = 1;
3788 /* F is the first insn in the chain of insns. */
3790 void
3791 thread_jumps (f, max_reg, flag_before_loop)
3792 rtx f;
3793 int max_reg;
3794 int flag_before_loop;
3796 /* Basic algorithm is to find a conditional branch,
3797 the label it may branch to, and the branch after
3798 that label. If the two branches test the same condition,
3799 walk back from both branch paths until the insn patterns
3800 differ, or code labels are hit. If we make it back to
3801 the target of the first branch, then we know that the first branch
3802 will either always succeed or always fail depending on the relative
3803 senses of the two branches. So adjust the first branch accordingly
3804 in this case. */
3806 rtx label, b1, b2, t1, t2;
3807 enum rtx_code code1, code2;
3808 rtx b1op0, b1op1, b2op0, b2op1;
3809 int changed = 1;
3810 int i;
3811 int *all_reset;
3812 enum rtx_code reversed_code1, reversed_code2;
3814 /* Allocate register tables and quick-reset table. */
3815 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
3816 same_regs = (int *) xmalloc (max_reg * sizeof (int));
3817 all_reset = (int *) xmalloc (max_reg * sizeof (int));
3818 for (i = 0; i < max_reg; i++)
3819 all_reset[i] = -1;
3821 while (changed)
3823 changed = 0;
3825 for (b1 = f; b1; b1 = NEXT_INSN (b1))
3827 rtx set;
3828 rtx set2;
3830 /* Get to a candidate branch insn. */
3831 if (GET_CODE (b1) != JUMP_INSN
3832 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
3833 continue;
3835 memset (modified_regs, 0, max_reg * sizeof (char));
3836 modified_mem = 0;
3838 memcpy (same_regs, all_reset, max_reg * sizeof (int));
3839 num_same_regs = 0;
3841 label = JUMP_LABEL (b1);
3843 /* Look for a branch after the target. Record any registers and
3844 memory modified between the target and the branch. Stop when we
3845 get to a label since we can't know what was changed there. */
3846 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
3848 if (GET_CODE (b2) == CODE_LABEL)
3849 break;
3851 else if (GET_CODE (b2) == JUMP_INSN)
3853 /* If this is an unconditional jump and is the only use of
3854 its target label, we can follow it. */
3855 if (any_uncondjump_p (b2)
3856 && onlyjump_p (b2)
3857 && JUMP_LABEL (b2) != 0
3858 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
3860 b2 = JUMP_LABEL (b2);
3861 continue;
3863 else
3864 break;
3867 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
3868 continue;
3870 if (GET_CODE (b2) == CALL_INSN)
3872 modified_mem = 1;
3873 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3874 if (call_used_regs[i] && ! fixed_regs[i]
3875 && i != STACK_POINTER_REGNUM
3876 && i != FRAME_POINTER_REGNUM
3877 && i != HARD_FRAME_POINTER_REGNUM
3878 && i != ARG_POINTER_REGNUM)
3879 modified_regs[i] = 1;
3882 note_stores (PATTERN (b2), mark_modified_reg, NULL);
3885 /* Check the next candidate branch insn from the label
3886 of the first. */
3887 if (b2 == 0
3888 || GET_CODE (b2) != JUMP_INSN
3889 || b2 == b1
3890 || !any_condjump_p (b2)
3891 || !onlyjump_p (b2))
3892 continue;
3893 set = pc_set (b1);
3894 set2 = pc_set (b2);
3896 /* Get the comparison codes and operands, reversing the
3897 codes if appropriate. If we don't have comparison codes,
3898 we can't do anything. */
3899 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
3900 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
3901 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
3902 reversed_code1 = code1;
3903 if (XEXP (SET_SRC (set), 1) == pc_rtx)
3904 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3905 else
3906 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
3908 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
3909 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
3910 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
3911 reversed_code2 = code2;
3912 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
3913 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3914 else
3915 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
3917 /* If they test the same things and knowing that B1 branches
3918 tells us whether or not B2 branches, check if we
3919 can thread the branch. */
3920 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
3921 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
3922 && (comparison_dominates_p (code1, code2)
3923 || comparison_dominates_p (code1, reversed_code2)))
3926 t1 = prev_nonnote_insn (b1);
3927 t2 = prev_nonnote_insn (b2);
3929 while (t1 != 0 && t2 != 0)
3931 if (t2 == label)
3933 /* We have reached the target of the first branch.
3934 If there are no pending register equivalents,
3935 we know that this branch will either always
3936 succeed (if the senses of the two branches are
3937 the same) or always fail (if not). */
3938 rtx new_label;
3940 if (num_same_regs != 0)
3941 break;
3943 if (comparison_dominates_p (code1, code2))
3944 new_label = JUMP_LABEL (b2);
3945 else
3946 new_label = get_label_after (b2);
3948 if (JUMP_LABEL (b1) != new_label)
3950 rtx prev = PREV_INSN (new_label);
3952 if (flag_before_loop
3953 && GET_CODE (prev) == NOTE
3954 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
3956 /* Don't thread to the loop label. If a loop
3957 label is reused, loop optimization will
3958 be disabled for that loop. */
3959 new_label = gen_label_rtx ();
3960 emit_label_after (new_label, PREV_INSN (prev));
3962 changed |= redirect_jump (b1, new_label, 1);
3964 break;
3967 /* If either of these is not a normal insn (it might be
3968 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
3969 have already been skipped above.) Similarly, fail
3970 if the insns are different. */
3971 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
3972 || recog_memoized (t1) != recog_memoized (t2)
3973 || ! rtx_equal_for_thread_p (PATTERN (t1),
3974 PATTERN (t2), t2))
3975 break;
3977 t1 = prev_nonnote_insn (t1);
3978 t2 = prev_nonnote_insn (t2);
3984 /* Clean up. */
3985 free (modified_regs);
3986 free (same_regs);
3987 free (all_reset);
3990 /* This is like RTX_EQUAL_P except that it knows about our handling of
3991 possibly equivalent registers and knows to consider volatile and
3992 modified objects as not equal.
3994 YINSN is the insn containing Y. */
3997 rtx_equal_for_thread_p (x, y, yinsn)
3998 rtx x, y;
3999 rtx yinsn;
4001 register int i;
4002 register int j;
4003 register enum rtx_code code;
4004 register const char *fmt;
4006 code = GET_CODE (x);
4007 /* Rtx's of different codes cannot be equal. */
4008 if (code != GET_CODE (y))
4009 return 0;
4011 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
4012 (REG:SI x) and (REG:HI x) are NOT equivalent. */
4014 if (GET_MODE (x) != GET_MODE (y))
4015 return 0;
4017 /* For floating-point, consider everything unequal. This is a bit
4018 pessimistic, but this pass would only rarely do anything for FP
4019 anyway. */
4020 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
4021 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
4022 return 0;
4024 /* For commutative operations, the RTX match if the operand match in any
4025 order. Also handle the simple binary and unary cases without a loop. */
4026 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4027 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4028 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
4029 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
4030 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
4031 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4032 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
4033 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
4034 else if (GET_RTX_CLASS (code) == '1')
4035 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4037 /* Handle special-cases first. */
4038 switch (code)
4040 case REG:
4041 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
4042 return 1;
4044 /* If neither is user variable or hard register, check for possible
4045 equivalence. */
4046 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
4047 || REGNO (x) < FIRST_PSEUDO_REGISTER
4048 || REGNO (y) < FIRST_PSEUDO_REGISTER)
4049 return 0;
4051 if (same_regs[REGNO (x)] == -1)
4053 same_regs[REGNO (x)] = REGNO (y);
4054 num_same_regs++;
4056 /* If this is the first time we are seeing a register on the `Y'
4057 side, see if it is the last use. If not, we can't thread the
4058 jump, so mark it as not equivalent. */
4059 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
4060 return 0;
4062 return 1;
4064 else
4065 return (same_regs[REGNO (x)] == (int) REGNO (y));
4067 break;
4069 case MEM:
4070 /* If memory modified or either volatile, not equivalent.
4071 Else, check address. */
4072 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4073 return 0;
4075 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
4077 case ASM_INPUT:
4078 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
4079 return 0;
4081 break;
4083 case SET:
4084 /* Cancel a pending `same_regs' if setting equivalenced registers.
4085 Then process source. */
4086 if (GET_CODE (SET_DEST (x)) == REG
4087 && GET_CODE (SET_DEST (y)) == REG)
4089 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
4091 same_regs[REGNO (SET_DEST (x))] = -1;
4092 num_same_regs--;
4094 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
4095 return 0;
4097 else
4099 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
4100 return 0;
4103 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
4105 case LABEL_REF:
4106 return XEXP (x, 0) == XEXP (y, 0);
4108 case SYMBOL_REF:
4109 return XSTR (x, 0) == XSTR (y, 0);
4111 default:
4112 break;
4115 if (x == y)
4116 return 1;
4118 fmt = GET_RTX_FORMAT (code);
4119 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4121 switch (fmt[i])
4123 case 'w':
4124 if (XWINT (x, i) != XWINT (y, i))
4125 return 0;
4126 break;
4128 case 'n':
4129 case 'i':
4130 if (XINT (x, i) != XINT (y, i))
4131 return 0;
4132 break;
4134 case 'V':
4135 case 'E':
4136 /* Two vectors must have the same length. */
4137 if (XVECLEN (x, i) != XVECLEN (y, i))
4138 return 0;
4140 /* And the corresponding elements must match. */
4141 for (j = 0; j < XVECLEN (x, i); j++)
4142 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
4143 XVECEXP (y, i, j), yinsn) == 0)
4144 return 0;
4145 break;
4147 case 'e':
4148 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
4149 return 0;
4150 break;
4152 case 'S':
4153 case 's':
4154 if (strcmp (XSTR (x, i), XSTR (y, i)))
4155 return 0;
4156 break;
4158 case 'u':
4159 /* These are just backpointers, so they don't matter. */
4160 break;
4162 case '0':
4163 case 't':
4164 break;
4166 /* It is believed that rtx's at this level will never
4167 contain anything but integers and other rtx's,
4168 except for within LABEL_REFs and SYMBOL_REFs. */
4169 default:
4170 abort ();
4173 return 1;