Do not generate error message about unrecognised command line switches of
[official-gcc.git] / gcc / jump.c
blob2328d8f069982a1095e16dc860a80521dee9cbcc
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-98, 1999 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
37 at by later passes.
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
54 #include "config.h"
55 #include "system.h"
56 #include "rtl.h"
57 #include "tm_p.h"
58 #include "flags.h"
59 #include "hard-reg-set.h"
60 #include "regs.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
64 #include "recog.h"
65 #include "function.h"
66 #include "expr.h"
67 #include "real.h"
68 #include "except.h"
69 #include "toplev.h"
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
92 static rtx *jump_chain;
94 /* Maximum index in jump_chain. */
96 static int max_jump_chain;
98 /* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
100 int can_reach_end;
102 /* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
106 case. */
108 static int cross_jump_death_matters = 0;
110 static int init_label_info PROTO((rtx));
111 static void delete_barrier_successors PROTO((rtx));
112 static void mark_all_labels PROTO((rtx, int));
113 static rtx delete_unreferenced_labels PROTO((rtx));
114 static void delete_noop_moves PROTO((rtx));
115 static int calculate_can_reach_end PROTO((rtx, int, int));
116 static int duplicate_loop_exit_test PROTO((rtx));
117 static void find_cross_jump PROTO((rtx, rtx, int, rtx *, rtx *));
118 static void do_cross_jump PROTO((rtx, rtx, rtx));
119 static int jump_back_p PROTO((rtx, rtx));
120 static int tension_vector_labels PROTO((rtx, int));
121 static void mark_jump_label PROTO((rtx, rtx, int));
122 static void delete_computation PROTO((rtx));
123 static void delete_from_jump_chain PROTO((rtx));
124 static int delete_labelref_insn PROTO((rtx, rtx, int));
125 static void mark_modified_reg PROTO((rtx, rtx));
126 static void redirect_tablejump PROTO((rtx, rtx));
127 static void jump_optimize_1 PROTO ((rtx, int, int, int, int));
128 #if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
129 static rtx find_insert_position PROTO((rtx, rtx));
130 #endif
131 static int returnjump_p_1 PROTO((rtx *, void *));
132 static void delete_prior_computation PROTO((rtx, rtx));
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
136 void
137 jump_optimize (f, cross_jump, noop_moves, after_regscan)
138 rtx f;
139 int cross_jump;
140 int noop_moves;
141 int after_regscan;
143 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, 0);
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
148 instructions. */
149 void
150 rebuild_jump_labels (f)
151 rtx f;
153 jump_optimize_1 (f, 0, 0, 0, 1);
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
165 If NOOP_MOVES is nonzero, delete no-op move insns.
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
179 static void
180 jump_optimize_1 (f, cross_jump, noop_moves, after_regscan, mark_labels_only)
181 rtx f;
182 int cross_jump;
183 int noop_moves;
184 int after_regscan;
185 int mark_labels_only;
187 register rtx insn, next;
188 int changed;
189 int old_max_reg;
190 int first = 1;
191 int max_uid = 0;
192 rtx last_insn;
194 cross_jump_death_matters = (cross_jump == 2);
195 max_uid = init_label_info (f) + 1;
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions && cross_jump)
201 init_insn_eh_region (f, max_uid);
203 delete_barrier_successors (f);
205 /* Leave some extra room for labels and duplicate exit test insns
206 we make. */
207 max_jump_chain = max_uid * 14 / 10;
208 jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
209 bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
211 mark_all_labels (f, cross_jump);
213 /* Keep track of labels used from static data;
214 they cannot ever be deleted. */
216 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
217 LABEL_NUSES (XEXP (insn, 0))++;
219 check_exception_handler_labels ();
221 /* Keep track of labels used for marking handlers for exception
222 regions; they cannot usually be deleted. */
224 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
225 LABEL_NUSES (XEXP (insn, 0))++;
227 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
228 notes and recompute LABEL_NUSES. */
229 if (mark_labels_only)
230 return;
232 exception_optimize ();
234 last_insn = delete_unreferenced_labels (f);
236 if (optimize == 0)
238 /* CAN_REACH_END is persistent for each function. Once set it should
239 not be cleared. This is especially true for the case where we
240 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
241 the front-end before compiling each function. */
242 if (calculate_can_reach_end (last_insn, 1, 0))
243 can_reach_end = 1;
245 /* Zero the "deleted" flag of all the "deleted" insns. */
246 for (insn = f; insn; insn = NEXT_INSN (insn))
247 INSN_DELETED_P (insn) = 0;
249 /* Show that the jump chain is not valid. */
250 jump_chain = 0;
251 return;
254 #ifdef HAVE_return
255 if (HAVE_return)
257 /* If we fall through to the epilogue, see if we can insert a RETURN insn
258 in front of it. If the machine allows it at this point (we might be
259 after reload for a leaf routine), it will improve optimization for it
260 to be there. */
261 insn = get_last_insn ();
262 while (insn && GET_CODE (insn) == NOTE)
263 insn = PREV_INSN (insn);
265 if (insn && GET_CODE (insn) != BARRIER)
267 emit_jump_insn (gen_return ());
268 emit_barrier ();
271 #endif
273 if (noop_moves)
274 delete_noop_moves (f);
276 /* If we haven't yet gotten to reload and we have just run regscan,
277 delete any insn that sets a register that isn't used elsewhere.
278 This helps some of the optimizations below by having less insns
279 being jumped around. */
281 if (! reload_completed && after_regscan)
282 for (insn = f; insn; insn = next)
284 rtx set = single_set (insn);
286 next = NEXT_INSN (insn);
288 if (set && GET_CODE (SET_DEST (set)) == REG
289 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
290 && REGNO_FIRST_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
291 /* We use regno_last_note_uid so as not to delete the setting
292 of a reg that's used in notes. A subsequent optimization
293 might arrange to use that reg for real. */
294 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set))) == INSN_UID (insn)
295 && ! side_effects_p (SET_SRC (set))
296 && ! find_reg_note (insn, REG_RETVAL, 0)
297 /* An ADDRESSOF expression can turn into a use of the internal arg
298 pointer, so do not delete the initialization of the internal
299 arg pointer yet. If it is truly dead, flow will delete the
300 initializing insn. */
301 && SET_DEST (set) != current_function_internal_arg_pointer)
302 delete_insn (insn);
305 /* Now iterate optimizing jumps until nothing changes over one pass. */
306 changed = 1;
307 old_max_reg = max_reg_num ();
308 while (changed)
310 changed = 0;
312 for (insn = f; insn; insn = next)
314 rtx reallabelprev;
315 rtx temp, temp1, temp2, temp3, temp4, temp5, temp6;
316 rtx nlabel;
317 int this_is_simplejump, this_is_condjump, reversep = 0;
318 int this_is_condjump_in_parallel;
320 next = NEXT_INSN (insn);
322 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
323 jump. Try to optimize by duplicating the loop exit test if so.
324 This is only safe immediately after regscan, because it uses
325 the values of regno_first_uid and regno_last_uid. */
326 if (after_regscan && GET_CODE (insn) == NOTE
327 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
328 && (temp1 = next_nonnote_insn (insn)) != 0
329 && simplejump_p (temp1))
331 temp = PREV_INSN (insn);
332 if (duplicate_loop_exit_test (insn))
334 changed = 1;
335 next = NEXT_INSN (temp);
336 continue;
340 if (GET_CODE (insn) != JUMP_INSN)
341 continue;
343 this_is_simplejump = simplejump_p (insn);
344 this_is_condjump = condjump_p (insn);
345 this_is_condjump_in_parallel = condjump_in_parallel_p (insn);
347 /* Tension the labels in dispatch tables. */
349 if (GET_CODE (PATTERN (insn)) == ADDR_VEC)
350 changed |= tension_vector_labels (PATTERN (insn), 0);
351 if (GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
352 changed |= tension_vector_labels (PATTERN (insn), 1);
354 /* See if this jump goes to another jump and redirect if so. */
355 nlabel = follow_jumps (JUMP_LABEL (insn));
356 if (nlabel != JUMP_LABEL (insn))
357 changed |= redirect_jump (insn, nlabel);
359 /* If a dispatch table always goes to the same place,
360 get rid of it and replace the insn that uses it. */
362 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
363 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
365 int i;
366 rtx pat = PATTERN (insn);
367 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
368 int len = XVECLEN (pat, diff_vec_p);
369 rtx dispatch = prev_real_insn (insn);
371 for (i = 0; i < len; i++)
372 if (XEXP (XVECEXP (pat, diff_vec_p, i), 0)
373 != XEXP (XVECEXP (pat, diff_vec_p, 0), 0))
374 break;
375 if (i == len
376 && dispatch != 0
377 && GET_CODE (dispatch) == JUMP_INSN
378 && JUMP_LABEL (dispatch) != 0
379 /* Don't mess with a casesi insn. */
380 && !(GET_CODE (PATTERN (dispatch)) == SET
381 && (GET_CODE (SET_SRC (PATTERN (dispatch)))
382 == IF_THEN_ELSE))
383 && next_real_insn (JUMP_LABEL (dispatch)) == insn)
385 redirect_tablejump (dispatch,
386 XEXP (XVECEXP (pat, diff_vec_p, 0), 0));
387 changed = 1;
391 /* If a jump references the end of the function, try to turn
392 it into a RETURN insn, possibly a conditional one. */
393 if (JUMP_LABEL (insn) != 0
394 && (next_active_insn (JUMP_LABEL (insn)) == 0
395 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn))))
396 == RETURN))
397 changed |= redirect_jump (insn, NULL_RTX);
399 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
401 /* Detect jump to following insn. */
402 if (reallabelprev == insn && this_is_condjump)
404 next = next_real_insn (JUMP_LABEL (insn));
405 delete_jump (insn);
406 changed = 1;
407 continue;
410 /* Detect a conditional jump going to the same place
411 as an immediately following unconditional jump. */
412 else if (this_is_condjump
413 && (temp = next_active_insn (insn)) != 0
414 && simplejump_p (temp)
415 && (next_active_insn (JUMP_LABEL (insn))
416 == next_active_insn (JUMP_LABEL (temp))))
418 /* Don't mess up test coverage analysis. */
419 temp2 = temp;
420 if (flag_test_coverage && !reload_completed)
421 for (temp2 = insn; temp2 != temp; temp2 = NEXT_INSN (temp2))
422 if (GET_CODE (temp2) == NOTE && NOTE_LINE_NUMBER (temp2) > 0)
423 break;
425 if (temp2 == temp)
427 delete_jump (insn);
428 changed = 1;
429 continue;
433 /* Detect a conditional jump jumping over an unconditional jump. */
435 else if ((this_is_condjump || this_is_condjump_in_parallel)
436 && ! this_is_simplejump
437 && reallabelprev != 0
438 && GET_CODE (reallabelprev) == JUMP_INSN
439 && prev_active_insn (reallabelprev) == insn
440 && no_labels_between_p (insn, reallabelprev)
441 && simplejump_p (reallabelprev))
443 /* When we invert the unconditional jump, we will be
444 decrementing the usage count of its old label.
445 Make sure that we don't delete it now because that
446 might cause the following code to be deleted. */
447 rtx prev_uses = prev_nonnote_insn (reallabelprev);
448 rtx prev_label = JUMP_LABEL (insn);
450 if (prev_label)
451 ++LABEL_NUSES (prev_label);
453 if (invert_jump (insn, JUMP_LABEL (reallabelprev)))
455 /* It is very likely that if there are USE insns before
456 this jump, they hold REG_DEAD notes. These REG_DEAD
457 notes are no longer valid due to this optimization,
458 and will cause the life-analysis that following passes
459 (notably delayed-branch scheduling) to think that
460 these registers are dead when they are not.
462 To prevent this trouble, we just remove the USE insns
463 from the insn chain. */
465 while (prev_uses && GET_CODE (prev_uses) == INSN
466 && GET_CODE (PATTERN (prev_uses)) == USE)
468 rtx useless = prev_uses;
469 prev_uses = prev_nonnote_insn (prev_uses);
470 delete_insn (useless);
473 delete_insn (reallabelprev);
474 changed = 1;
477 /* We can now safely delete the label if it is unreferenced
478 since the delete_insn above has deleted the BARRIER. */
479 if (prev_label && --LABEL_NUSES (prev_label) == 0)
480 delete_insn (prev_label);
482 next = NEXT_INSN (insn);
485 /* If we have an unconditional jump preceded by a USE, try to put
486 the USE before the target and jump there. This simplifies many
487 of the optimizations below since we don't have to worry about
488 dealing with these USE insns. We only do this if the label
489 being branch to already has the identical USE or if code
490 never falls through to that label. */
492 else if (this_is_simplejump
493 && (temp = prev_nonnote_insn (insn)) != 0
494 && GET_CODE (temp) == INSN
495 && GET_CODE (PATTERN (temp)) == USE
496 && (temp1 = prev_nonnote_insn (JUMP_LABEL (insn))) != 0
497 && (GET_CODE (temp1) == BARRIER
498 || (GET_CODE (temp1) == INSN
499 && rtx_equal_p (PATTERN (temp), PATTERN (temp1))))
500 /* Don't do this optimization if we have a loop containing
501 only the USE instruction, and the loop start label has
502 a usage count of 1. This is because we will redo this
503 optimization everytime through the outer loop, and jump
504 opt will never exit. */
505 && ! ((temp2 = prev_nonnote_insn (temp)) != 0
506 && temp2 == JUMP_LABEL (insn)
507 && LABEL_NUSES (temp2) == 1))
509 if (GET_CODE (temp1) == BARRIER)
511 emit_insn_after (PATTERN (temp), temp1);
512 temp1 = NEXT_INSN (temp1);
515 delete_insn (temp);
516 redirect_jump (insn, get_label_before (temp1));
517 reallabelprev = prev_real_insn (temp1);
518 changed = 1;
519 next = NEXT_INSN (insn);
522 /* Simplify if (...) x = a; else x = b; by converting it
523 to x = b; if (...) x = a;
524 if B is sufficiently simple, the test doesn't involve X,
525 and nothing in the test modifies B or X.
527 If we have small register classes, we also can't do this if X
528 is a hard register.
530 If the "x = b;" insn has any REG_NOTES, we don't do this because
531 of the possibility that we are running after CSE and there is a
532 REG_EQUAL note that is only valid if the branch has already been
533 taken. If we move the insn with the REG_EQUAL note, we may
534 fold the comparison to always be false in a later CSE pass.
535 (We could also delete the REG_NOTES when moving the insn, but it
536 seems simpler to not move it.) An exception is that we can move
537 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
538 value is the same as "b".
540 INSN is the branch over the `else' part.
542 We set:
544 TEMP to the jump insn preceding "x = a;"
545 TEMP1 to X
546 TEMP2 to the insn that sets "x = b;"
547 TEMP3 to the insn that sets "x = a;"
548 TEMP4 to the set of "x = b"; */
550 if (this_is_simplejump
551 && (temp3 = prev_active_insn (insn)) != 0
552 && GET_CODE (temp3) == INSN
553 && (temp4 = single_set (temp3)) != 0
554 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
555 && (! SMALL_REGISTER_CLASSES
556 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
557 && (temp2 = next_active_insn (insn)) != 0
558 && GET_CODE (temp2) == INSN
559 && (temp4 = single_set (temp2)) != 0
560 && rtx_equal_p (SET_DEST (temp4), temp1)
561 && ! side_effects_p (SET_SRC (temp4))
562 && ! may_trap_p (SET_SRC (temp4))
563 && (REG_NOTES (temp2) == 0
564 || ((REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUAL
565 || REG_NOTE_KIND (REG_NOTES (temp2)) == REG_EQUIV)
566 && XEXP (REG_NOTES (temp2), 1) == 0
567 && rtx_equal_p (XEXP (REG_NOTES (temp2), 0),
568 SET_SRC (temp4))))
569 && (temp = prev_active_insn (temp3)) != 0
570 && condjump_p (temp) && ! simplejump_p (temp)
571 /* TEMP must skip over the "x = a;" insn */
572 && prev_real_insn (JUMP_LABEL (temp)) == insn
573 && no_labels_between_p (insn, JUMP_LABEL (temp))
574 /* There must be no other entries to the "x = b;" insn. */
575 && no_labels_between_p (JUMP_LABEL (temp), temp2)
576 /* INSN must either branch to the insn after TEMP2 or the insn
577 after TEMP2 must branch to the same place as INSN. */
578 && (reallabelprev == temp2
579 || ((temp5 = next_active_insn (temp2)) != 0
580 && simplejump_p (temp5)
581 && JUMP_LABEL (temp5) == JUMP_LABEL (insn))))
583 /* The test expression, X, may be a complicated test with
584 multiple branches. See if we can find all the uses of
585 the label that TEMP branches to without hitting a CALL_INSN
586 or a jump to somewhere else. */
587 rtx target = JUMP_LABEL (temp);
588 int nuses = LABEL_NUSES (target);
589 rtx p;
590 #ifdef HAVE_cc0
591 rtx q;
592 #endif
594 /* Set P to the first jump insn that goes around "x = a;". */
595 for (p = temp; nuses && p; p = prev_nonnote_insn (p))
597 if (GET_CODE (p) == JUMP_INSN)
599 if (condjump_p (p) && ! simplejump_p (p)
600 && JUMP_LABEL (p) == target)
602 nuses--;
603 if (nuses == 0)
604 break;
606 else
607 break;
609 else if (GET_CODE (p) == CALL_INSN)
610 break;
613 #ifdef HAVE_cc0
614 /* We cannot insert anything between a set of cc and its use
615 so if P uses cc0, we must back up to the previous insn. */
616 q = prev_nonnote_insn (p);
617 if (q && GET_RTX_CLASS (GET_CODE (q)) == 'i'
618 && sets_cc0_p (PATTERN (q)))
619 p = q;
620 #endif
622 if (p)
623 p = PREV_INSN (p);
625 /* If we found all the uses and there was no data conflict, we
626 can move the assignment unless we can branch into the middle
627 from somewhere. */
628 if (nuses == 0 && p
629 && no_labels_between_p (p, insn)
630 && ! reg_referenced_between_p (temp1, p, NEXT_INSN (temp3))
631 && ! reg_set_between_p (temp1, p, temp3)
632 && (GET_CODE (SET_SRC (temp4)) == CONST_INT
633 || ! modified_between_p (SET_SRC (temp4), p, temp2))
634 /* Verify that registers used by the jump are not clobbered
635 by the instruction being moved. */
636 && ! regs_set_between_p (PATTERN (temp),
637 PREV_INSN (temp2),
638 NEXT_INSN (temp2)))
640 emit_insn_after_with_line_notes (PATTERN (temp2), p, temp2);
641 delete_insn (temp2);
643 /* Set NEXT to an insn that we know won't go away. */
644 next = next_active_insn (insn);
646 /* Delete the jump around the set. Note that we must do
647 this before we redirect the test jumps so that it won't
648 delete the code immediately following the assignment
649 we moved (which might be a jump). */
651 delete_insn (insn);
653 /* We either have two consecutive labels or a jump to
654 a jump, so adjust all the JUMP_INSNs to branch to where
655 INSN branches to. */
656 for (p = NEXT_INSN (p); p != next; p = NEXT_INSN (p))
657 if (GET_CODE (p) == JUMP_INSN)
658 redirect_jump (p, target);
660 changed = 1;
661 next = NEXT_INSN (insn);
662 continue;
666 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
667 to x = a; if (...) goto l; x = b;
668 if A is sufficiently simple, the test doesn't involve X,
669 and nothing in the test modifies A or X.
671 If we have small register classes, we also can't do this if X
672 is a hard register.
674 If the "x = a;" insn has any REG_NOTES, we don't do this because
675 of the possibility that we are running after CSE and there is a
676 REG_EQUAL note that is only valid if the branch has already been
677 taken. If we move the insn with the REG_EQUAL note, we may
678 fold the comparison to always be false in a later CSE pass.
679 (We could also delete the REG_NOTES when moving the insn, but it
680 seems simpler to not move it.) An exception is that we can move
681 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
682 value is the same as "a".
684 INSN is the goto.
686 We set:
688 TEMP to the jump insn preceding "x = a;"
689 TEMP1 to X
690 TEMP2 to the insn that sets "x = b;"
691 TEMP3 to the insn that sets "x = a;"
692 TEMP4 to the set of "x = a"; */
694 if (this_is_simplejump
695 && (temp2 = next_active_insn (insn)) != 0
696 && GET_CODE (temp2) == INSN
697 && (temp4 = single_set (temp2)) != 0
698 && GET_CODE (temp1 = SET_DEST (temp4)) == REG
699 && (! SMALL_REGISTER_CLASSES
700 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
701 && (temp3 = prev_active_insn (insn)) != 0
702 && GET_CODE (temp3) == INSN
703 && (temp4 = single_set (temp3)) != 0
704 && rtx_equal_p (SET_DEST (temp4), temp1)
705 && ! side_effects_p (SET_SRC (temp4))
706 && ! may_trap_p (SET_SRC (temp4))
707 && (REG_NOTES (temp3) == 0
708 || ((REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUAL
709 || REG_NOTE_KIND (REG_NOTES (temp3)) == REG_EQUIV)
710 && XEXP (REG_NOTES (temp3), 1) == 0
711 && rtx_equal_p (XEXP (REG_NOTES (temp3), 0),
712 SET_SRC (temp4))))
713 && (temp = prev_active_insn (temp3)) != 0
714 && condjump_p (temp) && ! simplejump_p (temp)
715 /* TEMP must skip over the "x = a;" insn */
716 && prev_real_insn (JUMP_LABEL (temp)) == insn
717 && no_labels_between_p (temp, insn))
719 rtx prev_label = JUMP_LABEL (temp);
720 rtx insert_after = prev_nonnote_insn (temp);
722 #ifdef HAVE_cc0
723 /* We cannot insert anything between a set of cc and its use. */
724 if (insert_after && GET_RTX_CLASS (GET_CODE (insert_after)) == 'i'
725 && sets_cc0_p (PATTERN (insert_after)))
726 insert_after = prev_nonnote_insn (insert_after);
727 #endif
728 ++LABEL_NUSES (prev_label);
730 if (insert_after
731 && no_labels_between_p (insert_after, temp)
732 && ! reg_referenced_between_p (temp1, insert_after, temp3)
733 && ! reg_referenced_between_p (temp1, temp3,
734 NEXT_INSN (temp2))
735 && ! reg_set_between_p (temp1, insert_after, temp)
736 && ! modified_between_p (SET_SRC (temp4), insert_after, temp)
737 /* Verify that registers used by the jump are not clobbered
738 by the instruction being moved. */
739 && ! regs_set_between_p (PATTERN (temp),
740 PREV_INSN (temp3),
741 NEXT_INSN (temp3))
742 && invert_jump (temp, JUMP_LABEL (insn)))
744 emit_insn_after_with_line_notes (PATTERN (temp3),
745 insert_after, temp3);
746 delete_insn (temp3);
747 delete_insn (insn);
748 /* Set NEXT to an insn that we know won't go away. */
749 next = temp2;
750 changed = 1;
752 if (prev_label && --LABEL_NUSES (prev_label) == 0)
753 delete_insn (prev_label);
754 if (changed)
755 continue;
758 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
760 /* If we have if (...) x = exp; and branches are expensive,
761 EXP is a single insn, does not have any side effects, cannot
762 trap, and is not too costly, convert this to
763 t = exp; if (...) x = t;
765 Don't do this when we have CC0 because it is unlikely to help
766 and we'd need to worry about where to place the new insn and
767 the potential for conflicts. We also can't do this when we have
768 notes on the insn for the same reason as above.
770 If we have conditional arithmetic, this will make this
771 harder to optimize later and isn't needed, so don't do it
772 in that case either.
774 We set:
776 TEMP to the "x = exp;" insn.
777 TEMP1 to the single set in the "x = exp;" insn.
778 TEMP2 to "x". */
780 if (! reload_completed
781 && this_is_condjump && ! this_is_simplejump
782 && BRANCH_COST >= 3
783 && (temp = next_nonnote_insn (insn)) != 0
784 && GET_CODE (temp) == INSN
785 && REG_NOTES (temp) == 0
786 && (reallabelprev == temp
787 || ((temp2 = next_active_insn (temp)) != 0
788 && simplejump_p (temp2)
789 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
790 && (temp1 = single_set (temp)) != 0
791 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
792 && (! SMALL_REGISTER_CLASSES
793 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
794 && GET_CODE (SET_SRC (temp1)) != REG
795 && GET_CODE (SET_SRC (temp1)) != SUBREG
796 && GET_CODE (SET_SRC (temp1)) != CONST_INT
797 && ! side_effects_p (SET_SRC (temp1))
798 && ! may_trap_p (SET_SRC (temp1))
799 && rtx_cost (SET_SRC (temp1), SET) < 10)
801 rtx new = gen_reg_rtx (GET_MODE (temp2));
803 if ((temp3 = find_insert_position (insn, temp))
804 && validate_change (temp, &SET_DEST (temp1), new, 0))
806 next = emit_insn_after (gen_move_insn (temp2, new), insn);
807 emit_insn_after_with_line_notes (PATTERN (temp),
808 PREV_INSN (temp3), temp);
809 delete_insn (temp);
810 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
812 if (after_regscan)
814 reg_scan_update (temp3, NEXT_INSN (next), old_max_reg);
815 old_max_reg = max_reg_num ();
820 /* Similarly, if it takes two insns to compute EXP but they
821 have the same destination. Here TEMP3 will be the second
822 insn and TEMP4 the SET from that insn. */
824 if (! reload_completed
825 && this_is_condjump && ! this_is_simplejump
826 && BRANCH_COST >= 4
827 && (temp = next_nonnote_insn (insn)) != 0
828 && GET_CODE (temp) == INSN
829 && REG_NOTES (temp) == 0
830 && (temp3 = next_nonnote_insn (temp)) != 0
831 && GET_CODE (temp3) == INSN
832 && REG_NOTES (temp3) == 0
833 && (reallabelprev == temp3
834 || ((temp2 = next_active_insn (temp3)) != 0
835 && simplejump_p (temp2)
836 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
837 && (temp1 = single_set (temp)) != 0
838 && (temp2 = SET_DEST (temp1), GET_CODE (temp2) == REG)
839 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
840 && (! SMALL_REGISTER_CLASSES
841 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
842 && ! side_effects_p (SET_SRC (temp1))
843 && ! may_trap_p (SET_SRC (temp1))
844 && rtx_cost (SET_SRC (temp1), SET) < 10
845 && (temp4 = single_set (temp3)) != 0
846 && rtx_equal_p (SET_DEST (temp4), temp2)
847 && ! side_effects_p (SET_SRC (temp4))
848 && ! may_trap_p (SET_SRC (temp4))
849 && rtx_cost (SET_SRC (temp4), SET) < 10)
851 rtx new = gen_reg_rtx (GET_MODE (temp2));
853 if ((temp5 = find_insert_position (insn, temp))
854 && (temp6 = find_insert_position (insn, temp3))
855 && validate_change (temp, &SET_DEST (temp1), new, 0))
857 /* Use the earliest of temp5 and temp6. */
858 if (temp5 != insn)
859 temp6 = temp5;
860 next = emit_insn_after (gen_move_insn (temp2, new), insn);
861 emit_insn_after_with_line_notes (PATTERN (temp),
862 PREV_INSN (temp6), temp);
863 emit_insn_after_with_line_notes
864 (replace_rtx (PATTERN (temp3), temp2, new),
865 PREV_INSN (temp6), temp3);
866 delete_insn (temp);
867 delete_insn (temp3);
868 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
870 if (after_regscan)
872 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
873 old_max_reg = max_reg_num ();
878 /* Finally, handle the case where two insns are used to
879 compute EXP but a temporary register is used. Here we must
880 ensure that the temporary register is not used anywhere else. */
882 if (! reload_completed
883 && after_regscan
884 && this_is_condjump && ! this_is_simplejump
885 && BRANCH_COST >= 4
886 && (temp = next_nonnote_insn (insn)) != 0
887 && GET_CODE (temp) == INSN
888 && REG_NOTES (temp) == 0
889 && (temp3 = next_nonnote_insn (temp)) != 0
890 && GET_CODE (temp3) == INSN
891 && REG_NOTES (temp3) == 0
892 && (reallabelprev == temp3
893 || ((temp2 = next_active_insn (temp3)) != 0
894 && simplejump_p (temp2)
895 && JUMP_LABEL (temp2) == JUMP_LABEL (insn)))
896 && (temp1 = single_set (temp)) != 0
897 && (temp5 = SET_DEST (temp1),
898 (GET_CODE (temp5) == REG
899 || (GET_CODE (temp5) == SUBREG
900 && (temp5 = SUBREG_REG (temp5),
901 GET_CODE (temp5) == REG))))
902 && REGNO (temp5) >= FIRST_PSEUDO_REGISTER
903 && REGNO_FIRST_UID (REGNO (temp5)) == INSN_UID (temp)
904 && REGNO_LAST_UID (REGNO (temp5)) == INSN_UID (temp3)
905 && ! side_effects_p (SET_SRC (temp1))
906 && ! may_trap_p (SET_SRC (temp1))
907 && rtx_cost (SET_SRC (temp1), SET) < 10
908 && (temp4 = single_set (temp3)) != 0
909 && (temp2 = SET_DEST (temp4), GET_CODE (temp2) == REG)
910 && GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT
911 && (! SMALL_REGISTER_CLASSES
912 || REGNO (temp2) >= FIRST_PSEUDO_REGISTER)
913 && rtx_equal_p (SET_DEST (temp4), temp2)
914 && ! side_effects_p (SET_SRC (temp4))
915 && ! may_trap_p (SET_SRC (temp4))
916 && rtx_cost (SET_SRC (temp4), SET) < 10)
918 rtx new = gen_reg_rtx (GET_MODE (temp2));
920 if ((temp5 = find_insert_position (insn, temp))
921 && (temp6 = find_insert_position (insn, temp3))
922 && validate_change (temp3, &SET_DEST (temp4), new, 0))
924 /* Use the earliest of temp5 and temp6. */
925 if (temp5 != insn)
926 temp6 = temp5;
927 next = emit_insn_after (gen_move_insn (temp2, new), insn);
928 emit_insn_after_with_line_notes (PATTERN (temp),
929 PREV_INSN (temp6), temp);
930 emit_insn_after_with_line_notes (PATTERN (temp3),
931 PREV_INSN (temp6), temp3);
932 delete_insn (temp);
933 delete_insn (temp3);
934 reallabelprev = prev_active_insn (JUMP_LABEL (insn));
936 if (after_regscan)
938 reg_scan_update (temp6, NEXT_INSN (next), old_max_reg);
939 old_max_reg = max_reg_num ();
943 #endif /* HAVE_cc0 */
945 #ifdef HAVE_conditional_arithmetic
946 /* ??? This is disabled in genconfig, as this simple-minded
947 transformation can incredibly lengthen register lifetimes.
949 Consider this example from cexp.c's yyparse:
951 234 (set (pc)
952 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
953 (label_ref 248) (pc)))
954 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
955 239 (set (pc) (label_ref 2382))
956 248 (code_label ("yybackup"))
958 This will be transformed to:
960 237 (set (reg/i:DI 0 $0)
961 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
962 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
963 239 (set (pc)
964 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
965 (label_ref 2382) (pc)))
967 which, from this narrow viewpoint looks fine. Except that
968 between this and 3 other ocurrences of the same pattern, $0
969 is now live for basically the entire function, and we'll
970 get an abort in caller_save.
972 Any replacement for this code should recall that a set of
973 a register that is not live need not, and indeed should not,
974 be conditionalized. Either that, or delay the transformation
975 until after register allocation. */
977 /* See if this is a conditional jump around a small number of
978 instructions that we can conditionalize. Don't do this before
979 the initial CSE pass or after reload.
981 We reject any insns that have side effects or may trap.
982 Strictly speaking, this is not needed since the machine may
983 support conditionalizing these too, but we won't deal with that
984 now. Specifically, this means that we can't conditionalize a
985 CALL_INSN, which some machines, such as the ARC, can do, but
986 this is a very minor optimization. */
987 if (this_is_condjump && ! this_is_simplejump
988 && cse_not_expected && optimize > 0 && ! reload_completed
989 && BRANCH_COST > 2
990 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn)), 0),
991 insn))
993 rtx ourcond = XEXP (SET_SRC (PATTERN (insn)), 0);
994 int num_insns = 0;
995 char *storage = (char *) oballoc (0);
996 int last_insn = 0, failed = 0;
997 rtx changed_jump = 0;
999 ourcond = gen_rtx (reverse_condition (GET_CODE (ourcond)),
1000 VOIDmode, XEXP (ourcond, 0),
1001 XEXP (ourcond, 1));
1003 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
1004 of this insn. We see if we think we can conditionalize the
1005 insns we pass. For now, we only deal with insns that have
1006 one SET. We stop after an insn that modifies anything in
1007 OURCOND, if we have too many insns, or if we have an insn
1008 with a side effect or that may trip. Note that we will
1009 be modifying any unconditional jumps we encounter to be
1010 conditional; this will have the effect of also doing this
1011 optimization on the "else" the next time around. */
1012 for (temp1 = NEXT_INSN (insn);
1013 num_insns <= BRANCH_COST && ! failed && temp1 != 0
1014 && GET_CODE (temp1) != CODE_LABEL;
1015 temp1 = NEXT_INSN (temp1))
1017 /* Ignore everything but an active insn. */
1018 if (GET_RTX_CLASS (GET_CODE (temp1)) != 'i'
1019 || GET_CODE (PATTERN (temp1)) == USE
1020 || GET_CODE (PATTERN (temp1)) == CLOBBER)
1021 continue;
1023 /* If this was an unconditional jump, record it since we'll
1024 need to remove the BARRIER if we succeed. We can only
1025 have one such jump since there must be a label after
1026 the BARRIER and it's either ours, in which case it's the
1027 only one or some other, in which case we'd fail. */
1029 if (simplejump_p (temp1))
1030 changed_jump = temp1;
1032 /* See if we are allowed another insn and if this insn
1033 if one we think we may be able to handle. */
1034 if (++num_insns > BRANCH_COST
1035 || last_insn
1036 || (temp2 = single_set (temp1)) == 0
1037 || side_effects_p (SET_SRC (temp2))
1038 || may_trap_p (SET_SRC (temp2)))
1039 failed = 1;
1040 else
1041 validate_change (temp1, &SET_SRC (temp2),
1042 gen_rtx_IF_THEN_ELSE
1043 (GET_MODE (SET_DEST (temp2)),
1044 copy_rtx (ourcond),
1045 SET_SRC (temp2), SET_DEST (temp2)),
1048 if (modified_in_p (ourcond, temp1))
1049 last_insn = 1;
1052 /* If we've reached our jump label, haven't failed, and all
1053 the changes above are valid, we can delete this jump
1054 insn. Also remove a BARRIER after any jump that used
1055 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1056 that might have previously been present on insns we
1057 made conditional. */
1058 if (temp1 == JUMP_LABEL (insn) && ! failed
1059 && apply_change_group ())
1061 for (temp1 = NEXT_INSN (insn); temp1 != JUMP_LABEL (insn);
1062 temp1 = NEXT_INSN (temp1))
1063 if (GET_RTX_CLASS (GET_CODE (temp1)) == 'i')
1064 for (temp2 = REG_NOTES (temp1); temp2 != 0;
1065 temp2 = XEXP (temp2, 1))
1066 if (REG_NOTE_KIND (temp2) == REG_EQUAL
1067 || REG_NOTE_KIND (temp2) == REG_EQUIV)
1068 remove_note (temp1, temp2);
1070 if (changed_jump != 0)
1072 if (GET_CODE (NEXT_INSN (changed_jump)) != BARRIER)
1073 abort ();
1075 delete_insn (NEXT_INSN (changed_jump));
1078 delete_insn (insn);
1079 changed = 1;
1080 continue;
1082 else
1084 cancel_changes (0);
1085 obfree (storage);
1088 #endif
1090 /* Try to use a conditional move (if the target has them), or a
1091 store-flag insn. If the target has conditional arithmetic as
1092 well as conditional move, the above code will have done something.
1093 Note that we prefer the above code since it is more general: the
1094 code below can make changes that require work to undo.
1096 The general case here is:
1098 1) x = a; if (...) x = b; and
1099 2) if (...) x = b;
1101 If the jump would be faster, the machine should not have defined
1102 the movcc or scc insns!. These cases are often made by the
1103 previous optimization.
1105 The second case is treated as x = x; if (...) x = b;.
1107 INSN here is the jump around the store. We set:
1109 TEMP to the "x op= b;" insn.
1110 TEMP1 to X.
1111 TEMP2 to B.
1112 TEMP3 to A (X in the second case).
1113 TEMP4 to the condition being tested.
1114 TEMP5 to the earliest insn used to find the condition.
1115 TEMP6 to the SET of TEMP. */
1117 if (/* We can't do this after reload has completed. */
1118 ! reload_completed
1119 #ifdef HAVE_conditional_arithmetic
1120 /* Defer this until after CSE so the above code gets the
1121 first crack at it. */
1122 && cse_not_expected
1123 #endif
1124 && this_is_condjump && ! this_is_simplejump
1125 /* Set TEMP to the "x = b;" insn. */
1126 && (temp = next_nonnote_insn (insn)) != 0
1127 && GET_CODE (temp) == INSN
1128 && (temp6 = single_set (temp)) != NULL_RTX
1129 && GET_CODE (temp1 = SET_DEST (temp6)) == REG
1130 && (! SMALL_REGISTER_CLASSES
1131 || REGNO (temp1) >= FIRST_PSEUDO_REGISTER)
1132 && ! side_effects_p (temp2 = SET_SRC (temp6))
1133 && ! may_trap_p (temp2)
1134 /* Allow either form, but prefer the former if both apply.
1135 There is no point in using the old value of TEMP1 if
1136 it is a register, since cse will alias them. It can
1137 lose if the old value were a hard register since CSE
1138 won't replace hard registers. Avoid using TEMP3 if
1139 small register classes and it is a hard register. */
1140 && (((temp3 = reg_set_last (temp1, insn)) != 0
1141 && ! (SMALL_REGISTER_CLASSES && GET_CODE (temp3) == REG
1142 && REGNO (temp3) < FIRST_PSEUDO_REGISTER))
1143 /* Make the latter case look like x = x; if (...) x = b; */
1144 || (temp3 = temp1, 1))
1145 /* INSN must either branch to the insn after TEMP or the insn
1146 after TEMP must branch to the same place as INSN. */
1147 && (reallabelprev == temp
1148 || ((temp4 = next_active_insn (temp)) != 0
1149 && simplejump_p (temp4)
1150 && JUMP_LABEL (temp4) == JUMP_LABEL (insn)))
1151 && (temp4 = get_condition (insn, &temp5)) != 0
1152 /* We must be comparing objects whose modes imply the size.
1153 We could handle BLKmode if (1) emit_store_flag could
1154 and (2) we could find the size reliably. */
1155 && GET_MODE (XEXP (temp4, 0)) != BLKmode
1156 /* Even if branches are cheap, the store_flag optimization
1157 can win when the operation to be performed can be
1158 expressed directly. */
1159 #ifdef HAVE_cc0
1160 /* If the previous insn sets CC0 and something else, we can't
1161 do this since we are going to delete that insn. */
1163 && ! ((temp6 = prev_nonnote_insn (insn)) != 0
1164 && GET_CODE (temp6) == INSN
1165 && (sets_cc0_p (PATTERN (temp6)) == -1
1166 || (sets_cc0_p (PATTERN (temp6)) == 1
1167 && FIND_REG_INC_NOTE (temp6, NULL_RTX))))
1168 #endif
1171 #ifdef HAVE_conditional_move
1172 /* First try a conditional move. */
1174 enum rtx_code code = GET_CODE (temp4);
1175 rtx var = temp1;
1176 rtx cond0, cond1, aval, bval;
1177 rtx target, new_insn;
1179 /* Copy the compared variables into cond0 and cond1, so that
1180 any side effects performed in or after the old comparison,
1181 will not affect our compare which will come later. */
1182 /* ??? Is it possible to just use the comparison in the jump
1183 insn? After all, we're going to delete it. We'd have
1184 to modify emit_conditional_move to take a comparison rtx
1185 instead or write a new function. */
1186 cond0 = gen_reg_rtx (GET_MODE (XEXP (temp4, 0)));
1187 /* We want the target to be able to simplify comparisons with
1188 zero (and maybe other constants as well), so don't create
1189 pseudos for them. There's no need to either. */
1190 if (GET_CODE (XEXP (temp4, 1)) == CONST_INT
1191 || GET_CODE (XEXP (temp4, 1)) == CONST_DOUBLE)
1192 cond1 = XEXP (temp4, 1);
1193 else
1194 cond1 = gen_reg_rtx (GET_MODE (XEXP (temp4, 1)));
1196 /* Careful about copying these values -- an IOR or what may
1197 need to do other things, like clobber flags. */
1198 /* ??? Assume for the moment that AVAL is ok. */
1199 aval = temp3;
1201 start_sequence ();
1203 /* We're dealing with a single_set insn with no side effects
1204 on SET_SRC. We do need to be reasonably certain that if
1205 we need to force BVAL into a register that we won't
1206 clobber the flags -- general_operand should suffice. */
1207 if (general_operand (temp2, GET_MODE (var)))
1208 bval = temp2;
1209 else
1211 bval = gen_reg_rtx (GET_MODE (var));
1212 new_insn = copy_rtx (temp);
1213 temp6 = single_set (new_insn);
1214 SET_DEST (temp6) = bval;
1215 emit_insn (PATTERN (new_insn));
1218 target = emit_conditional_move (var, code,
1219 cond0, cond1, VOIDmode,
1220 aval, bval, GET_MODE (var),
1221 (code == LTU || code == GEU
1222 || code == LEU || code == GTU));
1224 if (target)
1226 rtx seq1, seq2, last;
1227 int copy_ok;
1229 /* Save the conditional move sequence but don't emit it
1230 yet. On some machines, like the alpha, it is possible
1231 that temp5 == insn, so next generate the sequence that
1232 saves the compared values and then emit both
1233 sequences ensuring seq1 occurs before seq2. */
1234 seq2 = get_insns ();
1235 end_sequence ();
1237 /* "Now that we can't fail..." Famous last words.
1238 Generate the copy insns that preserve the compared
1239 values. */
1240 start_sequence ();
1241 emit_move_insn (cond0, XEXP (temp4, 0));
1242 if (cond1 != XEXP (temp4, 1))
1243 emit_move_insn (cond1, XEXP (temp4, 1));
1244 seq1 = get_insns ();
1245 end_sequence ();
1247 /* Validate the sequence -- this may be some weird
1248 bit-extract-and-test instruction for which there
1249 exists no complimentary bit-extract insn. */
1250 copy_ok = 1;
1251 for (last = seq1; last ; last = NEXT_INSN (last))
1252 if (recog_memoized (last) < 0)
1254 copy_ok = 0;
1255 break;
1258 if (copy_ok)
1260 emit_insns_before (seq1, temp5);
1262 /* Insert conditional move after insn, to be sure
1263 that the jump and a possible compare won't be
1264 separated. */
1265 last = emit_insns_after (seq2, insn);
1267 /* ??? We can also delete the insn that sets X to A.
1268 Flow will do it too though. */
1269 delete_insn (temp);
1270 next = NEXT_INSN (insn);
1271 delete_jump (insn);
1273 if (after_regscan)
1275 reg_scan_update (seq1, NEXT_INSN (last),
1276 old_max_reg);
1277 old_max_reg = max_reg_num ();
1280 changed = 1;
1281 continue;
1284 else
1285 end_sequence ();
1287 #endif
1289 /* That didn't work, try a store-flag insn.
1291 We further divide the cases into:
1293 1) x = a; if (...) x = b; and either A or B is zero,
1294 2) if (...) x = 0; and jumps are expensive,
1295 3) x = a; if (...) x = b; and A and B are constants where all
1296 the set bits in A are also set in B and jumps are expensive,
1297 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1298 more expensive, and
1299 5) if (...) x = b; if jumps are even more expensive. */
1301 if (GET_MODE_CLASS (GET_MODE (temp1)) == MODE_INT
1302 && ((GET_CODE (temp3) == CONST_INT)
1303 /* Make the latter case look like
1304 x = x; if (...) x = 0; */
1305 || (temp3 = temp1,
1306 ((BRANCH_COST >= 2
1307 && temp2 == const0_rtx)
1308 || BRANCH_COST >= 3)))
1309 /* If B is zero, OK; if A is zero, can only do (1) if we
1310 can reverse the condition. See if (3) applies possibly
1311 by reversing the condition. Prefer reversing to (4) when
1312 branches are very expensive. */
1313 && (((BRANCH_COST >= 2
1314 || STORE_FLAG_VALUE == -1
1315 || (STORE_FLAG_VALUE == 1
1316 /* Check that the mask is a power of two,
1317 so that it can probably be generated
1318 with a shift. */
1319 && GET_CODE (temp3) == CONST_INT
1320 && exact_log2 (INTVAL (temp3)) >= 0))
1321 && (reversep = 0, temp2 == const0_rtx))
1322 || ((BRANCH_COST >= 2
1323 || STORE_FLAG_VALUE == -1
1324 || (STORE_FLAG_VALUE == 1
1325 && GET_CODE (temp2) == CONST_INT
1326 && exact_log2 (INTVAL (temp2)) >= 0))
1327 && temp3 == const0_rtx
1328 && (reversep = can_reverse_comparison_p (temp4, insn)))
1329 || (BRANCH_COST >= 2
1330 && GET_CODE (temp2) == CONST_INT
1331 && GET_CODE (temp3) == CONST_INT
1332 && ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp2)
1333 || ((INTVAL (temp2) & INTVAL (temp3)) == INTVAL (temp3)
1334 && (reversep = can_reverse_comparison_p (temp4,
1335 insn)))))
1336 || BRANCH_COST >= 3)
1339 enum rtx_code code = GET_CODE (temp4);
1340 rtx uval, cval, var = temp1;
1341 int normalizep;
1342 rtx target;
1344 /* If necessary, reverse the condition. */
1345 if (reversep)
1346 code = reverse_condition (code), uval = temp2, cval = temp3;
1347 else
1348 uval = temp3, cval = temp2;
1350 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1351 is the constant 1, it is best to just compute the result
1352 directly. If UVAL is constant and STORE_FLAG_VALUE
1353 includes all of its bits, it is best to compute the flag
1354 value unnormalized and `and' it with UVAL. Otherwise,
1355 normalize to -1 and `and' with UVAL. */
1356 normalizep = (cval != const0_rtx ? -1
1357 : (uval == const1_rtx ? 1
1358 : (GET_CODE (uval) == CONST_INT
1359 && (INTVAL (uval) & ~STORE_FLAG_VALUE) == 0)
1360 ? 0 : -1));
1362 /* We will be putting the store-flag insn immediately in
1363 front of the comparison that was originally being done,
1364 so we know all the variables in TEMP4 will be valid.
1365 However, this might be in front of the assignment of
1366 A to VAR. If it is, it would clobber the store-flag
1367 we will be emitting.
1369 Therefore, emit into a temporary which will be copied to
1370 VAR immediately after TEMP. */
1372 start_sequence ();
1373 target = emit_store_flag (gen_reg_rtx (GET_MODE (var)), code,
1374 XEXP (temp4, 0), XEXP (temp4, 1),
1375 VOIDmode,
1376 (code == LTU || code == LEU
1377 || code == GEU || code == GTU),
1378 normalizep);
1379 if (target)
1381 rtx seq;
1382 rtx before = insn;
1384 seq = get_insns ();
1385 end_sequence ();
1387 /* Put the store-flag insns in front of the first insn
1388 used to compute the condition to ensure that we
1389 use the same values of them as the current
1390 comparison. However, the remainder of the insns we
1391 generate will be placed directly in front of the
1392 jump insn, in case any of the pseudos we use
1393 are modified earlier. */
1395 emit_insns_before (seq, temp5);
1397 start_sequence ();
1399 /* Both CVAL and UVAL are non-zero. */
1400 if (cval != const0_rtx && uval != const0_rtx)
1402 rtx tem1, tem2;
1404 tem1 = expand_and (uval, target, NULL_RTX);
1405 if (GET_CODE (cval) == CONST_INT
1406 && GET_CODE (uval) == CONST_INT
1407 && (INTVAL (cval) & INTVAL (uval)) == INTVAL (cval))
1408 tem2 = cval;
1409 else
1411 tem2 = expand_unop (GET_MODE (var), one_cmpl_optab,
1412 target, NULL_RTX, 0);
1413 tem2 = expand_and (cval, tem2,
1414 (GET_CODE (tem2) == REG
1415 ? tem2 : 0));
1418 /* If we usually make new pseudos, do so here. This
1419 turns out to help machines that have conditional
1420 move insns. */
1421 /* ??? Conditional moves have already been handled.
1422 This may be obsolete. */
1424 if (flag_expensive_optimizations)
1425 target = 0;
1427 target = expand_binop (GET_MODE (var), ior_optab,
1428 tem1, tem2, target,
1429 1, OPTAB_WIDEN);
1431 else if (normalizep != 1)
1433 /* We know that either CVAL or UVAL is zero. If
1434 UVAL is zero, negate TARGET and `and' with CVAL.
1435 Otherwise, `and' with UVAL. */
1436 if (uval == const0_rtx)
1438 target = expand_unop (GET_MODE (var), one_cmpl_optab,
1439 target, NULL_RTX, 0);
1440 uval = cval;
1443 target = expand_and (uval, target,
1444 (GET_CODE (target) == REG
1445 && ! preserve_subexpressions_p ()
1446 ? target : NULL_RTX));
1449 emit_move_insn (var, target);
1450 seq = get_insns ();
1451 end_sequence ();
1452 #ifdef HAVE_cc0
1453 /* If INSN uses CC0, we must not separate it from the
1454 insn that sets cc0. */
1455 if (reg_mentioned_p (cc0_rtx, PATTERN (before)))
1456 before = prev_nonnote_insn (before);
1457 #endif
1458 emit_insns_before (seq, before);
1460 delete_insn (temp);
1461 next = NEXT_INSN (insn);
1462 delete_jump (insn);
1464 if (after_regscan)
1466 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1467 old_max_reg = max_reg_num ();
1470 changed = 1;
1471 continue;
1473 else
1474 end_sequence ();
1478 /* If branches are expensive, convert
1479 if (foo) bar++; to bar += (foo != 0);
1480 and similarly for "bar--;"
1482 INSN is the conditional branch around the arithmetic. We set:
1484 TEMP is the arithmetic insn.
1485 TEMP1 is the SET doing the arithmetic.
1486 TEMP2 is the operand being incremented or decremented.
1487 TEMP3 to the condition being tested.
1488 TEMP4 to the earliest insn used to find the condition. */
1490 if ((BRANCH_COST >= 2
1491 #ifdef HAVE_incscc
1492 || HAVE_incscc
1493 #endif
1494 #ifdef HAVE_decscc
1495 || HAVE_decscc
1496 #endif
1498 && ! reload_completed
1499 && this_is_condjump && ! this_is_simplejump
1500 && (temp = next_nonnote_insn (insn)) != 0
1501 && (temp1 = single_set (temp)) != 0
1502 && (temp2 = SET_DEST (temp1),
1503 GET_MODE_CLASS (GET_MODE (temp2)) == MODE_INT)
1504 && GET_CODE (SET_SRC (temp1)) == PLUS
1505 && (XEXP (SET_SRC (temp1), 1) == const1_rtx
1506 || XEXP (SET_SRC (temp1), 1) == constm1_rtx)
1507 && rtx_equal_p (temp2, XEXP (SET_SRC (temp1), 0))
1508 && ! side_effects_p (temp2)
1509 && ! may_trap_p (temp2)
1510 /* INSN must either branch to the insn after TEMP or the insn
1511 after TEMP must branch to the same place as INSN. */
1512 && (reallabelprev == temp
1513 || ((temp3 = next_active_insn (temp)) != 0
1514 && simplejump_p (temp3)
1515 && JUMP_LABEL (temp3) == JUMP_LABEL (insn)))
1516 && (temp3 = get_condition (insn, &temp4)) != 0
1517 /* We must be comparing objects whose modes imply the size.
1518 We could handle BLKmode if (1) emit_store_flag could
1519 and (2) we could find the size reliably. */
1520 && GET_MODE (XEXP (temp3, 0)) != BLKmode
1521 && can_reverse_comparison_p (temp3, insn))
1523 rtx temp6, target = 0, seq, init_insn = 0, init = temp2;
1524 enum rtx_code code = reverse_condition (GET_CODE (temp3));
1526 start_sequence ();
1528 /* It must be the case that TEMP2 is not modified in the range
1529 [TEMP4, INSN). The one exception we make is if the insn
1530 before INSN sets TEMP2 to something which is also unchanged
1531 in that range. In that case, we can move the initialization
1532 into our sequence. */
1534 if ((temp5 = prev_active_insn (insn)) != 0
1535 && no_labels_between_p (temp5, insn)
1536 && GET_CODE (temp5) == INSN
1537 && (temp6 = single_set (temp5)) != 0
1538 && rtx_equal_p (temp2, SET_DEST (temp6))
1539 && (CONSTANT_P (SET_SRC (temp6))
1540 || GET_CODE (SET_SRC (temp6)) == REG
1541 || GET_CODE (SET_SRC (temp6)) == SUBREG))
1543 emit_insn (PATTERN (temp5));
1544 init_insn = temp5;
1545 init = SET_SRC (temp6);
1548 if (CONSTANT_P (init)
1549 || ! reg_set_between_p (init, PREV_INSN (temp4), insn))
1550 target = emit_store_flag (gen_reg_rtx (GET_MODE (temp2)), code,
1551 XEXP (temp3, 0), XEXP (temp3, 1),
1552 VOIDmode,
1553 (code == LTU || code == LEU
1554 || code == GTU || code == GEU), 1);
1556 /* If we can do the store-flag, do the addition or
1557 subtraction. */
1559 if (target)
1560 target = expand_binop (GET_MODE (temp2),
1561 (XEXP (SET_SRC (temp1), 1) == const1_rtx
1562 ? add_optab : sub_optab),
1563 temp2, target, temp2, 0, OPTAB_WIDEN);
1565 if (target != 0)
1567 /* Put the result back in temp2 in case it isn't already.
1568 Then replace the jump, possible a CC0-setting insn in
1569 front of the jump, and TEMP, with the sequence we have
1570 made. */
1572 if (target != temp2)
1573 emit_move_insn (temp2, target);
1575 seq = get_insns ();
1576 end_sequence ();
1578 emit_insns_before (seq, temp4);
1579 delete_insn (temp);
1581 if (init_insn)
1582 delete_insn (init_insn);
1584 next = NEXT_INSN (insn);
1585 #ifdef HAVE_cc0
1586 delete_insn (prev_nonnote_insn (insn));
1587 #endif
1588 delete_insn (insn);
1590 if (after_regscan)
1592 reg_scan_update (seq, NEXT_INSN (next), old_max_reg);
1593 old_max_reg = max_reg_num ();
1596 changed = 1;
1597 continue;
1599 else
1600 end_sequence ();
1603 /* Simplify if (...) x = 1; else {...} if (x) ...
1604 We recognize this case scanning backwards as well.
1606 TEMP is the assignment to x;
1607 TEMP1 is the label at the head of the second if. */
1608 /* ?? This should call get_condition to find the values being
1609 compared, instead of looking for a COMPARE insn when HAVE_cc0
1610 is not defined. This would allow it to work on the m88k. */
1611 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1612 is not defined and the condition is tested by a separate compare
1613 insn. This is because the code below assumes that the result
1614 of the compare dies in the following branch.
1616 Not only that, but there might be other insns between the
1617 compare and branch whose results are live. Those insns need
1618 to be executed.
1620 A way to fix this is to move the insns at JUMP_LABEL (insn)
1621 to before INSN. If we are running before flow, they will
1622 be deleted if they aren't needed. But this doesn't work
1623 well after flow.
1625 This is really a special-case of jump threading, anyway. The
1626 right thing to do is to replace this and jump threading with
1627 much simpler code in cse.
1629 This code has been turned off in the non-cc0 case in the
1630 meantime. */
1632 #ifdef HAVE_cc0
1633 else if (this_is_simplejump
1634 /* Safe to skip USE and CLOBBER insns here
1635 since they will not be deleted. */
1636 && (temp = prev_active_insn (insn))
1637 && no_labels_between_p (temp, insn)
1638 && GET_CODE (temp) == INSN
1639 && GET_CODE (PATTERN (temp)) == SET
1640 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1641 && CONSTANT_P (SET_SRC (PATTERN (temp)))
1642 && (temp1 = next_active_insn (JUMP_LABEL (insn)))
1643 /* If we find that the next value tested is `x'
1644 (TEMP1 is the insn where this happens), win. */
1645 && GET_CODE (temp1) == INSN
1646 && GET_CODE (PATTERN (temp1)) == SET
1647 #ifdef HAVE_cc0
1648 /* Does temp1 `tst' the value of x? */
1649 && SET_SRC (PATTERN (temp1)) == SET_DEST (PATTERN (temp))
1650 && SET_DEST (PATTERN (temp1)) == cc0_rtx
1651 && (temp1 = next_nonnote_insn (temp1))
1652 #else
1653 /* Does temp1 compare the value of x against zero? */
1654 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1655 && XEXP (SET_SRC (PATTERN (temp1)), 1) == const0_rtx
1656 && (XEXP (SET_SRC (PATTERN (temp1)), 0)
1657 == SET_DEST (PATTERN (temp)))
1658 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1659 && (temp1 = find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1660 #endif
1661 && condjump_p (temp1))
1663 /* Get the if_then_else from the condjump. */
1664 rtx choice = SET_SRC (PATTERN (temp1));
1665 if (GET_CODE (choice) == IF_THEN_ELSE)
1667 enum rtx_code code = GET_CODE (XEXP (choice, 0));
1668 rtx val = SET_SRC (PATTERN (temp));
1669 rtx cond
1670 = simplify_relational_operation (code, GET_MODE (SET_DEST (PATTERN (temp))),
1671 val, const0_rtx);
1672 rtx ultimate;
1674 if (cond == const_true_rtx)
1675 ultimate = XEXP (choice, 1);
1676 else if (cond == const0_rtx)
1677 ultimate = XEXP (choice, 2);
1678 else
1679 ultimate = 0;
1681 if (ultimate == pc_rtx)
1682 ultimate = get_label_after (temp1);
1683 else if (ultimate && GET_CODE (ultimate) != RETURN)
1684 ultimate = XEXP (ultimate, 0);
1686 if (ultimate && JUMP_LABEL(insn) != ultimate)
1687 changed |= redirect_jump (insn, ultimate);
1690 #endif
1692 #if 0
1693 /* @@ This needs a bit of work before it will be right.
1695 Any type of comparison can be accepted for the first and
1696 second compare. When rewriting the first jump, we must
1697 compute the what conditions can reach label3, and use the
1698 appropriate code. We can not simply reverse/swap the code
1699 of the first jump. In some cases, the second jump must be
1700 rewritten also.
1702 For example,
1703 < == converts to > ==
1704 < != converts to == >
1705 etc.
1707 If the code is written to only accept an '==' test for the second
1708 compare, then all that needs to be done is to swap the condition
1709 of the first branch.
1711 It is questionable whether we want this optimization anyways,
1712 since if the user wrote code like this because he/she knew that
1713 the jump to label1 is taken most of the time, then rewriting
1714 this gives slower code. */
1715 /* @@ This should call get_condition to find the values being
1716 compared, instead of looking for a COMPARE insn when HAVE_cc0
1717 is not defined. This would allow it to work on the m88k. */
1718 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1719 is not defined and the condition is tested by a separate compare
1720 insn. This is because the code below assumes that the result
1721 of the compare dies in the following branch. */
1723 /* Simplify test a ~= b
1724 condjump label1;
1725 test a == b
1726 condjump label2;
1727 jump label3;
1728 label1:
1730 rewriting as
1731 test a ~~= b
1732 condjump label3
1733 test a == b
1734 condjump label2
1735 label1:
1737 where ~= is an inequality, e.g. >, and ~~= is the swapped
1738 inequality, e.g. <.
1740 We recognize this case scanning backwards.
1742 TEMP is the conditional jump to `label2';
1743 TEMP1 is the test for `a == b';
1744 TEMP2 is the conditional jump to `label1';
1745 TEMP3 is the test for `a ~= b'. */
1746 else if (this_is_simplejump
1747 && (temp = prev_active_insn (insn))
1748 && no_labels_between_p (temp, insn)
1749 && condjump_p (temp)
1750 && (temp1 = prev_active_insn (temp))
1751 && no_labels_between_p (temp1, temp)
1752 && GET_CODE (temp1) == INSN
1753 && GET_CODE (PATTERN (temp1)) == SET
1754 #ifdef HAVE_cc0
1755 && sets_cc0_p (PATTERN (temp1)) == 1
1756 #else
1757 && GET_CODE (SET_SRC (PATTERN (temp1))) == COMPARE
1758 && GET_CODE (SET_DEST (PATTERN (temp1))) == REG
1759 && (temp == find_next_ref (SET_DEST (PATTERN (temp1)), temp1))
1760 #endif
1761 && (temp2 = prev_active_insn (temp1))
1762 && no_labels_between_p (temp2, temp1)
1763 && condjump_p (temp2)
1764 && JUMP_LABEL (temp2) == next_nonnote_insn (NEXT_INSN (insn))
1765 && (temp3 = prev_active_insn (temp2))
1766 && no_labels_between_p (temp3, temp2)
1767 && GET_CODE (PATTERN (temp3)) == SET
1768 && rtx_equal_p (SET_DEST (PATTERN (temp3)),
1769 SET_DEST (PATTERN (temp1)))
1770 && rtx_equal_p (SET_SRC (PATTERN (temp1)),
1771 SET_SRC (PATTERN (temp3)))
1772 && ! inequality_comparisons_p (PATTERN (temp))
1773 && inequality_comparisons_p (PATTERN (temp2)))
1775 rtx fallthrough_label = JUMP_LABEL (temp2);
1777 ++LABEL_NUSES (fallthrough_label);
1778 if (swap_jump (temp2, JUMP_LABEL (insn)))
1780 delete_insn (insn);
1781 changed = 1;
1784 if (--LABEL_NUSES (fallthrough_label) == 0)
1785 delete_insn (fallthrough_label);
1787 #endif
1788 /* Simplify if (...) {... x = 1;} if (x) ...
1790 We recognize this case backwards.
1792 TEMP is the test of `x';
1793 TEMP1 is the assignment to `x' at the end of the
1794 previous statement. */
1795 /* @@ This should call get_condition to find the values being
1796 compared, instead of looking for a COMPARE insn when HAVE_cc0
1797 is not defined. This would allow it to work on the m88k. */
1798 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1799 is not defined and the condition is tested by a separate compare
1800 insn. This is because the code below assumes that the result
1801 of the compare dies in the following branch. */
1803 /* ??? This has to be turned off. The problem is that the
1804 unconditional jump might indirectly end up branching to the
1805 label between TEMP1 and TEMP. We can't detect this, in general,
1806 since it may become a jump to there after further optimizations.
1807 If that jump is done, it will be deleted, so we will retry
1808 this optimization in the next pass, thus an infinite loop.
1810 The present code prevents this by putting the jump after the
1811 label, but this is not logically correct. */
1812 #if 0
1813 else if (this_is_condjump
1814 /* Safe to skip USE and CLOBBER insns here
1815 since they will not be deleted. */
1816 && (temp = prev_active_insn (insn))
1817 && no_labels_between_p (temp, insn)
1818 && GET_CODE (temp) == INSN
1819 && GET_CODE (PATTERN (temp)) == SET
1820 #ifdef HAVE_cc0
1821 && sets_cc0_p (PATTERN (temp)) == 1
1822 && GET_CODE (SET_SRC (PATTERN (temp))) == REG
1823 #else
1824 /* Temp must be a compare insn, we can not accept a register
1825 to register move here, since it may not be simply a
1826 tst insn. */
1827 && GET_CODE (SET_SRC (PATTERN (temp))) == COMPARE
1828 && XEXP (SET_SRC (PATTERN (temp)), 1) == const0_rtx
1829 && GET_CODE (XEXP (SET_SRC (PATTERN (temp)), 0)) == REG
1830 && GET_CODE (SET_DEST (PATTERN (temp))) == REG
1831 && insn == find_next_ref (SET_DEST (PATTERN (temp)), temp)
1832 #endif
1833 /* May skip USE or CLOBBER insns here
1834 for checking for opportunity, since we
1835 take care of them later. */
1836 && (temp1 = prev_active_insn (temp))
1837 && GET_CODE (temp1) == INSN
1838 && GET_CODE (PATTERN (temp1)) == SET
1839 #ifdef HAVE_cc0
1840 && SET_SRC (PATTERN (temp)) == SET_DEST (PATTERN (temp1))
1841 #else
1842 && (XEXP (SET_SRC (PATTERN (temp)), 0)
1843 == SET_DEST (PATTERN (temp1)))
1844 #endif
1845 && CONSTANT_P (SET_SRC (PATTERN (temp1)))
1846 /* If this isn't true, cse will do the job. */
1847 && ! no_labels_between_p (temp1, temp))
1849 /* Get the if_then_else from the condjump. */
1850 rtx choice = SET_SRC (PATTERN (insn));
1851 if (GET_CODE (choice) == IF_THEN_ELSE
1852 && (GET_CODE (XEXP (choice, 0)) == EQ
1853 || GET_CODE (XEXP (choice, 0)) == NE))
1855 int want_nonzero = (GET_CODE (XEXP (choice, 0)) == NE);
1856 rtx last_insn;
1857 rtx ultimate;
1858 rtx p;
1860 /* Get the place that condjump will jump to
1861 if it is reached from here. */
1862 if ((SET_SRC (PATTERN (temp1)) != const0_rtx)
1863 == want_nonzero)
1864 ultimate = XEXP (choice, 1);
1865 else
1866 ultimate = XEXP (choice, 2);
1867 /* Get it as a CODE_LABEL. */
1868 if (ultimate == pc_rtx)
1869 ultimate = get_label_after (insn);
1870 else
1871 /* Get the label out of the LABEL_REF. */
1872 ultimate = XEXP (ultimate, 0);
1874 /* Insert the jump immediately before TEMP, specifically
1875 after the label that is between TEMP1 and TEMP. */
1876 last_insn = PREV_INSN (temp);
1878 /* If we would be branching to the next insn, the jump
1879 would immediately be deleted and the re-inserted in
1880 a subsequent pass over the code. So don't do anything
1881 in that case. */
1882 if (next_active_insn (last_insn)
1883 != next_active_insn (ultimate))
1885 emit_barrier_after (last_insn);
1886 p = emit_jump_insn_after (gen_jump (ultimate),
1887 last_insn);
1888 JUMP_LABEL (p) = ultimate;
1889 ++LABEL_NUSES (ultimate);
1890 if (INSN_UID (ultimate) < max_jump_chain
1891 && INSN_CODE (p) < max_jump_chain)
1893 jump_chain[INSN_UID (p)]
1894 = jump_chain[INSN_UID (ultimate)];
1895 jump_chain[INSN_UID (ultimate)] = p;
1897 changed = 1;
1898 continue;
1902 #endif
1903 #ifdef HAVE_trap
1904 /* Detect a conditional jump jumping over an unconditional trap. */
1905 else if (HAVE_trap
1906 && this_is_condjump && ! this_is_simplejump
1907 && reallabelprev != 0
1908 && GET_CODE (reallabelprev) == INSN
1909 && GET_CODE (PATTERN (reallabelprev)) == TRAP_IF
1910 && TRAP_CONDITION (PATTERN (reallabelprev)) == const_true_rtx
1911 && prev_active_insn (reallabelprev) == insn
1912 && no_labels_between_p (insn, reallabelprev)
1913 && (temp2 = get_condition (insn, &temp4))
1914 && can_reverse_comparison_p (temp2, insn))
1916 rtx new = gen_cond_trap (reverse_condition (GET_CODE (temp2)),
1917 XEXP (temp2, 0), XEXP (temp2, 1),
1918 TRAP_CODE (PATTERN (reallabelprev)));
1920 if (new)
1922 emit_insn_before (new, temp4);
1923 delete_insn (reallabelprev);
1924 delete_jump (insn);
1925 changed = 1;
1926 continue;
1929 /* Detect a jump jumping to an unconditional trap. */
1930 else if (HAVE_trap && this_is_condjump
1931 && (temp = next_active_insn (JUMP_LABEL (insn)))
1932 && GET_CODE (temp) == INSN
1933 && GET_CODE (PATTERN (temp)) == TRAP_IF
1934 && (this_is_simplejump
1935 || (temp2 = get_condition (insn, &temp4))))
1937 rtx tc = TRAP_CONDITION (PATTERN (temp));
1939 if (tc == const_true_rtx
1940 || (! this_is_simplejump && rtx_equal_p (temp2, tc)))
1942 rtx new;
1943 /* Replace an unconditional jump to a trap with a trap. */
1944 if (this_is_simplejump)
1946 emit_barrier_after (emit_insn_before (gen_trap (), insn));
1947 delete_jump (insn);
1948 changed = 1;
1949 continue;
1951 new = gen_cond_trap (GET_CODE (temp2), XEXP (temp2, 0),
1952 XEXP (temp2, 1),
1953 TRAP_CODE (PATTERN (temp)));
1954 if (new)
1956 emit_insn_before (new, temp4);
1957 delete_jump (insn);
1958 changed = 1;
1959 continue;
1962 /* If the trap condition and jump condition are mutually
1963 exclusive, redirect the jump to the following insn. */
1964 else if (GET_RTX_CLASS (GET_CODE (tc)) == '<'
1965 && ! this_is_simplejump
1966 && swap_condition (GET_CODE (temp2)) == GET_CODE (tc)
1967 && rtx_equal_p (XEXP (tc, 0), XEXP (temp2, 0))
1968 && rtx_equal_p (XEXP (tc, 1), XEXP (temp2, 1))
1969 && redirect_jump (insn, get_label_after (temp)))
1971 changed = 1;
1972 continue;
1975 #endif
1976 else
1978 /* Detect a jump to a jump. */
1980 /* Look for if (foo) bar; else break; */
1981 /* The insns look like this:
1982 insn = condjump label1;
1983 ...range1 (some insns)...
1984 jump label2;
1985 label1:
1986 ...range2 (some insns)...
1987 jump somewhere unconditionally
1988 label2: */
1990 rtx label1 = next_label (insn);
1991 rtx range1end = label1 ? prev_active_insn (label1) : 0;
1992 /* Don't do this optimization on the first round, so that
1993 jump-around-a-jump gets simplified before we ask here
1994 whether a jump is unconditional.
1996 Also don't do it when we are called after reload since
1997 it will confuse reorg. */
1998 if (! first
1999 && (reload_completed ? ! flag_delayed_branch : 1)
2000 /* Make sure INSN is something we can invert. */
2001 && condjump_p (insn)
2002 && label1 != 0
2003 && JUMP_LABEL (insn) == label1
2004 && LABEL_NUSES (label1) == 1
2005 && GET_CODE (range1end) == JUMP_INSN
2006 && simplejump_p (range1end))
2008 rtx label2 = next_label (label1);
2009 rtx range2end = label2 ? prev_active_insn (label2) : 0;
2010 if (range1end != range2end
2011 && JUMP_LABEL (range1end) == label2
2012 && GET_CODE (range2end) == JUMP_INSN
2013 && GET_CODE (NEXT_INSN (range2end)) == BARRIER
2014 /* Invert the jump condition, so we
2015 still execute the same insns in each case. */
2016 && invert_jump (insn, label1))
2018 rtx range1beg = next_active_insn (insn);
2019 rtx range2beg = next_active_insn (label1);
2020 rtx range1after, range2after;
2021 rtx range1before, range2before;
2022 rtx rangenext;
2024 /* Include in each range any notes before it, to be
2025 sure that we get the line number note if any, even
2026 if there are other notes here. */
2027 while (PREV_INSN (range1beg)
2028 && GET_CODE (PREV_INSN (range1beg)) == NOTE)
2029 range1beg = PREV_INSN (range1beg);
2031 while (PREV_INSN (range2beg)
2032 && GET_CODE (PREV_INSN (range2beg)) == NOTE)
2033 range2beg = PREV_INSN (range2beg);
2035 /* Don't move NOTEs for blocks or loops; shift them
2036 outside the ranges, where they'll stay put. */
2037 range1beg = squeeze_notes (range1beg, range1end);
2038 range2beg = squeeze_notes (range2beg, range2end);
2040 /* Get current surrounds of the 2 ranges. */
2041 range1before = PREV_INSN (range1beg);
2042 range2before = PREV_INSN (range2beg);
2043 range1after = NEXT_INSN (range1end);
2044 range2after = NEXT_INSN (range2end);
2046 /* Splice range2 where range1 was. */
2047 NEXT_INSN (range1before) = range2beg;
2048 PREV_INSN (range2beg) = range1before;
2049 NEXT_INSN (range2end) = range1after;
2050 PREV_INSN (range1after) = range2end;
2051 /* Splice range1 where range2 was. */
2052 NEXT_INSN (range2before) = range1beg;
2053 PREV_INSN (range1beg) = range2before;
2054 NEXT_INSN (range1end) = range2after;
2055 PREV_INSN (range2after) = range1end;
2057 /* Check for a loop end note between the end of
2058 range2, and the next code label. If there is one,
2059 then what we have really seen is
2060 if (foo) break; end_of_loop;
2061 and moved the break sequence outside the loop.
2062 We must move the LOOP_END note to where the
2063 loop really ends now, or we will confuse loop
2064 optimization. Stop if we find a LOOP_BEG note
2065 first, since we don't want to move the LOOP_END
2066 note in that case. */
2067 for (;range2after != label2; range2after = rangenext)
2069 rangenext = NEXT_INSN (range2after);
2070 if (GET_CODE (range2after) == NOTE)
2072 if (NOTE_LINE_NUMBER (range2after)
2073 == NOTE_INSN_LOOP_END)
2075 NEXT_INSN (PREV_INSN (range2after))
2076 = rangenext;
2077 PREV_INSN (rangenext)
2078 = PREV_INSN (range2after);
2079 PREV_INSN (range2after)
2080 = PREV_INSN (range1beg);
2081 NEXT_INSN (range2after) = range1beg;
2082 NEXT_INSN (PREV_INSN (range1beg))
2083 = range2after;
2084 PREV_INSN (range1beg) = range2after;
2086 else if (NOTE_LINE_NUMBER (range2after)
2087 == NOTE_INSN_LOOP_BEG)
2088 break;
2091 changed = 1;
2092 continue;
2097 /* Now that the jump has been tensioned,
2098 try cross jumping: check for identical code
2099 before the jump and before its target label. */
2101 /* First, cross jumping of conditional jumps: */
2103 if (cross_jump && condjump_p (insn))
2105 rtx newjpos, newlpos;
2106 rtx x = prev_real_insn (JUMP_LABEL (insn));
2108 /* A conditional jump may be crossjumped
2109 only if the place it jumps to follows
2110 an opposing jump that comes back here. */
2112 if (x != 0 && ! jump_back_p (x, insn))
2113 /* We have no opposing jump;
2114 cannot cross jump this insn. */
2115 x = 0;
2117 newjpos = 0;
2118 /* TARGET is nonzero if it is ok to cross jump
2119 to code before TARGET. If so, see if matches. */
2120 if (x != 0)
2121 find_cross_jump (insn, x, 2,
2122 &newjpos, &newlpos);
2124 if (newjpos != 0)
2126 do_cross_jump (insn, newjpos, newlpos);
2127 /* Make the old conditional jump
2128 into an unconditional one. */
2129 SET_SRC (PATTERN (insn))
2130 = gen_rtx_LABEL_REF (VOIDmode, JUMP_LABEL (insn));
2131 INSN_CODE (insn) = -1;
2132 emit_barrier_after (insn);
2133 /* Add to jump_chain unless this is a new label
2134 whose UID is too large. */
2135 if (INSN_UID (JUMP_LABEL (insn)) < max_jump_chain)
2137 jump_chain[INSN_UID (insn)]
2138 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2139 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2141 changed = 1;
2142 next = insn;
2146 /* Cross jumping of unconditional jumps:
2147 a few differences. */
2149 if (cross_jump && simplejump_p (insn))
2151 rtx newjpos, newlpos;
2152 rtx target;
2154 newjpos = 0;
2156 /* TARGET is nonzero if it is ok to cross jump
2157 to code before TARGET. If so, see if matches. */
2158 find_cross_jump (insn, JUMP_LABEL (insn), 1,
2159 &newjpos, &newlpos);
2161 /* If cannot cross jump to code before the label,
2162 see if we can cross jump to another jump to
2163 the same label. */
2164 /* Try each other jump to this label. */
2165 if (INSN_UID (JUMP_LABEL (insn)) < max_uid)
2166 for (target = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2167 target != 0 && newjpos == 0;
2168 target = jump_chain[INSN_UID (target)])
2169 if (target != insn
2170 && JUMP_LABEL (target) == JUMP_LABEL (insn)
2171 /* Ignore TARGET if it's deleted. */
2172 && ! INSN_DELETED_P (target))
2173 find_cross_jump (insn, target, 2,
2174 &newjpos, &newlpos);
2176 if (newjpos != 0)
2178 do_cross_jump (insn, newjpos, newlpos);
2179 changed = 1;
2180 next = insn;
2184 /* This code was dead in the previous jump.c! */
2185 if (cross_jump && GET_CODE (PATTERN (insn)) == RETURN)
2187 /* Return insns all "jump to the same place"
2188 so we can cross-jump between any two of them. */
2190 rtx newjpos, newlpos, target;
2192 newjpos = 0;
2194 /* If cannot cross jump to code before the label,
2195 see if we can cross jump to another jump to
2196 the same label. */
2197 /* Try each other jump to this label. */
2198 for (target = jump_chain[0];
2199 target != 0 && newjpos == 0;
2200 target = jump_chain[INSN_UID (target)])
2201 if (target != insn
2202 && ! INSN_DELETED_P (target)
2203 && GET_CODE (PATTERN (target)) == RETURN)
2204 find_cross_jump (insn, target, 2,
2205 &newjpos, &newlpos);
2207 if (newjpos != 0)
2209 do_cross_jump (insn, newjpos, newlpos);
2210 changed = 1;
2211 next = insn;
2217 first = 0;
2220 /* Delete extraneous line number notes.
2221 Note that two consecutive notes for different lines are not really
2222 extraneous. There should be some indication where that line belonged,
2223 even if it became empty. */
2226 rtx last_note = 0;
2228 for (insn = f; insn; insn = NEXT_INSN (insn))
2229 if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) >= 0)
2231 /* Delete this note if it is identical to previous note. */
2232 if (last_note
2233 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
2234 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
2236 delete_insn (insn);
2237 continue;
2240 last_note = insn;
2244 #ifdef HAVE_return
2245 if (HAVE_return)
2247 /* If we fall through to the epilogue, see if we can insert a RETURN insn
2248 in front of it. If the machine allows it at this point (we might be
2249 after reload for a leaf routine), it will improve optimization for it
2250 to be there. We do this both here and at the start of this pass since
2251 the RETURN might have been deleted by some of our optimizations. */
2252 insn = get_last_insn ();
2253 while (insn && GET_CODE (insn) == NOTE)
2254 insn = PREV_INSN (insn);
2256 if (insn && GET_CODE (insn) != BARRIER)
2258 emit_jump_insn (gen_return ());
2259 emit_barrier ();
2262 #endif
2264 /* CAN_REACH_END is persistent for each function. Once set it should
2265 not be cleared. This is especially true for the case where we
2266 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2267 the front-end before compiling each function. */
2268 if (calculate_can_reach_end (last_insn, 0, 1))
2269 can_reach_end = 1;
2271 /* Show JUMP_CHAIN no longer valid. */
2272 jump_chain = 0;
2275 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2276 notes whose labels don't occur in the insn any more. Returns the
2277 largest INSN_UID found. */
2278 static int
2279 init_label_info (f)
2280 rtx f;
2282 int largest_uid = 0;
2283 rtx insn;
2285 for (insn = f; insn; insn = NEXT_INSN (insn))
2287 if (GET_CODE (insn) == CODE_LABEL)
2288 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
2289 else if (GET_CODE (insn) == JUMP_INSN)
2290 JUMP_LABEL (insn) = 0;
2291 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
2293 rtx note, next;
2295 for (note = REG_NOTES (insn); note; note = next)
2297 next = XEXP (note, 1);
2298 if (REG_NOTE_KIND (note) == REG_LABEL
2299 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
2300 remove_note (insn, note);
2303 if (INSN_UID (insn) > largest_uid)
2304 largest_uid = INSN_UID (insn);
2307 return largest_uid;
2310 /* Delete insns following barriers, up to next label.
2312 Also delete no-op jumps created by gcse. */
2313 static void
2314 delete_barrier_successors (f)
2315 rtx f;
2317 rtx insn;
2319 for (insn = f; insn;)
2321 if (GET_CODE (insn) == BARRIER)
2323 insn = NEXT_INSN (insn);
2325 never_reached_warning (insn);
2327 while (insn != 0 && GET_CODE (insn) != CODE_LABEL)
2329 if (GET_CODE (insn) == NOTE
2330 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2331 insn = NEXT_INSN (insn);
2332 else
2333 insn = delete_insn (insn);
2335 /* INSN is now the code_label. */
2337 /* Also remove (set (pc) (pc)) insns which can be created by
2338 gcse. We eliminate such insns now to avoid having them
2339 cause problems later. */
2340 else if (GET_CODE (insn) == JUMP_INSN
2341 && GET_CODE (PATTERN (insn)) == SET
2342 && SET_SRC (PATTERN (insn)) == pc_rtx
2343 && SET_DEST (PATTERN (insn)) == pc_rtx)
2344 insn = delete_insn (insn);
2346 else
2347 insn = NEXT_INSN (insn);
2351 /* Mark the label each jump jumps to.
2352 Combine consecutive labels, and count uses of labels.
2354 For each label, make a chain (using `jump_chain')
2355 of all the *unconditional* jumps that jump to it;
2356 also make a chain of all returns.
2358 CROSS_JUMP indicates whether we are doing cross jumping
2359 and if we are whether we will be paying attention to
2360 death notes or not. */
2362 static void
2363 mark_all_labels (f, cross_jump)
2364 rtx f;
2365 int cross_jump;
2367 rtx insn;
2369 for (insn = f; insn; insn = NEXT_INSN (insn))
2370 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2372 mark_jump_label (PATTERN (insn), insn, cross_jump);
2373 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
2375 if (JUMP_LABEL (insn) != 0 && simplejump_p (insn))
2377 jump_chain[INSN_UID (insn)]
2378 = jump_chain[INSN_UID (JUMP_LABEL (insn))];
2379 jump_chain[INSN_UID (JUMP_LABEL (insn))] = insn;
2381 if (GET_CODE (PATTERN (insn)) == RETURN)
2383 jump_chain[INSN_UID (insn)] = jump_chain[0];
2384 jump_chain[0] = insn;
2390 /* Delete all labels already not referenced.
2391 Also find and return the last insn. */
2393 static rtx
2394 delete_unreferenced_labels (f)
2395 rtx f;
2397 rtx final = NULL_RTX;
2398 rtx insn;
2400 for (insn = f; insn; )
2402 if (GET_CODE (insn) == CODE_LABEL && LABEL_NUSES (insn) == 0)
2403 insn = delete_insn (insn);
2404 else
2406 final = insn;
2407 insn = NEXT_INSN (insn);
2411 return final;
2414 /* Delete various simple forms of moves which have no necessary
2415 side effect. */
2417 static void
2418 delete_noop_moves (f)
2419 rtx f;
2421 rtx insn, next;
2423 for (insn = f; insn; )
2425 next = NEXT_INSN (insn);
2427 if (GET_CODE (insn) == INSN)
2429 register rtx body = PATTERN (insn);
2431 /* Combine stack_adjusts with following push_insns. */
2432 #ifdef PUSH_ROUNDING
2433 if (GET_CODE (body) == SET
2434 && SET_DEST (body) == stack_pointer_rtx
2435 && GET_CODE (SET_SRC (body)) == PLUS
2436 && XEXP (SET_SRC (body), 0) == stack_pointer_rtx
2437 && GET_CODE (XEXP (SET_SRC (body), 1)) == CONST_INT
2438 && INTVAL (XEXP (SET_SRC (body), 1)) > 0)
2440 rtx p;
2441 rtx stack_adjust_insn = insn;
2442 int stack_adjust_amount = INTVAL (XEXP (SET_SRC (body), 1));
2443 int total_pushed = 0;
2444 int pushes = 0;
2446 /* Find all successive push insns. */
2447 p = insn;
2448 /* Don't convert more than three pushes;
2449 that starts adding too many displaced addresses
2450 and the whole thing starts becoming a losing
2451 proposition. */
2452 while (pushes < 3)
2454 rtx pbody, dest;
2455 p = next_nonnote_insn (p);
2456 if (p == 0 || GET_CODE (p) != INSN)
2457 break;
2458 pbody = PATTERN (p);
2459 if (GET_CODE (pbody) != SET)
2460 break;
2461 dest = SET_DEST (pbody);
2462 /* Allow a no-op move between the adjust and the push. */
2463 if (GET_CODE (dest) == REG
2464 && GET_CODE (SET_SRC (pbody)) == REG
2465 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2466 continue;
2467 if (! (GET_CODE (dest) == MEM
2468 && GET_CODE (XEXP (dest, 0)) == POST_INC
2469 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2470 break;
2471 pushes++;
2472 if (total_pushed + GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)))
2473 > stack_adjust_amount)
2474 break;
2475 total_pushed += GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2478 /* Discard the amount pushed from the stack adjust;
2479 maybe eliminate it entirely. */
2480 if (total_pushed >= stack_adjust_amount)
2482 delete_computation (stack_adjust_insn);
2483 total_pushed = stack_adjust_amount;
2485 else
2486 XEXP (SET_SRC (PATTERN (stack_adjust_insn)), 1)
2487 = GEN_INT (stack_adjust_amount - total_pushed);
2489 /* Change the appropriate push insns to ordinary stores. */
2490 p = insn;
2491 while (total_pushed > 0)
2493 rtx pbody, dest;
2494 p = next_nonnote_insn (p);
2495 if (GET_CODE (p) != INSN)
2496 break;
2497 pbody = PATTERN (p);
2498 if (GET_CODE (pbody) != SET)
2499 break;
2500 dest = SET_DEST (pbody);
2501 /* Allow a no-op move between the adjust and the push. */
2502 if (GET_CODE (dest) == REG
2503 && GET_CODE (SET_SRC (pbody)) == REG
2504 && REGNO (dest) == REGNO (SET_SRC (pbody)))
2505 continue;
2506 if (! (GET_CODE (dest) == MEM
2507 && GET_CODE (XEXP (dest, 0)) == POST_INC
2508 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx))
2509 break;
2510 total_pushed -= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody)));
2511 /* If this push doesn't fully fit in the space
2512 of the stack adjust that we deleted,
2513 make another stack adjust here for what we
2514 didn't use up. There should be peepholes
2515 to recognize the resulting sequence of insns. */
2516 if (total_pushed < 0)
2518 emit_insn_before (gen_add2_insn (stack_pointer_rtx,
2519 GEN_INT (- total_pushed)),
2521 break;
2523 XEXP (dest, 0)
2524 = plus_constant (stack_pointer_rtx, total_pushed);
2527 #endif
2529 /* Detect and delete no-op move instructions
2530 resulting from not allocating a parameter in a register. */
2532 if (GET_CODE (body) == SET
2533 && (SET_DEST (body) == SET_SRC (body)
2534 || (GET_CODE (SET_DEST (body)) == MEM
2535 && GET_CODE (SET_SRC (body)) == MEM
2536 && rtx_equal_p (SET_SRC (body), SET_DEST (body))))
2537 && ! (GET_CODE (SET_DEST (body)) == MEM
2538 && MEM_VOLATILE_P (SET_DEST (body)))
2539 && ! (GET_CODE (SET_SRC (body)) == MEM
2540 && MEM_VOLATILE_P (SET_SRC (body))))
2541 delete_computation (insn);
2543 /* Detect and ignore no-op move instructions
2544 resulting from smart or fortuitous register allocation. */
2546 else if (GET_CODE (body) == SET)
2548 int sreg = true_regnum (SET_SRC (body));
2549 int dreg = true_regnum (SET_DEST (body));
2551 if (sreg == dreg && sreg >= 0)
2552 delete_insn (insn);
2553 else if (sreg >= 0 && dreg >= 0)
2555 rtx trial;
2556 rtx tem = find_equiv_reg (NULL_RTX, insn, 0,
2557 sreg, NULL_PTR, dreg,
2558 GET_MODE (SET_SRC (body)));
2560 if (tem != 0
2561 && GET_MODE (tem) == GET_MODE (SET_DEST (body)))
2563 /* DREG may have been the target of a REG_DEAD note in
2564 the insn which makes INSN redundant. If so, reorg
2565 would still think it is dead. So search for such a
2566 note and delete it if we find it. */
2567 if (! find_regno_note (insn, REG_UNUSED, dreg))
2568 for (trial = prev_nonnote_insn (insn);
2569 trial && GET_CODE (trial) != CODE_LABEL;
2570 trial = prev_nonnote_insn (trial))
2571 if (find_regno_note (trial, REG_DEAD, dreg))
2573 remove_death (dreg, trial);
2574 break;
2577 /* Deleting insn could lose a death-note for SREG. */
2578 if ((trial = find_regno_note (insn, REG_DEAD, sreg)))
2580 /* Change this into a USE so that we won't emit
2581 code for it, but still can keep the note. */
2582 PATTERN (insn)
2583 = gen_rtx_USE (VOIDmode, XEXP (trial, 0));
2584 INSN_CODE (insn) = -1;
2585 /* Remove all reg notes but the REG_DEAD one. */
2586 REG_NOTES (insn) = trial;
2587 XEXP (trial, 1) = NULL_RTX;
2589 else
2590 delete_insn (insn);
2593 else if (dreg >= 0 && CONSTANT_P (SET_SRC (body))
2594 && find_equiv_reg (SET_SRC (body), insn, 0, dreg,
2595 NULL_PTR, 0,
2596 GET_MODE (SET_DEST (body))))
2598 /* This handles the case where we have two consecutive
2599 assignments of the same constant to pseudos that didn't
2600 get a hard reg. Each SET from the constant will be
2601 converted into a SET of the spill register and an
2602 output reload will be made following it. This produces
2603 two loads of the same constant into the same spill
2604 register. */
2606 rtx in_insn = insn;
2608 /* Look back for a death note for the first reg.
2609 If there is one, it is no longer accurate. */
2610 while (in_insn && GET_CODE (in_insn) != CODE_LABEL)
2612 if ((GET_CODE (in_insn) == INSN
2613 || GET_CODE (in_insn) == JUMP_INSN)
2614 && find_regno_note (in_insn, REG_DEAD, dreg))
2616 remove_death (dreg, in_insn);
2617 break;
2619 in_insn = PREV_INSN (in_insn);
2622 /* Delete the second load of the value. */
2623 delete_insn (insn);
2626 else if (GET_CODE (body) == PARALLEL)
2628 /* If each part is a set between two identical registers or
2629 a USE or CLOBBER, delete the insn. */
2630 int i, sreg, dreg;
2631 rtx tem;
2633 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
2635 tem = XVECEXP (body, 0, i);
2636 if (GET_CODE (tem) == USE || GET_CODE (tem) == CLOBBER)
2637 continue;
2639 if (GET_CODE (tem) != SET
2640 || (sreg = true_regnum (SET_SRC (tem))) < 0
2641 || (dreg = true_regnum (SET_DEST (tem))) < 0
2642 || dreg != sreg)
2643 break;
2646 if (i < 0)
2647 delete_insn (insn);
2649 /* Also delete insns to store bit fields if they are no-ops. */
2650 /* Not worth the hair to detect this in the big-endian case. */
2651 else if (! BYTES_BIG_ENDIAN
2652 && GET_CODE (body) == SET
2653 && GET_CODE (SET_DEST (body)) == ZERO_EXTRACT
2654 && XEXP (SET_DEST (body), 2) == const0_rtx
2655 && XEXP (SET_DEST (body), 0) == SET_SRC (body)
2656 && ! (GET_CODE (SET_SRC (body)) == MEM
2657 && MEM_VOLATILE_P (SET_SRC (body))))
2658 delete_insn (insn);
2660 insn = next;
2664 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2665 If so indicate that this function can drop off the end by returning
2666 1, else return 0.
2668 CHECK_DELETED indicates whether we must check if the note being
2669 searched for has the deleted flag set.
2671 DELETE_FINAL_NOTE indicates whether we should delete the note
2672 if we find it. */
2674 static int
2675 calculate_can_reach_end (last, check_deleted, delete_final_note)
2676 rtx last;
2677 int check_deleted;
2678 int delete_final_note;
2680 rtx insn = last;
2681 int n_labels = 1;
2683 while (insn != NULL_RTX)
2685 int ok = 0;
2687 /* One label can follow the end-note: the return label. */
2688 if (GET_CODE (insn) == CODE_LABEL && n_labels-- > 0)
2689 ok = 1;
2690 /* Ordinary insns can follow it if returning a structure. */
2691 else if (GET_CODE (insn) == INSN)
2692 ok = 1;
2693 /* If machine uses explicit RETURN insns, no epilogue,
2694 then one of them follows the note. */
2695 else if (GET_CODE (insn) == JUMP_INSN
2696 && GET_CODE (PATTERN (insn)) == RETURN)
2697 ok = 1;
2698 /* A barrier can follow the return insn. */
2699 else if (GET_CODE (insn) == BARRIER)
2700 ok = 1;
2701 /* Other kinds of notes can follow also. */
2702 else if (GET_CODE (insn) == NOTE
2703 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END)
2704 ok = 1;
2706 if (ok != 1)
2707 break;
2709 insn = PREV_INSN (insn);
2712 /* See if we backed up to the appropriate type of note. */
2713 if (insn != NULL_RTX
2714 && GET_CODE (insn) == NOTE
2715 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END
2716 && (check_deleted == 0
2717 || ! INSN_DELETED_P (insn)))
2719 if (delete_final_note)
2720 delete_insn (insn);
2721 return 1;
2724 return 0;
2727 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2728 jump. Assume that this unconditional jump is to the exit test code. If
2729 the code is sufficiently simple, make a copy of it before INSN,
2730 followed by a jump to the exit of the loop. Then delete the unconditional
2731 jump after INSN.
2733 Return 1 if we made the change, else 0.
2735 This is only safe immediately after a regscan pass because it uses the
2736 values of regno_first_uid and regno_last_uid. */
2738 static int
2739 duplicate_loop_exit_test (loop_start)
2740 rtx loop_start;
2742 rtx insn, set, reg, p, link;
2743 rtx copy = 0, first_copy = 0;
2744 int num_insns = 0;
2745 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
2746 rtx lastexit;
2747 int max_reg = max_reg_num ();
2748 rtx *reg_map = 0;
2750 /* Scan the exit code. We do not perform this optimization if any insn:
2752 is a CALL_INSN
2753 is a CODE_LABEL
2754 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2755 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2756 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2757 is not valid.
2759 We also do not do this if we find an insn with ASM_OPERANDS. While
2760 this restriction should not be necessary, copying an insn with
2761 ASM_OPERANDS can confuse asm_noperands in some cases.
2763 Also, don't do this if the exit code is more than 20 insns. */
2765 for (insn = exitcode;
2766 insn
2767 && ! (GET_CODE (insn) == NOTE
2768 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
2769 insn = NEXT_INSN (insn))
2771 switch (GET_CODE (insn))
2773 case CODE_LABEL:
2774 case CALL_INSN:
2775 return 0;
2776 case NOTE:
2777 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2778 a jump immediately after the loop start that branches outside
2779 the loop but within an outer loop, near the exit test.
2780 If we copied this exit test and created a phony
2781 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2782 before the exit test look like these could be safely moved
2783 out of the loop even if they actually may be never executed.
2784 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2786 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2787 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
2788 return 0;
2790 if (optimize < 2
2791 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2792 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2793 /* If we were to duplicate this code, we would not move
2794 the BLOCK notes, and so debugging the moved code would
2795 be difficult. Thus, we only move the code with -O2 or
2796 higher. */
2797 return 0;
2799 break;
2800 case JUMP_INSN:
2801 case INSN:
2802 /* The code below would grossly mishandle REG_WAS_0 notes,
2803 so get rid of them here. */
2804 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
2805 remove_note (insn, p);
2806 if (++num_insns > 20
2807 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
2808 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2809 return 0;
2810 break;
2811 default:
2812 break;
2816 /* Unless INSN is zero, we can do the optimization. */
2817 if (insn == 0)
2818 return 0;
2820 lastexit = insn;
2822 /* See if any insn sets a register only used in the loop exit code and
2823 not a user variable. If so, replace it with a new register. */
2824 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2825 if (GET_CODE (insn) == INSN
2826 && (set = single_set (insn)) != 0
2827 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
2828 || (GET_CODE (reg) == SUBREG
2829 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
2830 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
2831 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
2833 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
2834 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
2835 break;
2837 if (p != lastexit)
2839 /* We can do the replacement. Allocate reg_map if this is the
2840 first replacement we found. */
2841 if (reg_map == 0)
2843 reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
2844 bzero ((char *) reg_map, max_reg * sizeof (rtx));
2847 REG_LOOP_TEST_P (reg) = 1;
2849 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
2853 /* Now copy each insn. */
2854 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
2856 switch (GET_CODE (insn))
2858 case BARRIER:
2859 copy = emit_barrier_before (loop_start);
2860 break;
2861 case NOTE:
2862 /* Only copy line-number notes. */
2863 if (NOTE_LINE_NUMBER (insn) >= 0)
2865 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
2866 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
2868 break;
2870 case INSN:
2871 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
2872 if (reg_map)
2873 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2875 mark_jump_label (PATTERN (copy), copy, 0);
2877 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2878 make them. */
2879 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2880 if (REG_NOTE_KIND (link) != REG_LABEL)
2881 REG_NOTES (copy)
2882 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
2883 XEXP (link, 0),
2884 REG_NOTES (copy)));
2885 if (reg_map && REG_NOTES (copy))
2886 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2887 break;
2889 case JUMP_INSN:
2890 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)), loop_start);
2891 if (reg_map)
2892 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
2893 mark_jump_label (PATTERN (copy), copy, 0);
2894 if (REG_NOTES (insn))
2896 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
2897 if (reg_map)
2898 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
2901 /* If this is a simple jump, add it to the jump chain. */
2903 if (INSN_UID (copy) < max_jump_chain && JUMP_LABEL (copy)
2904 && simplejump_p (copy))
2906 jump_chain[INSN_UID (copy)]
2907 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2908 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2910 break;
2912 default:
2913 abort ();
2916 /* Record the first insn we copied. We need it so that we can
2917 scan the copied insns for new pseudo registers. */
2918 if (! first_copy)
2919 first_copy = copy;
2922 /* Now clean up by emitting a jump to the end label and deleting the jump
2923 at the start of the loop. */
2924 if (! copy || GET_CODE (copy) != BARRIER)
2926 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
2927 loop_start);
2929 /* Record the first insn we copied. We need it so that we can
2930 scan the copied insns for new pseudo registers. This may not
2931 be strictly necessary since we should have copied at least one
2932 insn above. But I am going to be safe. */
2933 if (! first_copy)
2934 first_copy = copy;
2936 mark_jump_label (PATTERN (copy), copy, 0);
2937 if (INSN_UID (copy) < max_jump_chain
2938 && INSN_UID (JUMP_LABEL (copy)) < max_jump_chain)
2940 jump_chain[INSN_UID (copy)]
2941 = jump_chain[INSN_UID (JUMP_LABEL (copy))];
2942 jump_chain[INSN_UID (JUMP_LABEL (copy))] = copy;
2944 emit_barrier_before (loop_start);
2947 /* Now scan from the first insn we copied to the last insn we copied
2948 (copy) for new pseudo registers. Do this after the code to jump to
2949 the end label since that might create a new pseudo too. */
2950 reg_scan_update (first_copy, copy, max_reg);
2952 /* Mark the exit code as the virtual top of the converted loop. */
2953 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
2955 delete_insn (next_nonnote_insn (loop_start));
2957 return 1;
2960 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2961 loop-end notes between START and END out before START. Assume that
2962 END is not such a note. START may be such a note. Returns the value
2963 of the new starting insn, which may be different if the original start
2964 was such a note. */
2967 squeeze_notes (start, end)
2968 rtx start, end;
2970 rtx insn;
2971 rtx next;
2973 for (insn = start; insn != end; insn = next)
2975 next = NEXT_INSN (insn);
2976 if (GET_CODE (insn) == NOTE
2977 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
2978 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2979 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
2980 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
2981 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
2982 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
2984 if (insn == start)
2985 start = next;
2986 else
2988 rtx prev = PREV_INSN (insn);
2989 PREV_INSN (insn) = PREV_INSN (start);
2990 NEXT_INSN (insn) = start;
2991 NEXT_INSN (PREV_INSN (insn)) = insn;
2992 PREV_INSN (NEXT_INSN (insn)) = insn;
2993 NEXT_INSN (prev) = next;
2994 PREV_INSN (next) = prev;
2999 return start;
3002 /* Compare the instructions before insn E1 with those before E2
3003 to find an opportunity for cross jumping.
3004 (This means detecting identical sequences of insns followed by
3005 jumps to the same place, or followed by a label and a jump
3006 to that label, and replacing one with a jump to the other.)
3008 Assume E1 is a jump that jumps to label E2
3009 (that is not always true but it might as well be).
3010 Find the longest possible equivalent sequences
3011 and store the first insns of those sequences into *F1 and *F2.
3012 Store zero there if no equivalent preceding instructions are found.
3014 We give up if we find a label in stream 1.
3015 Actually we could transfer that label into stream 2. */
3017 static void
3018 find_cross_jump (e1, e2, minimum, f1, f2)
3019 rtx e1, e2;
3020 int minimum;
3021 rtx *f1, *f2;
3023 register rtx i1 = e1, i2 = e2;
3024 register rtx p1, p2;
3025 int lose = 0;
3027 rtx last1 = 0, last2 = 0;
3028 rtx afterlast1 = 0, afterlast2 = 0;
3030 *f1 = 0;
3031 *f2 = 0;
3033 while (1)
3035 i1 = prev_nonnote_insn (i1);
3037 i2 = PREV_INSN (i2);
3038 while (i2 && (GET_CODE (i2) == NOTE || GET_CODE (i2) == CODE_LABEL))
3039 i2 = PREV_INSN (i2);
3041 if (i1 == 0)
3042 break;
3044 /* Don't allow the range of insns preceding E1 or E2
3045 to include the other (E2 or E1). */
3046 if (i2 == e1 || i1 == e2)
3047 break;
3049 /* If we will get to this code by jumping, those jumps will be
3050 tensioned to go directly to the new label (before I2),
3051 so this cross-jumping won't cost extra. So reduce the minimum. */
3052 if (GET_CODE (i1) == CODE_LABEL)
3054 --minimum;
3055 break;
3058 if (i2 == 0 || GET_CODE (i1) != GET_CODE (i2))
3059 break;
3061 /* Avoid moving insns across EH regions if either of the insns
3062 can throw. */
3063 if (flag_exceptions
3064 && (asynchronous_exceptions || GET_CODE (i1) == CALL_INSN)
3065 && !in_same_eh_region (i1, i2))
3066 break;
3068 p1 = PATTERN (i1);
3069 p2 = PATTERN (i2);
3071 /* If this is a CALL_INSN, compare register usage information.
3072 If we don't check this on stack register machines, the two
3073 CALL_INSNs might be merged leaving reg-stack.c with mismatching
3074 numbers of stack registers in the same basic block.
3075 If we don't check this on machines with delay slots, a delay slot may
3076 be filled that clobbers a parameter expected by the subroutine.
3078 ??? We take the simple route for now and assume that if they're
3079 equal, they were constructed identically. */
3081 if (GET_CODE (i1) == CALL_INSN
3082 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1),
3083 CALL_INSN_FUNCTION_USAGE (i2)))
3084 lose = 1;
3086 #ifdef STACK_REGS
3087 /* If cross_jump_death_matters is not 0, the insn's mode
3088 indicates whether or not the insn contains any stack-like
3089 regs. */
3091 if (!lose && cross_jump_death_matters && stack_regs_mentioned (i1))
3093 /* If register stack conversion has already been done, then
3094 death notes must also be compared before it is certain that
3095 the two instruction streams match. */
3097 rtx note;
3098 HARD_REG_SET i1_regset, i2_regset;
3100 CLEAR_HARD_REG_SET (i1_regset);
3101 CLEAR_HARD_REG_SET (i2_regset);
3103 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
3104 if (REG_NOTE_KIND (note) == REG_DEAD
3105 && STACK_REG_P (XEXP (note, 0)))
3106 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
3108 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
3109 if (REG_NOTE_KIND (note) == REG_DEAD
3110 && STACK_REG_P (XEXP (note, 0)))
3111 SET_HARD_REG_BIT (i2_regset, REGNO (XEXP (note, 0)));
3113 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
3115 lose = 1;
3117 done:
3120 #endif
3122 /* Don't allow old-style asm or volatile extended asms to be accepted
3123 for cross jumping purposes. It is conceptually correct to allow
3124 them, since cross-jumping preserves the dynamic instruction order
3125 even though it is changing the static instruction order. However,
3126 if an asm is being used to emit an assembler pseudo-op, such as
3127 the MIPS `.set reorder' pseudo-op, then the static instruction order
3128 matters and it must be preserved. */
3129 if (GET_CODE (p1) == ASM_INPUT || GET_CODE (p2) == ASM_INPUT
3130 || (GET_CODE (p1) == ASM_OPERANDS && MEM_VOLATILE_P (p1))
3131 || (GET_CODE (p2) == ASM_OPERANDS && MEM_VOLATILE_P (p2)))
3132 lose = 1;
3134 if (lose || GET_CODE (p1) != GET_CODE (p2)
3135 || ! rtx_renumbered_equal_p (p1, p2))
3137 /* The following code helps take care of G++ cleanups. */
3138 rtx equiv1;
3139 rtx equiv2;
3141 if (!lose && GET_CODE (p1) == GET_CODE (p2)
3142 && ((equiv1 = find_reg_note (i1, REG_EQUAL, NULL_RTX)) != 0
3143 || (equiv1 = find_reg_note (i1, REG_EQUIV, NULL_RTX)) != 0)
3144 && ((equiv2 = find_reg_note (i2, REG_EQUAL, NULL_RTX)) != 0
3145 || (equiv2 = find_reg_note (i2, REG_EQUIV, NULL_RTX)) != 0)
3146 /* If the equivalences are not to a constant, they may
3147 reference pseudos that no longer exist, so we can't
3148 use them. */
3149 && CONSTANT_P (XEXP (equiv1, 0))
3150 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))
3152 rtx s1 = single_set (i1);
3153 rtx s2 = single_set (i2);
3154 if (s1 != 0 && s2 != 0
3155 && rtx_renumbered_equal_p (SET_DEST (s1), SET_DEST (s2)))
3157 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
3158 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
3159 if (! rtx_renumbered_equal_p (p1, p2))
3160 cancel_changes (0);
3161 else if (apply_change_group ())
3162 goto win;
3166 /* Insns fail to match; cross jumping is limited to the following
3167 insns. */
3169 #ifdef HAVE_cc0
3170 /* Don't allow the insn after a compare to be shared by
3171 cross-jumping unless the compare is also shared.
3172 Here, if either of these non-matching insns is a compare,
3173 exclude the following insn from possible cross-jumping. */
3174 if (sets_cc0_p (p1) || sets_cc0_p (p2))
3175 last1 = afterlast1, last2 = afterlast2, ++minimum;
3176 #endif
3178 /* If cross-jumping here will feed a jump-around-jump
3179 optimization, this jump won't cost extra, so reduce
3180 the minimum. */
3181 if (GET_CODE (i1) == JUMP_INSN
3182 && JUMP_LABEL (i1)
3183 && prev_real_insn (JUMP_LABEL (i1)) == e1)
3184 --minimum;
3185 break;
3188 win:
3189 if (GET_CODE (p1) != USE && GET_CODE (p1) != CLOBBER)
3191 /* Ok, this insn is potentially includable in a cross-jump here. */
3192 afterlast1 = last1, afterlast2 = last2;
3193 last1 = i1, last2 = i2, --minimum;
3197 if (minimum <= 0 && last1 != 0 && last1 != e1)
3198 *f1 = last1, *f2 = last2;
3201 static void
3202 do_cross_jump (insn, newjpos, newlpos)
3203 rtx insn, newjpos, newlpos;
3205 /* Find an existing label at this point
3206 or make a new one if there is none. */
3207 register rtx label = get_label_before (newlpos);
3209 /* Make the same jump insn jump to the new point. */
3210 if (GET_CODE (PATTERN (insn)) == RETURN)
3212 /* Remove from jump chain of returns. */
3213 delete_from_jump_chain (insn);
3214 /* Change the insn. */
3215 PATTERN (insn) = gen_jump (label);
3216 INSN_CODE (insn) = -1;
3217 JUMP_LABEL (insn) = label;
3218 LABEL_NUSES (label)++;
3219 /* Add to new the jump chain. */
3220 if (INSN_UID (label) < max_jump_chain
3221 && INSN_UID (insn) < max_jump_chain)
3223 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (label)];
3224 jump_chain[INSN_UID (label)] = insn;
3227 else
3228 redirect_jump (insn, label);
3230 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3231 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3232 the NEWJPOS stream. */
3234 while (newjpos != insn)
3236 rtx lnote;
3238 for (lnote = REG_NOTES (newlpos); lnote; lnote = XEXP (lnote, 1))
3239 if ((REG_NOTE_KIND (lnote) == REG_EQUAL
3240 || REG_NOTE_KIND (lnote) == REG_EQUIV)
3241 && ! find_reg_note (newjpos, REG_EQUAL, XEXP (lnote, 0))
3242 && ! find_reg_note (newjpos, REG_EQUIV, XEXP (lnote, 0)))
3243 remove_note (newlpos, lnote);
3245 delete_insn (newjpos);
3246 newjpos = next_real_insn (newjpos);
3247 newlpos = next_real_insn (newlpos);
3251 /* Return the label before INSN, or put a new label there. */
3254 get_label_before (insn)
3255 rtx insn;
3257 rtx label;
3259 /* Find an existing label at this point
3260 or make a new one if there is none. */
3261 label = prev_nonnote_insn (insn);
3263 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3265 rtx prev = PREV_INSN (insn);
3267 label = gen_label_rtx ();
3268 emit_label_after (label, prev);
3269 LABEL_NUSES (label) = 0;
3271 return label;
3274 /* Return the label after INSN, or put a new label there. */
3277 get_label_after (insn)
3278 rtx insn;
3280 rtx label;
3282 /* Find an existing label at this point
3283 or make a new one if there is none. */
3284 label = next_nonnote_insn (insn);
3286 if (label == 0 || GET_CODE (label) != CODE_LABEL)
3288 label = gen_label_rtx ();
3289 emit_label_after (label, insn);
3290 LABEL_NUSES (label) = 0;
3292 return label;
3295 /* Return 1 if INSN is a jump that jumps to right after TARGET
3296 only on the condition that TARGET itself would drop through.
3297 Assumes that TARGET is a conditional jump. */
3299 static int
3300 jump_back_p (insn, target)
3301 rtx insn, target;
3303 rtx cinsn, ctarget;
3304 enum rtx_code codei, codet;
3306 if (simplejump_p (insn) || ! condjump_p (insn)
3307 || simplejump_p (target)
3308 || target != prev_real_insn (JUMP_LABEL (insn)))
3309 return 0;
3311 cinsn = XEXP (SET_SRC (PATTERN (insn)), 0);
3312 ctarget = XEXP (SET_SRC (PATTERN (target)), 0);
3314 codei = GET_CODE (cinsn);
3315 codet = GET_CODE (ctarget);
3317 if (XEXP (SET_SRC (PATTERN (insn)), 1) == pc_rtx)
3319 if (! can_reverse_comparison_p (cinsn, insn))
3320 return 0;
3321 codei = reverse_condition (codei);
3324 if (XEXP (SET_SRC (PATTERN (target)), 2) == pc_rtx)
3326 if (! can_reverse_comparison_p (ctarget, target))
3327 return 0;
3328 codet = reverse_condition (codet);
3331 return (codei == codet
3332 && rtx_renumbered_equal_p (XEXP (cinsn, 0), XEXP (ctarget, 0))
3333 && rtx_renumbered_equal_p (XEXP (cinsn, 1), XEXP (ctarget, 1)));
3336 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3337 return non-zero if it is safe to reverse this comparison. It is if our
3338 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3339 this is known to be an integer comparison. */
3342 can_reverse_comparison_p (comparison, insn)
3343 rtx comparison;
3344 rtx insn;
3346 rtx arg0;
3348 /* If this is not actually a comparison, we can't reverse it. */
3349 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
3350 return 0;
3352 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3353 /* If this is an NE comparison, it is safe to reverse it to an EQ
3354 comparison and vice versa, even for floating point. If no operands
3355 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3356 always false and NE is always true, so the reversal is also valid. */
3357 || flag_fast_math
3358 || GET_CODE (comparison) == NE
3359 || GET_CODE (comparison) == EQ)
3360 return 1;
3362 arg0 = XEXP (comparison, 0);
3364 /* Make sure ARG0 is one of the actual objects being compared. If we
3365 can't do this, we can't be sure the comparison can be reversed.
3367 Handle cc0 and a MODE_CC register. */
3368 if ((GET_CODE (arg0) == REG && GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC)
3369 #ifdef HAVE_cc0
3370 || arg0 == cc0_rtx
3371 #endif
3374 rtx prev = prev_nonnote_insn (insn);
3375 rtx set;
3377 /* First see if the condition code mode alone if enough to say we can
3378 reverse the condition. If not, then search backwards for a set of
3379 ARG0. We do not need to check for an insn clobbering it since valid
3380 code will contain set a set with no intervening clobber. But
3381 stop when we reach a label. */
3382 #ifdef REVERSIBLE_CC_MODE
3383 if (GET_MODE_CLASS (GET_MODE (arg0)) == MODE_CC
3384 && REVERSIBLE_CC_MODE (GET_MODE (arg0)))
3385 return 1;
3386 #endif
3388 for (prev = prev_nonnote_insn (insn);
3389 prev != 0 && GET_CODE (prev) != CODE_LABEL;
3390 prev = prev_nonnote_insn (prev))
3391 if ((set = single_set (prev)) != 0
3392 && rtx_equal_p (SET_DEST (set), arg0))
3394 arg0 = SET_SRC (set);
3396 if (GET_CODE (arg0) == COMPARE)
3397 arg0 = XEXP (arg0, 0);
3398 break;
3402 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3403 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3404 return (GET_CODE (arg0) == CONST_INT
3405 || (GET_MODE (arg0) != VOIDmode
3406 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_CC
3407 && GET_MODE_CLASS (GET_MODE (arg0)) != MODE_FLOAT));
3410 /* Given an rtx-code for a comparison, return the code
3411 for the negated comparison.
3412 WATCH OUT! reverse_condition is not safe to use on a jump
3413 that might be acting on the results of an IEEE floating point comparison,
3414 because of the special treatment of non-signaling nans in comparisons.
3415 Use can_reverse_comparison_p to be sure. */
3417 enum rtx_code
3418 reverse_condition (code)
3419 enum rtx_code code;
3421 switch (code)
3423 case EQ:
3424 return NE;
3426 case NE:
3427 return EQ;
3429 case GT:
3430 return LE;
3432 case GE:
3433 return LT;
3435 case LT:
3436 return GE;
3438 case LE:
3439 return GT;
3441 case GTU:
3442 return LEU;
3444 case GEU:
3445 return LTU;
3447 case LTU:
3448 return GEU;
3450 case LEU:
3451 return GTU;
3453 default:
3454 abort ();
3455 return UNKNOWN;
3459 /* Similar, but return the code when two operands of a comparison are swapped.
3460 This IS safe for IEEE floating-point. */
3462 enum rtx_code
3463 swap_condition (code)
3464 enum rtx_code code;
3466 switch (code)
3468 case EQ:
3469 case NE:
3470 return code;
3472 case GT:
3473 return LT;
3475 case GE:
3476 return LE;
3478 case LT:
3479 return GT;
3481 case LE:
3482 return GE;
3484 case GTU:
3485 return LTU;
3487 case GEU:
3488 return LEU;
3490 case LTU:
3491 return GTU;
3493 case LEU:
3494 return GEU;
3496 default:
3497 abort ();
3498 return UNKNOWN;
3502 /* Given a comparison CODE, return the corresponding unsigned comparison.
3503 If CODE is an equality comparison or already an unsigned comparison,
3504 CODE is returned. */
3506 enum rtx_code
3507 unsigned_condition (code)
3508 enum rtx_code code;
3510 switch (code)
3512 case EQ:
3513 case NE:
3514 case GTU:
3515 case GEU:
3516 case LTU:
3517 case LEU:
3518 return code;
3520 case GT:
3521 return GTU;
3523 case GE:
3524 return GEU;
3526 case LT:
3527 return LTU;
3529 case LE:
3530 return LEU;
3532 default:
3533 abort ();
3537 /* Similarly, return the signed version of a comparison. */
3539 enum rtx_code
3540 signed_condition (code)
3541 enum rtx_code code;
3543 switch (code)
3545 case EQ:
3546 case NE:
3547 case GT:
3548 case GE:
3549 case LT:
3550 case LE:
3551 return code;
3553 case GTU:
3554 return GT;
3556 case GEU:
3557 return GE;
3559 case LTU:
3560 return LT;
3562 case LEU:
3563 return LE;
3565 default:
3566 abort ();
3570 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3571 truth of CODE1 implies the truth of CODE2. */
3574 comparison_dominates_p (code1, code2)
3575 enum rtx_code code1, code2;
3577 if (code1 == code2)
3578 return 1;
3580 switch (code1)
3582 case EQ:
3583 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU)
3584 return 1;
3585 break;
3587 case LT:
3588 if (code2 == LE || code2 == NE)
3589 return 1;
3590 break;
3592 case GT:
3593 if (code2 == GE || code2 == NE)
3594 return 1;
3595 break;
3597 case LTU:
3598 if (code2 == LEU || code2 == NE)
3599 return 1;
3600 break;
3602 case GTU:
3603 if (code2 == GEU || code2 == NE)
3604 return 1;
3605 break;
3607 default:
3608 break;
3611 return 0;
3614 /* Return 1 if INSN is an unconditional jump and nothing else. */
3617 simplejump_p (insn)
3618 rtx insn;
3620 return (GET_CODE (insn) == JUMP_INSN
3621 && GET_CODE (PATTERN (insn)) == SET
3622 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
3623 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
3626 /* Return nonzero if INSN is a (possibly) conditional jump
3627 and nothing more. */
3630 condjump_p (insn)
3631 rtx insn;
3633 register rtx x = PATTERN (insn);
3635 if (GET_CODE (x) != SET
3636 || GET_CODE (SET_DEST (x)) != PC)
3637 return 0;
3639 x = SET_SRC (x);
3640 if (GET_CODE (x) == LABEL_REF)
3641 return 1;
3642 else return (GET_CODE (x) == IF_THEN_ELSE
3643 && ((GET_CODE (XEXP (x, 2)) == PC
3644 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
3645 || GET_CODE (XEXP (x, 1)) == RETURN))
3646 || (GET_CODE (XEXP (x, 1)) == PC
3647 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
3648 || GET_CODE (XEXP (x, 2)) == RETURN))));
3650 return 0;
3653 /* Return nonzero if INSN is a (possibly) conditional jump inside a
3654 PARALLEL. */
3657 condjump_in_parallel_p (insn)
3658 rtx insn;
3660 register rtx x = PATTERN (insn);
3662 if (GET_CODE (x) != PARALLEL)
3663 return 0;
3664 else
3665 x = XVECEXP (x, 0, 0);
3667 if (GET_CODE (x) != SET)
3668 return 0;
3669 if (GET_CODE (SET_DEST (x)) != PC)
3670 return 0;
3671 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
3672 return 1;
3673 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
3674 return 0;
3675 if (XEXP (SET_SRC (x), 2) == pc_rtx
3676 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
3677 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
3678 return 1;
3679 if (XEXP (SET_SRC (x), 1) == pc_rtx
3680 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
3681 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
3682 return 1;
3683 return 0;
3686 /* Return the label of a conditional jump. */
3689 condjump_label (insn)
3690 rtx insn;
3692 register rtx x = PATTERN (insn);
3694 if (GET_CODE (x) == PARALLEL)
3695 x = XVECEXP (x, 0, 0);
3696 if (GET_CODE (x) != SET)
3697 return NULL_RTX;
3698 if (GET_CODE (SET_DEST (x)) != PC)
3699 return NULL_RTX;
3700 x = SET_SRC (x);
3701 if (GET_CODE (x) == LABEL_REF)
3702 return x;
3703 if (GET_CODE (x) != IF_THEN_ELSE)
3704 return NULL_RTX;
3705 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
3706 return XEXP (x, 1);
3707 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
3708 return XEXP (x, 2);
3709 return NULL_RTX;
3712 /* Return true if INSN is a (possibly conditional) return insn. */
3714 static int
3715 returnjump_p_1 (loc, data)
3716 rtx *loc;
3717 void *data ATTRIBUTE_UNUSED;
3719 rtx x = *loc;
3720 return GET_CODE (x) == RETURN;
3724 returnjump_p (insn)
3725 rtx insn;
3727 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
3730 /* Return true if INSN is a jump that only transfers control and
3731 nothing more. */
3734 onlyjump_p (insn)
3735 rtx insn;
3737 rtx set;
3739 if (GET_CODE (insn) != JUMP_INSN)
3740 return 0;
3742 set = single_set (insn);
3743 if (set == NULL)
3744 return 0;
3745 if (GET_CODE (SET_DEST (set)) != PC)
3746 return 0;
3747 if (side_effects_p (SET_SRC (set)))
3748 return 0;
3750 return 1;
3753 #ifdef HAVE_cc0
3755 /* Return 1 if X is an RTX that does nothing but set the condition codes
3756 and CLOBBER or USE registers.
3757 Return -1 if X does explicitly set the condition codes,
3758 but also does other things. */
3761 sets_cc0_p (x)
3762 rtx x ATTRIBUTE_UNUSED;
3764 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
3765 return 1;
3766 if (GET_CODE (x) == PARALLEL)
3768 int i;
3769 int sets_cc0 = 0;
3770 int other_things = 0;
3771 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3773 if (GET_CODE (XVECEXP (x, 0, i)) == SET
3774 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
3775 sets_cc0 = 1;
3776 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
3777 other_things = 1;
3779 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
3781 return 0;
3783 #endif
3785 /* Follow any unconditional jump at LABEL;
3786 return the ultimate label reached by any such chain of jumps.
3787 If LABEL is not followed by a jump, return LABEL.
3788 If the chain loops or we can't find end, return LABEL,
3789 since that tells caller to avoid changing the insn.
3791 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3792 a USE or CLOBBER. */
3795 follow_jumps (label)
3796 rtx label;
3798 register rtx insn;
3799 register rtx next;
3800 register rtx value = label;
3801 register int depth;
3803 for (depth = 0;
3804 (depth < 10
3805 && (insn = next_active_insn (value)) != 0
3806 && GET_CODE (insn) == JUMP_INSN
3807 && ((JUMP_LABEL (insn) != 0 && simplejump_p (insn))
3808 || GET_CODE (PATTERN (insn)) == RETURN)
3809 && (next = NEXT_INSN (insn))
3810 && GET_CODE (next) == BARRIER);
3811 depth++)
3813 /* Don't chain through the insn that jumps into a loop
3814 from outside the loop,
3815 since that would create multiple loop entry jumps
3816 and prevent loop optimization. */
3817 rtx tem;
3818 if (!reload_completed)
3819 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
3820 if (GET_CODE (tem) == NOTE
3821 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
3822 /* ??? Optional. Disables some optimizations, but makes
3823 gcov output more accurate with -O. */
3824 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
3825 return value;
3827 /* If we have found a cycle, make the insn jump to itself. */
3828 if (JUMP_LABEL (insn) == label)
3829 return label;
3831 tem = next_active_insn (JUMP_LABEL (insn));
3832 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
3833 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
3834 break;
3836 value = JUMP_LABEL (insn);
3838 if (depth == 10)
3839 return label;
3840 return value;
3843 /* Assuming that field IDX of X is a vector of label_refs,
3844 replace each of them by the ultimate label reached by it.
3845 Return nonzero if a change is made.
3846 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3848 static int
3849 tension_vector_labels (x, idx)
3850 register rtx x;
3851 register int idx;
3853 int changed = 0;
3854 register int i;
3855 for (i = XVECLEN (x, idx) - 1; i >= 0; i--)
3857 register rtx olabel = XEXP (XVECEXP (x, idx, i), 0);
3858 register rtx nlabel = follow_jumps (olabel);
3859 if (nlabel && nlabel != olabel)
3861 XEXP (XVECEXP (x, idx, i), 0) = nlabel;
3862 ++LABEL_NUSES (nlabel);
3863 if (--LABEL_NUSES (olabel) == 0)
3864 delete_insn (olabel);
3865 changed = 1;
3868 return changed;
3871 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3872 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3873 in INSN, then store one of them in JUMP_LABEL (INSN).
3874 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3875 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3876 Also, when there are consecutive labels, canonicalize on the last of them.
3878 Note that two labels separated by a loop-beginning note
3879 must be kept distinct if we have not yet done loop-optimization,
3880 because the gap between them is where loop-optimize
3881 will want to move invariant code to. CROSS_JUMP tells us
3882 that loop-optimization is done with.
3884 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3885 two labels distinct if they are separated by only USE or CLOBBER insns. */
3887 static void
3888 mark_jump_label (x, insn, cross_jump)
3889 register rtx x;
3890 rtx insn;
3891 int cross_jump;
3893 register RTX_CODE code = GET_CODE (x);
3894 register int i;
3895 register const char *fmt;
3897 switch (code)
3899 case PC:
3900 case CC0:
3901 case REG:
3902 case SUBREG:
3903 case CONST_INT:
3904 case SYMBOL_REF:
3905 case CONST_DOUBLE:
3906 case CLOBBER:
3907 case CALL:
3908 return;
3910 case MEM:
3911 /* If this is a constant-pool reference, see if it is a label. */
3912 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
3913 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
3914 mark_jump_label (get_pool_constant (XEXP (x, 0)), insn, cross_jump);
3915 break;
3917 case LABEL_REF:
3919 rtx label = XEXP (x, 0);
3920 rtx olabel = label;
3921 rtx note;
3922 rtx next;
3924 if (GET_CODE (label) != CODE_LABEL)
3925 abort ();
3927 /* Ignore references to labels of containing functions. */
3928 if (LABEL_REF_NONLOCAL_P (x))
3929 break;
3931 /* If there are other labels following this one,
3932 replace it with the last of the consecutive labels. */
3933 for (next = NEXT_INSN (label); next; next = NEXT_INSN (next))
3935 if (GET_CODE (next) == CODE_LABEL)
3936 label = next;
3937 else if (cross_jump && GET_CODE (next) == INSN
3938 && (GET_CODE (PATTERN (next)) == USE
3939 || GET_CODE (PATTERN (next)) == CLOBBER))
3940 continue;
3941 else if (GET_CODE (next) != NOTE)
3942 break;
3943 else if (! cross_jump
3944 && (NOTE_LINE_NUMBER (next) == NOTE_INSN_LOOP_BEG
3945 || NOTE_LINE_NUMBER (next) == NOTE_INSN_FUNCTION_END
3946 /* ??? Optional. Disables some optimizations, but
3947 makes gcov output more accurate with -O. */
3948 || (flag_test_coverage && NOTE_LINE_NUMBER (next) > 0)))
3949 break;
3952 XEXP (x, 0) = label;
3953 if (! insn || ! INSN_DELETED_P (insn))
3954 ++LABEL_NUSES (label);
3956 if (insn)
3958 if (GET_CODE (insn) == JUMP_INSN)
3959 JUMP_LABEL (insn) = label;
3961 /* If we've changed OLABEL and we had a REG_LABEL note
3962 for it, update it as well. */
3963 else if (label != olabel
3964 && (note = find_reg_note (insn, REG_LABEL, olabel)) != 0)
3965 XEXP (note, 0) = label;
3967 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3968 is one. */
3969 else if (! find_reg_note (insn, REG_LABEL, label))
3971 /* This code used to ignore labels which refered to dispatch
3972 tables to avoid flow.c generating worse code.
3974 However, in the presense of global optimizations like
3975 gcse which call find_basic_blocks without calling
3976 life_analysis, not recording such labels will lead
3977 to compiler aborts because of inconsistencies in the
3978 flow graph. So we go ahead and record the label.
3980 It may also be the case that the optimization argument
3981 is no longer valid because of the more accurate cfg
3982 we build in find_basic_blocks -- it no longer pessimizes
3983 code when it finds a REG_LABEL note. */
3984 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, label,
3985 REG_NOTES (insn));
3988 return;
3991 /* Do walk the labels in a vector, but not the first operand of an
3992 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3993 case ADDR_VEC:
3994 case ADDR_DIFF_VEC:
3995 if (! INSN_DELETED_P (insn))
3997 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
3999 for (i = 0; i < XVECLEN (x, eltnum); i++)
4000 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, cross_jump);
4002 return;
4004 default:
4005 break;
4008 fmt = GET_RTX_FORMAT (code);
4009 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4011 if (fmt[i] == 'e')
4012 mark_jump_label (XEXP (x, i), insn, cross_jump);
4013 else if (fmt[i] == 'E')
4015 register int j;
4016 for (j = 0; j < XVECLEN (x, i); j++)
4017 mark_jump_label (XVECEXP (x, i, j), insn, cross_jump);
4022 /* If all INSN does is set the pc, delete it,
4023 and delete the insn that set the condition codes for it
4024 if that's what the previous thing was. */
4026 void
4027 delete_jump (insn)
4028 rtx insn;
4030 register rtx set = single_set (insn);
4032 if (set && GET_CODE (SET_DEST (set)) == PC)
4033 delete_computation (insn);
4036 /* Recursively delete prior insns that compute the value (used only by INSN
4037 which the caller is deleting) stored in the register mentioned by NOTE
4038 which is a REG_DEAD note associated with INSN. */
4040 static void
4041 delete_prior_computation (note, insn)
4042 rtx note;
4043 rtx insn;
4045 rtx our_prev;
4046 rtx reg = XEXP (note, 0);
4048 for (our_prev = prev_nonnote_insn (insn);
4049 our_prev && (GET_CODE (our_prev) == INSN
4050 || GET_CODE (our_prev) == CALL_INSN);
4051 our_prev = prev_nonnote_insn (our_prev))
4053 rtx pat = PATTERN (our_prev);
4055 /* If we reach a CALL which is not calling a const function
4056 or the callee pops the arguments, then give up. */
4057 if (GET_CODE (our_prev) == CALL_INSN
4058 && (! CONST_CALL_P (our_prev)
4059 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
4060 break;
4062 /* If we reach a SEQUENCE, it is too complex to try to
4063 do anything with it, so give up. */
4064 if (GET_CODE (pat) == SEQUENCE)
4065 break;
4067 if (GET_CODE (pat) == USE
4068 && GET_CODE (XEXP (pat, 0)) == INSN)
4069 /* reorg creates USEs that look like this. We leave them
4070 alone because reorg needs them for its own purposes. */
4071 break;
4073 if (reg_set_p (reg, pat))
4075 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
4076 break;
4078 if (GET_CODE (pat) == PARALLEL)
4080 /* If we find a SET of something else, we can't
4081 delete the insn. */
4083 int i;
4085 for (i = 0; i < XVECLEN (pat, 0); i++)
4087 rtx part = XVECEXP (pat, 0, i);
4089 if (GET_CODE (part) == SET
4090 && SET_DEST (part) != reg)
4091 break;
4094 if (i == XVECLEN (pat, 0))
4095 delete_computation (our_prev);
4097 else if (GET_CODE (pat) == SET
4098 && GET_CODE (SET_DEST (pat)) == REG)
4100 int dest_regno = REGNO (SET_DEST (pat));
4101 int dest_endregno
4102 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4103 ? HARD_REGNO_NREGS (dest_regno,
4104 GET_MODE (SET_DEST (pat))) : 1);
4105 int regno = REGNO (reg);
4106 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
4107 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
4109 if (dest_regno >= regno
4110 && dest_endregno <= endregno)
4111 delete_computation (our_prev);
4113 /* We may have a multi-word hard register and some, but not
4114 all, of the words of the register are needed in subsequent
4115 insns. Write REG_UNUSED notes for those parts that were not
4116 needed. */
4117 else if (dest_regno <= regno
4118 && dest_endregno >= endregno)
4120 int i;
4122 REG_NOTES (our_prev)
4123 = gen_rtx_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (our_prev));
4125 for (i = dest_regno; i < dest_endregno; i++)
4126 if (! find_regno_note (our_prev, REG_UNUSED, i))
4127 break;
4129 if (i == dest_endregno)
4130 delete_computation (our_prev);
4134 break;
4137 /* If PAT references the register that dies here, it is an
4138 additional use. Hence any prior SET isn't dead. However, this
4139 insn becomes the new place for the REG_DEAD note. */
4140 if (reg_overlap_mentioned_p (reg, pat))
4142 XEXP (note, 1) = REG_NOTES (our_prev);
4143 REG_NOTES (our_prev) = note;
4144 break;
4149 /* Delete INSN and recursively delete insns that compute values used only
4150 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4151 If we are running before flow.c, we need do nothing since flow.c will
4152 delete dead code. We also can't know if the registers being used are
4153 dead or not at this point.
4155 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4156 nothing other than set a register that dies in this insn, we can delete
4157 that insn as well.
4159 On machines with CC0, if CC0 is used in this insn, we may be able to
4160 delete the insn that set it. */
4162 static void
4163 delete_computation (insn)
4164 rtx insn;
4166 rtx note, next;
4167 rtx set;
4169 #ifdef HAVE_cc0
4170 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
4172 rtx prev = prev_nonnote_insn (insn);
4173 /* We assume that at this stage
4174 CC's are always set explicitly
4175 and always immediately before the jump that
4176 will use them. So if the previous insn
4177 exists to set the CC's, delete it
4178 (unless it performs auto-increments, etc.). */
4179 if (prev && GET_CODE (prev) == INSN
4180 && sets_cc0_p (PATTERN (prev)))
4182 if (sets_cc0_p (PATTERN (prev)) > 0
4183 && ! side_effects_p (PATTERN (prev)))
4184 delete_computation (prev);
4185 else
4186 /* Otherwise, show that cc0 won't be used. */
4187 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
4188 cc0_rtx, REG_NOTES (prev));
4191 #endif
4193 #ifdef INSN_SCHEDULING
4194 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4195 reload has completed. The schedulers need to be fixed. Until
4196 they are, we must not rely on the death notes here. */
4197 if (reload_completed && flag_schedule_insns_after_reload)
4199 delete_insn (insn);
4200 return;
4202 #endif
4204 /* The REG_DEAD note may have been omitted for a register
4205 which is both set and used by the insn. */
4206 set = single_set (insn);
4207 if (set && GET_CODE (SET_DEST (set)) == REG)
4209 int dest_regno = REGNO (SET_DEST (set));
4210 int dest_endregno
4211 = dest_regno + (dest_regno < FIRST_PSEUDO_REGISTER
4212 ? HARD_REGNO_NREGS (dest_regno,
4213 GET_MODE (SET_DEST (set))) : 1);
4214 int i;
4216 for (i = dest_regno; i < dest_endregno; i++)
4218 if (! refers_to_regno_p (i, i + 1, SET_SRC (set), NULL_PTR)
4219 || find_regno_note (insn, REG_DEAD, i))
4220 continue;
4222 note = gen_rtx_EXPR_LIST (REG_DEAD, (i < FIRST_PSEUDO_REGISTER
4223 ? gen_rtx_REG (reg_raw_mode[i], i)
4224 : SET_DEST (set)), NULL_RTX);
4225 delete_prior_computation (note, insn);
4229 for (note = REG_NOTES (insn); note; note = next)
4231 next = XEXP (note, 1);
4233 if (REG_NOTE_KIND (note) != REG_DEAD
4234 /* Verify that the REG_NOTE is legitimate. */
4235 || GET_CODE (XEXP (note, 0)) != REG)
4236 continue;
4238 delete_prior_computation (note, insn);
4241 delete_insn (insn);
4244 /* Delete insn INSN from the chain of insns and update label ref counts.
4245 May delete some following insns as a consequence; may even delete
4246 a label elsewhere and insns that follow it.
4248 Returns the first insn after INSN that was not deleted. */
4251 delete_insn (insn)
4252 register rtx insn;
4254 register rtx next = NEXT_INSN (insn);
4255 register rtx prev = PREV_INSN (insn);
4256 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
4257 register int dont_really_delete = 0;
4259 while (next && INSN_DELETED_P (next))
4260 next = NEXT_INSN (next);
4262 /* This insn is already deleted => return first following nondeleted. */
4263 if (INSN_DELETED_P (insn))
4264 return next;
4266 if (was_code_label)
4267 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
4269 /* Don't delete user-declared labels. Convert them to special NOTEs
4270 instead. */
4271 if (was_code_label && LABEL_NAME (insn) != 0
4272 && optimize && ! dont_really_delete)
4274 PUT_CODE (insn, NOTE);
4275 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
4276 NOTE_SOURCE_FILE (insn) = 0;
4277 dont_really_delete = 1;
4279 else
4280 /* Mark this insn as deleted. */
4281 INSN_DELETED_P (insn) = 1;
4283 /* If this is an unconditional jump, delete it from the jump chain. */
4284 if (simplejump_p (insn))
4285 delete_from_jump_chain (insn);
4287 /* If instruction is followed by a barrier,
4288 delete the barrier too. */
4290 if (next != 0 && GET_CODE (next) == BARRIER)
4292 INSN_DELETED_P (next) = 1;
4293 next = NEXT_INSN (next);
4296 /* Patch out INSN (and the barrier if any) */
4298 if (optimize && ! dont_really_delete)
4300 if (prev)
4302 NEXT_INSN (prev) = next;
4303 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
4304 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
4305 XVECLEN (PATTERN (prev), 0) - 1)) = next;
4308 if (next)
4310 PREV_INSN (next) = prev;
4311 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
4312 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4315 if (prev && NEXT_INSN (prev) == 0)
4316 set_last_insn (prev);
4319 /* If deleting a jump, decrement the count of the label,
4320 and delete the label if it is now unused. */
4322 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
4324 rtx lab = JUMP_LABEL (insn), lab_next;
4326 if (--LABEL_NUSES (lab) == 0)
4328 /* This can delete NEXT or PREV,
4329 either directly if NEXT is JUMP_LABEL (INSN),
4330 or indirectly through more levels of jumps. */
4331 delete_insn (lab);
4333 /* I feel a little doubtful about this loop,
4334 but I see no clean and sure alternative way
4335 to find the first insn after INSN that is not now deleted.
4336 I hope this works. */
4337 while (next && INSN_DELETED_P (next))
4338 next = NEXT_INSN (next);
4339 return next;
4341 else if ((lab_next = next_nonnote_insn (lab)) != NULL
4342 && GET_CODE (lab_next) == JUMP_INSN
4343 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
4344 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
4346 /* If we're deleting the tablejump, delete the dispatch table.
4347 We may not be able to kill the label immediately preceeding
4348 just yet, as it might be referenced in code leading up to
4349 the tablejump. */
4350 delete_insn (lab_next);
4354 /* Likewise if we're deleting a dispatch table. */
4356 if (GET_CODE (insn) == JUMP_INSN
4357 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4358 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4360 rtx pat = PATTERN (insn);
4361 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
4362 int len = XVECLEN (pat, diff_vec_p);
4364 for (i = 0; i < len; i++)
4365 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
4366 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
4367 while (next && INSN_DELETED_P (next))
4368 next = NEXT_INSN (next);
4369 return next;
4372 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
4373 prev = PREV_INSN (prev);
4375 /* If INSN was a label and a dispatch table follows it,
4376 delete the dispatch table. The tablejump must have gone already.
4377 It isn't useful to fall through into a table. */
4379 if (was_code_label
4380 && NEXT_INSN (insn) != 0
4381 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
4382 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
4383 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
4384 next = delete_insn (NEXT_INSN (insn));
4386 /* If INSN was a label, delete insns following it if now unreachable. */
4388 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
4390 register RTX_CODE code;
4391 while (next != 0
4392 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
4393 || code == NOTE || code == BARRIER
4394 || (code == CODE_LABEL && INSN_DELETED_P (next))))
4396 if (code == NOTE
4397 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
4398 next = NEXT_INSN (next);
4399 /* Keep going past other deleted labels to delete what follows. */
4400 else if (code == CODE_LABEL && INSN_DELETED_P (next))
4401 next = NEXT_INSN (next);
4402 else
4403 /* Note: if this deletes a jump, it can cause more
4404 deletion of unreachable code, after a different label.
4405 As long as the value from this recursive call is correct,
4406 this invocation functions correctly. */
4407 next = delete_insn (next);
4411 return next;
4414 /* Advance from INSN till reaching something not deleted
4415 then return that. May return INSN itself. */
4418 next_nondeleted_insn (insn)
4419 rtx insn;
4421 while (INSN_DELETED_P (insn))
4422 insn = NEXT_INSN (insn);
4423 return insn;
4426 /* Delete a range of insns from FROM to TO, inclusive.
4427 This is for the sake of peephole optimization, so assume
4428 that whatever these insns do will still be done by a new
4429 peephole insn that will replace them. */
4431 void
4432 delete_for_peephole (from, to)
4433 register rtx from, to;
4435 register rtx insn = from;
4437 while (1)
4439 register rtx next = NEXT_INSN (insn);
4440 register rtx prev = PREV_INSN (insn);
4442 if (GET_CODE (insn) != NOTE)
4444 INSN_DELETED_P (insn) = 1;
4446 /* Patch this insn out of the chain. */
4447 /* We don't do this all at once, because we
4448 must preserve all NOTEs. */
4449 if (prev)
4450 NEXT_INSN (prev) = next;
4452 if (next)
4453 PREV_INSN (next) = prev;
4456 if (insn == to)
4457 break;
4458 insn = next;
4461 /* Note that if TO is an unconditional jump
4462 we *do not* delete the BARRIER that follows,
4463 since the peephole that replaces this sequence
4464 is also an unconditional jump in that case. */
4467 /* We have determined that INSN is never reached, and are about to
4468 delete it. Print a warning if the user asked for one.
4470 To try to make this warning more useful, this should only be called
4471 once per basic block not reached, and it only warns when the basic
4472 block contains more than one line from the current function, and
4473 contains at least one operation. CSE and inlining can duplicate insns,
4474 so it's possible to get spurious warnings from this. */
4476 void
4477 never_reached_warning (avoided_insn)
4478 rtx avoided_insn;
4480 rtx insn;
4481 rtx a_line_note = NULL;
4482 int two_avoided_lines = 0;
4483 int contains_insn = 0;
4485 if (! warn_notreached)
4486 return;
4488 /* Scan forwards, looking at LINE_NUMBER notes, until
4489 we hit a LABEL or we run out of insns. */
4491 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
4493 if (GET_CODE (insn) == CODE_LABEL)
4494 break;
4495 else if (GET_CODE (insn) == NOTE /* A line number note? */
4496 && NOTE_LINE_NUMBER (insn) >= 0)
4498 if (a_line_note == NULL)
4499 a_line_note = insn;
4500 else
4501 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
4502 != NOTE_LINE_NUMBER (insn));
4504 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4505 contains_insn = 1;
4507 if (two_avoided_lines && contains_insn)
4508 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
4509 NOTE_LINE_NUMBER (a_line_note),
4510 "will never be executed");
4513 /* Invert the condition of the jump JUMP, and make it jump
4514 to label NLABEL instead of where it jumps now. */
4517 invert_jump (jump, nlabel)
4518 rtx jump, nlabel;
4520 /* We have to either invert the condition and change the label or
4521 do neither. Either operation could fail. We first try to invert
4522 the jump. If that succeeds, we try changing the label. If that fails,
4523 we invert the jump back to what it was. */
4525 if (! invert_exp (PATTERN (jump), jump))
4526 return 0;
4528 if (redirect_jump (jump, nlabel))
4530 if (flag_branch_probabilities)
4532 rtx note = find_reg_note (jump, REG_BR_PROB, 0);
4534 /* An inverted jump means that a probability taken becomes a
4535 probability not taken. Subtract the branch probability from the
4536 probability base to convert it back to a taken probability.
4537 (We don't flip the probability on a branch that's never taken. */
4538 if (note && XINT (XEXP (note, 0), 0) >= 0)
4539 XINT (XEXP (note, 0), 0) = REG_BR_PROB_BASE - XINT (XEXP (note, 0), 0);
4542 return 1;
4545 if (! invert_exp (PATTERN (jump), jump))
4546 /* This should just be putting it back the way it was. */
4547 abort ();
4549 return 0;
4552 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4554 Return 1 if we can do so, 0 if we cannot find a way to do so that
4555 matches a pattern. */
4558 invert_exp (x, insn)
4559 rtx x;
4560 rtx insn;
4562 register RTX_CODE code;
4563 register int i;
4564 register const char *fmt;
4566 code = GET_CODE (x);
4568 if (code == IF_THEN_ELSE)
4570 register rtx comp = XEXP (x, 0);
4571 register rtx tem;
4573 /* We can do this in two ways: The preferable way, which can only
4574 be done if this is not an integer comparison, is to reverse
4575 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4576 of the IF_THEN_ELSE. If we can't do either, fail. */
4578 if (can_reverse_comparison_p (comp, insn)
4579 && validate_change (insn, &XEXP (x, 0),
4580 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp)),
4581 GET_MODE (comp), XEXP (comp, 0),
4582 XEXP (comp, 1)), 0))
4583 return 1;
4585 tem = XEXP (x, 1);
4586 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
4587 validate_change (insn, &XEXP (x, 2), tem, 1);
4588 return apply_change_group ();
4591 fmt = GET_RTX_FORMAT (code);
4592 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4594 if (fmt[i] == 'e')
4595 if (! invert_exp (XEXP (x, i), insn))
4596 return 0;
4597 if (fmt[i] == 'E')
4599 register int j;
4600 for (j = 0; j < XVECLEN (x, i); j++)
4601 if (!invert_exp (XVECEXP (x, i, j), insn))
4602 return 0;
4606 return 1;
4609 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4610 If the old jump target label is unused as a result,
4611 it and the code following it may be deleted.
4613 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4614 RETURN insn.
4616 The return value will be 1 if the change was made, 0 if it wasn't (this
4617 can only occur for NLABEL == 0). */
4620 redirect_jump (jump, nlabel)
4621 rtx jump, nlabel;
4623 register rtx olabel = JUMP_LABEL (jump);
4625 if (nlabel == olabel)
4626 return 1;
4628 if (! redirect_exp (&PATTERN (jump), olabel, nlabel, jump))
4629 return 0;
4631 /* If this is an unconditional branch, delete it from the jump_chain of
4632 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4633 have UID's in range and JUMP_CHAIN is valid). */
4634 if (jump_chain && (simplejump_p (jump)
4635 || GET_CODE (PATTERN (jump)) == RETURN))
4637 int label_index = nlabel ? INSN_UID (nlabel) : 0;
4639 delete_from_jump_chain (jump);
4640 if (label_index < max_jump_chain
4641 && INSN_UID (jump) < max_jump_chain)
4643 jump_chain[INSN_UID (jump)] = jump_chain[label_index];
4644 jump_chain[label_index] = jump;
4648 JUMP_LABEL (jump) = nlabel;
4649 if (nlabel)
4650 ++LABEL_NUSES (nlabel);
4652 if (olabel && --LABEL_NUSES (olabel) == 0)
4653 delete_insn (olabel);
4655 return 1;
4658 /* Delete the instruction JUMP from any jump chain it might be on. */
4660 static void
4661 delete_from_jump_chain (jump)
4662 rtx jump;
4664 int index;
4665 rtx olabel = JUMP_LABEL (jump);
4667 /* Handle unconditional jumps. */
4668 if (jump_chain && olabel != 0
4669 && INSN_UID (olabel) < max_jump_chain
4670 && simplejump_p (jump))
4671 index = INSN_UID (olabel);
4672 /* Handle return insns. */
4673 else if (jump_chain && GET_CODE (PATTERN (jump)) == RETURN)
4674 index = 0;
4675 else return;
4677 if (jump_chain[index] == jump)
4678 jump_chain[index] = jump_chain[INSN_UID (jump)];
4679 else
4681 rtx insn;
4683 for (insn = jump_chain[index];
4684 insn != 0;
4685 insn = jump_chain[INSN_UID (insn)])
4686 if (jump_chain[INSN_UID (insn)] == jump)
4688 jump_chain[INSN_UID (insn)] = jump_chain[INSN_UID (jump)];
4689 break;
4694 /* If NLABEL is nonzero, throughout the rtx at LOC,
4695 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4696 zero, alter (RETURN) to (LABEL_REF NLABEL).
4698 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4699 validity with validate_change. Convert (set (pc) (label_ref olabel))
4700 to (return).
4702 Return 0 if we found a change we would like to make but it is invalid.
4703 Otherwise, return 1. */
4706 redirect_exp (loc, olabel, nlabel, insn)
4707 rtx *loc;
4708 rtx olabel, nlabel;
4709 rtx insn;
4711 register rtx x = *loc;
4712 register RTX_CODE code = GET_CODE (x);
4713 register int i;
4714 register const char *fmt;
4716 if (code == LABEL_REF)
4718 if (XEXP (x, 0) == olabel)
4720 if (nlabel)
4721 XEXP (x, 0) = nlabel;
4722 else
4723 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4724 return 1;
4727 else if (code == RETURN && olabel == 0)
4729 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
4730 if (loc == &PATTERN (insn))
4731 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
4732 return validate_change (insn, loc, x, 0);
4735 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
4736 && GET_CODE (SET_SRC (x)) == LABEL_REF
4737 && XEXP (SET_SRC (x), 0) == olabel)
4738 return validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 0);
4740 fmt = GET_RTX_FORMAT (code);
4741 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4743 if (fmt[i] == 'e')
4744 if (! redirect_exp (&XEXP (x, i), olabel, nlabel, insn))
4745 return 0;
4746 if (fmt[i] == 'E')
4748 register int j;
4749 for (j = 0; j < XVECLEN (x, i); j++)
4750 if (! redirect_exp (&XVECEXP (x, i, j), olabel, nlabel, insn))
4751 return 0;
4755 return 1;
4758 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4760 If the old jump target label (before the dispatch table) becomes unused,
4761 it and the dispatch table may be deleted. In that case, find the insn
4762 before the jump references that label and delete it and logical successors
4763 too. */
4765 static void
4766 redirect_tablejump (jump, nlabel)
4767 rtx jump, nlabel;
4769 register rtx olabel = JUMP_LABEL (jump);
4771 /* Add this jump to the jump_chain of NLABEL. */
4772 if (jump_chain && INSN_UID (nlabel) < max_jump_chain
4773 && INSN_UID (jump) < max_jump_chain)
4775 jump_chain[INSN_UID (jump)] = jump_chain[INSN_UID (nlabel)];
4776 jump_chain[INSN_UID (nlabel)] = jump;
4779 PATTERN (jump) = gen_jump (nlabel);
4780 JUMP_LABEL (jump) = nlabel;
4781 ++LABEL_NUSES (nlabel);
4782 INSN_CODE (jump) = -1;
4784 if (--LABEL_NUSES (olabel) == 0)
4786 delete_labelref_insn (jump, olabel, 0);
4787 delete_insn (olabel);
4791 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4792 If we found one, delete it and then delete this insn if DELETE_THIS is
4793 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4795 static int
4796 delete_labelref_insn (insn, label, delete_this)
4797 rtx insn, label;
4798 int delete_this;
4800 int deleted = 0;
4801 rtx link;
4803 if (GET_CODE (insn) != NOTE
4804 && reg_mentioned_p (label, PATTERN (insn)))
4806 if (delete_this)
4808 delete_insn (insn);
4809 deleted = 1;
4811 else
4812 return 1;
4815 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
4816 if (delete_labelref_insn (XEXP (link, 0), label, 1))
4818 if (delete_this)
4820 delete_insn (insn);
4821 deleted = 1;
4823 else
4824 return 1;
4827 return deleted;
4830 /* Like rtx_equal_p except that it considers two REGs as equal
4831 if they renumber to the same value and considers two commutative
4832 operations to be the same if the order of the operands has been
4833 reversed.
4835 ??? Addition is not commutative on the PA due to the weird implicit
4836 space register selection rules for memory addresses. Therefore, we
4837 don't consider a + b == b + a.
4839 We could/should make this test a little tighter. Possibly only
4840 disabling it on the PA via some backend macro or only disabling this
4841 case when the PLUS is inside a MEM. */
4844 rtx_renumbered_equal_p (x, y)
4845 rtx x, y;
4847 register int i;
4848 register RTX_CODE code = GET_CODE (x);
4849 register const char *fmt;
4851 if (x == y)
4852 return 1;
4854 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
4855 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
4856 && GET_CODE (SUBREG_REG (y)) == REG)))
4858 int reg_x = -1, reg_y = -1;
4859 int word_x = 0, word_y = 0;
4861 if (GET_MODE (x) != GET_MODE (y))
4862 return 0;
4864 /* If we haven't done any renumbering, don't
4865 make any assumptions. */
4866 if (reg_renumber == 0)
4867 return rtx_equal_p (x, y);
4869 if (code == SUBREG)
4871 reg_x = REGNO (SUBREG_REG (x));
4872 word_x = SUBREG_WORD (x);
4874 if (reg_renumber[reg_x] >= 0)
4876 reg_x = reg_renumber[reg_x] + word_x;
4877 word_x = 0;
4881 else
4883 reg_x = REGNO (x);
4884 if (reg_renumber[reg_x] >= 0)
4885 reg_x = reg_renumber[reg_x];
4888 if (GET_CODE (y) == SUBREG)
4890 reg_y = REGNO (SUBREG_REG (y));
4891 word_y = SUBREG_WORD (y);
4893 if (reg_renumber[reg_y] >= 0)
4895 reg_y = reg_renumber[reg_y];
4896 word_y = 0;
4900 else
4902 reg_y = REGNO (y);
4903 if (reg_renumber[reg_y] >= 0)
4904 reg_y = reg_renumber[reg_y];
4907 return reg_x >= 0 && reg_x == reg_y && word_x == word_y;
4910 /* Now we have disposed of all the cases
4911 in which different rtx codes can match. */
4912 if (code != GET_CODE (y))
4913 return 0;
4915 switch (code)
4917 case PC:
4918 case CC0:
4919 case ADDR_VEC:
4920 case ADDR_DIFF_VEC:
4921 return 0;
4923 case CONST_INT:
4924 return INTVAL (x) == INTVAL (y);
4926 case LABEL_REF:
4927 /* We can't assume nonlocal labels have their following insns yet. */
4928 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
4929 return XEXP (x, 0) == XEXP (y, 0);
4931 /* Two label-refs are equivalent if they point at labels
4932 in the same position in the instruction stream. */
4933 return (next_real_insn (XEXP (x, 0))
4934 == next_real_insn (XEXP (y, 0)));
4936 case SYMBOL_REF:
4937 return XSTR (x, 0) == XSTR (y, 0);
4939 case CODE_LABEL:
4940 /* If we didn't match EQ equality above, they aren't the same. */
4941 return 0;
4943 default:
4944 break;
4947 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4949 if (GET_MODE (x) != GET_MODE (y))
4950 return 0;
4952 /* For commutative operations, the RTX match if the operand match in any
4953 order. Also handle the simple binary and unary cases without a loop.
4955 ??? Don't consider PLUS a commutative operator; see comments above. */
4956 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
4957 && code != PLUS)
4958 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4959 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
4960 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
4961 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
4962 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
4963 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
4964 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
4965 else if (GET_RTX_CLASS (code) == '1')
4966 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
4968 /* Compare the elements. If any pair of corresponding elements
4969 fail to match, return 0 for the whole things. */
4971 fmt = GET_RTX_FORMAT (code);
4972 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4974 register int j;
4975 switch (fmt[i])
4977 case 'w':
4978 if (XWINT (x, i) != XWINT (y, i))
4979 return 0;
4980 break;
4982 case 'i':
4983 if (XINT (x, i) != XINT (y, i))
4984 return 0;
4985 break;
4987 case 's':
4988 if (strcmp (XSTR (x, i), XSTR (y, i)))
4989 return 0;
4990 break;
4992 case 'e':
4993 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
4994 return 0;
4995 break;
4997 case 'u':
4998 if (XEXP (x, i) != XEXP (y, i))
4999 return 0;
5000 /* fall through. */
5001 case '0':
5002 break;
5004 case 'E':
5005 if (XVECLEN (x, i) != XVECLEN (y, i))
5006 return 0;
5007 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5008 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
5009 return 0;
5010 break;
5012 default:
5013 abort ();
5016 return 1;
5019 /* If X is a hard register or equivalent to one or a subregister of one,
5020 return the hard register number. If X is a pseudo register that was not
5021 assigned a hard register, return the pseudo register number. Otherwise,
5022 return -1. Any rtx is valid for X. */
5025 true_regnum (x)
5026 rtx x;
5028 if (GET_CODE (x) == REG)
5030 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
5031 return reg_renumber[REGNO (x)];
5032 return REGNO (x);
5034 if (GET_CODE (x) == SUBREG)
5036 int base = true_regnum (SUBREG_REG (x));
5037 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
5038 return SUBREG_WORD (x) + base;
5040 return -1;
5043 /* Optimize code of the form:
5045 for (x = a[i]; x; ...)
5047 for (x = a[i]; x; ...)
5049 foo:
5051 Loop optimize will change the above code into
5053 if (x = a[i])
5054 for (;;)
5055 { ...; if (! (x = ...)) break; }
5056 if (x = a[i])
5057 for (;;)
5058 { ...; if (! (x = ...)) break; }
5059 foo:
5061 In general, if the first test fails, the program can branch
5062 directly to `foo' and skip the second try which is doomed to fail.
5063 We run this after loop optimization and before flow analysis. */
5065 /* When comparing the insn patterns, we track the fact that different
5066 pseudo-register numbers may have been used in each computation.
5067 The following array stores an equivalence -- same_regs[I] == J means
5068 that pseudo register I was used in the first set of tests in a context
5069 where J was used in the second set. We also count the number of such
5070 pending equivalences. If nonzero, the expressions really aren't the
5071 same. */
5073 static int *same_regs;
5075 static int num_same_regs;
5077 /* Track any registers modified between the target of the first jump and
5078 the second jump. They never compare equal. */
5080 static char *modified_regs;
5082 /* Record if memory was modified. */
5084 static int modified_mem;
5086 /* Called via note_stores on each insn between the target of the first
5087 branch and the second branch. It marks any changed registers. */
5089 static void
5090 mark_modified_reg (dest, x)
5091 rtx dest;
5092 rtx x ATTRIBUTE_UNUSED;
5094 int regno, i;
5096 if (GET_CODE (dest) == SUBREG)
5097 dest = SUBREG_REG (dest);
5099 if (GET_CODE (dest) == MEM)
5100 modified_mem = 1;
5102 if (GET_CODE (dest) != REG)
5103 return;
5105 regno = REGNO (dest);
5106 if (regno >= FIRST_PSEUDO_REGISTER)
5107 modified_regs[regno] = 1;
5108 else
5109 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
5110 modified_regs[regno + i] = 1;
5113 /* F is the first insn in the chain of insns. */
5115 void
5116 thread_jumps (f, max_reg, flag_before_loop)
5117 rtx f;
5118 int max_reg;
5119 int flag_before_loop;
5121 /* Basic algorithm is to find a conditional branch,
5122 the label it may branch to, and the branch after
5123 that label. If the two branches test the same condition,
5124 walk back from both branch paths until the insn patterns
5125 differ, or code labels are hit. If we make it back to
5126 the target of the first branch, then we know that the first branch
5127 will either always succeed or always fail depending on the relative
5128 senses of the two branches. So adjust the first branch accordingly
5129 in this case. */
5131 rtx label, b1, b2, t1, t2;
5132 enum rtx_code code1, code2;
5133 rtx b1op0, b1op1, b2op0, b2op1;
5134 int changed = 1;
5135 int i;
5136 int *all_reset;
5138 /* Allocate register tables and quick-reset table. */
5139 modified_regs = (char *) alloca (max_reg * sizeof (char));
5140 same_regs = (int *) alloca (max_reg * sizeof (int));
5141 all_reset = (int *) alloca (max_reg * sizeof (int));
5142 for (i = 0; i < max_reg; i++)
5143 all_reset[i] = -1;
5145 while (changed)
5147 changed = 0;
5149 for (b1 = f; b1; b1 = NEXT_INSN (b1))
5151 /* Get to a candidate branch insn. */
5152 if (GET_CODE (b1) != JUMP_INSN
5153 || ! condjump_p (b1) || simplejump_p (b1)
5154 || JUMP_LABEL (b1) == 0)
5155 continue;
5157 bzero (modified_regs, max_reg * sizeof (char));
5158 modified_mem = 0;
5160 bcopy ((char *) all_reset, (char *) same_regs,
5161 max_reg * sizeof (int));
5162 num_same_regs = 0;
5164 label = JUMP_LABEL (b1);
5166 /* Look for a branch after the target. Record any registers and
5167 memory modified between the target and the branch. Stop when we
5168 get to a label since we can't know what was changed there. */
5169 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
5171 if (GET_CODE (b2) == CODE_LABEL)
5172 break;
5174 else if (GET_CODE (b2) == JUMP_INSN)
5176 /* If this is an unconditional jump and is the only use of
5177 its target label, we can follow it. */
5178 if (simplejump_p (b2)
5179 && JUMP_LABEL (b2) != 0
5180 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
5182 b2 = JUMP_LABEL (b2);
5183 continue;
5185 else
5186 break;
5189 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
5190 continue;
5192 if (GET_CODE (b2) == CALL_INSN)
5194 modified_mem = 1;
5195 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5196 if (call_used_regs[i] && ! fixed_regs[i]
5197 && i != STACK_POINTER_REGNUM
5198 && i != FRAME_POINTER_REGNUM
5199 && i != HARD_FRAME_POINTER_REGNUM
5200 && i != ARG_POINTER_REGNUM)
5201 modified_regs[i] = 1;
5204 note_stores (PATTERN (b2), mark_modified_reg);
5207 /* Check the next candidate branch insn from the label
5208 of the first. */
5209 if (b2 == 0
5210 || GET_CODE (b2) != JUMP_INSN
5211 || b2 == b1
5212 || ! condjump_p (b2)
5213 || simplejump_p (b2))
5214 continue;
5216 /* Get the comparison codes and operands, reversing the
5217 codes if appropriate. If we don't have comparison codes,
5218 we can't do anything. */
5219 b1op0 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 0);
5220 b1op1 = XEXP (XEXP (SET_SRC (PATTERN (b1)), 0), 1);
5221 code1 = GET_CODE (XEXP (SET_SRC (PATTERN (b1)), 0));
5222 if (XEXP (SET_SRC (PATTERN (b1)), 1) == pc_rtx)
5223 code1 = reverse_condition (code1);
5225 b2op0 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 0);
5226 b2op1 = XEXP (XEXP (SET_SRC (PATTERN (b2)), 0), 1);
5227 code2 = GET_CODE (XEXP (SET_SRC (PATTERN (b2)), 0));
5228 if (XEXP (SET_SRC (PATTERN (b2)), 1) == pc_rtx)
5229 code2 = reverse_condition (code2);
5231 /* If they test the same things and knowing that B1 branches
5232 tells us whether or not B2 branches, check if we
5233 can thread the branch. */
5234 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
5235 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
5236 && (comparison_dominates_p (code1, code2)
5237 || (comparison_dominates_p (code1, reverse_condition (code2))
5238 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1)),
5240 b1))))
5242 t1 = prev_nonnote_insn (b1);
5243 t2 = prev_nonnote_insn (b2);
5245 while (t1 != 0 && t2 != 0)
5247 if (t2 == label)
5249 /* We have reached the target of the first branch.
5250 If there are no pending register equivalents,
5251 we know that this branch will either always
5252 succeed (if the senses of the two branches are
5253 the same) or always fail (if not). */
5254 rtx new_label;
5256 if (num_same_regs != 0)
5257 break;
5259 if (comparison_dominates_p (code1, code2))
5260 new_label = JUMP_LABEL (b2);
5261 else
5262 new_label = get_label_after (b2);
5264 if (JUMP_LABEL (b1) != new_label)
5266 rtx prev = PREV_INSN (new_label);
5268 if (flag_before_loop
5269 && GET_CODE (prev) == NOTE
5270 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
5272 /* Don't thread to the loop label. If a loop
5273 label is reused, loop optimization will
5274 be disabled for that loop. */
5275 new_label = gen_label_rtx ();
5276 emit_label_after (new_label, PREV_INSN (prev));
5278 changed |= redirect_jump (b1, new_label);
5280 break;
5283 /* If either of these is not a normal insn (it might be
5284 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5285 have already been skipped above.) Similarly, fail
5286 if the insns are different. */
5287 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
5288 || recog_memoized (t1) != recog_memoized (t2)
5289 || ! rtx_equal_for_thread_p (PATTERN (t1),
5290 PATTERN (t2), t2))
5291 break;
5293 t1 = prev_nonnote_insn (t1);
5294 t2 = prev_nonnote_insn (t2);
5301 /* This is like RTX_EQUAL_P except that it knows about our handling of
5302 possibly equivalent registers and knows to consider volatile and
5303 modified objects as not equal.
5305 YINSN is the insn containing Y. */
5308 rtx_equal_for_thread_p (x, y, yinsn)
5309 rtx x, y;
5310 rtx yinsn;
5312 register int i;
5313 register int j;
5314 register enum rtx_code code;
5315 register const char *fmt;
5317 code = GET_CODE (x);
5318 /* Rtx's of different codes cannot be equal. */
5319 if (code != GET_CODE (y))
5320 return 0;
5322 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5323 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5325 if (GET_MODE (x) != GET_MODE (y))
5326 return 0;
5328 /* For floating-point, consider everything unequal. This is a bit
5329 pessimistic, but this pass would only rarely do anything for FP
5330 anyway. */
5331 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
5332 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_fast_math)
5333 return 0;
5335 /* For commutative operations, the RTX match if the operand match in any
5336 order. Also handle the simple binary and unary cases without a loop. */
5337 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
5338 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5339 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
5340 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
5341 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
5342 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
5343 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
5344 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
5345 else if (GET_RTX_CLASS (code) == '1')
5346 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5348 /* Handle special-cases first. */
5349 switch (code)
5351 case REG:
5352 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
5353 return 1;
5355 /* If neither is user variable or hard register, check for possible
5356 equivalence. */
5357 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
5358 || REGNO (x) < FIRST_PSEUDO_REGISTER
5359 || REGNO (y) < FIRST_PSEUDO_REGISTER)
5360 return 0;
5362 if (same_regs[REGNO (x)] == -1)
5364 same_regs[REGNO (x)] = REGNO (y);
5365 num_same_regs++;
5367 /* If this is the first time we are seeing a register on the `Y'
5368 side, see if it is the last use. If not, we can't thread the
5369 jump, so mark it as not equivalent. */
5370 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
5371 return 0;
5373 return 1;
5375 else
5376 return (same_regs[REGNO (x)] == REGNO (y));
5378 break;
5380 case MEM:
5381 /* If memory modified or either volatile, not equivalent.
5382 Else, check address. */
5383 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5384 return 0;
5386 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
5388 case ASM_INPUT:
5389 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
5390 return 0;
5392 break;
5394 case SET:
5395 /* Cancel a pending `same_regs' if setting equivalenced registers.
5396 Then process source. */
5397 if (GET_CODE (SET_DEST (x)) == REG
5398 && GET_CODE (SET_DEST (y)) == REG)
5400 if (same_regs[REGNO (SET_DEST (x))] == REGNO (SET_DEST (y)))
5402 same_regs[REGNO (SET_DEST (x))] = -1;
5403 num_same_regs--;
5405 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
5406 return 0;
5408 else
5409 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
5410 return 0;
5412 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
5414 case LABEL_REF:
5415 return XEXP (x, 0) == XEXP (y, 0);
5417 case SYMBOL_REF:
5418 return XSTR (x, 0) == XSTR (y, 0);
5420 default:
5421 break;
5424 if (x == y)
5425 return 1;
5427 fmt = GET_RTX_FORMAT (code);
5428 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5430 switch (fmt[i])
5432 case 'w':
5433 if (XWINT (x, i) != XWINT (y, i))
5434 return 0;
5435 break;
5437 case 'n':
5438 case 'i':
5439 if (XINT (x, i) != XINT (y, i))
5440 return 0;
5441 break;
5443 case 'V':
5444 case 'E':
5445 /* Two vectors must have the same length. */
5446 if (XVECLEN (x, i) != XVECLEN (y, i))
5447 return 0;
5449 /* And the corresponding elements must match. */
5450 for (j = 0; j < XVECLEN (x, i); j++)
5451 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
5452 XVECEXP (y, i, j), yinsn) == 0)
5453 return 0;
5454 break;
5456 case 'e':
5457 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
5458 return 0;
5459 break;
5461 case 'S':
5462 case 's':
5463 if (strcmp (XSTR (x, i), XSTR (y, i)))
5464 return 0;
5465 break;
5467 case 'u':
5468 /* These are just backpointers, so they don't matter. */
5469 break;
5471 case '0':
5472 case 't':
5473 break;
5475 /* It is believed that rtx's at this level will never
5476 contain anything but integers and other rtx's,
5477 except for within LABEL_REFs and SYMBOL_REFs. */
5478 default:
5479 abort ();
5482 return 1;
5486 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
5487 /* Return the insn that NEW can be safely inserted in front of starting at
5488 the jump insn INSN. Return 0 if it is not safe to do this jump
5489 optimization. Note that NEW must contain a single set. */
5491 static rtx
5492 find_insert_position (insn, new)
5493 rtx insn;
5494 rtx new;
5496 int i;
5497 rtx prev;
5499 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5500 if (GET_CODE (PATTERN (new)) != PARALLEL)
5501 return insn;
5503 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5504 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5505 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5506 insn))
5507 break;
5509 if (i < 0)
5510 return insn;
5512 /* There is a good chance that the previous insn PREV sets the thing
5513 being clobbered (often the CC in a hard reg). If PREV does not
5514 use what NEW sets, we can insert NEW before PREV. */
5516 prev = prev_active_insn (insn);
5517 for (i = XVECLEN (PATTERN (new), 0) - 1; i >= 0; i--)
5518 if (GET_CODE (XVECEXP (PATTERN (new), 0, i)) == CLOBBER
5519 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5520 insn)
5521 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i), 0),
5522 prev))
5523 return 0;
5525 return reg_mentioned_p (SET_DEST (single_set (new)), prev) ? 0 : prev;
5527 #endif /* !HAVE_cc0 */