1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 88, 89, 91-99, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
22 /* This is the jump-optimization pass of the compiler.
23 It is run two or three times: once before cse, sometimes once after cse,
24 and once after reload (before final).
26 jump_optimize deletes unreachable code and labels that are not used.
27 It also deletes jumps that jump to the following insn,
28 and simplifies jumps around unconditional jumps and jumps
29 to unconditional jumps.
31 Each CODE_LABEL has a count of the times it is used
32 stored in the LABEL_NUSES internal field, and each JUMP_INSN
33 has one label that it refers to stored in the
34 JUMP_LABEL internal field. With this we can detect labels that
35 become unused because of the deletion of all the jumps that
36 formerly used them. The JUMP_LABEL info is sometimes looked
39 Optionally, cross-jumping can be done. Currently it is done
40 only the last time (when after reload and before final).
41 In fact, the code for cross-jumping now assumes that register
42 allocation has been done, since it uses `rtx_renumbered_equal_p'.
44 Jump optimization is done after cse when cse's constant-propagation
45 causes jumps to become unconditional or to be deleted.
47 Unreachable loops are not detected here, because the labels
48 have references and the insns appear reachable from the labels.
49 find_basic_blocks in flow.c finds and deletes such loops.
51 The subroutines delete_insn, redirect_jump, and invert_jump are used
52 from other passes as well. */
59 #include "hard-reg-set.h"
61 #include "insn-config.h"
62 #include "insn-flags.h"
63 #include "insn-attr.h"
71 /* ??? Eventually must record somehow the labels used by jumps
72 from nested functions. */
73 /* Pre-record the next or previous real insn for each label?
74 No, this pass is very fast anyway. */
75 /* Condense consecutive labels?
76 This would make life analysis faster, maybe. */
77 /* Optimize jump y; x: ... y: jumpif... x?
78 Don't know if it is worth bothering with. */
79 /* Optimize two cases of conditional jump to conditional jump?
80 This can never delete any instruction or make anything dead,
81 or even change what is live at any point.
82 So perhaps let combiner do it. */
84 /* Vector indexed by uid.
85 For each CODE_LABEL, index by its uid to get first unconditional jump
86 that jumps to the label.
87 For each JUMP_INSN, index by its uid to get the next unconditional jump
88 that jumps to the same label.
89 Element 0 is the start of a chain of all return insns.
90 (It is safe to use element 0 because insn uid 0 is not used. */
92 static rtx
*jump_chain
;
94 /* Maximum index in jump_chain. */
96 static int max_jump_chain
;
98 /* Set nonzero by jump_optimize if control can fall through
99 to the end of the function. */
102 /* Indicates whether death notes are significant in cross jump analysis.
103 Normally they are not significant, because of A and B jump to C,
104 and R dies in A, it must die in B. But this might not be true after
105 stack register conversion, and we must compare death notes in that
108 static int cross_jump_death_matters
= 0;
110 static int init_label_info
PARAMS ((rtx
));
111 static void delete_barrier_successors
PARAMS ((rtx
));
112 static void mark_all_labels
PARAMS ((rtx
, int));
113 static rtx delete_unreferenced_labels
PARAMS ((rtx
));
114 static void delete_noop_moves
PARAMS ((rtx
));
115 static int calculate_can_reach_end
PARAMS ((rtx
, int));
116 static int duplicate_loop_exit_test
PARAMS ((rtx
));
117 static void find_cross_jump
PARAMS ((rtx
, rtx
, int, rtx
*, rtx
*));
118 static void do_cross_jump
PARAMS ((rtx
, rtx
, rtx
));
119 static int jump_back_p
PARAMS ((rtx
, rtx
));
120 static int tension_vector_labels
PARAMS ((rtx
, int));
121 static void mark_jump_label
PARAMS ((rtx
, rtx
, int, int));
122 static void delete_computation
PARAMS ((rtx
));
123 static void delete_from_jump_chain
PARAMS ((rtx
));
124 static int delete_labelref_insn
PARAMS ((rtx
, rtx
, int));
125 static void mark_modified_reg
PARAMS ((rtx
, rtx
, void *));
126 static void redirect_tablejump
PARAMS ((rtx
, rtx
));
127 static void jump_optimize_1
PARAMS ((rtx
, int, int, int, int));
128 #if ! defined(HAVE_cc0) && ! defined(HAVE_conditional_arithmetic)
129 static rtx find_insert_position
PARAMS ((rtx
, rtx
));
131 static int returnjump_p_1
PARAMS ((rtx
*, void *));
132 static void delete_prior_computation
PARAMS ((rtx
, rtx
));
134 /* Main external entry point into the jump optimizer. See comments before
135 jump_optimize_1 for descriptions of the arguments. */
137 jump_optimize (f
, cross_jump
, noop_moves
, after_regscan
)
143 jump_optimize_1 (f
, cross_jump
, noop_moves
, after_regscan
, 0);
146 /* Alternate entry into the jump optimizer. This entry point only rebuilds
147 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
150 rebuild_jump_labels (f
)
153 jump_optimize_1 (f
, 0, 0, 0, 1);
157 /* Delete no-op jumps and optimize jumps to jumps
158 and jumps around jumps.
159 Delete unused labels and unreachable code.
161 If CROSS_JUMP is 1, detect matching code
162 before a jump and its destination and unify them.
163 If CROSS_JUMP is 2, do cross-jumping, but pay attention to death notes.
165 If NOOP_MOVES is nonzero, delete no-op move insns.
167 If AFTER_REGSCAN is nonzero, then this jump pass is being run immediately
168 after regscan, and it is safe to use regno_first_uid and regno_last_uid.
170 If MARK_LABELS_ONLY is nonzero, then we only rebuild the jump chain
171 and JUMP_LABEL field for jumping insns.
173 If `optimize' is zero, don't change any code,
174 just determine whether control drops off the end of the function.
175 This case occurs when we have -W and not -O.
176 It works because `delete_insn' checks the value of `optimize'
177 and refrains from actually deleting when that is 0. */
180 jump_optimize_1 (f
, cross_jump
, noop_moves
, after_regscan
, mark_labels_only
)
185 int mark_labels_only
;
187 register rtx insn
, next
;
194 cross_jump_death_matters
= (cross_jump
== 2);
195 max_uid
= init_label_info (f
) + 1;
197 /* If we are performing cross jump optimizations, then initialize
198 tables mapping UIDs to EH regions to avoid incorrect movement
199 of insns from one EH region to another. */
200 if (flag_exceptions
&& cross_jump
)
201 init_insn_eh_region (f
, max_uid
);
203 delete_barrier_successors (f
);
205 /* Leave some extra room for labels and duplicate exit test insns
207 max_jump_chain
= max_uid
* 14 / 10;
208 jump_chain
= (rtx
*) xcalloc (max_jump_chain
, sizeof (rtx
));
210 mark_all_labels (f
, cross_jump
);
212 /* Keep track of labels used from static data;
213 they cannot ever be deleted. */
215 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
216 LABEL_NUSES (XEXP (insn
, 0))++;
218 check_exception_handler_labels ();
220 /* Keep track of labels used for marking handlers for exception
221 regions; they cannot usually be deleted. */
223 for (insn
= exception_handler_labels
; insn
; insn
= XEXP (insn
, 1))
224 LABEL_NUSES (XEXP (insn
, 0))++;
226 /* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
227 notes and recompute LABEL_NUSES. */
228 if (mark_labels_only
)
231 exception_optimize ();
233 last_insn
= delete_unreferenced_labels (f
);
236 delete_noop_moves (f
);
238 /* If we haven't yet gotten to reload and we have just run regscan,
239 delete any insn that sets a register that isn't used elsewhere.
240 This helps some of the optimizations below by having less insns
241 being jumped around. */
243 if (optimize
&& ! reload_completed
&& after_regscan
)
244 for (insn
= f
; insn
; insn
= next
)
246 rtx set
= single_set (insn
);
248 next
= NEXT_INSN (insn
);
250 if (set
&& GET_CODE (SET_DEST (set
)) == REG
251 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
252 && REGNO_FIRST_UID (REGNO (SET_DEST (set
))) == INSN_UID (insn
)
253 /* We use regno_last_note_uid so as not to delete the setting
254 of a reg that's used in notes. A subsequent optimization
255 might arrange to use that reg for real. */
256 && REGNO_LAST_NOTE_UID (REGNO (SET_DEST (set
))) == INSN_UID (insn
)
257 && ! side_effects_p (SET_SRC (set
))
258 && ! find_reg_note (insn
, REG_RETVAL
, 0)
259 /* An ADDRESSOF expression can turn into a use of the internal arg
260 pointer, so do not delete the initialization of the internal
261 arg pointer yet. If it is truly dead, flow will delete the
262 initializing insn. */
263 && SET_DEST (set
) != current_function_internal_arg_pointer
)
267 /* Now iterate optimizing jumps until nothing changes over one pass. */
269 old_max_reg
= max_reg_num ();
274 for (insn
= f
; insn
; insn
= next
)
277 rtx temp
, temp1
, temp2
= NULL_RTX
, temp3
, temp4
, temp5
, temp6
;
279 int this_is_simplejump
, this_is_condjump
, reversep
= 0;
280 int this_is_condjump_in_parallel
;
282 next
= NEXT_INSN (insn
);
284 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
285 jump. Try to optimize by duplicating the loop exit test if so.
286 This is only safe immediately after regscan, because it uses
287 the values of regno_first_uid and regno_last_uid. */
288 if (after_regscan
&& GET_CODE (insn
) == NOTE
289 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
290 && (temp1
= next_nonnote_insn (insn
)) != 0
291 && simplejump_p (temp1
))
293 temp
= PREV_INSN (insn
);
294 if (duplicate_loop_exit_test (insn
))
297 next
= NEXT_INSN (temp
);
302 if (GET_CODE (insn
) != JUMP_INSN
)
305 this_is_simplejump
= simplejump_p (insn
);
306 this_is_condjump
= condjump_p (insn
);
307 this_is_condjump_in_parallel
= condjump_in_parallel_p (insn
);
309 /* Tension the labels in dispatch tables. */
311 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
)
312 changed
|= tension_vector_labels (PATTERN (insn
), 0);
313 if (GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
314 changed
|= tension_vector_labels (PATTERN (insn
), 1);
316 /* See if this jump goes to another jump and redirect if so. */
317 nlabel
= follow_jumps (JUMP_LABEL (insn
));
318 if (nlabel
!= JUMP_LABEL (insn
))
319 changed
|= redirect_jump (insn
, nlabel
);
324 /* If a dispatch table always goes to the same place,
325 get rid of it and replace the insn that uses it. */
327 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
328 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
331 rtx pat
= PATTERN (insn
);
332 int diff_vec_p
= GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
;
333 int len
= XVECLEN (pat
, diff_vec_p
);
334 rtx dispatch
= prev_real_insn (insn
);
337 for (i
= 0; i
< len
; i
++)
338 if (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)
339 != XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0))
344 && GET_CODE (dispatch
) == JUMP_INSN
345 && JUMP_LABEL (dispatch
) != 0
346 /* Don't mess with a casesi insn.
347 XXX according to the comment before computed_jump_p(),
348 all casesi insns should be a parallel of the jump
349 and a USE of a LABEL_REF. */
350 && ! ((set
= single_set (dispatch
)) != NULL
351 && (GET_CODE (SET_SRC (set
)) == IF_THEN_ELSE
))
352 && next_real_insn (JUMP_LABEL (dispatch
)) == insn
)
354 redirect_tablejump (dispatch
,
355 XEXP (XVECEXP (pat
, diff_vec_p
, 0), 0));
360 /* If a jump references the end of the function, try to turn
361 it into a RETURN insn, possibly a conditional one. */
362 if (JUMP_LABEL (insn
) != 0
363 && (next_active_insn (JUMP_LABEL (insn
)) == 0
364 || GET_CODE (PATTERN (next_active_insn (JUMP_LABEL (insn
))))
366 changed
|= redirect_jump (insn
, NULL_RTX
);
368 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
370 /* Detect jump to following insn. */
371 if (reallabelprev
== insn
&& this_is_condjump
)
373 next
= next_real_insn (JUMP_LABEL (insn
));
379 /* Detect a conditional jump going to the same place
380 as an immediately following unconditional jump. */
381 else if (this_is_condjump
382 && (temp
= next_active_insn (insn
)) != 0
383 && simplejump_p (temp
)
384 && (next_active_insn (JUMP_LABEL (insn
))
385 == next_active_insn (JUMP_LABEL (temp
))))
387 /* Don't mess up test coverage analysis. */
389 if (flag_test_coverage
&& !reload_completed
)
390 for (temp2
= insn
; temp2
!= temp
; temp2
= NEXT_INSN (temp2
))
391 if (GET_CODE (temp2
) == NOTE
&& NOTE_LINE_NUMBER (temp2
) > 0)
402 /* Detect a conditional jump jumping over an unconditional jump. */
404 else if ((this_is_condjump
|| this_is_condjump_in_parallel
)
405 && ! this_is_simplejump
406 && reallabelprev
!= 0
407 && GET_CODE (reallabelprev
) == JUMP_INSN
408 && prev_active_insn (reallabelprev
) == insn
409 && no_labels_between_p (insn
, reallabelprev
)
410 && simplejump_p (reallabelprev
))
412 /* When we invert the unconditional jump, we will be
413 decrementing the usage count of its old label.
414 Make sure that we don't delete it now because that
415 might cause the following code to be deleted. */
416 rtx prev_uses
= prev_nonnote_insn (reallabelprev
);
417 rtx prev_label
= JUMP_LABEL (insn
);
420 ++LABEL_NUSES (prev_label
);
422 if (invert_jump (insn
, JUMP_LABEL (reallabelprev
)))
424 /* It is very likely that if there are USE insns before
425 this jump, they hold REG_DEAD notes. These REG_DEAD
426 notes are no longer valid due to this optimization,
427 and will cause the life-analysis that following passes
428 (notably delayed-branch scheduling) to think that
429 these registers are dead when they are not.
431 To prevent this trouble, we just remove the USE insns
432 from the insn chain. */
434 while (prev_uses
&& GET_CODE (prev_uses
) == INSN
435 && GET_CODE (PATTERN (prev_uses
)) == USE
)
437 rtx useless
= prev_uses
;
438 prev_uses
= prev_nonnote_insn (prev_uses
);
439 delete_insn (useless
);
442 delete_insn (reallabelprev
);
446 /* We can now safely delete the label if it is unreferenced
447 since the delete_insn above has deleted the BARRIER. */
448 if (prev_label
&& --LABEL_NUSES (prev_label
) == 0)
449 delete_insn (prev_label
);
451 next
= NEXT_INSN (insn
);
454 /* If we have an unconditional jump preceded by a USE, try to put
455 the USE before the target and jump there. This simplifies many
456 of the optimizations below since we don't have to worry about
457 dealing with these USE insns. We only do this if the label
458 being branch to already has the identical USE or if code
459 never falls through to that label. */
461 else if (this_is_simplejump
462 && (temp
= prev_nonnote_insn (insn
)) != 0
463 && GET_CODE (temp
) == INSN
464 && GET_CODE (PATTERN (temp
)) == USE
465 && (temp1
= prev_nonnote_insn (JUMP_LABEL (insn
))) != 0
466 && (GET_CODE (temp1
) == BARRIER
467 || (GET_CODE (temp1
) == INSN
468 && rtx_equal_p (PATTERN (temp
), PATTERN (temp1
))))
469 /* Don't do this optimization if we have a loop containing
470 only the USE instruction, and the loop start label has
471 a usage count of 1. This is because we will redo this
472 optimization everytime through the outer loop, and jump
473 opt will never exit. */
474 && ! ((temp2
= prev_nonnote_insn (temp
)) != 0
475 && temp2
== JUMP_LABEL (insn
)
476 && LABEL_NUSES (temp2
) == 1))
478 if (GET_CODE (temp1
) == BARRIER
)
480 emit_insn_after (PATTERN (temp
), temp1
);
481 temp1
= NEXT_INSN (temp1
);
485 redirect_jump (insn
, get_label_before (temp1
));
486 reallabelprev
= prev_real_insn (temp1
);
488 next
= NEXT_INSN (insn
);
491 /* Simplify if (...) x = a; else x = b; by converting it
492 to x = b; if (...) x = a;
493 if B is sufficiently simple, the test doesn't involve X,
494 and nothing in the test modifies B or X.
496 If we have small register classes, we also can't do this if X
499 If the "x = b;" insn has any REG_NOTES, we don't do this because
500 of the possibility that we are running after CSE and there is a
501 REG_EQUAL note that is only valid if the branch has already been
502 taken. If we move the insn with the REG_EQUAL note, we may
503 fold the comparison to always be false in a later CSE pass.
504 (We could also delete the REG_NOTES when moving the insn, but it
505 seems simpler to not move it.) An exception is that we can move
506 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
507 value is the same as "b".
509 INSN is the branch over the `else' part.
513 TEMP to the jump insn preceding "x = a;"
515 TEMP2 to the insn that sets "x = b;"
516 TEMP3 to the insn that sets "x = a;"
517 TEMP4 to the set of "x = b"; */
519 if (this_is_simplejump
520 && (temp3
= prev_active_insn (insn
)) != 0
521 && GET_CODE (temp3
) == INSN
522 && (temp4
= single_set (temp3
)) != 0
523 && GET_CODE (temp1
= SET_DEST (temp4
)) == REG
524 && (! SMALL_REGISTER_CLASSES
525 || REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
)
526 && (temp2
= next_active_insn (insn
)) != 0
527 && GET_CODE (temp2
) == INSN
528 && (temp4
= single_set (temp2
)) != 0
529 && rtx_equal_p (SET_DEST (temp4
), temp1
)
530 && ! side_effects_p (SET_SRC (temp4
))
531 && ! may_trap_p (SET_SRC (temp4
))
532 && (REG_NOTES (temp2
) == 0
533 || ((REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUAL
534 || REG_NOTE_KIND (REG_NOTES (temp2
)) == REG_EQUIV
)
535 && XEXP (REG_NOTES (temp2
), 1) == 0
536 && rtx_equal_p (XEXP (REG_NOTES (temp2
), 0),
538 && (temp
= prev_active_insn (temp3
)) != 0
539 && condjump_p (temp
) && ! simplejump_p (temp
)
540 /* TEMP must skip over the "x = a;" insn */
541 && prev_real_insn (JUMP_LABEL (temp
)) == insn
542 && no_labels_between_p (insn
, JUMP_LABEL (temp
))
543 /* There must be no other entries to the "x = b;" insn. */
544 && no_labels_between_p (JUMP_LABEL (temp
), temp2
)
545 /* INSN must either branch to the insn after TEMP2 or the insn
546 after TEMP2 must branch to the same place as INSN. */
547 && (reallabelprev
== temp2
548 || ((temp5
= next_active_insn (temp2
)) != 0
549 && simplejump_p (temp5
)
550 && JUMP_LABEL (temp5
) == JUMP_LABEL (insn
))))
552 /* The test expression, X, may be a complicated test with
553 multiple branches. See if we can find all the uses of
554 the label that TEMP branches to without hitting a CALL_INSN
555 or a jump to somewhere else. */
556 rtx target
= JUMP_LABEL (temp
);
557 int nuses
= LABEL_NUSES (target
);
563 /* Set P to the first jump insn that goes around "x = a;". */
564 for (p
= temp
; nuses
&& p
; p
= prev_nonnote_insn (p
))
566 if (GET_CODE (p
) == JUMP_INSN
)
568 if (condjump_p (p
) && ! simplejump_p (p
)
569 && JUMP_LABEL (p
) == target
)
578 else if (GET_CODE (p
) == CALL_INSN
)
583 /* We cannot insert anything between a set of cc and its use
584 so if P uses cc0, we must back up to the previous insn. */
585 q
= prev_nonnote_insn (p
);
586 if (q
&& GET_RTX_CLASS (GET_CODE (q
)) == 'i'
587 && sets_cc0_p (PATTERN (q
)))
594 /* If we found all the uses and there was no data conflict, we
595 can move the assignment unless we can branch into the middle
598 && no_labels_between_p (p
, insn
)
599 && ! reg_referenced_between_p (temp1
, p
, NEXT_INSN (temp3
))
600 && ! reg_set_between_p (temp1
, p
, temp3
)
601 && (GET_CODE (SET_SRC (temp4
)) == CONST_INT
602 || ! modified_between_p (SET_SRC (temp4
), p
, temp2
))
603 /* Verify that registers used by the jump are not clobbered
604 by the instruction being moved. */
605 && ! regs_set_between_p (PATTERN (temp
),
609 emit_insn_after_with_line_notes (PATTERN (temp2
), p
, temp2
);
612 /* Set NEXT to an insn that we know won't go away. */
613 next
= next_active_insn (insn
);
615 /* Delete the jump around the set. Note that we must do
616 this before we redirect the test jumps so that it won't
617 delete the code immediately following the assignment
618 we moved (which might be a jump). */
622 /* We either have two consecutive labels or a jump to
623 a jump, so adjust all the JUMP_INSNs to branch to where
625 for (p
= NEXT_INSN (p
); p
!= next
; p
= NEXT_INSN (p
))
626 if (GET_CODE (p
) == JUMP_INSN
)
627 redirect_jump (p
, target
);
630 next
= NEXT_INSN (insn
);
635 /* Simplify if (...) { x = a; goto l; } x = b; by converting it
636 to x = a; if (...) goto l; x = b;
637 if A is sufficiently simple, the test doesn't involve X,
638 and nothing in the test modifies A or X.
640 If we have small register classes, we also can't do this if X
643 If the "x = a;" insn has any REG_NOTES, we don't do this because
644 of the possibility that we are running after CSE and there is a
645 REG_EQUAL note that is only valid if the branch has already been
646 taken. If we move the insn with the REG_EQUAL note, we may
647 fold the comparison to always be false in a later CSE pass.
648 (We could also delete the REG_NOTES when moving the insn, but it
649 seems simpler to not move it.) An exception is that we can move
650 the insn if the only note is a REG_EQUAL or REG_EQUIV whose
651 value is the same as "a".
657 TEMP to the jump insn preceding "x = a;"
659 TEMP2 to the insn that sets "x = b;"
660 TEMP3 to the insn that sets "x = a;"
661 TEMP4 to the set of "x = a"; */
663 if (this_is_simplejump
664 && (temp2
= next_active_insn (insn
)) != 0
665 && GET_CODE (temp2
) == INSN
666 && (temp4
= single_set (temp2
)) != 0
667 && GET_CODE (temp1
= SET_DEST (temp4
)) == REG
668 && (! SMALL_REGISTER_CLASSES
669 || REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
)
670 && (temp3
= prev_active_insn (insn
)) != 0
671 && GET_CODE (temp3
) == INSN
672 && (temp4
= single_set (temp3
)) != 0
673 && rtx_equal_p (SET_DEST (temp4
), temp1
)
674 && ! side_effects_p (SET_SRC (temp4
))
675 && ! may_trap_p (SET_SRC (temp4
))
676 && (REG_NOTES (temp3
) == 0
677 || ((REG_NOTE_KIND (REG_NOTES (temp3
)) == REG_EQUAL
678 || REG_NOTE_KIND (REG_NOTES (temp3
)) == REG_EQUIV
)
679 && XEXP (REG_NOTES (temp3
), 1) == 0
680 && rtx_equal_p (XEXP (REG_NOTES (temp3
), 0),
682 && (temp
= prev_active_insn (temp3
)) != 0
683 && condjump_p (temp
) && ! simplejump_p (temp
)
684 /* TEMP must skip over the "x = a;" insn */
685 && prev_real_insn (JUMP_LABEL (temp
)) == insn
686 && no_labels_between_p (temp
, insn
))
688 rtx prev_label
= JUMP_LABEL (temp
);
689 rtx insert_after
= prev_nonnote_insn (temp
);
692 /* We cannot insert anything between a set of cc and its use. */
693 if (insert_after
&& GET_RTX_CLASS (GET_CODE (insert_after
)) == 'i'
694 && sets_cc0_p (PATTERN (insert_after
)))
695 insert_after
= prev_nonnote_insn (insert_after
);
697 ++LABEL_NUSES (prev_label
);
700 && no_labels_between_p (insert_after
, temp
)
701 && ! reg_referenced_between_p (temp1
, insert_after
, temp3
)
702 && ! reg_referenced_between_p (temp1
, temp3
,
704 && ! reg_set_between_p (temp1
, insert_after
, temp
)
705 && ! modified_between_p (SET_SRC (temp4
), insert_after
, temp
)
706 /* Verify that registers used by the jump are not clobbered
707 by the instruction being moved. */
708 && ! regs_set_between_p (PATTERN (temp
),
711 && invert_jump (temp
, JUMP_LABEL (insn
)))
713 emit_insn_after_with_line_notes (PATTERN (temp3
),
714 insert_after
, temp3
);
717 /* Set NEXT to an insn that we know won't go away. */
721 if (prev_label
&& --LABEL_NUSES (prev_label
) == 0)
722 delete_insn (prev_label
);
727 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
729 /* If we have if (...) x = exp; and branches are expensive,
730 EXP is a single insn, does not have any side effects, cannot
731 trap, and is not too costly, convert this to
732 t = exp; if (...) x = t;
734 Don't do this when we have CC0 because it is unlikely to help
735 and we'd need to worry about where to place the new insn and
736 the potential for conflicts. We also can't do this when we have
737 notes on the insn for the same reason as above.
739 If we have conditional arithmetic, this will make this
740 harder to optimize later and isn't needed, so don't do it
745 TEMP to the "x = exp;" insn.
746 TEMP1 to the single set in the "x = exp;" insn.
749 if (! reload_completed
750 && this_is_condjump
&& ! this_is_simplejump
752 && (temp
= next_nonnote_insn (insn
)) != 0
753 && GET_CODE (temp
) == INSN
754 && REG_NOTES (temp
) == 0
755 && (reallabelprev
== temp
756 || ((temp2
= next_active_insn (temp
)) != 0
757 && simplejump_p (temp2
)
758 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
759 && (temp1
= single_set (temp
)) != 0
760 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
761 && (! SMALL_REGISTER_CLASSES
762 || REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
)
763 && GET_CODE (SET_SRC (temp1
)) != REG
764 && GET_CODE (SET_SRC (temp1
)) != SUBREG
765 && GET_CODE (SET_SRC (temp1
)) != CONST_INT
766 && ! side_effects_p (SET_SRC (temp1
))
767 && ! may_trap_p (SET_SRC (temp1
))
768 && rtx_cost (SET_SRC (temp1
), SET
) < 10)
770 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
772 if ((temp3
= find_insert_position (insn
, temp
))
773 && validate_change (temp
, &SET_DEST (temp1
), new, 0))
775 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
776 emit_insn_after_with_line_notes (PATTERN (temp
),
777 PREV_INSN (temp3
), temp
);
779 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
783 reg_scan_update (temp3
, NEXT_INSN (next
), old_max_reg
);
784 old_max_reg
= max_reg_num ();
789 /* Similarly, if it takes two insns to compute EXP but they
790 have the same destination. Here TEMP3 will be the second
791 insn and TEMP4 the SET from that insn. */
793 if (! reload_completed
794 && this_is_condjump
&& ! this_is_simplejump
796 && (temp
= next_nonnote_insn (insn
)) != 0
797 && GET_CODE (temp
) == INSN
798 && REG_NOTES (temp
) == 0
799 && (temp3
= next_nonnote_insn (temp
)) != 0
800 && GET_CODE (temp3
) == INSN
801 && REG_NOTES (temp3
) == 0
802 && (reallabelprev
== temp3
803 || ((temp2
= next_active_insn (temp3
)) != 0
804 && simplejump_p (temp2
)
805 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
806 && (temp1
= single_set (temp
)) != 0
807 && (temp2
= SET_DEST (temp1
), GET_CODE (temp2
) == REG
)
808 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
809 && (! SMALL_REGISTER_CLASSES
810 || REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
)
811 && ! side_effects_p (SET_SRC (temp1
))
812 && ! may_trap_p (SET_SRC (temp1
))
813 && rtx_cost (SET_SRC (temp1
), SET
) < 10
814 && (temp4
= single_set (temp3
)) != 0
815 && rtx_equal_p (SET_DEST (temp4
), temp2
)
816 && ! side_effects_p (SET_SRC (temp4
))
817 && ! may_trap_p (SET_SRC (temp4
))
818 && rtx_cost (SET_SRC (temp4
), SET
) < 10)
820 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
822 if ((temp5
= find_insert_position (insn
, temp
))
823 && (temp6
= find_insert_position (insn
, temp3
))
824 && validate_change (temp
, &SET_DEST (temp1
), new, 0))
826 /* Use the earliest of temp5 and temp6. */
829 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
830 emit_insn_after_with_line_notes (PATTERN (temp
),
831 PREV_INSN (temp6
), temp
);
832 emit_insn_after_with_line_notes
833 (replace_rtx (PATTERN (temp3
), temp2
, new),
834 PREV_INSN (temp6
), temp3
);
837 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
841 reg_scan_update (temp6
, NEXT_INSN (next
), old_max_reg
);
842 old_max_reg
= max_reg_num ();
847 /* Finally, handle the case where two insns are used to
848 compute EXP but a temporary register is used. Here we must
849 ensure that the temporary register is not used anywhere else. */
851 if (! reload_completed
853 && this_is_condjump
&& ! this_is_simplejump
855 && (temp
= next_nonnote_insn (insn
)) != 0
856 && GET_CODE (temp
) == INSN
857 && REG_NOTES (temp
) == 0
858 && (temp3
= next_nonnote_insn (temp
)) != 0
859 && GET_CODE (temp3
) == INSN
860 && REG_NOTES (temp3
) == 0
861 && (reallabelprev
== temp3
862 || ((temp2
= next_active_insn (temp3
)) != 0
863 && simplejump_p (temp2
)
864 && JUMP_LABEL (temp2
) == JUMP_LABEL (insn
)))
865 && (temp1
= single_set (temp
)) != 0
866 && (temp5
= SET_DEST (temp1
),
867 (GET_CODE (temp5
) == REG
868 || (GET_CODE (temp5
) == SUBREG
869 && (temp5
= SUBREG_REG (temp5
),
870 GET_CODE (temp5
) == REG
))))
871 && REGNO (temp5
) >= FIRST_PSEUDO_REGISTER
872 && REGNO_FIRST_UID (REGNO (temp5
)) == INSN_UID (temp
)
873 && REGNO_LAST_UID (REGNO (temp5
)) == INSN_UID (temp3
)
874 && ! side_effects_p (SET_SRC (temp1
))
875 && ! may_trap_p (SET_SRC (temp1
))
876 && rtx_cost (SET_SRC (temp1
), SET
) < 10
877 && (temp4
= single_set (temp3
)) != 0
878 && (temp2
= SET_DEST (temp4
), GET_CODE (temp2
) == REG
)
879 && GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
880 && (! SMALL_REGISTER_CLASSES
881 || REGNO (temp2
) >= FIRST_PSEUDO_REGISTER
)
882 && rtx_equal_p (SET_DEST (temp4
), temp2
)
883 && ! side_effects_p (SET_SRC (temp4
))
884 && ! may_trap_p (SET_SRC (temp4
))
885 && rtx_cost (SET_SRC (temp4
), SET
) < 10)
887 rtx
new = gen_reg_rtx (GET_MODE (temp2
));
889 if ((temp5
= find_insert_position (insn
, temp
))
890 && (temp6
= find_insert_position (insn
, temp3
))
891 && validate_change (temp3
, &SET_DEST (temp4
), new, 0))
893 /* Use the earliest of temp5 and temp6. */
896 next
= emit_insn_after (gen_move_insn (temp2
, new), insn
);
897 emit_insn_after_with_line_notes (PATTERN (temp
),
898 PREV_INSN (temp6
), temp
);
899 emit_insn_after_with_line_notes (PATTERN (temp3
),
900 PREV_INSN (temp6
), temp3
);
903 reallabelprev
= prev_active_insn (JUMP_LABEL (insn
));
907 reg_scan_update (temp6
, NEXT_INSN (next
), old_max_reg
);
908 old_max_reg
= max_reg_num ();
912 #endif /* HAVE_cc0 */
914 #ifdef HAVE_conditional_arithmetic
915 /* ??? This is disabled in genconfig, as this simple-minded
916 transformation can incredibly lengthen register lifetimes.
918 Consider this example from cexp.c's yyparse:
921 (if_then_else (ne (reg:DI 149) (const_int 0 [0x0]))
922 (label_ref 248) (pc)))
923 237 (set (reg/i:DI 0 $0) (const_int 1 [0x1]))
924 239 (set (pc) (label_ref 2382))
925 248 (code_label ("yybackup"))
927 This will be transformed to:
929 237 (set (reg/i:DI 0 $0)
930 (if_then_else:DI (eq (reg:DI 149) (const_int 0 [0x0]))
931 (const_int 1 [0x1]) (reg/i:DI 0 $0)))
933 (if_then_else (eq (reg:DI 149) (const_int 0 [0x0]))
934 (label_ref 2382) (pc)))
936 which, from this narrow viewpoint looks fine. Except that
937 between this and 3 other ocurrences of the same pattern, $0
938 is now live for basically the entire function, and we'll
939 get an abort in caller_save.
941 Any replacement for this code should recall that a set of
942 a register that is not live need not, and indeed should not,
943 be conditionalized. Either that, or delay the transformation
944 until after register allocation. */
946 /* See if this is a conditional jump around a small number of
947 instructions that we can conditionalize. Don't do this before
948 the initial CSE pass or after reload.
950 We reject any insns that have side effects or may trap.
951 Strictly speaking, this is not needed since the machine may
952 support conditionalizing these too, but we won't deal with that
953 now. Specifically, this means that we can't conditionalize a
954 CALL_INSN, which some machines, such as the ARC, can do, but
955 this is a very minor optimization. */
956 if (this_is_condjump
&& ! this_is_simplejump
957 && cse_not_expected
&& ! reload_completed
959 && can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (insn
)), 0),
962 rtx ourcond
= XEXP (SET_SRC (PATTERN (insn
)), 0);
964 char *storage
= (char *) oballoc (0);
965 int last_insn
= 0, failed
= 0;
966 rtx changed_jump
= 0;
968 ourcond
= gen_rtx (reverse_condition (GET_CODE (ourcond
)),
969 VOIDmode
, XEXP (ourcond
, 0),
972 /* Scan forward BRANCH_COST real insns looking for the JUMP_LABEL
973 of this insn. We see if we think we can conditionalize the
974 insns we pass. For now, we only deal with insns that have
975 one SET. We stop after an insn that modifies anything in
976 OURCOND, if we have too many insns, or if we have an insn
977 with a side effect or that may trip. Note that we will
978 be modifying any unconditional jumps we encounter to be
979 conditional; this will have the effect of also doing this
980 optimization on the "else" the next time around. */
981 for (temp1
= NEXT_INSN (insn
);
982 num_insns
<= BRANCH_COST
&& ! failed
&& temp1
!= 0
983 && GET_CODE (temp1
) != CODE_LABEL
;
984 temp1
= NEXT_INSN (temp1
))
986 /* Ignore everything but an active insn. */
987 if (GET_RTX_CLASS (GET_CODE (temp1
)) != 'i'
988 || GET_CODE (PATTERN (temp1
)) == USE
989 || GET_CODE (PATTERN (temp1
)) == CLOBBER
)
992 /* If this was an unconditional jump, record it since we'll
993 need to remove the BARRIER if we succeed. We can only
994 have one such jump since there must be a label after
995 the BARRIER and it's either ours, in which case it's the
996 only one or some other, in which case we'd fail.
997 Likewise if it's a CALL_INSN followed by a BARRIER. */
999 if (simplejump_p (temp1
)
1000 || (GET_CODE (temp1
) == CALL_INSN
1001 && NEXT_INSN (temp1
) != 0
1002 && GET_CODE (NEXT_INSN (temp1
)) == BARRIER
))
1004 if (changed_jump
== 0)
1005 changed_jump
= temp1
;
1008 = gen_rtx_INSN_LIST (VOIDmode
, temp1
, changed_jump
);
1011 /* See if we are allowed another insn and if this insn
1012 if one we think we may be able to handle. */
1013 if (++num_insns
> BRANCH_COST
1015 || (((temp2
= single_set (temp1
)) == 0
1016 || side_effects_p (SET_SRC (temp2
))
1017 || may_trap_p (SET_SRC (temp2
)))
1018 && GET_CODE (temp1
) != CALL_INSN
))
1020 else if (temp2
!= 0)
1021 validate_change (temp1
, &SET_SRC (temp2
),
1022 gen_rtx_IF_THEN_ELSE
1023 (GET_MODE (SET_DEST (temp2
)),
1025 SET_SRC (temp2
), SET_DEST (temp2
)),
1029 /* This is a CALL_INSN that doesn't have a SET. */
1030 rtx
*call_loc
= &PATTERN (temp1
);
1032 if (GET_CODE (*call_loc
) == PARALLEL
)
1033 call_loc
= &XVECEXP (*call_loc
, 0, 0);
1035 validate_change (temp1
, call_loc
,
1036 gen_rtx_IF_THEN_ELSE
1037 (VOIDmode
, copy_rtx (ourcond
),
1038 *call_loc
, const0_rtx
),
1043 if (modified_in_p (ourcond
, temp1
))
1047 /* If we've reached our jump label, haven't failed, and all
1048 the changes above are valid, we can delete this jump
1049 insn. Also remove a BARRIER after any jump that used
1050 to be unconditional and remove any REG_EQUAL or REG_EQUIV
1051 that might have previously been present on insns we
1052 made conditional. */
1053 if (temp1
== JUMP_LABEL (insn
) && ! failed
1054 && apply_change_group ())
1056 for (temp1
= NEXT_INSN (insn
); temp1
!= JUMP_LABEL (insn
);
1057 temp1
= NEXT_INSN (temp1
))
1058 if (GET_RTX_CLASS (GET_CODE (temp1
)) == 'i')
1059 for (temp2
= REG_NOTES (temp1
); temp2
!= 0;
1060 temp2
= XEXP (temp2
, 1))
1061 if (REG_NOTE_KIND (temp2
) == REG_EQUAL
1062 || REG_NOTE_KIND (temp2
) == REG_EQUIV
)
1063 remove_note (temp1
, temp2
);
1065 if (changed_jump
!= 0)
1067 while (GET_CODE (changed_jump
) == INSN_LIST
)
1069 delete_barrier (NEXT_INSN (XEXP (changed_jump
, 0)));
1070 changed_jump
= XEXP (changed_jump
, 1);
1073 delete_barrier (NEXT_INSN (changed_jump
));
1087 /* If branches are expensive, convert
1088 if (foo) bar++; to bar += (foo != 0);
1089 and similarly for "bar--;"
1091 INSN is the conditional branch around the arithmetic. We set:
1093 TEMP is the arithmetic insn.
1094 TEMP1 is the SET doing the arithmetic.
1095 TEMP2 is the operand being incremented or decremented.
1096 TEMP3 to the condition being tested.
1097 TEMP4 to the earliest insn used to find the condition. */
1099 if ((BRANCH_COST
>= 2
1107 && ! reload_completed
1108 && this_is_condjump
&& ! this_is_simplejump
1109 && (temp
= next_nonnote_insn (insn
)) != 0
1110 && (temp1
= single_set (temp
)) != 0
1111 && (temp2
= SET_DEST (temp1
),
1112 GET_MODE_CLASS (GET_MODE (temp2
)) == MODE_INT
)
1113 && GET_CODE (SET_SRC (temp1
)) == PLUS
1114 && (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1115 || XEXP (SET_SRC (temp1
), 1) == constm1_rtx
)
1116 && rtx_equal_p (temp2
, XEXP (SET_SRC (temp1
), 0))
1117 && ! side_effects_p (temp2
)
1118 && ! may_trap_p (temp2
)
1119 /* INSN must either branch to the insn after TEMP or the insn
1120 after TEMP must branch to the same place as INSN. */
1121 && (reallabelprev
== temp
1122 || ((temp3
= next_active_insn (temp
)) != 0
1123 && simplejump_p (temp3
)
1124 && JUMP_LABEL (temp3
) == JUMP_LABEL (insn
)))
1125 && (temp3
= get_condition (insn
, &temp4
)) != 0
1126 /* We must be comparing objects whose modes imply the size.
1127 We could handle BLKmode if (1) emit_store_flag could
1128 and (2) we could find the size reliably. */
1129 && GET_MODE (XEXP (temp3
, 0)) != BLKmode
1130 && can_reverse_comparison_p (temp3
, insn
))
1132 rtx temp6
, target
= 0, seq
, init_insn
= 0, init
= temp2
;
1133 enum rtx_code code
= reverse_condition (GET_CODE (temp3
));
1137 /* It must be the case that TEMP2 is not modified in the range
1138 [TEMP4, INSN). The one exception we make is if the insn
1139 before INSN sets TEMP2 to something which is also unchanged
1140 in that range. In that case, we can move the initialization
1141 into our sequence. */
1143 if ((temp5
= prev_active_insn (insn
)) != 0
1144 && no_labels_between_p (temp5
, insn
)
1145 && GET_CODE (temp5
) == INSN
1146 && (temp6
= single_set (temp5
)) != 0
1147 && rtx_equal_p (temp2
, SET_DEST (temp6
))
1148 && (CONSTANT_P (SET_SRC (temp6
))
1149 || GET_CODE (SET_SRC (temp6
)) == REG
1150 || GET_CODE (SET_SRC (temp6
)) == SUBREG
))
1152 emit_insn (PATTERN (temp5
));
1154 init
= SET_SRC (temp6
);
1157 if (CONSTANT_P (init
)
1158 || ! reg_set_between_p (init
, PREV_INSN (temp4
), insn
))
1159 target
= emit_store_flag (gen_reg_rtx (GET_MODE (temp2
)), code
,
1160 XEXP (temp3
, 0), XEXP (temp3
, 1),
1162 (code
== LTU
|| code
== LEU
1163 || code
== GTU
|| code
== GEU
), 1);
1165 /* If we can do the store-flag, do the addition or
1169 target
= expand_binop (GET_MODE (temp2
),
1170 (XEXP (SET_SRC (temp1
), 1) == const1_rtx
1171 ? add_optab
: sub_optab
),
1172 temp2
, target
, temp2
, 0, OPTAB_WIDEN
);
1176 /* Put the result back in temp2 in case it isn't already.
1177 Then replace the jump, possible a CC0-setting insn in
1178 front of the jump, and TEMP, with the sequence we have
1181 if (target
!= temp2
)
1182 emit_move_insn (temp2
, target
);
1187 emit_insns_before (seq
, temp4
);
1191 delete_insn (init_insn
);
1193 next
= NEXT_INSN (insn
);
1195 delete_insn (prev_nonnote_insn (insn
));
1201 reg_scan_update (seq
, NEXT_INSN (next
), old_max_reg
);
1202 old_max_reg
= max_reg_num ();
1212 /* Try to use a conditional move (if the target has them), or a
1213 store-flag insn. If the target has conditional arithmetic as
1214 well as conditional move, the above code will have done something.
1215 Note that we prefer the above code since it is more general: the
1216 code below can make changes that require work to undo.
1218 The general case here is:
1220 1) x = a; if (...) x = b; and
1223 If the jump would be faster, the machine should not have defined
1224 the movcc or scc insns!. These cases are often made by the
1225 previous optimization.
1227 The second case is treated as x = x; if (...) x = b;.
1229 INSN here is the jump around the store. We set:
1231 TEMP to the "x op= b;" insn.
1234 TEMP3 to A (X in the second case).
1235 TEMP4 to the condition being tested.
1236 TEMP5 to the earliest insn used to find the condition.
1237 TEMP6 to the SET of TEMP. */
1239 if (/* We can't do this after reload has completed. */
1241 #ifdef HAVE_conditional_arithmetic
1242 /* Defer this until after CSE so the above code gets the
1243 first crack at it. */
1246 && this_is_condjump
&& ! this_is_simplejump
1247 /* Set TEMP to the "x = b;" insn. */
1248 && (temp
= next_nonnote_insn (insn
)) != 0
1249 && GET_CODE (temp
) == INSN
1250 && (temp6
= single_set (temp
)) != NULL_RTX
1251 && GET_CODE (temp1
= SET_DEST (temp6
)) == REG
1252 && (! SMALL_REGISTER_CLASSES
1253 || REGNO (temp1
) >= FIRST_PSEUDO_REGISTER
)
1254 && ! side_effects_p (temp2
= SET_SRC (temp6
))
1255 && ! may_trap_p (temp2
)
1256 /* Allow either form, but prefer the former if both apply.
1257 There is no point in using the old value of TEMP1 if
1258 it is a register, since cse will alias them. It can
1259 lose if the old value were a hard register since CSE
1260 won't replace hard registers. Avoid using TEMP3 if
1261 small register classes and it is a hard register. */
1262 && (((temp3
= reg_set_last (temp1
, insn
)) != 0
1263 && ! (SMALL_REGISTER_CLASSES
&& GET_CODE (temp3
) == REG
1264 && REGNO (temp3
) < FIRST_PSEUDO_REGISTER
))
1265 /* Make the latter case look like x = x; if (...) x = b; */
1266 || (temp3
= temp1
, 1))
1267 /* INSN must either branch to the insn after TEMP or the insn
1268 after TEMP must branch to the same place as INSN. */
1269 && (reallabelprev
== temp
1270 || ((temp4
= next_active_insn (temp
)) != 0
1271 && simplejump_p (temp4
)
1272 && JUMP_LABEL (temp4
) == JUMP_LABEL (insn
)))
1273 && (temp4
= get_condition (insn
, &temp5
)) != 0
1274 /* We must be comparing objects whose modes imply the size.
1275 We could handle BLKmode if (1) emit_store_flag could
1276 and (2) we could find the size reliably. */
1277 && GET_MODE (XEXP (temp4
, 0)) != BLKmode
1278 /* Even if branches are cheap, the store_flag optimization
1279 can win when the operation to be performed can be
1280 expressed directly. */
1282 /* If the previous insn sets CC0 and something else, we can't
1283 do this since we are going to delete that insn. */
1285 && ! ((temp6
= prev_nonnote_insn (insn
)) != 0
1286 && GET_CODE (temp6
) == INSN
1287 && (sets_cc0_p (PATTERN (temp6
)) == -1
1288 || (sets_cc0_p (PATTERN (temp6
)) == 1
1289 && FIND_REG_INC_NOTE (temp6
, NULL_RTX
))))
1293 #ifdef HAVE_conditional_move
1294 /* First try a conditional move. */
1296 enum rtx_code code
= GET_CODE (temp4
);
1298 rtx cond0
, cond1
, aval
, bval
;
1299 rtx target
, new_insn
;
1301 /* Copy the compared variables into cond0 and cond1, so that
1302 any side effects performed in or after the old comparison,
1303 will not affect our compare which will come later. */
1304 /* ??? Is it possible to just use the comparison in the jump
1305 insn? After all, we're going to delete it. We'd have
1306 to modify emit_conditional_move to take a comparison rtx
1307 instead or write a new function. */
1309 /* We want the target to be able to simplify comparisons with
1310 zero (and maybe other constants as well), so don't create
1311 pseudos for them. There's no need to either. */
1312 if (GET_CODE (XEXP (temp4
, 0)) == CONST_INT
1313 || GET_CODE (XEXP (temp4
, 0)) == CONST_DOUBLE
)
1314 cond0
= XEXP (temp4
, 0);
1316 cond0
= gen_reg_rtx (GET_MODE (XEXP (temp4
, 0)));
1318 if (GET_CODE (XEXP (temp4
, 1)) == CONST_INT
1319 || GET_CODE (XEXP (temp4
, 1)) == CONST_DOUBLE
)
1320 cond1
= XEXP (temp4
, 1);
1322 cond1
= gen_reg_rtx (GET_MODE (XEXP (temp4
, 1)));
1324 /* Careful about copying these values -- an IOR or what may
1325 need to do other things, like clobber flags. */
1326 /* ??? Assume for the moment that AVAL is ok. */
1331 /* We're dealing with a single_set insn with no side effects
1332 on SET_SRC. We do need to be reasonably certain that if
1333 we need to force BVAL into a register that we won't
1334 clobber the flags -- general_operand should suffice. */
1335 if (general_operand (temp2
, GET_MODE (var
)))
1339 bval
= gen_reg_rtx (GET_MODE (var
));
1340 new_insn
= copy_rtx (temp
);
1341 temp6
= single_set (new_insn
);
1342 SET_DEST (temp6
) = bval
;
1343 emit_insn (PATTERN (new_insn
));
1346 target
= emit_conditional_move (var
, code
,
1347 cond0
, cond1
, VOIDmode
,
1348 aval
, bval
, GET_MODE (var
),
1349 (code
== LTU
|| code
== GEU
1350 || code
== LEU
|| code
== GTU
));
1354 rtx seq1
, seq2
, last
;
1357 /* Save the conditional move sequence but don't emit it
1358 yet. On some machines, like the alpha, it is possible
1359 that temp5 == insn, so next generate the sequence that
1360 saves the compared values and then emit both
1361 sequences ensuring seq1 occurs before seq2. */
1362 seq2
= get_insns ();
1365 /* "Now that we can't fail..." Famous last words.
1366 Generate the copy insns that preserve the compared
1369 emit_move_insn (cond0
, XEXP (temp4
, 0));
1370 if (cond1
!= XEXP (temp4
, 1))
1371 emit_move_insn (cond1
, XEXP (temp4
, 1));
1372 seq1
= get_insns ();
1375 /* Validate the sequence -- this may be some weird
1376 bit-extract-and-test instruction for which there
1377 exists no complimentary bit-extract insn. */
1379 for (last
= seq1
; last
; last
= NEXT_INSN (last
))
1380 if (recog_memoized (last
) < 0)
1388 emit_insns_before (seq1
, temp5
);
1390 /* Insert conditional move after insn, to be sure
1391 that the jump and a possible compare won't be
1393 last
= emit_insns_after (seq2
, insn
);
1395 /* ??? We can also delete the insn that sets X to A.
1396 Flow will do it too though. */
1398 next
= NEXT_INSN (insn
);
1403 reg_scan_update (seq1
, NEXT_INSN (last
),
1405 old_max_reg
= max_reg_num ();
1417 /* That didn't work, try a store-flag insn.
1419 We further divide the cases into:
1421 1) x = a; if (...) x = b; and either A or B is zero,
1422 2) if (...) x = 0; and jumps are expensive,
1423 3) x = a; if (...) x = b; and A and B are constants where all
1424 the set bits in A are also set in B and jumps are expensive,
1425 4) x = a; if (...) x = b; and A and B non-zero, and jumps are
1427 5) if (...) x = b; if jumps are even more expensive. */
1429 if (GET_MODE_CLASS (GET_MODE (temp1
)) == MODE_INT
1430 /* We will be passing this as operand into expand_and. No
1431 good if it's not valid as an operand. */
1432 && general_operand (temp2
, GET_MODE (temp2
))
1433 && ((GET_CODE (temp3
) == CONST_INT
)
1434 /* Make the latter case look like
1435 x = x; if (...) x = 0; */
1438 && temp2
== const0_rtx
)
1439 || BRANCH_COST
>= 3)))
1440 /* If B is zero, OK; if A is zero, can only do (1) if we
1441 can reverse the condition. See if (3) applies possibly
1442 by reversing the condition. Prefer reversing to (4) when
1443 branches are very expensive. */
1444 && (((BRANCH_COST
>= 2
1445 || STORE_FLAG_VALUE
== -1
1446 || (STORE_FLAG_VALUE
== 1
1447 /* Check that the mask is a power of two,
1448 so that it can probably be generated
1450 && GET_CODE (temp3
) == CONST_INT
1451 && exact_log2 (INTVAL (temp3
)) >= 0))
1452 && (reversep
= 0, temp2
== const0_rtx
))
1453 || ((BRANCH_COST
>= 2
1454 || STORE_FLAG_VALUE
== -1
1455 || (STORE_FLAG_VALUE
== 1
1456 && GET_CODE (temp2
) == CONST_INT
1457 && exact_log2 (INTVAL (temp2
)) >= 0))
1458 && temp3
== const0_rtx
1459 && (reversep
= can_reverse_comparison_p (temp4
, insn
)))
1460 || (BRANCH_COST
>= 2
1461 && GET_CODE (temp2
) == CONST_INT
1462 && GET_CODE (temp3
) == CONST_INT
1463 && ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp2
)
1464 || ((INTVAL (temp2
) & INTVAL (temp3
)) == INTVAL (temp3
)
1465 && (reversep
= can_reverse_comparison_p (temp4
,
1467 || BRANCH_COST
>= 3)
1470 enum rtx_code code
= GET_CODE (temp4
);
1471 rtx uval
, cval
, var
= temp1
;
1475 /* If necessary, reverse the condition. */
1477 code
= reverse_condition (code
), uval
= temp2
, cval
= temp3
;
1479 uval
= temp3
, cval
= temp2
;
1481 /* If CVAL is non-zero, normalize to -1. Otherwise, if UVAL
1482 is the constant 1, it is best to just compute the result
1483 directly. If UVAL is constant and STORE_FLAG_VALUE
1484 includes all of its bits, it is best to compute the flag
1485 value unnormalized and `and' it with UVAL. Otherwise,
1486 normalize to -1 and `and' with UVAL. */
1487 normalizep
= (cval
!= const0_rtx
? -1
1488 : (uval
== const1_rtx
? 1
1489 : (GET_CODE (uval
) == CONST_INT
1490 && (INTVAL (uval
) & ~STORE_FLAG_VALUE
) == 0)
1493 /* We will be putting the store-flag insn immediately in
1494 front of the comparison that was originally being done,
1495 so we know all the variables in TEMP4 will be valid.
1496 However, this might be in front of the assignment of
1497 A to VAR. If it is, it would clobber the store-flag
1498 we will be emitting.
1500 Therefore, emit into a temporary which will be copied to
1501 VAR immediately after TEMP. */
1504 target
= emit_store_flag (gen_reg_rtx (GET_MODE (var
)), code
,
1505 XEXP (temp4
, 0), XEXP (temp4
, 1),
1507 (code
== LTU
|| code
== LEU
1508 || code
== GEU
|| code
== GTU
),
1518 /* Put the store-flag insns in front of the first insn
1519 used to compute the condition to ensure that we
1520 use the same values of them as the current
1521 comparison. However, the remainder of the insns we
1522 generate will be placed directly in front of the
1523 jump insn, in case any of the pseudos we use
1524 are modified earlier. */
1526 emit_insns_before (seq
, temp5
);
1530 /* Both CVAL and UVAL are non-zero. */
1531 if (cval
!= const0_rtx
&& uval
!= const0_rtx
)
1535 tem1
= expand_and (uval
, target
, NULL_RTX
);
1536 if (GET_CODE (cval
) == CONST_INT
1537 && GET_CODE (uval
) == CONST_INT
1538 && (INTVAL (cval
) & INTVAL (uval
)) == INTVAL (cval
))
1542 tem2
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
1543 target
, NULL_RTX
, 0);
1544 tem2
= expand_and (cval
, tem2
,
1545 (GET_CODE (tem2
) == REG
1549 /* If we usually make new pseudos, do so here. This
1550 turns out to help machines that have conditional
1552 /* ??? Conditional moves have already been handled.
1553 This may be obsolete. */
1555 if (flag_expensive_optimizations
)
1558 target
= expand_binop (GET_MODE (var
), ior_optab
,
1562 else if (normalizep
!= 1)
1564 /* We know that either CVAL or UVAL is zero. If
1565 UVAL is zero, negate TARGET and `and' with CVAL.
1566 Otherwise, `and' with UVAL. */
1567 if (uval
== const0_rtx
)
1569 target
= expand_unop (GET_MODE (var
), one_cmpl_optab
,
1570 target
, NULL_RTX
, 0);
1574 target
= expand_and (uval
, target
,
1575 (GET_CODE (target
) == REG
1576 && ! preserve_subexpressions_p ()
1577 ? target
: NULL_RTX
));
1580 emit_move_insn (var
, target
);
1584 /* If INSN uses CC0, we must not separate it from the
1585 insn that sets cc0. */
1586 if (reg_mentioned_p (cc0_rtx
, PATTERN (before
)))
1587 before
= prev_nonnote_insn (before
);
1589 emit_insns_before (seq
, before
);
1592 next
= NEXT_INSN (insn
);
1597 reg_scan_update (seq
, NEXT_INSN (next
), old_max_reg
);
1598 old_max_reg
= max_reg_num ();
1610 /* Simplify if (...) x = 1; else {...} if (x) ...
1611 We recognize this case scanning backwards as well.
1613 TEMP is the assignment to x;
1614 TEMP1 is the label at the head of the second if. */
1615 /* ?? This should call get_condition to find the values being
1616 compared, instead of looking for a COMPARE insn when HAVE_cc0
1617 is not defined. This would allow it to work on the m88k. */
1618 /* ?? This optimization is only safe before cse is run if HAVE_cc0
1619 is not defined and the condition is tested by a separate compare
1620 insn. This is because the code below assumes that the result
1621 of the compare dies in the following branch.
1623 Not only that, but there might be other insns between the
1624 compare and branch whose results are live. Those insns need
1627 A way to fix this is to move the insns at JUMP_LABEL (insn)
1628 to before INSN. If we are running before flow, they will
1629 be deleted if they aren't needed. But this doesn't work
1632 This is really a special-case of jump threading, anyway. The
1633 right thing to do is to replace this and jump threading with
1634 much simpler code in cse.
1636 This code has been turned off in the non-cc0 case in the
1640 else if (this_is_simplejump
1641 /* Safe to skip USE and CLOBBER insns here
1642 since they will not be deleted. */
1643 && (temp
= prev_active_insn (insn
))
1644 && no_labels_between_p (temp
, insn
)
1645 && GET_CODE (temp
) == INSN
1646 && GET_CODE (PATTERN (temp
)) == SET
1647 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1648 && CONSTANT_P (SET_SRC (PATTERN (temp
)))
1649 && (temp1
= next_active_insn (JUMP_LABEL (insn
)))
1650 /* If we find that the next value tested is `x'
1651 (TEMP1 is the insn where this happens), win. */
1652 && GET_CODE (temp1
) == INSN
1653 && GET_CODE (PATTERN (temp1
)) == SET
1655 /* Does temp1 `tst' the value of x? */
1656 && SET_SRC (PATTERN (temp1
)) == SET_DEST (PATTERN (temp
))
1657 && SET_DEST (PATTERN (temp1
)) == cc0_rtx
1658 && (temp1
= next_nonnote_insn (temp1
))
1660 /* Does temp1 compare the value of x against zero? */
1661 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1662 && XEXP (SET_SRC (PATTERN (temp1
)), 1) == const0_rtx
1663 && (XEXP (SET_SRC (PATTERN (temp1
)), 0)
1664 == SET_DEST (PATTERN (temp
)))
1665 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1666 && (temp1
= find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1668 && condjump_p (temp1
))
1670 /* Get the if_then_else from the condjump. */
1671 rtx choice
= SET_SRC (PATTERN (temp1
));
1672 if (GET_CODE (choice
) == IF_THEN_ELSE
)
1674 enum rtx_code code
= GET_CODE (XEXP (choice
, 0));
1675 rtx val
= SET_SRC (PATTERN (temp
));
1677 = simplify_relational_operation (code
, GET_MODE (SET_DEST (PATTERN (temp
))),
1681 if (cond
== const_true_rtx
)
1682 ultimate
= XEXP (choice
, 1);
1683 else if (cond
== const0_rtx
)
1684 ultimate
= XEXP (choice
, 2);
1688 if (ultimate
== pc_rtx
)
1689 ultimate
= get_label_after (temp1
);
1690 else if (ultimate
&& GET_CODE (ultimate
) != RETURN
)
1691 ultimate
= XEXP (ultimate
, 0);
1693 if (ultimate
&& JUMP_LABEL(insn
) != ultimate
)
1694 changed
|= redirect_jump (insn
, ultimate
);
1700 /* @@ This needs a bit of work before it will be right.
1702 Any type of comparison can be accepted for the first and
1703 second compare. When rewriting the first jump, we must
1704 compute the what conditions can reach label3, and use the
1705 appropriate code. We can not simply reverse/swap the code
1706 of the first jump. In some cases, the second jump must be
1710 < == converts to > ==
1711 < != converts to == >
1714 If the code is written to only accept an '==' test for the second
1715 compare, then all that needs to be done is to swap the condition
1716 of the first branch.
1718 It is questionable whether we want this optimization anyways,
1719 since if the user wrote code like this because he/she knew that
1720 the jump to label1 is taken most of the time, then rewriting
1721 this gives slower code. */
1722 /* @@ This should call get_condition to find the values being
1723 compared, instead of looking for a COMPARE insn when HAVE_cc0
1724 is not defined. This would allow it to work on the m88k. */
1725 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1726 is not defined and the condition is tested by a separate compare
1727 insn. This is because the code below assumes that the result
1728 of the compare dies in the following branch. */
1730 /* Simplify test a ~= b
1744 where ~= is an inequality, e.g. >, and ~~= is the swapped
1747 We recognize this case scanning backwards.
1749 TEMP is the conditional jump to `label2';
1750 TEMP1 is the test for `a == b';
1751 TEMP2 is the conditional jump to `label1';
1752 TEMP3 is the test for `a ~= b'. */
1753 else if (this_is_simplejump
1754 && (temp
= prev_active_insn (insn
))
1755 && no_labels_between_p (temp
, insn
)
1756 && condjump_p (temp
)
1757 && (temp1
= prev_active_insn (temp
))
1758 && no_labels_between_p (temp1
, temp
)
1759 && GET_CODE (temp1
) == INSN
1760 && GET_CODE (PATTERN (temp1
)) == SET
1762 && sets_cc0_p (PATTERN (temp1
)) == 1
1764 && GET_CODE (SET_SRC (PATTERN (temp1
))) == COMPARE
1765 && GET_CODE (SET_DEST (PATTERN (temp1
))) == REG
1766 && (temp
== find_next_ref (SET_DEST (PATTERN (temp1
)), temp1
))
1768 && (temp2
= prev_active_insn (temp1
))
1769 && no_labels_between_p (temp2
, temp1
)
1770 && condjump_p (temp2
)
1771 && JUMP_LABEL (temp2
) == next_nonnote_insn (NEXT_INSN (insn
))
1772 && (temp3
= prev_active_insn (temp2
))
1773 && no_labels_between_p (temp3
, temp2
)
1774 && GET_CODE (PATTERN (temp3
)) == SET
1775 && rtx_equal_p (SET_DEST (PATTERN (temp3
)),
1776 SET_DEST (PATTERN (temp1
)))
1777 && rtx_equal_p (SET_SRC (PATTERN (temp1
)),
1778 SET_SRC (PATTERN (temp3
)))
1779 && ! inequality_comparisons_p (PATTERN (temp
))
1780 && inequality_comparisons_p (PATTERN (temp2
)))
1782 rtx fallthrough_label
= JUMP_LABEL (temp2
);
1784 ++LABEL_NUSES (fallthrough_label
);
1785 if (swap_jump (temp2
, JUMP_LABEL (insn
)))
1791 if (--LABEL_NUSES (fallthrough_label
) == 0)
1792 delete_insn (fallthrough_label
);
1795 /* Simplify if (...) {... x = 1;} if (x) ...
1797 We recognize this case backwards.
1799 TEMP is the test of `x';
1800 TEMP1 is the assignment to `x' at the end of the
1801 previous statement. */
1802 /* @@ This should call get_condition to find the values being
1803 compared, instead of looking for a COMPARE insn when HAVE_cc0
1804 is not defined. This would allow it to work on the m88k. */
1805 /* @@ This optimization is only safe before cse is run if HAVE_cc0
1806 is not defined and the condition is tested by a separate compare
1807 insn. This is because the code below assumes that the result
1808 of the compare dies in the following branch. */
1810 /* ??? This has to be turned off. The problem is that the
1811 unconditional jump might indirectly end up branching to the
1812 label between TEMP1 and TEMP. We can't detect this, in general,
1813 since it may become a jump to there after further optimizations.
1814 If that jump is done, it will be deleted, so we will retry
1815 this optimization in the next pass, thus an infinite loop.
1817 The present code prevents this by putting the jump after the
1818 label, but this is not logically correct. */
1820 else if (this_is_condjump
1821 /* Safe to skip USE and CLOBBER insns here
1822 since they will not be deleted. */
1823 && (temp
= prev_active_insn (insn
))
1824 && no_labels_between_p (temp
, insn
)
1825 && GET_CODE (temp
) == INSN
1826 && GET_CODE (PATTERN (temp
)) == SET
1828 && sets_cc0_p (PATTERN (temp
)) == 1
1829 && GET_CODE (SET_SRC (PATTERN (temp
))) == REG
1831 /* Temp must be a compare insn, we can not accept a register
1832 to register move here, since it may not be simply a
1834 && GET_CODE (SET_SRC (PATTERN (temp
))) == COMPARE
1835 && XEXP (SET_SRC (PATTERN (temp
)), 1) == const0_rtx
1836 && GET_CODE (XEXP (SET_SRC (PATTERN (temp
)), 0)) == REG
1837 && GET_CODE (SET_DEST (PATTERN (temp
))) == REG
1838 && insn
== find_next_ref (SET_DEST (PATTERN (temp
)), temp
)
1840 /* May skip USE or CLOBBER insns here
1841 for checking for opportunity, since we
1842 take care of them later. */
1843 && (temp1
= prev_active_insn (temp
))
1844 && GET_CODE (temp1
) == INSN
1845 && GET_CODE (PATTERN (temp1
)) == SET
1847 && SET_SRC (PATTERN (temp
)) == SET_DEST (PATTERN (temp1
))
1849 && (XEXP (SET_SRC (PATTERN (temp
)), 0)
1850 == SET_DEST (PATTERN (temp1
)))
1852 && CONSTANT_P (SET_SRC (PATTERN (temp1
)))
1853 /* If this isn't true, cse will do the job. */
1854 && ! no_labels_between_p (temp1
, temp
))
1856 /* Get the if_then_else from the condjump. */
1857 rtx choice
= SET_SRC (PATTERN (insn
));
1858 if (GET_CODE (choice
) == IF_THEN_ELSE
1859 && (GET_CODE (XEXP (choice
, 0)) == EQ
1860 || GET_CODE (XEXP (choice
, 0)) == NE
))
1862 int want_nonzero
= (GET_CODE (XEXP (choice
, 0)) == NE
);
1867 /* Get the place that condjump will jump to
1868 if it is reached from here. */
1869 if ((SET_SRC (PATTERN (temp1
)) != const0_rtx
)
1871 ultimate
= XEXP (choice
, 1);
1873 ultimate
= XEXP (choice
, 2);
1874 /* Get it as a CODE_LABEL. */
1875 if (ultimate
== pc_rtx
)
1876 ultimate
= get_label_after (insn
);
1878 /* Get the label out of the LABEL_REF. */
1879 ultimate
= XEXP (ultimate
, 0);
1881 /* Insert the jump immediately before TEMP, specifically
1882 after the label that is between TEMP1 and TEMP. */
1883 last_insn
= PREV_INSN (temp
);
1885 /* If we would be branching to the next insn, the jump
1886 would immediately be deleted and the re-inserted in
1887 a subsequent pass over the code. So don't do anything
1889 if (next_active_insn (last_insn
)
1890 != next_active_insn (ultimate
))
1892 emit_barrier_after (last_insn
);
1893 p
= emit_jump_insn_after (gen_jump (ultimate
),
1895 JUMP_LABEL (p
) = ultimate
;
1896 ++LABEL_NUSES (ultimate
);
1897 if (INSN_UID (ultimate
) < max_jump_chain
1898 && INSN_CODE (p
) < max_jump_chain
)
1900 jump_chain
[INSN_UID (p
)]
1901 = jump_chain
[INSN_UID (ultimate
)];
1902 jump_chain
[INSN_UID (ultimate
)] = p
;
1911 /* Detect a conditional jump jumping over an unconditional trap. */
1913 && this_is_condjump
&& ! this_is_simplejump
1914 && reallabelprev
!= 0
1915 && GET_CODE (reallabelprev
) == INSN
1916 && GET_CODE (PATTERN (reallabelprev
)) == TRAP_IF
1917 && TRAP_CONDITION (PATTERN (reallabelprev
)) == const_true_rtx
1918 && prev_active_insn (reallabelprev
) == insn
1919 && no_labels_between_p (insn
, reallabelprev
)
1920 && (temp2
= get_condition (insn
, &temp4
))
1921 && can_reverse_comparison_p (temp2
, insn
))
1923 rtx
new = gen_cond_trap (reverse_condition (GET_CODE (temp2
)),
1924 XEXP (temp2
, 0), XEXP (temp2
, 1),
1925 TRAP_CODE (PATTERN (reallabelprev
)));
1929 emit_insn_before (new, temp4
);
1930 delete_insn (reallabelprev
);
1936 /* Detect a jump jumping to an unconditional trap. */
1937 else if (HAVE_trap
&& this_is_condjump
1938 && (temp
= next_active_insn (JUMP_LABEL (insn
)))
1939 && GET_CODE (temp
) == INSN
1940 && GET_CODE (PATTERN (temp
)) == TRAP_IF
1941 && (this_is_simplejump
1942 || (temp2
= get_condition (insn
, &temp4
))))
1944 rtx tc
= TRAP_CONDITION (PATTERN (temp
));
1946 if (tc
== const_true_rtx
1947 || (! this_is_simplejump
&& rtx_equal_p (temp2
, tc
)))
1950 /* Replace an unconditional jump to a trap with a trap. */
1951 if (this_is_simplejump
)
1953 emit_barrier_after (emit_insn_before (gen_trap (), insn
));
1958 new = gen_cond_trap (GET_CODE (temp2
), XEXP (temp2
, 0),
1960 TRAP_CODE (PATTERN (temp
)));
1963 emit_insn_before (new, temp4
);
1969 /* If the trap condition and jump condition are mutually
1970 exclusive, redirect the jump to the following insn. */
1971 else if (GET_RTX_CLASS (GET_CODE (tc
)) == '<'
1972 && ! this_is_simplejump
1973 && swap_condition (GET_CODE (temp2
)) == GET_CODE (tc
)
1974 && rtx_equal_p (XEXP (tc
, 0), XEXP (temp2
, 0))
1975 && rtx_equal_p (XEXP (tc
, 1), XEXP (temp2
, 1))
1976 && redirect_jump (insn
, get_label_after (temp
)))
1985 /* Now that the jump has been tensioned,
1986 try cross jumping: check for identical code
1987 before the jump and before its target label. */
1989 /* First, cross jumping of conditional jumps: */
1991 if (cross_jump
&& condjump_p (insn
))
1993 rtx newjpos
, newlpos
;
1994 rtx x
= prev_real_insn (JUMP_LABEL (insn
));
1996 /* A conditional jump may be crossjumped
1997 only if the place it jumps to follows
1998 an opposing jump that comes back here. */
2000 if (x
!= 0 && ! jump_back_p (x
, insn
))
2001 /* We have no opposing jump;
2002 cannot cross jump this insn. */
2006 /* TARGET is nonzero if it is ok to cross jump
2007 to code before TARGET. If so, see if matches. */
2009 find_cross_jump (insn
, x
, 2,
2010 &newjpos
, &newlpos
);
2014 do_cross_jump (insn
, newjpos
, newlpos
);
2015 /* Make the old conditional jump
2016 into an unconditional one. */
2017 SET_SRC (PATTERN (insn
))
2018 = gen_rtx_LABEL_REF (VOIDmode
, JUMP_LABEL (insn
));
2019 INSN_CODE (insn
) = -1;
2020 emit_barrier_after (insn
);
2021 /* Add to jump_chain unless this is a new label
2022 whose UID is too large. */
2023 if (INSN_UID (JUMP_LABEL (insn
)) < max_jump_chain
)
2025 jump_chain
[INSN_UID (insn
)]
2026 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
2027 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
2034 /* Cross jumping of unconditional jumps:
2035 a few differences. */
2037 if (cross_jump
&& simplejump_p (insn
))
2039 rtx newjpos
, newlpos
;
2044 /* TARGET is nonzero if it is ok to cross jump
2045 to code before TARGET. If so, see if matches. */
2046 find_cross_jump (insn
, JUMP_LABEL (insn
), 1,
2047 &newjpos
, &newlpos
);
2049 /* If cannot cross jump to code before the label,
2050 see if we can cross jump to another jump to
2052 /* Try each other jump to this label. */
2053 if (INSN_UID (JUMP_LABEL (insn
)) < max_uid
)
2054 for (target
= jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
2055 target
!= 0 && newjpos
== 0;
2056 target
= jump_chain
[INSN_UID (target
)])
2058 && JUMP_LABEL (target
) == JUMP_LABEL (insn
)
2059 /* Ignore TARGET if it's deleted. */
2060 && ! INSN_DELETED_P (target
))
2061 find_cross_jump (insn
, target
, 2,
2062 &newjpos
, &newlpos
);
2066 do_cross_jump (insn
, newjpos
, newlpos
);
2072 /* This code was dead in the previous jump.c! */
2073 if (cross_jump
&& GET_CODE (PATTERN (insn
)) == RETURN
)
2075 /* Return insns all "jump to the same place"
2076 so we can cross-jump between any two of them. */
2078 rtx newjpos
, newlpos
, target
;
2082 /* If cannot cross jump to code before the label,
2083 see if we can cross jump to another jump to
2085 /* Try each other jump to this label. */
2086 for (target
= jump_chain
[0];
2087 target
!= 0 && newjpos
== 0;
2088 target
= jump_chain
[INSN_UID (target
)])
2090 && ! INSN_DELETED_P (target
)
2091 && GET_CODE (PATTERN (target
)) == RETURN
)
2092 find_cross_jump (insn
, target
, 2,
2093 &newjpos
, &newlpos
);
2097 do_cross_jump (insn
, newjpos
, newlpos
);
2108 /* Delete extraneous line number notes.
2109 Note that two consecutive notes for different lines are not really
2110 extraneous. There should be some indication where that line belonged,
2111 even if it became empty. */
2116 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2117 if (GET_CODE (insn
) == NOTE
&& NOTE_LINE_NUMBER (insn
) >= 0)
2119 /* Delete this note if it is identical to previous note. */
2121 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
2122 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
2132 /* CAN_REACH_END is persistent for each function. Once set it should
2133 not be cleared. This is especially true for the case where we
2134 delete the NOTE_FUNCTION_END note. CAN_REACH_END is cleared by
2135 the front-end before compiling each function. */
2136 if (calculate_can_reach_end (last_insn
, optimize
!= 0))
2145 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
2146 notes whose labels don't occur in the insn any more. Returns the
2147 largest INSN_UID found. */
2152 int largest_uid
= 0;
2155 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2157 if (GET_CODE (insn
) == CODE_LABEL
)
2158 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
2159 else if (GET_CODE (insn
) == JUMP_INSN
)
2160 JUMP_LABEL (insn
) = 0;
2161 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
2165 for (note
= REG_NOTES (insn
); note
; note
= next
)
2167 next
= XEXP (note
, 1);
2168 if (REG_NOTE_KIND (note
) == REG_LABEL
2169 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
2170 remove_note (insn
, note
);
2173 if (INSN_UID (insn
) > largest_uid
)
2174 largest_uid
= INSN_UID (insn
);
2180 /* Delete insns following barriers, up to next label.
2182 Also delete no-op jumps created by gcse. */
2184 delete_barrier_successors (f
)
2189 for (insn
= f
; insn
;)
2191 if (GET_CODE (insn
) == BARRIER
)
2193 insn
= NEXT_INSN (insn
);
2195 never_reached_warning (insn
);
2197 while (insn
!= 0 && GET_CODE (insn
) != CODE_LABEL
)
2199 if (GET_CODE (insn
) == NOTE
2200 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
2201 insn
= NEXT_INSN (insn
);
2203 insn
= delete_insn (insn
);
2205 /* INSN is now the code_label. */
2207 /* Also remove (set (pc) (pc)) insns which can be created by
2208 gcse. We eliminate such insns now to avoid having them
2209 cause problems later. */
2210 else if (GET_CODE (insn
) == JUMP_INSN
2211 && GET_CODE (PATTERN (insn
)) == SET
2212 && SET_SRC (PATTERN (insn
)) == pc_rtx
2213 && SET_DEST (PATTERN (insn
)) == pc_rtx
)
2214 insn
= delete_insn (insn
);
2217 insn
= NEXT_INSN (insn
);
2221 /* Mark the label each jump jumps to.
2222 Combine consecutive labels, and count uses of labels.
2224 For each label, make a chain (using `jump_chain')
2225 of all the *unconditional* jumps that jump to it;
2226 also make a chain of all returns.
2228 CROSS_JUMP indicates whether we are doing cross jumping
2229 and if we are whether we will be paying attention to
2230 death notes or not. */
2233 mark_all_labels (f
, cross_jump
)
2239 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
2240 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2242 mark_jump_label (PATTERN (insn
), insn
, cross_jump
, 0);
2243 if (! INSN_DELETED_P (insn
) && GET_CODE (insn
) == JUMP_INSN
)
2245 if (JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
2247 jump_chain
[INSN_UID (insn
)]
2248 = jump_chain
[INSN_UID (JUMP_LABEL (insn
))];
2249 jump_chain
[INSN_UID (JUMP_LABEL (insn
))] = insn
;
2251 if (GET_CODE (PATTERN (insn
)) == RETURN
)
2253 jump_chain
[INSN_UID (insn
)] = jump_chain
[0];
2254 jump_chain
[0] = insn
;
2260 /* Delete all labels already not referenced.
2261 Also find and return the last insn. */
2264 delete_unreferenced_labels (f
)
2267 rtx final
= NULL_RTX
;
2270 for (insn
= f
; insn
; )
2272 if (GET_CODE (insn
) == CODE_LABEL
2273 && LABEL_NUSES (insn
) == 0
2274 && LABEL_ALTERNATE_NAME (insn
) == NULL
)
2275 insn
= delete_insn (insn
);
2279 insn
= NEXT_INSN (insn
);
2286 /* Delete various simple forms of moves which have no necessary
2290 delete_noop_moves (f
)
2295 for (insn
= f
; insn
; )
2297 next
= NEXT_INSN (insn
);
2299 if (GET_CODE (insn
) == INSN
)
2301 register rtx body
= PATTERN (insn
);
2303 /* Combine stack_adjusts with following push_insns. */
2304 #ifdef PUSH_ROUNDING
2305 if (GET_CODE (body
) == SET
2306 && SET_DEST (body
) == stack_pointer_rtx
2307 && GET_CODE (SET_SRC (body
)) == PLUS
2308 && XEXP (SET_SRC (body
), 0) == stack_pointer_rtx
2309 && GET_CODE (XEXP (SET_SRC (body
), 1)) == CONST_INT
2310 && INTVAL (XEXP (SET_SRC (body
), 1)) > 0)
2313 rtx stack_adjust_insn
= insn
;
2314 int stack_adjust_amount
= INTVAL (XEXP (SET_SRC (body
), 1));
2315 int total_pushed
= 0;
2318 /* Find all successive push insns. */
2320 /* Don't convert more than three pushes;
2321 that starts adding too many displaced addresses
2322 and the whole thing starts becoming a losing
2327 p
= next_nonnote_insn (p
);
2328 if (p
== 0 || GET_CODE (p
) != INSN
)
2330 pbody
= PATTERN (p
);
2331 if (GET_CODE (pbody
) != SET
)
2333 dest
= SET_DEST (pbody
);
2334 /* Allow a no-op move between the adjust and the push. */
2335 if (GET_CODE (dest
) == REG
2336 && GET_CODE (SET_SRC (pbody
)) == REG
2337 && REGNO (dest
) == REGNO (SET_SRC (pbody
)))
2339 if (! (GET_CODE (dest
) == MEM
2340 && GET_CODE (XEXP (dest
, 0)) == POST_INC
2341 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
2344 if (total_pushed
+ GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)))
2345 > stack_adjust_amount
)
2347 total_pushed
+= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)));
2350 /* Discard the amount pushed from the stack adjust;
2351 maybe eliminate it entirely. */
2352 if (total_pushed
>= stack_adjust_amount
)
2354 delete_computation (stack_adjust_insn
);
2355 total_pushed
= stack_adjust_amount
;
2358 XEXP (SET_SRC (PATTERN (stack_adjust_insn
)), 1)
2359 = GEN_INT (stack_adjust_amount
- total_pushed
);
2361 /* Change the appropriate push insns to ordinary stores. */
2363 while (total_pushed
> 0)
2366 p
= next_nonnote_insn (p
);
2367 if (GET_CODE (p
) != INSN
)
2369 pbody
= PATTERN (p
);
2370 if (GET_CODE (pbody
) != SET
)
2372 dest
= SET_DEST (pbody
);
2373 /* Allow a no-op move between the adjust and the push. */
2374 if (GET_CODE (dest
) == REG
2375 && GET_CODE (SET_SRC (pbody
)) == REG
2376 && REGNO (dest
) == REGNO (SET_SRC (pbody
)))
2378 if (! (GET_CODE (dest
) == MEM
2379 && GET_CODE (XEXP (dest
, 0)) == POST_INC
2380 && XEXP (XEXP (dest
, 0), 0) == stack_pointer_rtx
))
2382 total_pushed
-= GET_MODE_SIZE (GET_MODE (SET_DEST (pbody
)));
2383 /* If this push doesn't fully fit in the space
2384 of the stack adjust that we deleted,
2385 make another stack adjust here for what we
2386 didn't use up. There should be peepholes
2387 to recognize the resulting sequence of insns. */
2388 if (total_pushed
< 0)
2390 emit_insn_before (gen_add2_insn (stack_pointer_rtx
,
2391 GEN_INT (- total_pushed
)),
2396 = plus_constant (stack_pointer_rtx
, total_pushed
);
2401 /* Detect and delete no-op move instructions
2402 resulting from not allocating a parameter in a register. */
2404 if (GET_CODE (body
) == SET
2405 && (SET_DEST (body
) == SET_SRC (body
)
2406 || (GET_CODE (SET_DEST (body
)) == MEM
2407 && GET_CODE (SET_SRC (body
)) == MEM
2408 && rtx_equal_p (SET_SRC (body
), SET_DEST (body
))))
2409 && ! (GET_CODE (SET_DEST (body
)) == MEM
2410 && MEM_VOLATILE_P (SET_DEST (body
)))
2411 && ! (GET_CODE (SET_SRC (body
)) == MEM
2412 && MEM_VOLATILE_P (SET_SRC (body
))))
2413 delete_computation (insn
);
2415 /* Detect and ignore no-op move instructions
2416 resulting from smart or fortuitous register allocation. */
2418 else if (GET_CODE (body
) == SET
)
2420 int sreg
= true_regnum (SET_SRC (body
));
2421 int dreg
= true_regnum (SET_DEST (body
));
2423 if (sreg
== dreg
&& sreg
>= 0)
2425 else if (sreg
>= 0 && dreg
>= 0)
2428 rtx tem
= find_equiv_reg (NULL_RTX
, insn
, 0,
2429 sreg
, NULL_PTR
, dreg
,
2430 GET_MODE (SET_SRC (body
)));
2433 && GET_MODE (tem
) == GET_MODE (SET_DEST (body
)))
2435 /* DREG may have been the target of a REG_DEAD note in
2436 the insn which makes INSN redundant. If so, reorg
2437 would still think it is dead. So search for such a
2438 note and delete it if we find it. */
2439 if (! find_regno_note (insn
, REG_UNUSED
, dreg
))
2440 for (trial
= prev_nonnote_insn (insn
);
2441 trial
&& GET_CODE (trial
) != CODE_LABEL
;
2442 trial
= prev_nonnote_insn (trial
))
2443 if (find_regno_note (trial
, REG_DEAD
, dreg
))
2445 remove_death (dreg
, trial
);
2449 /* Deleting insn could lose a death-note for SREG. */
2450 if ((trial
= find_regno_note (insn
, REG_DEAD
, sreg
)))
2452 /* Change this into a USE so that we won't emit
2453 code for it, but still can keep the note. */
2455 = gen_rtx_USE (VOIDmode
, XEXP (trial
, 0));
2456 INSN_CODE (insn
) = -1;
2457 /* Remove all reg notes but the REG_DEAD one. */
2458 REG_NOTES (insn
) = trial
;
2459 XEXP (trial
, 1) = NULL_RTX
;
2465 else if (dreg
>= 0 && CONSTANT_P (SET_SRC (body
))
2466 && find_equiv_reg (SET_SRC (body
), insn
, 0, dreg
,
2468 GET_MODE (SET_DEST (body
))))
2470 /* This handles the case where we have two consecutive
2471 assignments of the same constant to pseudos that didn't
2472 get a hard reg. Each SET from the constant will be
2473 converted into a SET of the spill register and an
2474 output reload will be made following it. This produces
2475 two loads of the same constant into the same spill
2480 /* Look back for a death note for the first reg.
2481 If there is one, it is no longer accurate. */
2482 while (in_insn
&& GET_CODE (in_insn
) != CODE_LABEL
)
2484 if ((GET_CODE (in_insn
) == INSN
2485 || GET_CODE (in_insn
) == JUMP_INSN
)
2486 && find_regno_note (in_insn
, REG_DEAD
, dreg
))
2488 remove_death (dreg
, in_insn
);
2491 in_insn
= PREV_INSN (in_insn
);
2494 /* Delete the second load of the value. */
2498 else if (GET_CODE (body
) == PARALLEL
)
2500 /* If each part is a set between two identical registers or
2501 a USE or CLOBBER, delete the insn. */
2505 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
2507 tem
= XVECEXP (body
, 0, i
);
2508 if (GET_CODE (tem
) == USE
|| GET_CODE (tem
) == CLOBBER
)
2511 if (GET_CODE (tem
) != SET
2512 || (sreg
= true_regnum (SET_SRC (tem
))) < 0
2513 || (dreg
= true_regnum (SET_DEST (tem
))) < 0
2521 /* Also delete insns to store bit fields if they are no-ops. */
2522 /* Not worth the hair to detect this in the big-endian case. */
2523 else if (! BYTES_BIG_ENDIAN
2524 && GET_CODE (body
) == SET
2525 && GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
2526 && XEXP (SET_DEST (body
), 2) == const0_rtx
2527 && XEXP (SET_DEST (body
), 0) == SET_SRC (body
)
2528 && ! (GET_CODE (SET_SRC (body
)) == MEM
2529 && MEM_VOLATILE_P (SET_SRC (body
))))
2536 /* See if there is still a NOTE_INSN_FUNCTION_END in this function.
2537 If so indicate that this function can drop off the end by returning
2540 CHECK_DELETED indicates whether we must check if the note being
2541 searched for has the deleted flag set.
2543 DELETE_FINAL_NOTE indicates whether we should delete the note
2547 calculate_can_reach_end (last
, delete_final_note
)
2549 int delete_final_note
;
2554 while (insn
!= NULL_RTX
)
2558 /* One label can follow the end-note: the return label. */
2559 if (GET_CODE (insn
) == CODE_LABEL
&& n_labels
-- > 0)
2561 /* Ordinary insns can follow it if returning a structure. */
2562 else if (GET_CODE (insn
) == INSN
)
2564 /* If machine uses explicit RETURN insns, no epilogue,
2565 then one of them follows the note. */
2566 else if (GET_CODE (insn
) == JUMP_INSN
2567 && GET_CODE (PATTERN (insn
)) == RETURN
)
2569 /* A barrier can follow the return insn. */
2570 else if (GET_CODE (insn
) == BARRIER
)
2572 /* Other kinds of notes can follow also. */
2573 else if (GET_CODE (insn
) == NOTE
2574 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_FUNCTION_END
)
2580 insn
= PREV_INSN (insn
);
2583 /* See if we backed up to the appropriate type of note. */
2584 if (insn
!= NULL_RTX
2585 && GET_CODE (insn
) == NOTE
2586 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_END
)
2588 if (delete_final_note
)
2596 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
2597 jump. Assume that this unconditional jump is to the exit test code. If
2598 the code is sufficiently simple, make a copy of it before INSN,
2599 followed by a jump to the exit of the loop. Then delete the unconditional
2602 Return 1 if we made the change, else 0.
2604 This is only safe immediately after a regscan pass because it uses the
2605 values of regno_first_uid and regno_last_uid. */
2608 duplicate_loop_exit_test (loop_start
)
2611 rtx insn
, set
, reg
, p
, link
;
2612 rtx copy
= 0, first_copy
= 0;
2614 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
2616 int max_reg
= max_reg_num ();
2619 /* Scan the exit code. We do not perform this optimization if any insn:
2623 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
2624 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
2625 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
2628 We also do not do this if we find an insn with ASM_OPERANDS. While
2629 this restriction should not be necessary, copying an insn with
2630 ASM_OPERANDS can confuse asm_noperands in some cases.
2632 Also, don't do this if the exit code is more than 20 insns. */
2634 for (insn
= exitcode
;
2636 && ! (GET_CODE (insn
) == NOTE
2637 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
2638 insn
= NEXT_INSN (insn
))
2640 switch (GET_CODE (insn
))
2646 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
2647 a jump immediately after the loop start that branches outside
2648 the loop but within an outer loop, near the exit test.
2649 If we copied this exit test and created a phony
2650 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
2651 before the exit test look like these could be safely moved
2652 out of the loop even if they actually may be never executed.
2653 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
2655 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
2656 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
)
2660 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2661 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
2662 /* If we were to duplicate this code, we would not move
2663 the BLOCK notes, and so debugging the moved code would
2664 be difficult. Thus, we only move the code with -O2 or
2671 /* The code below would grossly mishandle REG_WAS_0 notes,
2672 so get rid of them here. */
2673 while ((p
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
)) != 0)
2674 remove_note (insn
, p
);
2675 if (++num_insns
> 20
2676 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
2677 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
2685 /* Unless INSN is zero, we can do the optimization. */
2691 /* See if any insn sets a register only used in the loop exit code and
2692 not a user variable. If so, replace it with a new register. */
2693 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2694 if (GET_CODE (insn
) == INSN
2695 && (set
= single_set (insn
)) != 0
2696 && ((reg
= SET_DEST (set
), GET_CODE (reg
) == REG
)
2697 || (GET_CODE (reg
) == SUBREG
2698 && (reg
= SUBREG_REG (reg
), GET_CODE (reg
) == REG
)))
2699 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
2700 && REGNO_FIRST_UID (REGNO (reg
)) == INSN_UID (insn
))
2702 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
2703 if (REGNO_LAST_UID (REGNO (reg
)) == INSN_UID (p
))
2708 /* We can do the replacement. Allocate reg_map if this is the
2709 first replacement we found. */
2711 reg_map
= (rtx
*) xcalloc (max_reg
, sizeof (rtx
));
2713 REG_LOOP_TEST_P (reg
) = 1;
2715 reg_map
[REGNO (reg
)] = gen_reg_rtx (GET_MODE (reg
));
2719 /* Now copy each insn. */
2720 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
2722 switch (GET_CODE (insn
))
2725 copy
= emit_barrier_before (loop_start
);
2728 /* Only copy line-number notes. */
2729 if (NOTE_LINE_NUMBER (insn
) >= 0)
2731 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
2732 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
2737 copy
= emit_insn_before (copy_insn (PATTERN (insn
)), loop_start
);
2739 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2741 mark_jump_label (PATTERN (copy
), copy
, 0, 0);
2743 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
2745 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2746 if (REG_NOTE_KIND (link
) != REG_LABEL
)
2748 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link
),
2751 if (reg_map
&& REG_NOTES (copy
))
2752 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2756 copy
= emit_jump_insn_before (copy_insn (PATTERN (insn
)), loop_start
);
2758 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
2759 mark_jump_label (PATTERN (copy
), copy
, 0, 0);
2760 if (REG_NOTES (insn
))
2762 REG_NOTES (copy
) = copy_insn_1 (REG_NOTES (insn
));
2764 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
2767 /* If this is a simple jump, add it to the jump chain. */
2769 if (INSN_UID (copy
) < max_jump_chain
&& JUMP_LABEL (copy
)
2770 && simplejump_p (copy
))
2772 jump_chain
[INSN_UID (copy
)]
2773 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2774 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2782 /* Record the first insn we copied. We need it so that we can
2783 scan the copied insns for new pseudo registers. */
2788 /* Now clean up by emitting a jump to the end label and deleting the jump
2789 at the start of the loop. */
2790 if (! copy
|| GET_CODE (copy
) != BARRIER
)
2792 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
2795 /* Record the first insn we copied. We need it so that we can
2796 scan the copied insns for new pseudo registers. This may not
2797 be strictly necessary since we should have copied at least one
2798 insn above. But I am going to be safe. */
2802 mark_jump_label (PATTERN (copy
), copy
, 0, 0);
2803 if (INSN_UID (copy
) < max_jump_chain
2804 && INSN_UID (JUMP_LABEL (copy
)) < max_jump_chain
)
2806 jump_chain
[INSN_UID (copy
)]
2807 = jump_chain
[INSN_UID (JUMP_LABEL (copy
))];
2808 jump_chain
[INSN_UID (JUMP_LABEL (copy
))] = copy
;
2810 emit_barrier_before (loop_start
);
2813 /* Now scan from the first insn we copied to the last insn we copied
2814 (copy) for new pseudo registers. Do this after the code to jump to
2815 the end label since that might create a new pseudo too. */
2816 reg_scan_update (first_copy
, copy
, max_reg
);
2818 /* Mark the exit code as the virtual top of the converted loop. */
2819 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
2821 delete_insn (next_nonnote_insn (loop_start
));
2830 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, and
2831 loop-end notes between START and END out before START. Assume that
2832 END is not such a note. START may be such a note. Returns the value
2833 of the new starting insn, which may be different if the original start
2837 squeeze_notes (start
, end
)
2843 for (insn
= start
; insn
!= end
; insn
= next
)
2845 next
= NEXT_INSN (insn
);
2846 if (GET_CODE (insn
) == NOTE
2847 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
2848 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
2849 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
2850 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
2851 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
2852 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
2858 rtx prev
= PREV_INSN (insn
);
2859 PREV_INSN (insn
) = PREV_INSN (start
);
2860 NEXT_INSN (insn
) = start
;
2861 NEXT_INSN (PREV_INSN (insn
)) = insn
;
2862 PREV_INSN (NEXT_INSN (insn
)) = insn
;
2863 NEXT_INSN (prev
) = next
;
2864 PREV_INSN (next
) = prev
;
2872 /* Compare the instructions before insn E1 with those before E2
2873 to find an opportunity for cross jumping.
2874 (This means detecting identical sequences of insns followed by
2875 jumps to the same place, or followed by a label and a jump
2876 to that label, and replacing one with a jump to the other.)
2878 Assume E1 is a jump that jumps to label E2
2879 (that is not always true but it might as well be).
2880 Find the longest possible equivalent sequences
2881 and store the first insns of those sequences into *F1 and *F2.
2882 Store zero there if no equivalent preceding instructions are found.
2884 We give up if we find a label in stream 1.
2885 Actually we could transfer that label into stream 2. */
2888 find_cross_jump (e1
, e2
, minimum
, f1
, f2
)
2893 register rtx i1
= e1
, i2
= e2
;
2894 register rtx p1
, p2
;
2897 rtx last1
= 0, last2
= 0;
2898 rtx afterlast1
= 0, afterlast2
= 0;
2905 i1
= prev_nonnote_insn (i1
);
2907 i2
= PREV_INSN (i2
);
2908 while (i2
&& (GET_CODE (i2
) == NOTE
|| GET_CODE (i2
) == CODE_LABEL
))
2909 i2
= PREV_INSN (i2
);
2914 /* Don't allow the range of insns preceding E1 or E2
2915 to include the other (E2 or E1). */
2916 if (i2
== e1
|| i1
== e2
)
2919 /* If we will get to this code by jumping, those jumps will be
2920 tensioned to go directly to the new label (before I2),
2921 so this cross-jumping won't cost extra. So reduce the minimum. */
2922 if (GET_CODE (i1
) == CODE_LABEL
)
2928 if (i2
== 0 || GET_CODE (i1
) != GET_CODE (i2
))
2931 /* Avoid moving insns across EH regions if either of the insns
2934 && (asynchronous_exceptions
|| GET_CODE (i1
) == CALL_INSN
)
2935 && !in_same_eh_region (i1
, i2
))
2941 /* If this is a CALL_INSN, compare register usage information.
2942 If we don't check this on stack register machines, the two
2943 CALL_INSNs might be merged leaving reg-stack.c with mismatching
2944 numbers of stack registers in the same basic block.
2945 If we don't check this on machines with delay slots, a delay slot may
2946 be filled that clobbers a parameter expected by the subroutine.
2948 ??? We take the simple route for now and assume that if they're
2949 equal, they were constructed identically. */
2951 if (GET_CODE (i1
) == CALL_INSN
2952 && ! rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1
),
2953 CALL_INSN_FUNCTION_USAGE (i2
)))
2957 /* If cross_jump_death_matters is not 0, the insn's mode
2958 indicates whether or not the insn contains any stack-like
2961 if (!lose
&& cross_jump_death_matters
&& stack_regs_mentioned (i1
))
2963 /* If register stack conversion has already been done, then
2964 death notes must also be compared before it is certain that
2965 the two instruction streams match. */
2968 HARD_REG_SET i1_regset
, i2_regset
;
2970 CLEAR_HARD_REG_SET (i1_regset
);
2971 CLEAR_HARD_REG_SET (i2_regset
);
2973 for (note
= REG_NOTES (i1
); note
; note
= XEXP (note
, 1))
2974 if (REG_NOTE_KIND (note
) == REG_DEAD
2975 && STACK_REG_P (XEXP (note
, 0)))
2976 SET_HARD_REG_BIT (i1_regset
, REGNO (XEXP (note
, 0)));
2978 for (note
= REG_NOTES (i2
); note
; note
= XEXP (note
, 1))
2979 if (REG_NOTE_KIND (note
) == REG_DEAD
2980 && STACK_REG_P (XEXP (note
, 0)))
2981 SET_HARD_REG_BIT (i2_regset
, REGNO (XEXP (note
, 0)));
2983 GO_IF_HARD_REG_EQUAL (i1_regset
, i2_regset
, done
);
2992 /* Don't allow old-style asm or volatile extended asms to be accepted
2993 for cross jumping purposes. It is conceptually correct to allow
2994 them, since cross-jumping preserves the dynamic instruction order
2995 even though it is changing the static instruction order. However,
2996 if an asm is being used to emit an assembler pseudo-op, such as
2997 the MIPS `.set reorder' pseudo-op, then the static instruction order
2998 matters and it must be preserved. */
2999 if (GET_CODE (p1
) == ASM_INPUT
|| GET_CODE (p2
) == ASM_INPUT
3000 || (GET_CODE (p1
) == ASM_OPERANDS
&& MEM_VOLATILE_P (p1
))
3001 || (GET_CODE (p2
) == ASM_OPERANDS
&& MEM_VOLATILE_P (p2
)))
3004 if (lose
|| GET_CODE (p1
) != GET_CODE (p2
)
3005 || ! rtx_renumbered_equal_p (p1
, p2
))
3007 /* The following code helps take care of G++ cleanups. */
3011 if (!lose
&& GET_CODE (p1
) == GET_CODE (p2
)
3012 && ((equiv1
= find_reg_note (i1
, REG_EQUAL
, NULL_RTX
)) != 0
3013 || (equiv1
= find_reg_note (i1
, REG_EQUIV
, NULL_RTX
)) != 0)
3014 && ((equiv2
= find_reg_note (i2
, REG_EQUAL
, NULL_RTX
)) != 0
3015 || (equiv2
= find_reg_note (i2
, REG_EQUIV
, NULL_RTX
)) != 0)
3016 /* If the equivalences are not to a constant, they may
3017 reference pseudos that no longer exist, so we can't
3019 && CONSTANT_P (XEXP (equiv1
, 0))
3020 && rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))
3022 rtx s1
= single_set (i1
);
3023 rtx s2
= single_set (i2
);
3024 if (s1
!= 0 && s2
!= 0
3025 && rtx_renumbered_equal_p (SET_DEST (s1
), SET_DEST (s2
)))
3027 validate_change (i1
, &SET_SRC (s1
), XEXP (equiv1
, 0), 1);
3028 validate_change (i2
, &SET_SRC (s2
), XEXP (equiv2
, 0), 1);
3029 if (! rtx_renumbered_equal_p (p1
, p2
))
3031 else if (apply_change_group ())
3036 /* Insns fail to match; cross jumping is limited to the following
3040 /* Don't allow the insn after a compare to be shared by
3041 cross-jumping unless the compare is also shared.
3042 Here, if either of these non-matching insns is a compare,
3043 exclude the following insn from possible cross-jumping. */
3044 if (sets_cc0_p (p1
) || sets_cc0_p (p2
))
3045 last1
= afterlast1
, last2
= afterlast2
, ++minimum
;
3048 /* If cross-jumping here will feed a jump-around-jump
3049 optimization, this jump won't cost extra, so reduce
3051 if (GET_CODE (i1
) == JUMP_INSN
3053 && prev_real_insn (JUMP_LABEL (i1
)) == e1
)
3059 if (GET_CODE (p1
) != USE
&& GET_CODE (p1
) != CLOBBER
)
3061 /* Ok, this insn is potentially includable in a cross-jump here. */
3062 afterlast1
= last1
, afterlast2
= last2
;
3063 last1
= i1
, last2
= i2
, --minimum
;
3067 if (minimum
<= 0 && last1
!= 0 && last1
!= e1
)
3068 *f1
= last1
, *f2
= last2
;
3072 do_cross_jump (insn
, newjpos
, newlpos
)
3073 rtx insn
, newjpos
, newlpos
;
3075 /* Find an existing label at this point
3076 or make a new one if there is none. */
3077 register rtx label
= get_label_before (newlpos
);
3079 /* Make the same jump insn jump to the new point. */
3080 if (GET_CODE (PATTERN (insn
)) == RETURN
)
3082 /* Remove from jump chain of returns. */
3083 delete_from_jump_chain (insn
);
3084 /* Change the insn. */
3085 PATTERN (insn
) = gen_jump (label
);
3086 INSN_CODE (insn
) = -1;
3087 JUMP_LABEL (insn
) = label
;
3088 LABEL_NUSES (label
)++;
3089 /* Add to new the jump chain. */
3090 if (INSN_UID (label
) < max_jump_chain
3091 && INSN_UID (insn
) < max_jump_chain
)
3093 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (label
)];
3094 jump_chain
[INSN_UID (label
)] = insn
;
3098 redirect_jump (insn
, label
);
3100 /* Delete the matching insns before the jump. Also, remove any REG_EQUAL
3101 or REG_EQUIV note in the NEWLPOS stream that isn't also present in
3102 the NEWJPOS stream. */
3104 while (newjpos
!= insn
)
3108 for (lnote
= REG_NOTES (newlpos
); lnote
; lnote
= XEXP (lnote
, 1))
3109 if ((REG_NOTE_KIND (lnote
) == REG_EQUAL
3110 || REG_NOTE_KIND (lnote
) == REG_EQUIV
)
3111 && ! find_reg_note (newjpos
, REG_EQUAL
, XEXP (lnote
, 0))
3112 && ! find_reg_note (newjpos
, REG_EQUIV
, XEXP (lnote
, 0)))
3113 remove_note (newlpos
, lnote
);
3115 delete_insn (newjpos
);
3116 newjpos
= next_real_insn (newjpos
);
3117 newlpos
= next_real_insn (newlpos
);
3121 /* Return the label before INSN, or put a new label there. */
3124 get_label_before (insn
)
3129 /* Find an existing label at this point
3130 or make a new one if there is none. */
3131 label
= prev_nonnote_insn (insn
);
3133 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
3135 rtx prev
= PREV_INSN (insn
);
3137 label
= gen_label_rtx ();
3138 emit_label_after (label
, prev
);
3139 LABEL_NUSES (label
) = 0;
3144 /* Return the label after INSN, or put a new label there. */
3147 get_label_after (insn
)
3152 /* Find an existing label at this point
3153 or make a new one if there is none. */
3154 label
= next_nonnote_insn (insn
);
3156 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
3158 label
= gen_label_rtx ();
3159 emit_label_after (label
, insn
);
3160 LABEL_NUSES (label
) = 0;
3165 /* Return 1 if INSN is a jump that jumps to right after TARGET
3166 only on the condition that TARGET itself would drop through.
3167 Assumes that TARGET is a conditional jump. */
3170 jump_back_p (insn
, target
)
3174 enum rtx_code codei
, codet
;
3176 if (simplejump_p (insn
) || ! condjump_p (insn
)
3177 || simplejump_p (target
)
3178 || target
!= prev_real_insn (JUMP_LABEL (insn
)))
3181 cinsn
= XEXP (SET_SRC (PATTERN (insn
)), 0);
3182 ctarget
= XEXP (SET_SRC (PATTERN (target
)), 0);
3184 codei
= GET_CODE (cinsn
);
3185 codet
= GET_CODE (ctarget
);
3187 if (XEXP (SET_SRC (PATTERN (insn
)), 1) == pc_rtx
)
3189 if (! can_reverse_comparison_p (cinsn
, insn
))
3191 codei
= reverse_condition (codei
);
3194 if (XEXP (SET_SRC (PATTERN (target
)), 2) == pc_rtx
)
3196 if (! can_reverse_comparison_p (ctarget
, target
))
3198 codet
= reverse_condition (codet
);
3201 return (codei
== codet
3202 && rtx_renumbered_equal_p (XEXP (cinsn
, 0), XEXP (ctarget
, 0))
3203 && rtx_renumbered_equal_p (XEXP (cinsn
, 1), XEXP (ctarget
, 1)));
3206 /* Given a comparison, COMPARISON, inside a conditional jump insn, INSN,
3207 return non-zero if it is safe to reverse this comparison. It is if our
3208 floating-point is not IEEE, if this is an NE or EQ comparison, or if
3209 this is known to be an integer comparison. */
3212 can_reverse_comparison_p (comparison
, insn
)
3218 /* If this is not actually a comparison, we can't reverse it. */
3219 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
3222 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3223 /* If this is an NE comparison, it is safe to reverse it to an EQ
3224 comparison and vice versa, even for floating point. If no operands
3225 are NaNs, the reversal is valid. If some operand is a NaN, EQ is
3226 always false and NE is always true, so the reversal is also valid. */
3228 || GET_CODE (comparison
) == NE
3229 || GET_CODE (comparison
) == EQ
)
3232 arg0
= XEXP (comparison
, 0);
3234 /* Make sure ARG0 is one of the actual objects being compared. If we
3235 can't do this, we can't be sure the comparison can be reversed.
3237 Handle cc0 and a MODE_CC register. */
3238 if ((GET_CODE (arg0
) == REG
&& GET_MODE_CLASS (GET_MODE (arg0
)) == MODE_CC
)
3244 rtx prev
= prev_nonnote_insn (insn
);
3247 /* First see if the condition code mode alone if enough to say we can
3248 reverse the condition. If not, then search backwards for a set of
3249 ARG0. We do not need to check for an insn clobbering it since valid
3250 code will contain set a set with no intervening clobber. But
3251 stop when we reach a label. */
3252 #ifdef REVERSIBLE_CC_MODE
3253 if (GET_MODE_CLASS (GET_MODE (arg0
)) == MODE_CC
3254 && REVERSIBLE_CC_MODE (GET_MODE (arg0
)))
3258 for (prev
= prev_nonnote_insn (insn
);
3259 prev
!= 0 && GET_CODE (prev
) != CODE_LABEL
;
3260 prev
= prev_nonnote_insn (prev
))
3261 if ((set
= single_set (prev
)) != 0
3262 && rtx_equal_p (SET_DEST (set
), arg0
))
3264 arg0
= SET_SRC (set
);
3266 if (GET_CODE (arg0
) == COMPARE
)
3267 arg0
= XEXP (arg0
, 0);
3272 /* We can reverse this if ARG0 is a CONST_INT or if its mode is
3273 not VOIDmode and neither a MODE_CC nor MODE_FLOAT type. */
3274 return (GET_CODE (arg0
) == CONST_INT
3275 || (GET_MODE (arg0
) != VOIDmode
3276 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_CC
3277 && GET_MODE_CLASS (GET_MODE (arg0
)) != MODE_FLOAT
));
3280 /* Given an rtx-code for a comparison, return the code for the negated
3281 comparison. If no such code exists, return UNKNOWN.
3283 WATCH OUT! reverse_condition is not safe to use on a jump that might
3284 be acting on the results of an IEEE floating point comparison, because
3285 of the special treatment of non-signaling nans in comparisons.
3286 Use can_reverse_comparison_p to be sure. */
3289 reverse_condition (code
)
3332 /* Similar, but we're allowed to generate unordered comparisons, which
3333 makes it safe for IEEE floating-point. Of course, we have to recognize
3334 that the target will support them too... */
3337 reverse_condition_maybe_unordered (code
)
3340 /* Non-IEEE formats don't have unordered conditions. */
3341 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
3342 return reverse_condition (code
);
3388 /* Similar, but return the code when two operands of a comparison are swapped.
3389 This IS safe for IEEE floating-point. */
3392 swap_condition (code
)
3435 /* Given a comparison CODE, return the corresponding unsigned comparison.
3436 If CODE is an equality comparison or already an unsigned comparison,
3437 CODE is returned. */
3440 unsigned_condition (code
)
3467 /* Similarly, return the signed version of a comparison. */
3470 signed_condition (code
)
3497 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
3498 truth of CODE1 implies the truth of CODE2. */
3501 comparison_dominates_p (code1
, code2
)
3502 enum rtx_code code1
, code2
;
3510 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
3511 || code2
== ORDERED
)
3516 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
)
3521 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
)
3527 if (code2
== ORDERED
)
3532 if (code2
== NE
|| code2
== ORDERED
)
3537 if (code2
== LEU
|| code2
== NE
)
3542 if (code2
== GEU
|| code2
== NE
)
3558 /* Return 1 if INSN is an unconditional jump and nothing else. */
3564 return (GET_CODE (insn
) == JUMP_INSN
3565 && GET_CODE (PATTERN (insn
)) == SET
3566 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
3567 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
3570 /* Return nonzero if INSN is a (possibly) conditional jump
3571 and nothing more. */
3577 register rtx x
= PATTERN (insn
);
3579 if (GET_CODE (x
) != SET
3580 || GET_CODE (SET_DEST (x
)) != PC
)
3584 if (GET_CODE (x
) == LABEL_REF
)
3586 else return (GET_CODE (x
) == IF_THEN_ELSE
3587 && ((GET_CODE (XEXP (x
, 2)) == PC
3588 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
3589 || GET_CODE (XEXP (x
, 1)) == RETURN
))
3590 || (GET_CODE (XEXP (x
, 1)) == PC
3591 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
3592 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
3597 /* Return nonzero if INSN is a (possibly) conditional jump inside a
3601 condjump_in_parallel_p (insn
)
3604 register rtx x
= PATTERN (insn
);
3606 if (GET_CODE (x
) != PARALLEL
)
3609 x
= XVECEXP (x
, 0, 0);
3611 if (GET_CODE (x
) != SET
)
3613 if (GET_CODE (SET_DEST (x
)) != PC
)
3615 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
3617 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
3619 if (XEXP (SET_SRC (x
), 2) == pc_rtx
3620 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
3621 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
3623 if (XEXP (SET_SRC (x
), 1) == pc_rtx
3624 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
3625 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
3630 /* Return the label of a conditional jump. */
3633 condjump_label (insn
)
3636 register rtx x
= PATTERN (insn
);
3638 if (GET_CODE (x
) == PARALLEL
)
3639 x
= XVECEXP (x
, 0, 0);
3640 if (GET_CODE (x
) != SET
)
3642 if (GET_CODE (SET_DEST (x
)) != PC
)
3645 if (GET_CODE (x
) == LABEL_REF
)
3647 if (GET_CODE (x
) != IF_THEN_ELSE
)
3649 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
3651 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
3656 /* Return true if INSN is a (possibly conditional) return insn. */
3659 returnjump_p_1 (loc
, data
)
3661 void *data ATTRIBUTE_UNUSED
;
3664 return x
&& GET_CODE (x
) == RETURN
;
3671 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
3674 /* Return true if INSN is a jump that only transfers control and
3683 if (GET_CODE (insn
) != JUMP_INSN
)
3686 set
= single_set (insn
);
3689 if (GET_CODE (SET_DEST (set
)) != PC
)
3691 if (side_effects_p (SET_SRC (set
)))
3699 /* Return 1 if X is an RTX that does nothing but set the condition codes
3700 and CLOBBER or USE registers.
3701 Return -1 if X does explicitly set the condition codes,
3702 but also does other things. */
3706 rtx x ATTRIBUTE_UNUSED
;
3708 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
3710 if (GET_CODE (x
) == PARALLEL
)
3714 int other_things
= 0;
3715 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
3717 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
3718 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
3720 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
3723 return ! sets_cc0
? 0 : other_things
? -1 : 1;
3729 /* Follow any unconditional jump at LABEL;
3730 return the ultimate label reached by any such chain of jumps.
3731 If LABEL is not followed by a jump, return LABEL.
3732 If the chain loops or we can't find end, return LABEL,
3733 since that tells caller to avoid changing the insn.
3735 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
3736 a USE or CLOBBER. */
3739 follow_jumps (label
)
3744 register rtx value
= label
;
3749 && (insn
= next_active_insn (value
)) != 0
3750 && GET_CODE (insn
) == JUMP_INSN
3751 && ((JUMP_LABEL (insn
) != 0 && simplejump_p (insn
))
3752 || GET_CODE (PATTERN (insn
)) == RETURN
)
3753 && (next
= NEXT_INSN (insn
))
3754 && GET_CODE (next
) == BARRIER
);
3757 /* Don't chain through the insn that jumps into a loop
3758 from outside the loop,
3759 since that would create multiple loop entry jumps
3760 and prevent loop optimization. */
3762 if (!reload_completed
)
3763 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
3764 if (GET_CODE (tem
) == NOTE
3765 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
3766 /* ??? Optional. Disables some optimizations, but makes
3767 gcov output more accurate with -O. */
3768 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
3771 /* If we have found a cycle, make the insn jump to itself. */
3772 if (JUMP_LABEL (insn
) == label
)
3775 tem
= next_active_insn (JUMP_LABEL (insn
));
3776 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
3777 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
3780 value
= JUMP_LABEL (insn
);
3787 /* Assuming that field IDX of X is a vector of label_refs,
3788 replace each of them by the ultimate label reached by it.
3789 Return nonzero if a change is made.
3790 If IGNORE_LOOPS is 0, we do not chain across a NOTE_INSN_LOOP_BEG. */
3793 tension_vector_labels (x
, idx
)
3799 for (i
= XVECLEN (x
, idx
) - 1; i
>= 0; i
--)
3801 register rtx olabel
= XEXP (XVECEXP (x
, idx
, i
), 0);
3802 register rtx nlabel
= follow_jumps (olabel
);
3803 if (nlabel
&& nlabel
!= olabel
)
3805 XEXP (XVECEXP (x
, idx
, i
), 0) = nlabel
;
3806 ++LABEL_NUSES (nlabel
);
3807 if (--LABEL_NUSES (olabel
) == 0)
3808 delete_insn (olabel
);
3815 /* Find all CODE_LABELs referred to in X, and increment their use counts.
3816 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
3817 in INSN, then store one of them in JUMP_LABEL (INSN).
3818 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
3819 referenced in INSN, add a REG_LABEL note containing that label to INSN.
3820 Also, when there are consecutive labels, canonicalize on the last of them.
3822 Note that two labels separated by a loop-beginning note
3823 must be kept distinct if we have not yet done loop-optimization,
3824 because the gap between them is where loop-optimize
3825 will want to move invariant code to. CROSS_JUMP tells us
3826 that loop-optimization is done with.
3828 Once reload has completed (CROSS_JUMP non-zero), we need not consider
3829 two labels distinct if they are separated by only USE or CLOBBER insns. */
3832 mark_jump_label (x
, insn
, cross_jump
, in_mem
)
3838 register RTX_CODE code
= GET_CODE (x
);
3840 register const char *fmt
;
3862 /* If this is a constant-pool reference, see if it is a label. */
3863 if (CONSTANT_POOL_ADDRESS_P (x
))
3864 mark_jump_label (get_pool_constant (x
), insn
, cross_jump
, in_mem
);
3869 rtx label
= XEXP (x
, 0);
3874 if (GET_CODE (label
) != CODE_LABEL
)
3877 /* Ignore references to labels of containing functions. */
3878 if (LABEL_REF_NONLOCAL_P (x
))
3881 /* If there are other labels following this one,
3882 replace it with the last of the consecutive labels. */
3883 for (next
= NEXT_INSN (label
); next
; next
= NEXT_INSN (next
))
3885 if (GET_CODE (next
) == CODE_LABEL
)
3887 else if (cross_jump
&& GET_CODE (next
) == INSN
3888 && (GET_CODE (PATTERN (next
)) == USE
3889 || GET_CODE (PATTERN (next
)) == CLOBBER
))
3891 else if (GET_CODE (next
) != NOTE
)
3893 else if (! cross_jump
3894 && (NOTE_LINE_NUMBER (next
) == NOTE_INSN_LOOP_BEG
3895 || NOTE_LINE_NUMBER (next
) == NOTE_INSN_FUNCTION_END
3896 /* ??? Optional. Disables some optimizations, but
3897 makes gcov output more accurate with -O. */
3898 || (flag_test_coverage
&& NOTE_LINE_NUMBER (next
) > 0)))
3902 XEXP (x
, 0) = label
;
3903 if (! insn
|| ! INSN_DELETED_P (insn
))
3904 ++LABEL_NUSES (label
);
3908 if (GET_CODE (insn
) == JUMP_INSN
)
3909 JUMP_LABEL (insn
) = label
;
3911 /* If we've changed OLABEL and we had a REG_LABEL note
3912 for it, update it as well. */
3913 else if (label
!= olabel
3914 && (note
= find_reg_note (insn
, REG_LABEL
, olabel
)) != 0)
3915 XEXP (note
, 0) = label
;
3917 /* Otherwise, add a REG_LABEL note for LABEL unless there already
3919 else if (! find_reg_note (insn
, REG_LABEL
, label
))
3921 /* This code used to ignore labels which refered to dispatch
3922 tables to avoid flow.c generating worse code.
3924 However, in the presense of global optimizations like
3925 gcse which call find_basic_blocks without calling
3926 life_analysis, not recording such labels will lead
3927 to compiler aborts because of inconsistencies in the
3928 flow graph. So we go ahead and record the label.
3930 It may also be the case that the optimization argument
3931 is no longer valid because of the more accurate cfg
3932 we build in find_basic_blocks -- it no longer pessimizes
3933 code when it finds a REG_LABEL note. */
3934 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_LABEL
, label
,
3941 /* Do walk the labels in a vector, but not the first operand of an
3942 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
3945 if (! INSN_DELETED_P (insn
))
3947 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
3949 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
3950 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
,
3951 cross_jump
, in_mem
);
3959 fmt
= GET_RTX_FORMAT (code
);
3960 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3963 mark_jump_label (XEXP (x
, i
), insn
, cross_jump
, in_mem
);
3964 else if (fmt
[i
] == 'E')
3967 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3968 mark_jump_label (XVECEXP (x
, i
, j
), insn
, cross_jump
, in_mem
);
3973 /* If all INSN does is set the pc, delete it,
3974 and delete the insn that set the condition codes for it
3975 if that's what the previous thing was. */
3981 register rtx set
= single_set (insn
);
3983 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
3984 delete_computation (insn
);
3987 /* Verify INSN is a BARRIER and delete it. */
3990 delete_barrier (insn
)
3993 if (GET_CODE (insn
) != BARRIER
)
3999 /* Recursively delete prior insns that compute the value (used only by INSN
4000 which the caller is deleting) stored in the register mentioned by NOTE
4001 which is a REG_DEAD note associated with INSN. */
4004 delete_prior_computation (note
, insn
)
4009 rtx reg
= XEXP (note
, 0);
4011 for (our_prev
= prev_nonnote_insn (insn
);
4012 our_prev
&& (GET_CODE (our_prev
) == INSN
4013 || GET_CODE (our_prev
) == CALL_INSN
);
4014 our_prev
= prev_nonnote_insn (our_prev
))
4016 rtx pat
= PATTERN (our_prev
);
4018 /* If we reach a CALL which is not calling a const function
4019 or the callee pops the arguments, then give up. */
4020 if (GET_CODE (our_prev
) == CALL_INSN
4021 && (! CONST_CALL_P (our_prev
)
4022 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
4025 /* If we reach a SEQUENCE, it is too complex to try to
4026 do anything with it, so give up. */
4027 if (GET_CODE (pat
) == SEQUENCE
)
4030 if (GET_CODE (pat
) == USE
4031 && GET_CODE (XEXP (pat
, 0)) == INSN
)
4032 /* reorg creates USEs that look like this. We leave them
4033 alone because reorg needs them for its own purposes. */
4036 if (reg_set_p (reg
, pat
))
4038 if (side_effects_p (pat
) && GET_CODE (our_prev
) != CALL_INSN
)
4041 if (GET_CODE (pat
) == PARALLEL
)
4043 /* If we find a SET of something else, we can't
4048 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4050 rtx part
= XVECEXP (pat
, 0, i
);
4052 if (GET_CODE (part
) == SET
4053 && SET_DEST (part
) != reg
)
4057 if (i
== XVECLEN (pat
, 0))
4058 delete_computation (our_prev
);
4060 else if (GET_CODE (pat
) == SET
4061 && GET_CODE (SET_DEST (pat
)) == REG
)
4063 int dest_regno
= REGNO (SET_DEST (pat
));
4065 = dest_regno
+ (dest_regno
< FIRST_PSEUDO_REGISTER
4066 ? HARD_REGNO_NREGS (dest_regno
,
4067 GET_MODE (SET_DEST (pat
))) : 1);
4068 int regno
= REGNO (reg
);
4069 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
4070 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1);
4072 if (dest_regno
>= regno
4073 && dest_endregno
<= endregno
)
4074 delete_computation (our_prev
);
4076 /* We may have a multi-word hard register and some, but not
4077 all, of the words of the register are needed in subsequent
4078 insns. Write REG_UNUSED notes for those parts that were not
4080 else if (dest_regno
<= regno
4081 && dest_endregno
>= endregno
)
4085 REG_NOTES (our_prev
)
4086 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
, REG_NOTES (our_prev
));
4088 for (i
= dest_regno
; i
< dest_endregno
; i
++)
4089 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
4092 if (i
== dest_endregno
)
4093 delete_computation (our_prev
);
4100 /* If PAT references the register that dies here, it is an
4101 additional use. Hence any prior SET isn't dead. However, this
4102 insn becomes the new place for the REG_DEAD note. */
4103 if (reg_overlap_mentioned_p (reg
, pat
))
4105 XEXP (note
, 1) = REG_NOTES (our_prev
);
4106 REG_NOTES (our_prev
) = note
;
4112 /* Delete INSN and recursively delete insns that compute values used only
4113 by INSN. This uses the REG_DEAD notes computed during flow analysis.
4114 If we are running before flow.c, we need do nothing since flow.c will
4115 delete dead code. We also can't know if the registers being used are
4116 dead or not at this point.
4118 Otherwise, look at all our REG_DEAD notes. If a previous insn does
4119 nothing other than set a register that dies in this insn, we can delete
4122 On machines with CC0, if CC0 is used in this insn, we may be able to
4123 delete the insn that set it. */
4126 delete_computation (insn
)
4133 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
4135 rtx prev
= prev_nonnote_insn (insn
);
4136 /* We assume that at this stage
4137 CC's are always set explicitly
4138 and always immediately before the jump that
4139 will use them. So if the previous insn
4140 exists to set the CC's, delete it
4141 (unless it performs auto-increments, etc.). */
4142 if (prev
&& GET_CODE (prev
) == INSN
4143 && sets_cc0_p (PATTERN (prev
)))
4145 if (sets_cc0_p (PATTERN (prev
)) > 0
4146 && ! side_effects_p (PATTERN (prev
)))
4147 delete_computation (prev
);
4149 /* Otherwise, show that cc0 won't be used. */
4150 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
4151 cc0_rtx
, REG_NOTES (prev
));
4156 #ifdef INSN_SCHEDULING
4157 /* ?!? The schedulers do not keep REG_DEAD notes accurate after
4158 reload has completed. The schedulers need to be fixed. Until
4159 they are, we must not rely on the death notes here. */
4160 if (reload_completed
&& flag_schedule_insns_after_reload
)
4167 /* The REG_DEAD note may have been omitted for a register
4168 which is both set and used by the insn. */
4169 set
= single_set (insn
);
4170 if (set
&& GET_CODE (SET_DEST (set
)) == REG
)
4172 int dest_regno
= REGNO (SET_DEST (set
));
4174 = dest_regno
+ (dest_regno
< FIRST_PSEUDO_REGISTER
4175 ? HARD_REGNO_NREGS (dest_regno
,
4176 GET_MODE (SET_DEST (set
))) : 1);
4179 for (i
= dest_regno
; i
< dest_endregno
; i
++)
4181 if (! refers_to_regno_p (i
, i
+ 1, SET_SRC (set
), NULL_PTR
)
4182 || find_regno_note (insn
, REG_DEAD
, i
))
4185 note
= gen_rtx_EXPR_LIST (REG_DEAD
, (i
< FIRST_PSEUDO_REGISTER
4186 ? gen_rtx_REG (reg_raw_mode
[i
], i
)
4187 : SET_DEST (set
)), NULL_RTX
);
4188 delete_prior_computation (note
, insn
);
4192 for (note
= REG_NOTES (insn
); note
; note
= next
)
4194 next
= XEXP (note
, 1);
4196 if (REG_NOTE_KIND (note
) != REG_DEAD
4197 /* Verify that the REG_NOTE is legitimate. */
4198 || GET_CODE (XEXP (note
, 0)) != REG
)
4201 delete_prior_computation (note
, insn
);
4207 /* Delete insn INSN from the chain of insns and update label ref counts.
4208 May delete some following insns as a consequence; may even delete
4209 a label elsewhere and insns that follow it.
4211 Returns the first insn after INSN that was not deleted. */
4217 register rtx next
= NEXT_INSN (insn
);
4218 register rtx prev
= PREV_INSN (insn
);
4219 register int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
4220 register int dont_really_delete
= 0;
4222 while (next
&& INSN_DELETED_P (next
))
4223 next
= NEXT_INSN (next
);
4225 /* This insn is already deleted => return first following nondeleted. */
4226 if (INSN_DELETED_P (insn
))
4230 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
4232 /* Don't delete user-declared labels. When optimizing, convert them
4233 to special NOTEs instead. When not optimizing, leave them alone. */
4234 if (was_code_label
&& LABEL_NAME (insn
) != 0)
4237 dont_really_delete
= 1;
4238 else if (! dont_really_delete
)
4240 PUT_CODE (insn
, NOTE
);
4241 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
4242 NOTE_SOURCE_FILE (insn
) = 0;
4243 dont_really_delete
= 1;
4247 /* Mark this insn as deleted. */
4248 INSN_DELETED_P (insn
) = 1;
4250 /* If this is an unconditional jump, delete it from the jump chain. */
4251 if (simplejump_p (insn
))
4252 delete_from_jump_chain (insn
);
4254 /* If instruction is followed by a barrier,
4255 delete the barrier too. */
4257 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
4259 INSN_DELETED_P (next
) = 1;
4260 next
= NEXT_INSN (next
);
4263 /* Patch out INSN (and the barrier if any) */
4265 if (! dont_really_delete
)
4269 NEXT_INSN (prev
) = next
;
4270 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4271 NEXT_INSN (XVECEXP (PATTERN (prev
), 0,
4272 XVECLEN (PATTERN (prev
), 0) - 1)) = next
;
4277 PREV_INSN (next
) = prev
;
4278 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
4279 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
4282 if (prev
&& NEXT_INSN (prev
) == 0)
4283 set_last_insn (prev
);
4286 /* If deleting a jump, decrement the count of the label,
4287 and delete the label if it is now unused. */
4289 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
4291 rtx lab
= JUMP_LABEL (insn
), lab_next
;
4293 if (--LABEL_NUSES (lab
) == 0)
4295 /* This can delete NEXT or PREV,
4296 either directly if NEXT is JUMP_LABEL (INSN),
4297 or indirectly through more levels of jumps. */
4300 /* I feel a little doubtful about this loop,
4301 but I see no clean and sure alternative way
4302 to find the first insn after INSN that is not now deleted.
4303 I hope this works. */
4304 while (next
&& INSN_DELETED_P (next
))
4305 next
= NEXT_INSN (next
);
4308 else if ((lab_next
= next_nonnote_insn (lab
)) != NULL
4309 && GET_CODE (lab_next
) == JUMP_INSN
4310 && (GET_CODE (PATTERN (lab_next
)) == ADDR_VEC
4311 || GET_CODE (PATTERN (lab_next
)) == ADDR_DIFF_VEC
))
4313 /* If we're deleting the tablejump, delete the dispatch table.
4314 We may not be able to kill the label immediately preceeding
4315 just yet, as it might be referenced in code leading up to
4317 delete_insn (lab_next
);
4321 /* Likewise if we're deleting a dispatch table. */
4323 if (GET_CODE (insn
) == JUMP_INSN
4324 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4325 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
4327 rtx pat
= PATTERN (insn
);
4328 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
4329 int len
= XVECLEN (pat
, diff_vec_p
);
4331 for (i
= 0; i
< len
; i
++)
4332 if (--LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
4333 delete_insn (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
4334 while (next
&& INSN_DELETED_P (next
))
4335 next
= NEXT_INSN (next
);
4339 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
4340 prev
= PREV_INSN (prev
);
4342 /* If INSN was a label and a dispatch table follows it,
4343 delete the dispatch table. The tablejump must have gone already.
4344 It isn't useful to fall through into a table. */
4347 && NEXT_INSN (insn
) != 0
4348 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
4349 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
4350 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
4351 next
= delete_insn (NEXT_INSN (insn
));
4353 /* If INSN was a label, delete insns following it if now unreachable. */
4355 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
4357 register RTX_CODE code
;
4359 && (GET_RTX_CLASS (code
= GET_CODE (next
)) == 'i'
4360 || code
== NOTE
|| code
== BARRIER
4361 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
4364 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
4365 next
= NEXT_INSN (next
);
4366 /* Keep going past other deleted labels to delete what follows. */
4367 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
4368 next
= NEXT_INSN (next
);
4370 /* Note: if this deletes a jump, it can cause more
4371 deletion of unreachable code, after a different label.
4372 As long as the value from this recursive call is correct,
4373 this invocation functions correctly. */
4374 next
= delete_insn (next
);
4381 /* Advance from INSN till reaching something not deleted
4382 then return that. May return INSN itself. */
4385 next_nondeleted_insn (insn
)
4388 while (INSN_DELETED_P (insn
))
4389 insn
= NEXT_INSN (insn
);
4393 /* Delete a range of insns from FROM to TO, inclusive.
4394 This is for the sake of peephole optimization, so assume
4395 that whatever these insns do will still be done by a new
4396 peephole insn that will replace them. */
4399 delete_for_peephole (from
, to
)
4400 register rtx from
, to
;
4402 register rtx insn
= from
;
4406 register rtx next
= NEXT_INSN (insn
);
4407 register rtx prev
= PREV_INSN (insn
);
4409 if (GET_CODE (insn
) != NOTE
)
4411 INSN_DELETED_P (insn
) = 1;
4413 /* Patch this insn out of the chain. */
4414 /* We don't do this all at once, because we
4415 must preserve all NOTEs. */
4417 NEXT_INSN (prev
) = next
;
4420 PREV_INSN (next
) = prev
;
4428 /* Note that if TO is an unconditional jump
4429 we *do not* delete the BARRIER that follows,
4430 since the peephole that replaces this sequence
4431 is also an unconditional jump in that case. */
4434 /* We have determined that INSN is never reached, and are about to
4435 delete it. Print a warning if the user asked for one.
4437 To try to make this warning more useful, this should only be called
4438 once per basic block not reached, and it only warns when the basic
4439 block contains more than one line from the current function, and
4440 contains at least one operation. CSE and inlining can duplicate insns,
4441 so it's possible to get spurious warnings from this. */
4444 never_reached_warning (avoided_insn
)
4448 rtx a_line_note
= NULL
;
4449 int two_avoided_lines
= 0;
4450 int contains_insn
= 0;
4452 if (! warn_notreached
)
4455 /* Scan forwards, looking at LINE_NUMBER notes, until
4456 we hit a LABEL or we run out of insns. */
4458 for (insn
= avoided_insn
; insn
!= NULL
; insn
= NEXT_INSN (insn
))
4460 if (GET_CODE (insn
) == CODE_LABEL
)
4462 else if (GET_CODE (insn
) == NOTE
/* A line number note? */
4463 && NOTE_LINE_NUMBER (insn
) >= 0)
4465 if (a_line_note
== NULL
)
4468 two_avoided_lines
|= (NOTE_LINE_NUMBER (a_line_note
)
4469 != NOTE_LINE_NUMBER (insn
));
4471 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4474 if (two_avoided_lines
&& contains_insn
)
4475 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note
),
4476 NOTE_LINE_NUMBER (a_line_note
),
4477 "will never be executed");
4480 /* Invert the condition of the jump JUMP, and make it jump
4481 to label NLABEL instead of where it jumps now. */
4484 invert_jump (jump
, nlabel
)
4487 /* We have to either invert the condition and change the label or
4488 do neither. Either operation could fail. We first try to invert
4489 the jump. If that succeeds, we try changing the label. If that fails,
4490 we invert the jump back to what it was. */
4492 if (! invert_exp (PATTERN (jump
), jump
))
4495 if (redirect_jump (jump
, nlabel
))
4497 if (flag_branch_probabilities
)
4499 rtx note
= find_reg_note (jump
, REG_BR_PROB
, 0);
4501 /* An inverted jump means that a probability taken becomes a
4502 probability not taken. Subtract the branch probability from the
4503 probability base to convert it back to a taken probability.
4504 (We don't flip the probability on a branch that's never taken. */
4505 if (note
&& XINT (XEXP (note
, 0), 0) >= 0)
4506 XINT (XEXP (note
, 0), 0) = REG_BR_PROB_BASE
- XINT (XEXP (note
, 0), 0);
4512 if (! invert_exp (PATTERN (jump
), jump
))
4513 /* This should just be putting it back the way it was. */
4519 /* Invert the jump condition of rtx X contained in jump insn, INSN.
4521 Return 1 if we can do so, 0 if we cannot find a way to do so that
4522 matches a pattern. */
4525 invert_exp (x
, insn
)
4529 register RTX_CODE code
;
4531 register const char *fmt
;
4533 code
= GET_CODE (x
);
4535 if (code
== IF_THEN_ELSE
)
4537 register rtx comp
= XEXP (x
, 0);
4540 /* We can do this in two ways: The preferable way, which can only
4541 be done if this is not an integer comparison, is to reverse
4542 the comparison code. Otherwise, swap the THEN-part and ELSE-part
4543 of the IF_THEN_ELSE. If we can't do either, fail. */
4545 if (can_reverse_comparison_p (comp
, insn
)
4546 && validate_change (insn
, &XEXP (x
, 0),
4547 gen_rtx_fmt_ee (reverse_condition (GET_CODE (comp
)),
4548 GET_MODE (comp
), XEXP (comp
, 0),
4549 XEXP (comp
, 1)), 0))
4553 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
4554 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
4555 return apply_change_group ();
4558 fmt
= GET_RTX_FORMAT (code
);
4559 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4563 if (! invert_exp (XEXP (x
, i
), insn
))
4566 else if (fmt
[i
] == 'E')
4569 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4570 if (!invert_exp (XVECEXP (x
, i
, j
), insn
))
4578 /* Make jump JUMP jump to label NLABEL instead of where it jumps now.
4579 If the old jump target label is unused as a result,
4580 it and the code following it may be deleted.
4582 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
4585 The return value will be 1 if the change was made, 0 if it wasn't (this
4586 can only occur for NLABEL == 0). */
4589 redirect_jump (jump
, nlabel
)
4592 register rtx olabel
= JUMP_LABEL (jump
);
4594 if (nlabel
== olabel
)
4597 if (! redirect_exp (&PATTERN (jump
), olabel
, nlabel
, jump
))
4600 /* If this is an unconditional branch, delete it from the jump_chain of
4601 OLABEL and add it to the jump_chain of NLABEL (assuming both labels
4602 have UID's in range and JUMP_CHAIN is valid). */
4603 if (jump_chain
&& (simplejump_p (jump
)
4604 || GET_CODE (PATTERN (jump
)) == RETURN
))
4606 int label_index
= nlabel
? INSN_UID (nlabel
) : 0;
4608 delete_from_jump_chain (jump
);
4609 if (label_index
< max_jump_chain
4610 && INSN_UID (jump
) < max_jump_chain
)
4612 jump_chain
[INSN_UID (jump
)] = jump_chain
[label_index
];
4613 jump_chain
[label_index
] = jump
;
4617 JUMP_LABEL (jump
) = nlabel
;
4619 ++LABEL_NUSES (nlabel
);
4621 /* If we're eliding the jump over exception cleanups at the end of a
4622 function, move the function end note so that -Wreturn-type works. */
4623 if (olabel
&& NEXT_INSN (olabel
)
4624 && GET_CODE (NEXT_INSN (olabel
)) == NOTE
4625 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
)
4626 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
4628 if (olabel
&& --LABEL_NUSES (olabel
) == 0)
4629 delete_insn (olabel
);
4634 /* Delete the instruction JUMP from any jump chain it might be on. */
4637 delete_from_jump_chain (jump
)
4641 rtx olabel
= JUMP_LABEL (jump
);
4643 /* Handle unconditional jumps. */
4644 if (jump_chain
&& olabel
!= 0
4645 && INSN_UID (olabel
) < max_jump_chain
4646 && simplejump_p (jump
))
4647 index
= INSN_UID (olabel
);
4648 /* Handle return insns. */
4649 else if (jump_chain
&& GET_CODE (PATTERN (jump
)) == RETURN
)
4653 if (jump_chain
[index
] == jump
)
4654 jump_chain
[index
] = jump_chain
[INSN_UID (jump
)];
4659 for (insn
= jump_chain
[index
];
4661 insn
= jump_chain
[INSN_UID (insn
)])
4662 if (jump_chain
[INSN_UID (insn
)] == jump
)
4664 jump_chain
[INSN_UID (insn
)] = jump_chain
[INSN_UID (jump
)];
4670 /* If NLABEL is nonzero, throughout the rtx at LOC,
4671 alter (LABEL_REF OLABEL) to (LABEL_REF NLABEL). If OLABEL is
4672 zero, alter (RETURN) to (LABEL_REF NLABEL).
4674 If NLABEL is zero, alter (LABEL_REF OLABEL) to (RETURN) and check
4675 validity with validate_change. Convert (set (pc) (label_ref olabel))
4678 Return 0 if we found a change we would like to make but it is invalid.
4679 Otherwise, return 1. */
4682 redirect_exp (loc
, olabel
, nlabel
, insn
)
4687 register rtx x
= *loc
;
4688 register RTX_CODE code
= GET_CODE (x
);
4690 register const char *fmt
;
4692 if (code
== LABEL_REF
)
4694 if (XEXP (x
, 0) == olabel
)
4697 XEXP (x
, 0) = nlabel
;
4699 return validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 0);
4703 else if (code
== RETURN
&& olabel
== 0)
4705 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
4706 if (loc
== &PATTERN (insn
))
4707 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
4708 return validate_change (insn
, loc
, x
, 0);
4711 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
4712 && GET_CODE (SET_SRC (x
)) == LABEL_REF
4713 && XEXP (SET_SRC (x
), 0) == olabel
)
4714 return validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 0);
4716 fmt
= GET_RTX_FORMAT (code
);
4717 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4721 if (! redirect_exp (&XEXP (x
, i
), olabel
, nlabel
, insn
))
4724 else if (fmt
[i
] == 'E')
4727 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
4728 if (! redirect_exp (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
))
4736 /* Make jump JUMP jump to label NLABEL, assuming it used to be a tablejump.
4738 If the old jump target label (before the dispatch table) becomes unused,
4739 it and the dispatch table may be deleted. In that case, find the insn
4740 before the jump references that label and delete it and logical successors
4744 redirect_tablejump (jump
, nlabel
)
4747 register rtx olabel
= JUMP_LABEL (jump
);
4749 /* Add this jump to the jump_chain of NLABEL. */
4750 if (jump_chain
&& INSN_UID (nlabel
) < max_jump_chain
4751 && INSN_UID (jump
) < max_jump_chain
)
4753 jump_chain
[INSN_UID (jump
)] = jump_chain
[INSN_UID (nlabel
)];
4754 jump_chain
[INSN_UID (nlabel
)] = jump
;
4757 PATTERN (jump
) = gen_jump (nlabel
);
4758 JUMP_LABEL (jump
) = nlabel
;
4759 ++LABEL_NUSES (nlabel
);
4760 INSN_CODE (jump
) = -1;
4762 if (--LABEL_NUSES (olabel
) == 0)
4764 delete_labelref_insn (jump
, olabel
, 0);
4765 delete_insn (olabel
);
4769 /* Find the insn referencing LABEL that is a logical predecessor of INSN.
4770 If we found one, delete it and then delete this insn if DELETE_THIS is
4771 non-zero. Return non-zero if INSN or a predecessor references LABEL. */
4774 delete_labelref_insn (insn
, label
, delete_this
)
4781 if (GET_CODE (insn
) != NOTE
4782 && reg_mentioned_p (label
, PATTERN (insn
)))
4793 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
4794 if (delete_labelref_insn (XEXP (link
, 0), label
, 1))
4808 /* Like rtx_equal_p except that it considers two REGs as equal
4809 if they renumber to the same value and considers two commutative
4810 operations to be the same if the order of the operands has been
4813 ??? Addition is not commutative on the PA due to the weird implicit
4814 space register selection rules for memory addresses. Therefore, we
4815 don't consider a + b == b + a.
4817 We could/should make this test a little tighter. Possibly only
4818 disabling it on the PA via some backend macro or only disabling this
4819 case when the PLUS is inside a MEM. */
4822 rtx_renumbered_equal_p (x
, y
)
4826 register RTX_CODE code
= GET_CODE (x
);
4827 register const char *fmt
;
4832 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
4833 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
4834 && GET_CODE (SUBREG_REG (y
)) == REG
)))
4836 int reg_x
= -1, reg_y
= -1;
4837 int word_x
= 0, word_y
= 0;
4839 if (GET_MODE (x
) != GET_MODE (y
))
4842 /* If we haven't done any renumbering, don't
4843 make any assumptions. */
4844 if (reg_renumber
== 0)
4845 return rtx_equal_p (x
, y
);
4849 reg_x
= REGNO (SUBREG_REG (x
));
4850 word_x
= SUBREG_WORD (x
);
4852 if (reg_renumber
[reg_x
] >= 0)
4854 reg_x
= reg_renumber
[reg_x
] + word_x
;
4862 if (reg_renumber
[reg_x
] >= 0)
4863 reg_x
= reg_renumber
[reg_x
];
4866 if (GET_CODE (y
) == SUBREG
)
4868 reg_y
= REGNO (SUBREG_REG (y
));
4869 word_y
= SUBREG_WORD (y
);
4871 if (reg_renumber
[reg_y
] >= 0)
4873 reg_y
= reg_renumber
[reg_y
];
4881 if (reg_renumber
[reg_y
] >= 0)
4882 reg_y
= reg_renumber
[reg_y
];
4885 return reg_x
>= 0 && reg_x
== reg_y
&& word_x
== word_y
;
4888 /* Now we have disposed of all the cases
4889 in which different rtx codes can match. */
4890 if (code
!= GET_CODE (y
))
4902 return INTVAL (x
) == INTVAL (y
);
4905 /* We can't assume nonlocal labels have their following insns yet. */
4906 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
4907 return XEXP (x
, 0) == XEXP (y
, 0);
4909 /* Two label-refs are equivalent if they point at labels
4910 in the same position in the instruction stream. */
4911 return (next_real_insn (XEXP (x
, 0))
4912 == next_real_insn (XEXP (y
, 0)));
4915 return XSTR (x
, 0) == XSTR (y
, 0);
4918 /* If we didn't match EQ equality above, they aren't the same. */
4925 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
4927 if (GET_MODE (x
) != GET_MODE (y
))
4930 /* For commutative operations, the RTX match if the operand match in any
4931 order. Also handle the simple binary and unary cases without a loop.
4933 ??? Don't consider PLUS a commutative operator; see comments above. */
4934 if ((code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
4936 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
4937 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
4938 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
4939 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
4940 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
4941 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
4942 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
4943 else if (GET_RTX_CLASS (code
) == '1')
4944 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
4946 /* Compare the elements. If any pair of corresponding elements
4947 fail to match, return 0 for the whole things. */
4949 fmt
= GET_RTX_FORMAT (code
);
4950 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4956 if (XWINT (x
, i
) != XWINT (y
, i
))
4961 if (XINT (x
, i
) != XINT (y
, i
))
4966 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
4971 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
4976 if (XEXP (x
, i
) != XEXP (y
, i
))
4983 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
4985 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4986 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
4997 /* If X is a hard register or equivalent to one or a subregister of one,
4998 return the hard register number. If X is a pseudo register that was not
4999 assigned a hard register, return the pseudo register number. Otherwise,
5000 return -1. Any rtx is valid for X. */
5006 if (GET_CODE (x
) == REG
)
5008 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
5009 return reg_renumber
[REGNO (x
)];
5012 if (GET_CODE (x
) == SUBREG
)
5014 int base
= true_regnum (SUBREG_REG (x
));
5015 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
5016 return SUBREG_WORD (x
) + base
;
5021 /* Optimize code of the form:
5023 for (x = a[i]; x; ...)
5025 for (x = a[i]; x; ...)
5029 Loop optimize will change the above code into
5033 { ...; if (! (x = ...)) break; }
5036 { ...; if (! (x = ...)) break; }
5039 In general, if the first test fails, the program can branch
5040 directly to `foo' and skip the second try which is doomed to fail.
5041 We run this after loop optimization and before flow analysis. */
5043 /* When comparing the insn patterns, we track the fact that different
5044 pseudo-register numbers may have been used in each computation.
5045 The following array stores an equivalence -- same_regs[I] == J means
5046 that pseudo register I was used in the first set of tests in a context
5047 where J was used in the second set. We also count the number of such
5048 pending equivalences. If nonzero, the expressions really aren't the
5051 static int *same_regs
;
5053 static int num_same_regs
;
5055 /* Track any registers modified between the target of the first jump and
5056 the second jump. They never compare equal. */
5058 static char *modified_regs
;
5060 /* Record if memory was modified. */
5062 static int modified_mem
;
5064 /* Called via note_stores on each insn between the target of the first
5065 branch and the second branch. It marks any changed registers. */
5068 mark_modified_reg (dest
, x
, data
)
5070 rtx x ATTRIBUTE_UNUSED
;
5071 void *data ATTRIBUTE_UNUSED
;
5075 if (GET_CODE (dest
) == SUBREG
)
5076 dest
= SUBREG_REG (dest
);
5078 if (GET_CODE (dest
) == MEM
)
5081 if (GET_CODE (dest
) != REG
)
5084 regno
= REGNO (dest
);
5085 if (regno
>= FIRST_PSEUDO_REGISTER
)
5086 modified_regs
[regno
] = 1;
5088 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
5089 modified_regs
[regno
+ i
] = 1;
5092 /* F is the first insn in the chain of insns. */
5095 thread_jumps (f
, max_reg
, flag_before_loop
)
5098 int flag_before_loop
;
5100 /* Basic algorithm is to find a conditional branch,
5101 the label it may branch to, and the branch after
5102 that label. If the two branches test the same condition,
5103 walk back from both branch paths until the insn patterns
5104 differ, or code labels are hit. If we make it back to
5105 the target of the first branch, then we know that the first branch
5106 will either always succeed or always fail depending on the relative
5107 senses of the two branches. So adjust the first branch accordingly
5110 rtx label
, b1
, b2
, t1
, t2
;
5111 enum rtx_code code1
, code2
;
5112 rtx b1op0
, b1op1
, b2op0
, b2op1
;
5117 /* Allocate register tables and quick-reset table. */
5118 modified_regs
= (char *) xmalloc (max_reg
* sizeof (char));
5119 same_regs
= (int *) xmalloc (max_reg
* sizeof (int));
5120 all_reset
= (int *) xmalloc (max_reg
* sizeof (int));
5121 for (i
= 0; i
< max_reg
; i
++)
5128 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
5130 /* Get to a candidate branch insn. */
5131 if (GET_CODE (b1
) != JUMP_INSN
5132 || ! condjump_p (b1
) || simplejump_p (b1
)
5133 || JUMP_LABEL (b1
) == 0)
5136 bzero (modified_regs
, max_reg
* sizeof (char));
5139 bcopy ((char *) all_reset
, (char *) same_regs
,
5140 max_reg
* sizeof (int));
5143 label
= JUMP_LABEL (b1
);
5145 /* Look for a branch after the target. Record any registers and
5146 memory modified between the target and the branch. Stop when we
5147 get to a label since we can't know what was changed there. */
5148 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
5150 if (GET_CODE (b2
) == CODE_LABEL
)
5153 else if (GET_CODE (b2
) == JUMP_INSN
)
5155 /* If this is an unconditional jump and is the only use of
5156 its target label, we can follow it. */
5157 if (simplejump_p (b2
)
5158 && JUMP_LABEL (b2
) != 0
5159 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
5161 b2
= JUMP_LABEL (b2
);
5168 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
5171 if (GET_CODE (b2
) == CALL_INSN
)
5174 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5175 if (call_used_regs
[i
] && ! fixed_regs
[i
]
5176 && i
!= STACK_POINTER_REGNUM
5177 && i
!= FRAME_POINTER_REGNUM
5178 && i
!= HARD_FRAME_POINTER_REGNUM
5179 && i
!= ARG_POINTER_REGNUM
)
5180 modified_regs
[i
] = 1;
5183 note_stores (PATTERN (b2
), mark_modified_reg
, NULL
);
5186 /* Check the next candidate branch insn from the label
5189 || GET_CODE (b2
) != JUMP_INSN
5191 || ! condjump_p (b2
)
5192 || simplejump_p (b2
))
5195 /* Get the comparison codes and operands, reversing the
5196 codes if appropriate. If we don't have comparison codes,
5197 we can't do anything. */
5198 b1op0
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 0);
5199 b1op1
= XEXP (XEXP (SET_SRC (PATTERN (b1
)), 0), 1);
5200 code1
= GET_CODE (XEXP (SET_SRC (PATTERN (b1
)), 0));
5201 if (XEXP (SET_SRC (PATTERN (b1
)), 1) == pc_rtx
)
5202 code1
= reverse_condition (code1
);
5204 b2op0
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 0);
5205 b2op1
= XEXP (XEXP (SET_SRC (PATTERN (b2
)), 0), 1);
5206 code2
= GET_CODE (XEXP (SET_SRC (PATTERN (b2
)), 0));
5207 if (XEXP (SET_SRC (PATTERN (b2
)), 1) == pc_rtx
)
5208 code2
= reverse_condition (code2
);
5210 /* If they test the same things and knowing that B1 branches
5211 tells us whether or not B2 branches, check if we
5212 can thread the branch. */
5213 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
5214 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
5215 && (comparison_dominates_p (code1
, code2
)
5216 || (can_reverse_comparison_p (XEXP (SET_SRC (PATTERN (b1
)),
5219 && comparison_dominates_p (code1
, reverse_condition (code2
)))))
5222 t1
= prev_nonnote_insn (b1
);
5223 t2
= prev_nonnote_insn (b2
);
5225 while (t1
!= 0 && t2
!= 0)
5229 /* We have reached the target of the first branch.
5230 If there are no pending register equivalents,
5231 we know that this branch will either always
5232 succeed (if the senses of the two branches are
5233 the same) or always fail (if not). */
5236 if (num_same_regs
!= 0)
5239 if (comparison_dominates_p (code1
, code2
))
5240 new_label
= JUMP_LABEL (b2
);
5242 new_label
= get_label_after (b2
);
5244 if (JUMP_LABEL (b1
) != new_label
)
5246 rtx prev
= PREV_INSN (new_label
);
5248 if (flag_before_loop
5249 && GET_CODE (prev
) == NOTE
5250 && NOTE_LINE_NUMBER (prev
) == NOTE_INSN_LOOP_BEG
)
5252 /* Don't thread to the loop label. If a loop
5253 label is reused, loop optimization will
5254 be disabled for that loop. */
5255 new_label
= gen_label_rtx ();
5256 emit_label_after (new_label
, PREV_INSN (prev
));
5258 changed
|= redirect_jump (b1
, new_label
);
5263 /* If either of these is not a normal insn (it might be
5264 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
5265 have already been skipped above.) Similarly, fail
5266 if the insns are different. */
5267 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
5268 || recog_memoized (t1
) != recog_memoized (t2
)
5269 || ! rtx_equal_for_thread_p (PATTERN (t1
),
5273 t1
= prev_nonnote_insn (t1
);
5274 t2
= prev_nonnote_insn (t2
);
5281 free (modified_regs
);
5286 /* This is like RTX_EQUAL_P except that it knows about our handling of
5287 possibly equivalent registers and knows to consider volatile and
5288 modified objects as not equal.
5290 YINSN is the insn containing Y. */
5293 rtx_equal_for_thread_p (x
, y
, yinsn
)
5299 register enum rtx_code code
;
5300 register const char *fmt
;
5302 code
= GET_CODE (x
);
5303 /* Rtx's of different codes cannot be equal. */
5304 if (code
!= GET_CODE (y
))
5307 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
5308 (REG:SI x) and (REG:HI x) are NOT equivalent. */
5310 if (GET_MODE (x
) != GET_MODE (y
))
5313 /* For floating-point, consider everything unequal. This is a bit
5314 pessimistic, but this pass would only rarely do anything for FP
5316 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
5317 && FLOAT_MODE_P (GET_MODE (x
)) && ! flag_fast_math
)
5320 /* For commutative operations, the RTX match if the operand match in any
5321 order. Also handle the simple binary and unary cases without a loop. */
5322 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
5323 return ((rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
5324 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
))
5325 || (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 1), yinsn
)
5326 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 0), yinsn
)));
5327 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
5328 return (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
5329 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
));
5330 else if (GET_RTX_CLASS (code
) == '1')
5331 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
5333 /* Handle special-cases first. */
5337 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
5340 /* If neither is user variable or hard register, check for possible
5342 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
5343 || REGNO (x
) < FIRST_PSEUDO_REGISTER
5344 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
5347 if (same_regs
[REGNO (x
)] == -1)
5349 same_regs
[REGNO (x
)] = REGNO (y
);
5352 /* If this is the first time we are seeing a register on the `Y'
5353 side, see if it is the last use. If not, we can't thread the
5354 jump, so mark it as not equivalent. */
5355 if (REGNO_LAST_UID (REGNO (y
)) != INSN_UID (yinsn
))
5361 return (same_regs
[REGNO (x
)] == REGNO (y
));
5366 /* If memory modified or either volatile, not equivalent.
5367 Else, check address. */
5368 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
5371 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
5374 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
5380 /* Cancel a pending `same_regs' if setting equivalenced registers.
5381 Then process source. */
5382 if (GET_CODE (SET_DEST (x
)) == REG
5383 && GET_CODE (SET_DEST (y
)) == REG
)
5385 if (same_regs
[REGNO (SET_DEST (x
))] == REGNO (SET_DEST (y
)))
5387 same_regs
[REGNO (SET_DEST (x
))] = -1;
5390 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
5394 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
5397 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
5400 return XEXP (x
, 0) == XEXP (y
, 0);
5403 return XSTR (x
, 0) == XSTR (y
, 0);
5412 fmt
= GET_RTX_FORMAT (code
);
5413 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
5418 if (XWINT (x
, i
) != XWINT (y
, i
))
5424 if (XINT (x
, i
) != XINT (y
, i
))
5430 /* Two vectors must have the same length. */
5431 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
5434 /* And the corresponding elements must match. */
5435 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5436 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
5437 XVECEXP (y
, i
, j
), yinsn
) == 0)
5442 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
5448 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
5453 /* These are just backpointers, so they don't matter. */
5460 /* It is believed that rtx's at this level will never
5461 contain anything but integers and other rtx's,
5462 except for within LABEL_REFs and SYMBOL_REFs. */
5471 #if !defined(HAVE_cc0) && !defined(HAVE_conditional_arithmetic)
5472 /* Return the insn that NEW can be safely inserted in front of starting at
5473 the jump insn INSN. Return 0 if it is not safe to do this jump
5474 optimization. Note that NEW must contain a single set. */
5477 find_insert_position (insn
, new)
5484 /* If NEW does not clobber, it is safe to insert NEW before INSN. */
5485 if (GET_CODE (PATTERN (new)) != PARALLEL
)
5488 for (i
= XVECLEN (PATTERN (new), 0) - 1; i
>= 0; i
--)
5489 if (GET_CODE (XVECEXP (PATTERN (new), 0, i
)) == CLOBBER
5490 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i
), 0),
5497 /* There is a good chance that the previous insn PREV sets the thing
5498 being clobbered (often the CC in a hard reg). If PREV does not
5499 use what NEW sets, we can insert NEW before PREV. */
5501 prev
= prev_active_insn (insn
);
5502 for (i
= XVECLEN (PATTERN (new), 0) - 1; i
>= 0; i
--)
5503 if (GET_CODE (XVECEXP (PATTERN (new), 0, i
)) == CLOBBER
5504 && reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (new), 0, i
), 0),
5506 && ! modified_in_p (XEXP (XVECEXP (PATTERN (new), 0, i
), 0),
5510 return reg_mentioned_p (SET_DEST (single_set (new)), prev
) ? 0 : prev
;
5512 #endif /* !HAVE_cc0 */