1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 The TMS320C3x and C4x have three branch delay slots. When the three
65 slots are filled, the branch penalty is zero. Most insns can fill the
66 delay slots except jump insns.
68 Three techniques for filling delay slots have been implemented so far:
70 (1) `fill_simple_delay_slots' is the simplest, most efficient way
71 to fill delay slots. This pass first looks for insns which come
72 from before the branch and which are safe to execute after the
73 branch. Then it searches after the insn requiring delay slots or,
74 in the case of a branch, for insns that are after the point at
75 which the branch merges into the fallthrough code, if such a point
76 exists. When such insns are found, the branch penalty decreases
77 and no code expansion takes place.
79 (2) `fill_eager_delay_slots' is more complicated: it is used for
80 scheduling conditional jumps, or for scheduling jumps which cannot
81 be filled using (1). A machine need not have annulled jumps to use
82 this strategy, but it helps (by keeping more options open).
83 `fill_eager_delay_slots' tries to guess the direction the branch
84 will go; if it guesses right 100% of the time, it can reduce the
85 branch penalty as much as `fill_simple_delay_slots' does. If it
86 guesses wrong 100% of the time, it might as well schedule nops (or
87 on the m88k, unexpose the branch slot). When
88 `fill_eager_delay_slots' takes insns from the fall-through path of
89 the jump, usually there is no code expansion; when it takes insns
90 from the branch target, there is code expansion if it is not the
91 only way to reach that target.
93 (3) `relax_delay_slots' uses a set of rules to simplify code that
94 has been reorganized by (1) and (2). It finds cases where
95 conditional test can be eliminated, jumps can be threaded, extra
96 insns can be eliminated, etc. It is the job of (1) and (2) to do a
97 good job of scheduling locally; `relax_delay_slots' takes care of
98 making the various individual schedules work well together. It is
99 especially tuned to handle the control flow interactions of branch
100 insns. It does nothing for insns with delay slots that do not
103 On machines that use CC0, we are very conservative. We will not make
104 a copy of an insn involving CC0 since we want to maintain a 1-1
105 correspondence between the insn that sets and uses CC0. The insns are
106 allowed to be separated by placing an insn that sets CC0 (but not an insn
107 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
108 delay slot. In that case, we point each insn at the other with REG_CC_USER
109 and REG_CC_SETTER notes. Note that these restrictions affect very few
110 machines because most RISC machines with delay slots will not use CC0
111 (the RT is the only known exception at this point).
115 The Acorn Risc Machine can conditionally execute most insns, so
116 it is profitable to move single insns into a position to execute
117 based on the condition code of the previous insn.
119 The HP-PA can conditionally nullify insns, providing a similar
120 effect to the ARM, differing mostly in which insn is "in charge". */
128 #include "function.h"
129 #include "insn-config.h"
130 #include "conditions.h"
131 #include "hard-reg-set.h"
132 #include "basic-block.h"
134 #include "insn-flags.h"
139 #include "insn-attr.h"
140 #include "resource.h"
145 #define obstack_chunk_alloc xmalloc
146 #define obstack_chunk_free free
148 #ifndef ANNUL_IFTRUE_SLOTS
149 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
151 #ifndef ANNUL_IFFALSE_SLOTS
152 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
155 /* Insns which have delay slots that have not yet been filled. */
157 static struct obstack unfilled_slots_obstack
;
158 static rtx
*unfilled_firstobj
;
160 /* Define macros to refer to the first and last slot containing unfilled
161 insns. These are used because the list may move and its address
162 should be recomputed at each use. */
164 #define unfilled_slots_base \
165 ((rtx *) obstack_base (&unfilled_slots_obstack))
167 #define unfilled_slots_next \
168 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
170 /* Points to the label before the end of the function. */
171 static rtx end_of_function_label
;
173 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
174 not always monotonically increase. */
175 static int *uid_to_ruid
;
177 /* Highest valid index in `uid_to_ruid'. */
180 static int stop_search_p
PROTO((rtx
, int));
181 static int resource_conflicts_p
PROTO((struct resources
*,
182 struct resources
*));
183 static int insn_references_resource_p
PROTO((rtx
, struct resources
*, int));
184 static int insn_sets_resource_p
PROTO((rtx
, struct resources
*, int));
185 static rtx find_end_label
PROTO((void));
186 static rtx emit_delay_sequence
PROTO((rtx
, rtx
, int));
187 static rtx add_to_delay_list
PROTO((rtx
, rtx
));
188 static rtx delete_from_delay_slot
PROTO((rtx
));
189 static void delete_scheduled_jump
PROTO((rtx
));
190 static void note_delay_statistics
PROTO((int, int));
191 static rtx optimize_skip
PROTO((rtx
));
192 static int get_jump_flags
PROTO((rtx
, rtx
));
193 static int rare_destination
PROTO((rtx
));
194 static int mostly_true_jump
PROTO((rtx
, rtx
));
195 static rtx get_branch_condition
PROTO((rtx
, rtx
));
196 static int condition_dominates_p
PROTO((rtx
, rtx
));
197 static int redirect_with_delay_slots_safe_p
PROTO ((rtx
, rtx
, rtx
));
198 static int redirect_with_delay_list_safe_p
PROTO ((rtx
, rtx
, rtx
));
199 static int check_annul_list_true_false
PROTO ((int, rtx
));
200 static rtx steal_delay_list_from_target
PROTO((rtx
, rtx
, rtx
, rtx
,
204 int, int *, int *, rtx
*));
205 static rtx steal_delay_list_from_fallthrough
PROTO((rtx
, rtx
, rtx
, rtx
,
210 static void try_merge_delay_insns
PROTO((rtx
, rtx
));
211 static rtx redundant_insn
PROTO((rtx
, rtx
, rtx
));
212 static int own_thread_p
PROTO((rtx
, rtx
, int));
213 static void update_block
PROTO((rtx
, rtx
));
214 static int reorg_redirect_jump
PROTO((rtx
, rtx
));
215 static void update_reg_dead_notes
PROTO((rtx
, rtx
));
216 static void fix_reg_dead_note
PROTO((rtx
, rtx
));
217 static void update_reg_unused_notes
PROTO((rtx
, rtx
));
218 static void fill_simple_delay_slots
PROTO((int));
219 static rtx fill_slots_from_thread
PROTO((rtx
, rtx
, rtx
, rtx
, int, int,
220 int, int, int *, rtx
));
221 static void fill_eager_delay_slots
PROTO((void));
222 static void relax_delay_slots
PROTO((rtx
));
223 static void make_return_insns
PROTO((rtx
));
225 /* Return TRUE if this insn should stop the search for insn to fill delay
226 slots. LABELS_P indicates that labels should terminate the search.
227 In all cases, jumps terminate the search. */
230 stop_search_p (insn
, labels_p
)
237 switch (GET_CODE (insn
))
251 /* OK unless it contains a delay slot or is an `asm' insn of some type.
252 We don't know anything about these. */
253 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
254 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
255 || asm_noperands (PATTERN (insn
)) >= 0);
262 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
263 resource set contains a volatile memory reference. Otherwise, return FALSE. */
266 resource_conflicts_p (res1
, res2
)
267 struct resources
*res1
, *res2
;
269 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
270 || (res1
->unch_memory
&& res2
->unch_memory
)
271 || res1
->volatil
|| res2
->volatil
)
275 return (res1
->regs
& res2
->regs
) != HARD_CONST (0);
280 for (i
= 0; i
< HARD_REG_SET_LONGS
; i
++)
281 if ((res1
->regs
[i
] & res2
->regs
[i
]) != 0)
288 /* Return TRUE if any resource marked in RES, a `struct resources', is
289 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
290 routine is using those resources.
292 We compute this by computing all the resources referenced by INSN and
293 seeing if this conflicts with RES. It might be faster to directly check
294 ourselves, and this is the way it used to work, but it means duplicating
295 a large block of complex code. */
298 insn_references_resource_p (insn
, res
, include_delayed_effects
)
300 register struct resources
*res
;
301 int include_delayed_effects
;
303 struct resources insn_res
;
305 CLEAR_RESOURCE (&insn_res
);
306 mark_referenced_resources (insn
, &insn_res
, include_delayed_effects
);
307 return resource_conflicts_p (&insn_res
, res
);
310 /* Return TRUE if INSN modifies resources that are marked in RES.
311 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
312 included. CC0 is only modified if it is explicitly set; see comments
313 in front of mark_set_resources for details. */
316 insn_sets_resource_p (insn
, res
, include_delayed_effects
)
318 register struct resources
*res
;
319 int include_delayed_effects
;
321 struct resources insn_sets
;
323 CLEAR_RESOURCE (&insn_sets
);
324 mark_set_resources (insn
, &insn_sets
, 0, include_delayed_effects
);
325 return resource_conflicts_p (&insn_sets
, res
);
328 /* Find a label at the end of the function or before a RETURN. If there is
336 /* If we found one previously, return it. */
337 if (end_of_function_label
)
338 return end_of_function_label
;
340 /* Otherwise, see if there is a label at the end of the function. If there
341 is, it must be that RETURN insns aren't needed, so that is our return
342 label and we don't have to do anything else. */
344 insn
= get_last_insn ();
345 while (GET_CODE (insn
) == NOTE
346 || (GET_CODE (insn
) == INSN
347 && (GET_CODE (PATTERN (insn
)) == USE
348 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
349 insn
= PREV_INSN (insn
);
351 /* When a target threads its epilogue we might already have a
352 suitable return insn. If so put a label before it for the
353 end_of_function_label. */
354 if (GET_CODE (insn
) == BARRIER
355 && GET_CODE (PREV_INSN (insn
)) == JUMP_INSN
356 && GET_CODE (PATTERN (PREV_INSN (insn
))) == RETURN
)
358 rtx temp
= PREV_INSN (PREV_INSN (insn
));
359 end_of_function_label
= gen_label_rtx ();
360 LABEL_NUSES (end_of_function_label
) = 0;
362 /* Put the label before an USE insns that may proceed the RETURN insn. */
363 while (GET_CODE (temp
) == USE
)
364 temp
= PREV_INSN (temp
);
366 emit_label_after (end_of_function_label
, temp
);
369 else if (GET_CODE (insn
) == CODE_LABEL
)
370 end_of_function_label
= insn
;
373 /* Otherwise, make a new label and emit a RETURN and BARRIER,
375 end_of_function_label
= gen_label_rtx ();
376 LABEL_NUSES (end_of_function_label
) = 0;
377 emit_label (end_of_function_label
);
381 /* The return we make may have delay slots too. */
382 rtx insn
= gen_return ();
383 insn
= emit_jump_insn (insn
);
385 if (num_delay_slots (insn
) > 0)
386 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
391 /* Show one additional use for this label so it won't go away until
393 ++LABEL_NUSES (end_of_function_label
);
395 return end_of_function_label
;
398 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
399 the pattern of INSN with the SEQUENCE.
401 Chain the insns so that NEXT_INSN of each insn in the sequence points to
402 the next and NEXT_INSN of the last insn in the sequence points to
403 the first insn after the sequence. Similarly for PREV_INSN. This makes
404 it easier to scan all insns.
406 Returns the SEQUENCE that replaces INSN. */
409 emit_delay_sequence (insn
, list
, length
)
418 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
419 rtvec seqv
= rtvec_alloc (length
+ 1);
420 rtx seq
= gen_rtx_SEQUENCE (VOIDmode
, seqv
);
421 rtx seq_insn
= make_insn_raw (seq
);
422 rtx first
= get_insns ();
423 rtx last
= get_last_insn ();
425 /* Make a copy of the insn having delay slots. */
426 rtx delay_insn
= copy_rtx (insn
);
428 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
429 confuse further processing. Update LAST in case it was the last insn.
430 We will put the BARRIER back in later. */
431 if (NEXT_INSN (insn
) && GET_CODE (NEXT_INSN (insn
)) == BARRIER
)
433 delete_insn (NEXT_INSN (insn
));
434 last
= get_last_insn ();
438 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
439 NEXT_INSN (seq_insn
) = NEXT_INSN (insn
);
440 PREV_INSN (seq_insn
) = PREV_INSN (insn
);
443 PREV_INSN (NEXT_INSN (seq_insn
)) = seq_insn
;
446 NEXT_INSN (PREV_INSN (seq_insn
)) = seq_insn
;
448 /* Note the calls to set_new_first_and_last_insn must occur after
449 SEQ_INSN has been completely spliced into the insn stream.
451 Otherwise CUR_INSN_UID will get set to an incorrect value because
452 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
454 set_new_first_and_last_insn (first
, seq_insn
);
457 set_new_first_and_last_insn (seq_insn
, last
);
459 /* Build our SEQUENCE and rebuild the insn chain. */
460 XVECEXP (seq
, 0, 0) = delay_insn
;
461 INSN_DELETED_P (delay_insn
) = 0;
462 PREV_INSN (delay_insn
) = PREV_INSN (seq_insn
);
464 for (li
= list
; li
; li
= XEXP (li
, 1), i
++)
466 rtx tem
= XEXP (li
, 0);
469 /* Show that this copy of the insn isn't deleted. */
470 INSN_DELETED_P (tem
) = 0;
472 XVECEXP (seq
, 0, i
) = tem
;
473 PREV_INSN (tem
) = XVECEXP (seq
, 0, i
- 1);
474 NEXT_INSN (XVECEXP (seq
, 0, i
- 1)) = tem
;
476 /* Remove any REG_DEAD notes because we can't rely on them now
477 that the insn has been moved. */
478 for (note
= REG_NOTES (tem
); note
; note
= XEXP (note
, 1))
479 if (REG_NOTE_KIND (note
) == REG_DEAD
)
480 XEXP (note
, 0) = const0_rtx
;
483 NEXT_INSN (XVECEXP (seq
, 0, length
)) = NEXT_INSN (seq_insn
);
485 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
486 last insn in that SEQUENCE to point to us. Similarly for the first
487 insn in the following insn if it is a SEQUENCE. */
489 if (PREV_INSN (seq_insn
) && GET_CODE (PREV_INSN (seq_insn
)) == INSN
490 && GET_CODE (PATTERN (PREV_INSN (seq_insn
))) == SEQUENCE
)
491 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn
)), 0,
492 XVECLEN (PATTERN (PREV_INSN (seq_insn
)), 0) - 1))
495 if (NEXT_INSN (seq_insn
) && GET_CODE (NEXT_INSN (seq_insn
)) == INSN
496 && GET_CODE (PATTERN (NEXT_INSN (seq_insn
))) == SEQUENCE
)
497 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn
)), 0, 0)) = seq_insn
;
499 /* If there used to be a BARRIER, put it back. */
501 emit_barrier_after (seq_insn
);
509 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
510 be in the order in which the insns are to be executed. */
513 add_to_delay_list (insn
, delay_list
)
517 /* If we have an empty list, just make a new list element. If
518 INSN has its block number recorded, clear it since we may
519 be moving the insn to a new block. */
523 clear_hashed_info_for_insn (insn
);
524 return gen_rtx_INSN_LIST (VOIDmode
, insn
, NULL_RTX
);
527 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
529 XEXP (delay_list
, 1) = add_to_delay_list (insn
, XEXP (delay_list
, 1));
534 /* Delete INSN from the delay slot of the insn that it is in, which may
535 produce an insn with no delay slots. Return the new insn. */
538 delete_from_delay_slot (insn
)
541 rtx trial
, seq_insn
, seq
, prev
;
545 /* We first must find the insn containing the SEQUENCE with INSN in its
546 delay slot. Do this by finding an insn, TRIAL, where
547 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
550 PREV_INSN (NEXT_INSN (trial
)) == trial
;
551 trial
= NEXT_INSN (trial
))
554 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
555 seq
= PATTERN (seq_insn
);
557 /* Create a delay list consisting of all the insns other than the one
558 we are deleting (unless we were the only one). */
559 if (XVECLEN (seq
, 0) > 2)
560 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
561 if (XVECEXP (seq
, 0, i
) != insn
)
562 delay_list
= add_to_delay_list (XVECEXP (seq
, 0, i
), delay_list
);
564 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
565 list, and rebuild the delay list if non-empty. */
566 prev
= PREV_INSN (seq_insn
);
567 trial
= XVECEXP (seq
, 0, 0);
568 delete_insn (seq_insn
);
569 add_insn_after (trial
, prev
);
571 if (GET_CODE (trial
) == JUMP_INSN
572 && (simplejump_p (trial
) || GET_CODE (PATTERN (trial
)) == RETURN
))
573 emit_barrier_after (trial
);
575 /* If there are any delay insns, remit them. Otherwise clear the
578 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2);
580 INSN_ANNULLED_BRANCH_P (trial
) = 0;
582 INSN_FROM_TARGET_P (insn
) = 0;
584 /* Show we need to fill this insn again. */
585 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
590 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
591 the insn that sets CC0 for it and delete it too. */
594 delete_scheduled_jump (insn
)
597 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
598 delete the insn that sets the condition code, but it is hard to find it.
599 Since this case is rare anyway, don't bother trying; there would likely
600 be other insns that became dead anyway, which we wouldn't know to
604 if (reg_mentioned_p (cc0_rtx
, insn
))
606 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
608 /* If a reg-note was found, it points to an insn to set CC0. This
609 insn is in the delay list of some other insn. So delete it from
610 the delay list it was in. */
613 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), NULL_RTX
)
614 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
615 delete_from_delay_slot (XEXP (note
, 0));
619 /* The insn setting CC0 is our previous insn, but it may be in
620 a delay slot. It will be the last insn in the delay slot, if
622 rtx trial
= previous_insn (insn
);
623 if (GET_CODE (trial
) == NOTE
)
624 trial
= prev_nonnote_insn (trial
);
625 if (sets_cc0_p (PATTERN (trial
)) != 1
626 || FIND_REG_INC_NOTE (trial
, 0))
628 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
631 delete_from_delay_slot (trial
);
639 /* Counters for delay-slot filling. */
641 #define NUM_REORG_FUNCTIONS 2
642 #define MAX_DELAY_HISTOGRAM 3
643 #define MAX_REORG_PASSES 2
645 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
647 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
649 static int reorg_pass_number
;
652 note_delay_statistics (slots_filled
, index
)
653 int slots_filled
, index
;
655 num_insns_needing_delays
[index
][reorg_pass_number
]++;
656 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
657 slots_filled
= MAX_DELAY_HISTOGRAM
;
658 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
661 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
663 /* Optimize the following cases:
665 1. When a conditional branch skips over only one instruction,
666 use an annulling branch and put that insn in the delay slot.
667 Use either a branch that annuls when the condition if true or
668 invert the test with a branch that annuls when the condition is
669 false. This saves insns, since otherwise we must copy an insn
672 (orig) (skip) (otherwise)
673 Bcc.n L1 Bcc',a L1 Bcc,a L1'
680 2. When a conditional branch skips over only one instruction,
681 and after that, it unconditionally branches somewhere else,
682 perform the similar optimization. This saves executing the
683 second branch in the case where the inverted condition is true.
692 This should be expanded to skip over N insns, where N is the number
693 of delay slots required. */
699 register rtx trial
= next_nonnote_insn (insn
);
700 rtx next_trial
= next_active_insn (trial
);
705 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
708 || GET_CODE (trial
) != INSN
709 || GET_CODE (PATTERN (trial
)) == SEQUENCE
710 || recog_memoized (trial
) < 0
711 || (! eligible_for_annul_false (insn
, 0, trial
, flags
)
712 && ! eligible_for_annul_true (insn
, 0, trial
, flags
)))
715 /* There are two cases where we are just executing one insn (we assume
716 here that a branch requires only one insn; this should be generalized
717 at some point): Where the branch goes around a single insn or where
718 we have one insn followed by a branch to the same label we branch to.
719 In both of these cases, inverting the jump and annulling the delay
720 slot give the same effect in fewer insns. */
721 if ((next_trial
== next_active_insn (JUMP_LABEL (insn
))
722 && ! (next_trial
== 0 && current_function_epilogue_delay_list
!= 0))
724 && GET_CODE (next_trial
) == JUMP_INSN
725 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)
726 && (simplejump_p (next_trial
)
727 || GET_CODE (PATTERN (next_trial
)) == RETURN
)))
729 if (eligible_for_annul_false (insn
, 0, trial
, flags
))
731 if (invert_jump (insn
, JUMP_LABEL (insn
)))
732 INSN_FROM_TARGET_P (trial
) = 1;
733 else if (! eligible_for_annul_true (insn
, 0, trial
, flags
))
737 delay_list
= add_to_delay_list (trial
, NULL_RTX
);
738 next_trial
= next_active_insn (trial
);
739 update_block (trial
, trial
);
742 /* Also, if we are targeting an unconditional
743 branch, thread our jump to the target of that branch. Don't
744 change this into a RETURN here, because it may not accept what
745 we have in the delay slot. We'll fix this up later. */
746 if (next_trial
&& GET_CODE (next_trial
) == JUMP_INSN
747 && (simplejump_p (next_trial
)
748 || GET_CODE (PATTERN (next_trial
)) == RETURN
))
750 target_label
= JUMP_LABEL (next_trial
);
751 if (target_label
== 0)
752 target_label
= find_end_label ();
754 /* Recompute the flags based on TARGET_LABEL since threading
755 the jump to TARGET_LABEL may change the direction of the
756 jump (which may change the circumstances in which the
757 delay slot is nullified). */
758 flags
= get_jump_flags (insn
, target_label
);
759 if (eligible_for_annul_true (insn
, 0, trial
, flags
))
760 reorg_redirect_jump (insn
, target_label
);
763 INSN_ANNULLED_BRANCH_P (insn
) = 1;
771 /* Encode and return branch direction and prediction information for
772 INSN assuming it will jump to LABEL.
774 Non conditional branches return no direction information and
775 are predicted as very likely taken. */
778 get_jump_flags (insn
, label
)
783 /* get_jump_flags can be passed any insn with delay slots, these may
784 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
785 direction information, and only if they are conditional jumps.
787 If LABEL is zero, then there is no way to determine the branch
789 if (GET_CODE (insn
) == JUMP_INSN
790 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
791 && INSN_UID (insn
) <= max_uid
793 && INSN_UID (label
) <= max_uid
)
795 = (uid_to_ruid
[INSN_UID (label
)] > uid_to_ruid
[INSN_UID (insn
)])
796 ? ATTR_FLAG_forward
: ATTR_FLAG_backward
;
797 /* No valid direction information. */
801 /* If insn is a conditional branch call mostly_true_jump to get
802 determine the branch prediction.
804 Non conditional branches are predicted as very likely taken. */
805 if (GET_CODE (insn
) == JUMP_INSN
806 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
810 prediction
= mostly_true_jump (insn
, get_branch_condition (insn
, label
));
814 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
817 flags
|= ATTR_FLAG_likely
;
820 flags
|= ATTR_FLAG_unlikely
;
823 flags
|= (ATTR_FLAG_very_unlikely
| ATTR_FLAG_unlikely
);
831 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
836 /* Return 1 if INSN is a destination that will be branched to rarely (the
837 return point of a function); return 2 if DEST will be branched to very
838 rarely (a call to a function that doesn't return). Otherwise,
842 rare_destination (insn
)
848 for (; insn
; insn
= next
)
850 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
851 insn
= XVECEXP (PATTERN (insn
), 0, 0);
853 next
= NEXT_INSN (insn
);
855 switch (GET_CODE (insn
))
860 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
861 don't scan past JUMP_INSNs, so any barrier we find here must
862 have been after a CALL_INSN and hence mean the call doesn't
866 if (GET_CODE (PATTERN (insn
)) == RETURN
)
868 else if (simplejump_p (insn
)
869 && jump_count
++ < 10)
870 next
= JUMP_LABEL (insn
);
879 /* If we got here it means we hit the end of the function. So this
880 is an unlikely destination. */
885 /* Return truth value of the statement that this branch
886 is mostly taken. If we think that the branch is extremely likely
887 to be taken, we return 2. If the branch is slightly more likely to be
888 taken, return 1. If the branch is slightly less likely to be taken,
889 return 0 and if the branch is highly unlikely to be taken, return -1.
891 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
894 mostly_true_jump (jump_insn
, condition
)
895 rtx jump_insn
, condition
;
897 rtx target_label
= JUMP_LABEL (jump_insn
);
899 int rare_dest
= rare_destination (target_label
);
900 int rare_fallthrough
= rare_destination (NEXT_INSN (jump_insn
));
902 /* If branch probabilities are available, then use that number since it
903 always gives a correct answer. */
904 if (flag_branch_probabilities
)
906 rtx note
= find_reg_note (jump_insn
, REG_BR_PROB
, 0);
909 int prob
= XINT (note
, 0);
911 if (prob
>= REG_BR_PROB_BASE
* 9 / 10)
913 else if (prob
>= REG_BR_PROB_BASE
/ 2)
915 else if (prob
>= REG_BR_PROB_BASE
/ 10)
922 /* If this is a branch outside a loop, it is highly unlikely. */
923 if (GET_CODE (PATTERN (jump_insn
)) == SET
924 && GET_CODE (SET_SRC (PATTERN (jump_insn
))) == IF_THEN_ELSE
925 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)) == LABEL_REF
926 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)))
927 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)) == LABEL_REF
928 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)))))
933 /* If this is the test of a loop, it is very likely true. We scan
934 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
935 before the next real insn, we assume the branch is to the top of
937 for (insn
= PREV_INSN (target_label
);
938 insn
&& GET_CODE (insn
) == NOTE
;
939 insn
= PREV_INSN (insn
))
940 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
943 /* If this is a jump to the test of a loop, it is likely true. We scan
944 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
945 before the next real insn, we assume the branch is to the loop branch
947 for (insn
= NEXT_INSN (target_label
);
948 insn
&& GET_CODE (insn
) == NOTE
;
949 insn
= PREV_INSN (insn
))
950 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
)
954 /* Look at the relative rarities of the fallthrough and destination. If
955 they differ, we can predict the branch that way. */
957 switch (rare_fallthrough
- rare_dest
)
971 /* If we couldn't figure out what this jump was, assume it won't be
972 taken. This should be rare. */
976 /* EQ tests are usually false and NE tests are usually true. Also,
977 most quantities are positive, so we can make the appropriate guesses
978 about signed comparisons against zero. */
979 switch (GET_CODE (condition
))
982 /* Unconditional branch. */
990 if (XEXP (condition
, 1) == const0_rtx
)
995 if (XEXP (condition
, 1) == const0_rtx
)
1003 /* Predict backward branches usually take, forward branches usually not. If
1004 we don't know whether this is forward or backward, assume the branch
1005 will be taken, since most are. */
1006 return (target_label
== 0 || INSN_UID (jump_insn
) > max_uid
1007 || INSN_UID (target_label
) > max_uid
1008 || (uid_to_ruid
[INSN_UID (jump_insn
)]
1009 > uid_to_ruid
[INSN_UID (target_label
)]));
1012 /* Return the condition under which INSN will branch to TARGET. If TARGET
1013 is zero, return the condition under which INSN will return. If INSN is
1014 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1015 type of jump, or it doesn't go to TARGET, return 0. */
1018 get_branch_condition (insn
, target
)
1022 rtx pat
= PATTERN (insn
);
1025 if (condjump_in_parallel_p (insn
))
1026 pat
= XVECEXP (pat
, 0, 0);
1028 if (GET_CODE (pat
) == RETURN
)
1029 return target
== 0 ? const_true_rtx
: 0;
1031 else if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
1034 src
= SET_SRC (pat
);
1035 if (GET_CODE (src
) == LABEL_REF
&& XEXP (src
, 0) == target
)
1036 return const_true_rtx
;
1038 else if (GET_CODE (src
) == IF_THEN_ELSE
1039 && ((target
== 0 && GET_CODE (XEXP (src
, 1)) == RETURN
)
1040 || (GET_CODE (XEXP (src
, 1)) == LABEL_REF
1041 && XEXP (XEXP (src
, 1), 0) == target
))
1042 && XEXP (src
, 2) == pc_rtx
)
1043 return XEXP (src
, 0);
1045 else if (GET_CODE (src
) == IF_THEN_ELSE
1046 && ((target
== 0 && GET_CODE (XEXP (src
, 2)) == RETURN
)
1047 || (GET_CODE (XEXP (src
, 2)) == LABEL_REF
1048 && XEXP (XEXP (src
, 2), 0) == target
))
1049 && XEXP (src
, 1) == pc_rtx
)
1050 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src
, 0))),
1051 GET_MODE (XEXP (src
, 0)),
1052 XEXP (XEXP (src
, 0), 0), XEXP (XEXP (src
, 0), 1));
1057 /* Return non-zero if CONDITION is more strict than the condition of
1058 INSN, i.e., if INSN will always branch if CONDITION is true. */
1061 condition_dominates_p (condition
, insn
)
1065 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
1066 enum rtx_code code
= GET_CODE (condition
);
1067 enum rtx_code other_code
;
1069 if (rtx_equal_p (condition
, other_condition
)
1070 || other_condition
== const_true_rtx
)
1073 else if (condition
== const_true_rtx
|| other_condition
== 0)
1076 other_code
= GET_CODE (other_condition
);
1077 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
1078 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
1079 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
1082 return comparison_dominates_p (code
, other_code
);
1085 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1086 any insns already in the delay slot of JUMP. */
1089 redirect_with_delay_slots_safe_p (jump
, newlabel
, seq
)
1090 rtx jump
, newlabel
, seq
;
1093 rtx pat
= PATTERN (seq
);
1095 /* Make sure all the delay slots of this jump would still
1096 be valid after threading the jump. If they are still
1097 valid, then return non-zero. */
1099 flags
= get_jump_flags (jump
, newlabel
);
1100 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1102 #ifdef ANNUL_IFFALSE_SLOTS
1103 (INSN_ANNULLED_BRANCH_P (jump
)
1104 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1105 ? eligible_for_annul_false (jump
, i
- 1,
1106 XVECEXP (pat
, 0, i
), flags
) :
1108 #ifdef ANNUL_IFTRUE_SLOTS
1109 (INSN_ANNULLED_BRANCH_P (jump
)
1110 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1111 ? eligible_for_annul_true (jump
, i
- 1,
1112 XVECEXP (pat
, 0, i
), flags
) :
1114 eligible_for_delay (jump
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
1117 return (i
== XVECLEN (pat
, 0));
1120 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1121 any insns we wish to place in the delay slot of JUMP. */
1124 redirect_with_delay_list_safe_p (jump
, newlabel
, delay_list
)
1125 rtx jump
, newlabel
, delay_list
;
1130 /* Make sure all the insns in DELAY_LIST would still be
1131 valid after threading the jump. If they are still
1132 valid, then return non-zero. */
1134 flags
= get_jump_flags (jump
, newlabel
);
1135 for (li
= delay_list
, i
= 0; li
; li
= XEXP (li
, 1), i
++)
1137 #ifdef ANNUL_IFFALSE_SLOTS
1138 (INSN_ANNULLED_BRANCH_P (jump
)
1139 && INSN_FROM_TARGET_P (XEXP (li
, 0)))
1140 ? eligible_for_annul_false (jump
, i
, XEXP (li
, 0), flags
) :
1142 #ifdef ANNUL_IFTRUE_SLOTS
1143 (INSN_ANNULLED_BRANCH_P (jump
)
1144 && ! INSN_FROM_TARGET_P (XEXP (li
, 0)))
1145 ? eligible_for_annul_true (jump
, i
, XEXP (li
, 0), flags
) :
1147 eligible_for_delay (jump
, i
, XEXP (li
, 0), flags
)))
1150 return (li
== NULL
);
1153 /* DELAY_LIST is a list of insns that have already been placed into delay
1154 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1155 If not, return 0; otherwise return 1. */
1158 check_annul_list_true_false (annul_true_p
, delay_list
)
1166 for (temp
= delay_list
; temp
; temp
= XEXP (temp
, 1))
1168 rtx trial
= XEXP (temp
, 0);
1170 if ((annul_true_p
&& INSN_FROM_TARGET_P (trial
))
1171 || (!annul_true_p
&& !INSN_FROM_TARGET_P (trial
)))
1180 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1181 the condition tested by INSN is CONDITION and the resources shown in
1182 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1183 from SEQ's delay list, in addition to whatever insns it may execute
1184 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1185 needed while searching for delay slot insns. Return the concatenated
1186 delay list if possible, otherwise, return 0.
1188 SLOTS_TO_FILL is the total number of slots required by INSN, and
1189 PSLOTS_FILLED points to the number filled so far (also the number of
1190 insns in DELAY_LIST). It is updated with the number that have been
1191 filled from the SEQUENCE, if any.
1193 PANNUL_P points to a non-zero value if we already know that we need
1194 to annul INSN. If this routine determines that annulling is needed,
1195 it may set that value non-zero.
1197 PNEW_THREAD points to a location that is to receive the place at which
1198 execution should continue. */
1201 steal_delay_list_from_target (insn
, condition
, seq
, delay_list
,
1202 sets
, needed
, other_needed
,
1203 slots_to_fill
, pslots_filled
, pannul_p
,
1205 rtx insn
, condition
;
1208 struct resources
*sets
, *needed
, *other_needed
;
1215 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1216 int total_slots_filled
= *pslots_filled
;
1217 rtx new_delay_list
= 0;
1218 int must_annul
= *pannul_p
;
1221 struct resources cc_set
;
1223 /* We can't do anything if there are more delay slots in SEQ than we
1224 can handle, or if we don't know that it will be a taken branch.
1225 We know that it will be a taken branch if it is either an unconditional
1226 branch or a conditional branch with a stricter branch condition.
1228 Also, exit if the branch has more than one set, since then it is computing
1229 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1230 ??? It may be possible to move other sets into INSN in addition to
1231 moving the instructions in the delay slots.
1233 We can not steal the delay list if one of the instructions in the
1234 current delay_list modifies the condition codes and the jump in the
1235 sequence is a conditional jump. We can not do this because we can
1236 not change the direction of the jump because the condition codes
1237 will effect the direction of the jump in the sequence. */
1239 CLEAR_RESOURCE (&cc_set
);
1240 for (temp
= delay_list
; temp
; temp
= XEXP (temp
, 1))
1242 rtx trial
= XEXP (temp
, 0);
1244 mark_set_resources (trial
, &cc_set
, 0, 1);
1245 if (insn_references_resource_p (XVECEXP (seq
, 0, 0), &cc_set
, 0))
1249 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1250 || ! condition_dominates_p (condition
, XVECEXP (seq
, 0, 0))
1251 || ! single_set (XVECEXP (seq
, 0, 0)))
1254 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1256 rtx trial
= XVECEXP (seq
, 0, i
);
1259 if (insn_references_resource_p (trial
, sets
, 0)
1260 || insn_sets_resource_p (trial
, needed
, 0)
1261 || insn_sets_resource_p (trial
, sets
, 0)
1263 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1265 || find_reg_note (trial
, REG_CC_USER
, NULL_RTX
)
1267 /* If TRIAL is from the fallthrough code of an annulled branch insn
1268 in SEQ, we cannot use it. */
1269 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq
, 0, 0))
1270 && ! INSN_FROM_TARGET_P (trial
)))
1273 /* If this insn was already done (usually in a previous delay slot),
1274 pretend we put it in our delay slot. */
1275 if (redundant_insn (trial
, insn
, new_delay_list
))
1278 /* We will end up re-vectoring this branch, so compute flags
1279 based on jumping to the new label. */
1280 flags
= get_jump_flags (insn
, JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1283 && ((condition
== const_true_rtx
1284 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1285 && ! may_trap_p (PATTERN (trial
)))))
1286 ? eligible_for_delay (insn
, total_slots_filled
, trial
, flags
)
1287 : (must_annul
|| (delay_list
== NULL
&& new_delay_list
== NULL
))
1289 check_annul_list_true_false (0, delay_list
)
1290 && check_annul_list_true_false (0, new_delay_list
)
1291 && eligible_for_annul_false (insn
, total_slots_filled
,
1296 temp
= copy_rtx (trial
);
1297 INSN_FROM_TARGET_P (temp
) = 1;
1298 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1299 total_slots_filled
++;
1301 if (--slots_remaining
== 0)
1308 /* Show the place to which we will be branching. */
1309 *pnew_thread
= next_active_insn (JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1311 /* Add any new insns to the delay list and update the count of the
1312 number of slots filled. */
1313 *pslots_filled
= total_slots_filled
;
1317 if (delay_list
== 0)
1318 return new_delay_list
;
1320 for (temp
= new_delay_list
; temp
; temp
= XEXP (temp
, 1))
1321 delay_list
= add_to_delay_list (XEXP (temp
, 0), delay_list
);
1326 /* Similar to steal_delay_list_from_target except that SEQ is on the
1327 fallthrough path of INSN. Here we only do something if the delay insn
1328 of SEQ is an unconditional branch. In that case we steal its delay slot
1329 for INSN since unconditional branches are much easier to fill. */
1332 steal_delay_list_from_fallthrough (insn
, condition
, seq
,
1333 delay_list
, sets
, needed
, other_needed
,
1334 slots_to_fill
, pslots_filled
, pannul_p
)
1335 rtx insn
, condition
;
1338 struct resources
*sets
, *needed
, *other_needed
;
1345 int must_annul
= *pannul_p
;
1348 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1350 /* We can't do anything if SEQ's delay insn isn't an
1351 unconditional branch. */
1353 if (! simplejump_p (XVECEXP (seq
, 0, 0))
1354 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) != RETURN
)
1357 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1359 rtx trial
= XVECEXP (seq
, 0, i
);
1361 /* If TRIAL sets CC0, stealing it will move it too far from the use
1363 if (insn_references_resource_p (trial
, sets
, 0)
1364 || insn_sets_resource_p (trial
, needed
, 0)
1365 || insn_sets_resource_p (trial
, sets
, 0)
1367 || sets_cc0_p (PATTERN (trial
))
1373 /* If this insn was already done, we don't need it. */
1374 if (redundant_insn (trial
, insn
, delay_list
))
1376 delete_from_delay_slot (trial
);
1381 && ((condition
== const_true_rtx
1382 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1383 && ! may_trap_p (PATTERN (trial
)))))
1384 ? eligible_for_delay (insn
, *pslots_filled
, trial
, flags
)
1385 : (must_annul
|| delay_list
== NULL
) && (must_annul
= 1,
1386 check_annul_list_true_false (1, delay_list
)
1387 && eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
1391 delete_from_delay_slot (trial
);
1392 delay_list
= add_to_delay_list (trial
, delay_list
);
1394 if (++(*pslots_filled
) == slots_to_fill
)
1407 /* Try merging insns starting at THREAD which match exactly the insns in
1410 If all insns were matched and the insn was previously annulling, the
1411 annul bit will be cleared.
1413 For each insn that is merged, if the branch is or will be non-annulling,
1414 we delete the merged insn. */
1417 try_merge_delay_insns (insn
, thread
)
1420 rtx trial
, next_trial
;
1421 rtx delay_insn
= XVECEXP (PATTERN (insn
), 0, 0);
1422 int annul_p
= INSN_ANNULLED_BRANCH_P (delay_insn
);
1423 int slot_number
= 1;
1424 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1425 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1426 struct resources set
, needed
;
1427 rtx merged_insns
= 0;
1431 flags
= get_jump_flags (delay_insn
, JUMP_LABEL (delay_insn
));
1433 CLEAR_RESOURCE (&needed
);
1434 CLEAR_RESOURCE (&set
);
1436 /* If this is not an annulling branch, take into account anything needed in
1437 INSN's delay slot. This prevents two increments from being incorrectly
1438 folded into one. If we are annulling, this would be the correct
1439 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1440 will essentially disable this optimization. This method is somewhat of
1441 a kludge, but I don't see a better way.) */
1443 for (i
= 1 ; i
< num_slots
; i
++)
1444 if (XVECEXP (PATTERN (insn
), 0, i
))
1445 mark_referenced_resources (XVECEXP (PATTERN (insn
), 0, i
), &needed
, 1);
1447 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1449 rtx pat
= PATTERN (trial
);
1450 rtx oldtrial
= trial
;
1452 next_trial
= next_nonnote_insn (trial
);
1454 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1455 if (GET_CODE (trial
) == INSN
1456 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1459 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1461 /* We can't share an insn that sets cc0. */
1462 && ! sets_cc0_p (pat
)
1464 && ! insn_references_resource_p (trial
, &set
, 1)
1465 && ! insn_sets_resource_p (trial
, &set
, 1)
1466 && ! insn_sets_resource_p (trial
, &needed
, 1)
1467 && (trial
= try_split (pat
, trial
, 0)) != 0
1468 /* Update next_trial, in case try_split succeeded. */
1469 && (next_trial
= next_nonnote_insn (trial
))
1470 /* Likewise THREAD. */
1471 && (thread
= oldtrial
== thread
? trial
: thread
)
1472 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1473 /* Have to test this condition if annul condition is different
1474 from (and less restrictive than) non-annulling one. */
1475 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
, flags
))
1480 update_block (trial
, thread
);
1481 if (trial
== thread
)
1482 thread
= next_active_insn (thread
);
1484 delete_insn (trial
);
1485 INSN_FROM_TARGET_P (next_to_match
) = 0;
1488 merged_insns
= gen_rtx_INSN_LIST (VOIDmode
, trial
, merged_insns
);
1490 if (++slot_number
== num_slots
)
1493 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1496 mark_set_resources (trial
, &set
, 0, 1);
1497 mark_referenced_resources (trial
, &needed
, 1);
1500 /* See if we stopped on a filled insn. If we did, try to see if its
1501 delay slots match. */
1502 if (slot_number
!= num_slots
1503 && trial
&& GET_CODE (trial
) == INSN
1504 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1505 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0)))
1507 rtx pat
= PATTERN (trial
);
1508 rtx filled_insn
= XVECEXP (pat
, 0, 0);
1510 /* Account for resources set/needed by the filled insn. */
1511 mark_set_resources (filled_insn
, &set
, 0, 1);
1512 mark_referenced_resources (filled_insn
, &needed
, 1);
1514 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1516 rtx dtrial
= XVECEXP (pat
, 0, i
);
1518 if (! insn_references_resource_p (dtrial
, &set
, 1)
1519 && ! insn_sets_resource_p (dtrial
, &set
, 1)
1520 && ! insn_sets_resource_p (dtrial
, &needed
, 1)
1522 && ! sets_cc0_p (PATTERN (dtrial
))
1524 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1525 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
, flags
))
1531 update_block (dtrial
, thread
);
1532 new = delete_from_delay_slot (dtrial
);
1533 if (INSN_DELETED_P (thread
))
1535 INSN_FROM_TARGET_P (next_to_match
) = 0;
1538 merged_insns
= gen_rtx_INSN_LIST (SImode
, dtrial
,
1541 if (++slot_number
== num_slots
)
1544 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1548 /* Keep track of the set/referenced resources for the delay
1549 slots of any trial insns we encounter. */
1550 mark_set_resources (dtrial
, &set
, 0, 1);
1551 mark_referenced_resources (dtrial
, &needed
, 1);
1556 /* If all insns in the delay slot have been matched and we were previously
1557 annulling the branch, we need not any more. In that case delete all the
1558 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1559 the delay list so that we know that it isn't only being used at the
1561 if (slot_number
== num_slots
&& annul_p
)
1563 for (; merged_insns
; merged_insns
= XEXP (merged_insns
, 1))
1565 if (GET_MODE (merged_insns
) == SImode
)
1569 update_block (XEXP (merged_insns
, 0), thread
);
1570 new = delete_from_delay_slot (XEXP (merged_insns
, 0));
1571 if (INSN_DELETED_P (thread
))
1576 update_block (XEXP (merged_insns
, 0), thread
);
1577 delete_insn (XEXP (merged_insns
, 0));
1581 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1583 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1584 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1588 /* See if INSN is redundant with an insn in front of TARGET. Often this
1589 is called when INSN is a candidate for a delay slot of TARGET.
1590 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1591 of INSN. Often INSN will be redundant with an insn in a delay slot of
1592 some previous insn. This happens when we have a series of branches to the
1593 same label; in that case the first insn at the target might want to go
1594 into each of the delay slots.
1596 If we are not careful, this routine can take up a significant fraction
1597 of the total compilation time (4%), but only wins rarely. Hence we
1598 speed this routine up by making two passes. The first pass goes back
1599 until it hits a label and sees if it find an insn with an identical
1600 pattern. Only in this (relatively rare) event does it check for
1603 We do not split insns we encounter. This could cause us not to find a
1604 redundant insn, but the cost of splitting seems greater than the possible
1605 gain in rare cases. */
1608 redundant_insn (insn
, target
, delay_list
)
1613 rtx target_main
= target
;
1614 rtx ipat
= PATTERN (insn
);
1616 struct resources needed
, set
;
1619 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1620 are allowed to not actually assign to such a register. */
1621 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
) != 0)
1624 /* Scan backwards looking for a match. */
1625 for (trial
= PREV_INSN (target
); trial
; trial
= PREV_INSN (trial
))
1627 if (GET_CODE (trial
) == CODE_LABEL
)
1630 if (GET_RTX_CLASS (GET_CODE (trial
)) != 'i')
1633 pat
= PATTERN (trial
);
1634 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1637 if (GET_CODE (pat
) == SEQUENCE
)
1639 /* Stop for a CALL and its delay slots because it is difficult to
1640 track its resource needs correctly. */
1641 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1644 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1645 slots because it is difficult to track its resource needs
1648 #ifdef INSN_SETS_ARE_DELAYED
1649 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1653 #ifdef INSN_REFERENCES_ARE_DELAYED
1654 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1658 /* See if any of the insns in the delay slot match, updating
1659 resource requirements as we go. */
1660 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1661 if (GET_CODE (XVECEXP (pat
, 0, i
)) == GET_CODE (insn
)
1662 && rtx_equal_p (PATTERN (XVECEXP (pat
, 0, i
)), ipat
)
1663 && ! find_reg_note (XVECEXP (pat
, 0, i
), REG_UNUSED
, NULL_RTX
))
1666 /* If found a match, exit this loop early. */
1671 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
)
1672 && ! find_reg_note (trial
, REG_UNUSED
, NULL_RTX
))
1676 /* If we didn't find an insn that matches, return 0. */
1680 /* See what resources this insn sets and needs. If they overlap, or
1681 if this insn references CC0, it can't be redundant. */
1683 CLEAR_RESOURCE (&needed
);
1684 CLEAR_RESOURCE (&set
);
1685 mark_set_resources (insn
, &set
, 0, 1);
1686 mark_referenced_resources (insn
, &needed
, 1);
1688 /* If TARGET is a SEQUENCE, get the main insn. */
1689 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
1690 target_main
= XVECEXP (PATTERN (target
), 0, 0);
1692 if (resource_conflicts_p (&needed
, &set
)
1694 || reg_mentioned_p (cc0_rtx
, ipat
)
1696 /* The insn requiring the delay may not set anything needed or set by
1698 || insn_sets_resource_p (target_main
, &needed
, 1)
1699 || insn_sets_resource_p (target_main
, &set
, 1))
1702 /* Insns we pass may not set either NEEDED or SET, so merge them for
1704 needed
.memory
|= set
.memory
;
1705 needed
.unch_memory
|= set
.unch_memory
;
1706 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
1708 /* This insn isn't redundant if it conflicts with an insn that either is
1709 or will be in a delay slot of TARGET. */
1713 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, 1))
1715 delay_list
= XEXP (delay_list
, 1);
1718 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
1719 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
1720 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
, 1))
1723 /* Scan backwards until we reach a label or an insn that uses something
1724 INSN sets or sets something insn uses or sets. */
1726 for (trial
= PREV_INSN (target
);
1727 trial
&& GET_CODE (trial
) != CODE_LABEL
;
1728 trial
= PREV_INSN (trial
))
1730 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != CALL_INSN
1731 && GET_CODE (trial
) != JUMP_INSN
)
1734 pat
= PATTERN (trial
);
1735 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1738 if (GET_CODE (pat
) == SEQUENCE
)
1740 /* If this is a CALL_INSN and its delay slots, it is hard to track
1741 the resource needs properly, so give up. */
1742 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1745 /* If this is an INSN or JUMP_INSN with delayed effects, it
1746 is hard to track the resource needs properly, so give up. */
1748 #ifdef INSN_SETS_ARE_DELAYED
1749 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1753 #ifdef INSN_REFERENCES_ARE_DELAYED
1754 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1758 /* See if any of the insns in the delay slot match, updating
1759 resource requirements as we go. */
1760 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1762 rtx candidate
= XVECEXP (pat
, 0, i
);
1764 /* If an insn will be annulled if the branch is false, it isn't
1765 considered as a possible duplicate insn. */
1766 if (rtx_equal_p (PATTERN (candidate
), ipat
)
1767 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
1768 && INSN_FROM_TARGET_P (candidate
)))
1770 /* Show that this insn will be used in the sequel. */
1771 INSN_FROM_TARGET_P (candidate
) = 0;
1775 /* Unless this is an annulled insn from the target of a branch,
1776 we must stop if it sets anything needed or set by INSN. */
1777 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
1778 || ! INSN_FROM_TARGET_P (candidate
))
1779 && insn_sets_resource_p (candidate
, &needed
, 1))
1784 /* If the insn requiring the delay slot conflicts with INSN, we
1786 if (insn_sets_resource_p (XVECEXP (pat
, 0, 0), &needed
, 1))
1791 /* See if TRIAL is the same as INSN. */
1792 pat
= PATTERN (trial
);
1793 if (rtx_equal_p (pat
, ipat
))
1796 /* Can't go any further if TRIAL conflicts with INSN. */
1797 if (insn_sets_resource_p (trial
, &needed
, 1))
1805 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1806 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1807 is non-zero, we are allowed to fall into this thread; otherwise, we are
1810 If LABEL is used more than one or we pass a label other than LABEL before
1811 finding an active insn, we do not own this thread. */
1814 own_thread_p (thread
, label
, allow_fallthrough
)
1817 int allow_fallthrough
;
1822 /* We don't own the function end. */
1826 /* Get the first active insn, or THREAD, if it is an active insn. */
1827 active_insn
= next_active_insn (PREV_INSN (thread
));
1829 for (insn
= thread
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
1830 if (GET_CODE (insn
) == CODE_LABEL
1831 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
1834 if (allow_fallthrough
)
1837 /* Ensure that we reach a BARRIER before any insn or label. */
1838 for (insn
= prev_nonnote_insn (thread
);
1839 insn
== 0 || GET_CODE (insn
) != BARRIER
;
1840 insn
= prev_nonnote_insn (insn
))
1842 || GET_CODE (insn
) == CODE_LABEL
1843 || (GET_CODE (insn
) == INSN
1844 && GET_CODE (PATTERN (insn
)) != USE
1845 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
1851 /* Called when INSN is being moved from a location near the target of a jump.
1852 We leave a marker of the form (use (INSN)) immediately in front
1853 of WHERE for mark_target_live_regs. These markers will be deleted when
1856 We used to try to update the live status of registers if WHERE is at
1857 the start of a basic block, but that can't work since we may remove a
1858 BARRIER in relax_delay_slots. */
1861 update_block (insn
, where
)
1865 /* Ignore if this was in a delay slot and it came from the target of
1867 if (INSN_FROM_TARGET_P (insn
))
1870 emit_insn_before (gen_rtx_USE (VOIDmode
, insn
), where
);
1872 /* INSN might be making a value live in a block where it didn't use to
1873 be. So recompute liveness information for this block. */
1875 incr_ticks_for_insn (insn
);
1878 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1879 the basic block containing the jump. */
1882 reorg_redirect_jump (jump
, nlabel
)
1886 incr_ticks_for_insn (jump
);
1887 return redirect_jump (jump
, nlabel
);
1890 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1891 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1892 that reference values used in INSN. If we find one, then we move the
1893 REG_DEAD note to INSN.
1895 This is needed to handle the case where an later insn (after INSN) has a
1896 REG_DEAD note for a register used by INSN, and this later insn subsequently
1897 gets moved before a CODE_LABEL because it is a redundant insn. In this
1898 case, mark_target_live_regs may be confused into thinking the register
1899 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1902 update_reg_dead_notes (insn
, delayed_insn
)
1903 rtx insn
, delayed_insn
;
1907 for (p
= next_nonnote_insn (insn
); p
!= delayed_insn
;
1908 p
= next_nonnote_insn (p
))
1909 for (link
= REG_NOTES (p
); link
; link
= next
)
1911 next
= XEXP (link
, 1);
1913 if (REG_NOTE_KIND (link
) != REG_DEAD
1914 || GET_CODE (XEXP (link
, 0)) != REG
)
1917 if (reg_referenced_p (XEXP (link
, 0), PATTERN (insn
)))
1919 /* Move the REG_DEAD note from P to INSN. */
1920 remove_note (p
, link
);
1921 XEXP (link
, 1) = REG_NOTES (insn
);
1922 REG_NOTES (insn
) = link
;
1927 /* Called when an insn redundant with start_insn is deleted. If there
1928 is a REG_DEAD note for the target of start_insn between start_insn
1929 and stop_insn, then the REG_DEAD note needs to be deleted since the
1930 value no longer dies there.
1932 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1933 confused into thinking the register is dead. */
1936 fix_reg_dead_note (start_insn
, stop_insn
)
1937 rtx start_insn
, stop_insn
;
1941 for (p
= next_nonnote_insn (start_insn
); p
!= stop_insn
;
1942 p
= next_nonnote_insn (p
))
1943 for (link
= REG_NOTES (p
); link
; link
= next
)
1945 next
= XEXP (link
, 1);
1947 if (REG_NOTE_KIND (link
) != REG_DEAD
1948 || GET_CODE (XEXP (link
, 0)) != REG
)
1951 if (reg_set_p (XEXP (link
, 0), PATTERN (start_insn
)))
1953 remove_note (p
, link
);
1959 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1961 This handles the case of udivmodXi4 instructions which optimize their
1962 output depending on whether any REG_UNUSED notes are present.
1963 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1967 update_reg_unused_notes (insn
, redundant_insn
)
1968 rtx insn
, redundant_insn
;
1972 for (link
= REG_NOTES (insn
); link
; link
= next
)
1974 next
= XEXP (link
, 1);
1976 if (REG_NOTE_KIND (link
) != REG_UNUSED
1977 || GET_CODE (XEXP (link
, 0)) != REG
)
1980 if (! find_regno_note (redundant_insn
, REG_UNUSED
,
1981 REGNO (XEXP (link
, 0))))
1982 remove_note (insn
, link
);
1986 /* Scan a function looking for insns that need a delay slot and find insns to
1987 put into the delay slot.
1989 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
1990 as calls). We do these first since we don't want jump insns (that are
1991 easier to fill) to get the only insns that could be used for non-jump insns.
1992 When it is zero, only try to fill JUMP_INSNs.
1994 When slots are filled in this manner, the insns (including the
1995 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
1996 it is possible to tell whether a delay slot has really been filled
1997 or not. `final' knows how to deal with this, by communicating
1998 through FINAL_SEQUENCE. */
2001 fill_simple_delay_slots (non_jumps_p
)
2004 register rtx insn
, pat
, trial
, next_trial
;
2006 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2007 struct resources needed
, set
;
2008 int slots_to_fill
, slots_filled
;
2011 for (i
= 0; i
< num_unfilled_slots
; i
++)
2014 /* Get the next insn to fill. If it has already had any slots assigned,
2015 we can't do anything with it. Maybe we'll improve this later. */
2017 insn
= unfilled_slots_base
[i
];
2019 || INSN_DELETED_P (insn
)
2020 || (GET_CODE (insn
) == INSN
2021 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2022 || (GET_CODE (insn
) == JUMP_INSN
&& non_jumps_p
)
2023 || (GET_CODE (insn
) != JUMP_INSN
&& ! non_jumps_p
))
2026 /* It may have been that this insn used to need delay slots, but
2027 now doesn't; ignore in that case. This can happen, for example,
2028 on the HP PA RISC, where the number of delay slots depends on
2029 what insns are nearby. */
2030 slots_to_fill
= num_delay_slots (insn
);
2032 /* Some machine description have defined instructions to have
2033 delay slots only in certain circumstances which may depend on
2034 nearby insns (which change due to reorg's actions).
2036 For example, the PA port normally has delay slots for unconditional
2039 However, the PA port claims such jumps do not have a delay slot
2040 if they are immediate successors of certain CALL_INSNs. This
2041 allows the port to favor filling the delay slot of the call with
2042 the unconditional jump. */
2043 if (slots_to_fill
== 0)
2046 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2047 says how many. After initialization, first try optimizing
2050 nop add %o7,.-L1,%o7
2054 If this case applies, the delay slot of the call is filled with
2055 the unconditional jump. This is done first to avoid having the
2056 delay slot of the call filled in the backward scan. Also, since
2057 the unconditional jump is likely to also have a delay slot, that
2058 insn must exist when it is subsequently scanned.
2060 This is tried on each insn with delay slots as some machines
2061 have insns which perform calls, but are not represented as
2067 if (GET_CODE (insn
) == JUMP_INSN
)
2068 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2070 flags
= get_jump_flags (insn
, NULL_RTX
);
2072 if ((trial
= next_active_insn (insn
))
2073 && GET_CODE (trial
) == JUMP_INSN
2074 && simplejump_p (trial
)
2075 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
2076 && no_labels_between_p (insn
, trial
))
2080 delay_list
= add_to_delay_list (trial
, delay_list
);
2082 /* TRIAL may have had its delay slot filled, then unfilled. When
2083 the delay slot is unfilled, TRIAL is placed back on the unfilled
2084 slots obstack. Unfortunately, it is placed on the end of the
2085 obstack, not in its original location. Therefore, we must search
2086 from entry i + 1 to the end of the unfilled slots obstack to
2087 try and find TRIAL. */
2088 tmp
= &unfilled_slots_base
[i
+ 1];
2089 while (*tmp
!= trial
&& tmp
!= unfilled_slots_next
)
2092 /* Remove the unconditional jump from consideration for delay slot
2093 filling and unthread it. */
2097 rtx next
= NEXT_INSN (trial
);
2098 rtx prev
= PREV_INSN (trial
);
2100 NEXT_INSN (prev
) = next
;
2102 PREV_INSN (next
) = prev
;
2106 /* Now, scan backwards from the insn to search for a potential
2107 delay-slot candidate. Stop searching when a label or jump is hit.
2109 For each candidate, if it is to go into the delay slot (moved
2110 forward in execution sequence), it must not need or set any resources
2111 that were set by later insns and must not set any resources that
2112 are needed for those insns.
2114 The delay slot insn itself sets resources unless it is a call
2115 (in which case the called routine, not the insn itself, is doing
2118 if (slots_filled
< slots_to_fill
)
2120 CLEAR_RESOURCE (&needed
);
2121 CLEAR_RESOURCE (&set
);
2122 mark_set_resources (insn
, &set
, 0, 0);
2123 mark_referenced_resources (insn
, &needed
, 0);
2125 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
2128 next_trial
= prev_nonnote_insn (trial
);
2130 /* This must be an INSN or CALL_INSN. */
2131 pat
= PATTERN (trial
);
2133 /* USE and CLOBBER at this level was just for flow; ignore it. */
2134 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2137 /* Check for resource conflict first, to avoid unnecessary
2139 if (! insn_references_resource_p (trial
, &set
, 1)
2140 && ! insn_sets_resource_p (trial
, &set
, 1)
2141 && ! insn_sets_resource_p (trial
, &needed
, 1)
2143 /* Can't separate set of cc0 from its use. */
2144 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
2148 trial
= try_split (pat
, trial
, 1);
2149 next_trial
= prev_nonnote_insn (trial
);
2150 if (eligible_for_delay (insn
, slots_filled
, trial
, flags
))
2152 /* In this case, we are searching backward, so if we
2153 find insns to put on the delay list, we want
2154 to put them at the head, rather than the
2155 tail, of the list. */
2157 update_reg_dead_notes (trial
, insn
);
2158 delay_list
= gen_rtx_INSN_LIST (VOIDmode
,
2160 update_block (trial
, trial
);
2161 delete_insn (trial
);
2162 if (slots_to_fill
== ++slots_filled
)
2168 mark_set_resources (trial
, &set
, 0, 1);
2169 mark_referenced_resources (trial
, &needed
, 1);
2173 /* If all needed slots haven't been filled, we come here. */
2175 /* Try to optimize case of jumping around a single insn. */
2176 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2177 if (slots_filled
!= slots_to_fill
2179 && GET_CODE (insn
) == JUMP_INSN
2180 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
2182 delay_list
= optimize_skip (insn
);
2188 /* Try to get insns from beyond the insn needing the delay slot.
2189 These insns can neither set or reference resources set in insns being
2190 skipped, cannot set resources in the insn being skipped, and, if this
2191 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2192 call might not return).
2194 There used to be code which continued past the target label if
2195 we saw all uses of the target label. This code did not work,
2196 because it failed to account for some instructions which were
2197 both annulled and marked as from the target. This can happen as a
2198 result of optimize_skip. Since this code was redundant with
2199 fill_eager_delay_slots anyways, it was just deleted. */
2201 if (slots_filled
!= slots_to_fill
2202 && (GET_CODE (insn
) != JUMP_INSN
2203 || ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
2204 && ! simplejump_p (insn
)
2205 && JUMP_LABEL (insn
) != 0)))
2208 int maybe_never
= 0;
2209 struct resources needed_at_jump
;
2211 CLEAR_RESOURCE (&needed
);
2212 CLEAR_RESOURCE (&set
);
2214 if (GET_CODE (insn
) == CALL_INSN
)
2216 mark_set_resources (insn
, &set
, 0, 1);
2217 mark_referenced_resources (insn
, &needed
, 1);
2222 mark_set_resources (insn
, &set
, 0, 1);
2223 mark_referenced_resources (insn
, &needed
, 1);
2224 if (GET_CODE (insn
) == JUMP_INSN
)
2225 target
= JUMP_LABEL (insn
);
2228 for (trial
= next_nonnote_insn (insn
); trial
; trial
= next_trial
)
2230 rtx pat
, trial_delay
;
2232 next_trial
= next_nonnote_insn (trial
);
2234 if (GET_CODE (trial
) == CODE_LABEL
2235 || GET_CODE (trial
) == BARRIER
)
2238 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2239 pat
= PATTERN (trial
);
2241 /* Stand-alone USE and CLOBBER are just for flow. */
2242 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2245 /* If this already has filled delay slots, get the insn needing
2247 if (GET_CODE (pat
) == SEQUENCE
)
2248 trial_delay
= XVECEXP (pat
, 0, 0);
2250 trial_delay
= trial
;
2252 /* If this is a jump insn to our target, indicate that we have
2253 seen another jump to it. If we aren't handling a conditional
2254 jump, stop our search. Otherwise, compute the needs at its
2255 target and add them to NEEDED. */
2256 if (GET_CODE (trial_delay
) == JUMP_INSN
)
2260 else if (JUMP_LABEL (trial_delay
) != target
)
2263 next_active_insn (JUMP_LABEL (trial_delay
));
2265 mark_target_live_regs (get_insns (), ninsn
,
2267 needed
.memory
|= needed_at_jump
.memory
;
2268 needed
.unch_memory
|= needed_at_jump
.unch_memory
;
2269 IOR_HARD_REG_SET (needed
.regs
, needed_at_jump
.regs
);
2273 /* See if we have a resource problem before we try to
2276 && GET_CODE (pat
) != SEQUENCE
2277 && ! insn_references_resource_p (trial
, &set
, 1)
2278 && ! insn_sets_resource_p (trial
, &set
, 1)
2279 && ! insn_sets_resource_p (trial
, &needed
, 1)
2281 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
2283 && ! (maybe_never
&& may_trap_p (pat
))
2284 && (trial
= try_split (pat
, trial
, 0))
2285 && eligible_for_delay (insn
, slots_filled
, trial
, flags
))
2287 next_trial
= next_nonnote_insn (trial
);
2288 delay_list
= add_to_delay_list (trial
, delay_list
);
2291 if (reg_mentioned_p (cc0_rtx
, pat
))
2292 link_cc0_insns (trial
);
2295 delete_insn (trial
);
2296 if (slots_to_fill
== ++slots_filled
)
2301 mark_set_resources (trial
, &set
, 0, 1);
2302 mark_referenced_resources (trial
, &needed
, 1);
2304 /* Ensure we don't put insns between the setting of cc and the
2305 comparison by moving a setting of cc into an earlier delay
2306 slot since these insns could clobber the condition code. */
2309 /* If this is a call or jump, we might not get here. */
2310 if (GET_CODE (trial_delay
) == CALL_INSN
2311 || GET_CODE (trial_delay
) == JUMP_INSN
)
2315 /* If there are slots left to fill and our search was stopped by an
2316 unconditional branch, try the insn at the branch target. We can
2317 redirect the branch if it works.
2319 Don't do this if the insn at the branch target is a branch. */
2320 if (slots_to_fill
!= slots_filled
2322 && GET_CODE (trial
) == JUMP_INSN
2323 && simplejump_p (trial
)
2324 && (target
== 0 || JUMP_LABEL (trial
) == target
)
2325 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
2326 && ! (GET_CODE (next_trial
) == INSN
2327 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
2328 && GET_CODE (next_trial
) != JUMP_INSN
2329 && ! insn_references_resource_p (next_trial
, &set
, 1)
2330 && ! insn_sets_resource_p (next_trial
, &set
, 1)
2331 && ! insn_sets_resource_p (next_trial
, &needed
, 1)
2333 && ! reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
))
2335 && ! (maybe_never
&& may_trap_p (PATTERN (next_trial
)))
2336 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
2337 && eligible_for_delay (insn
, slots_filled
, next_trial
, flags
))
2339 rtx new_label
= next_active_insn (next_trial
);
2342 new_label
= get_label_before (new_label
);
2344 new_label
= find_end_label ();
2347 = add_to_delay_list (copy_rtx (next_trial
), delay_list
);
2349 reorg_redirect_jump (trial
, new_label
);
2351 /* If we merged because we both jumped to the same place,
2352 redirect the original insn also. */
2354 reorg_redirect_jump (insn
, new_label
);
2358 /* If this is an unconditional jump, then try to get insns from the
2359 target of the jump. */
2360 if (GET_CODE (insn
) == JUMP_INSN
2361 && simplejump_p (insn
)
2362 && slots_filled
!= slots_to_fill
)
2364 = fill_slots_from_thread (insn
, const_true_rtx
,
2365 next_active_insn (JUMP_LABEL (insn
)),
2367 own_thread_p (JUMP_LABEL (insn
),
2368 JUMP_LABEL (insn
), 0),
2369 slots_to_fill
, &slots_filled
,
2373 unfilled_slots_base
[i
]
2374 = emit_delay_sequence (insn
, delay_list
, slots_filled
);
2376 if (slots_to_fill
== slots_filled
)
2377 unfilled_slots_base
[i
] = 0;
2379 note_delay_statistics (slots_filled
, 0);
2382 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2383 /* See if the epilogue needs any delay slots. Try to fill them if so.
2384 The only thing we can do is scan backwards from the end of the
2385 function. If we did this in a previous pass, it is incorrect to do it
2387 if (current_function_epilogue_delay_list
)
2390 slots_to_fill
= DELAY_SLOTS_FOR_EPILOGUE
;
2391 if (slots_to_fill
== 0)
2395 CLEAR_RESOURCE (&set
);
2397 /* The frame pointer and stack pointer are needed at the beginning of
2398 the epilogue, so instructions setting them can not be put in the
2399 epilogue delay slot. However, everything else needed at function
2400 end is safe, so we don't want to use end_of_function_needs here. */
2401 CLEAR_RESOURCE (&needed
);
2402 if (frame_pointer_needed
)
2404 SET_HARD_REG_BIT (needed
.regs
, FRAME_POINTER_REGNUM
);
2405 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2406 SET_HARD_REG_BIT (needed
.regs
, HARD_FRAME_POINTER_REGNUM
);
2408 #ifdef EXIT_IGNORE_STACK
2409 if (! EXIT_IGNORE_STACK
2410 || current_function_sp_is_unchanging
)
2412 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
2415 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
2417 #ifdef EPILOGUE_USES
2418 for (i
= 0; i
<FIRST_PSEUDO_REGISTER
; i
++)
2420 if (EPILOGUE_USES (i
))
2421 SET_HARD_REG_BIT (needed
.regs
, i
);
2425 for (trial
= get_last_insn (); ! stop_search_p (trial
, 1);
2426 trial
= PREV_INSN (trial
))
2428 if (GET_CODE (trial
) == NOTE
)
2430 pat
= PATTERN (trial
);
2431 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2434 if (! insn_references_resource_p (trial
, &set
, 1)
2435 && ! insn_sets_resource_p (trial
, &needed
, 1)
2436 && ! insn_sets_resource_p (trial
, &set
, 1)
2438 /* Don't want to mess with cc0 here. */
2439 && ! reg_mentioned_p (cc0_rtx
, pat
)
2443 trial
= try_split (pat
, trial
, 1);
2444 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial
, slots_filled
))
2446 /* Here as well we are searching backward, so put the
2447 insns we find on the head of the list. */
2449 current_function_epilogue_delay_list
2450 = gen_rtx_INSN_LIST (VOIDmode
, trial
,
2451 current_function_epilogue_delay_list
);
2452 mark_end_of_function_resources (trial
, 1);
2453 update_block (trial
, trial
);
2454 delete_insn (trial
);
2456 /* Clear deleted bit so final.c will output the insn. */
2457 INSN_DELETED_P (trial
) = 0;
2459 if (slots_to_fill
== ++slots_filled
)
2465 mark_set_resources (trial
, &set
, 0, 1);
2466 mark_referenced_resources (trial
, &needed
, 1);
2469 note_delay_statistics (slots_filled
, 0);
2473 /* Try to find insns to place in delay slots.
2475 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2476 or is an unconditional branch if CONDITION is const_true_rtx.
2477 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2479 THREAD is a flow-of-control, either the insns to be executed if the
2480 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2482 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2483 to see if any potential delay slot insns set things needed there.
2485 LIKELY is non-zero if it is extremely likely that the branch will be
2486 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2487 end of a loop back up to the top.
2489 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2490 thread. I.e., it is the fallthrough code of our jump or the target of the
2491 jump when we are the only jump going there.
2493 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2494 case, we can only take insns from the head of the thread for our delay
2495 slot. We then adjust the jump to point after the insns we have taken. */
2498 fill_slots_from_thread (insn
, condition
, thread
, opposite_thread
, likely
,
2499 thread_if_true
, own_thread
,
2500 slots_to_fill
, pslots_filled
, delay_list
)
2503 rtx thread
, opposite_thread
;
2507 int slots_to_fill
, *pslots_filled
;
2511 struct resources opposite_needed
, set
, needed
;
2517 /* Validate our arguments. */
2518 if ((condition
== const_true_rtx
&& ! thread_if_true
)
2519 || (! own_thread
&& ! thread_if_true
))
2522 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2524 /* If our thread is the end of subroutine, we can't get any delay
2529 /* If this is an unconditional branch, nothing is needed at the
2530 opposite thread. Otherwise, compute what is needed there. */
2531 if (condition
== const_true_rtx
)
2532 CLEAR_RESOURCE (&opposite_needed
);
2534 mark_target_live_regs (get_insns (), opposite_thread
, &opposite_needed
);
2536 /* If the insn at THREAD can be split, do it here to avoid having to
2537 update THREAD and NEW_THREAD if it is done in the loop below. Also
2538 initialize NEW_THREAD. */
2540 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
2542 /* Scan insns at THREAD. We are looking for an insn that can be removed
2543 from THREAD (it neither sets nor references resources that were set
2544 ahead of it and it doesn't set anything needs by the insns ahead of
2545 it) and that either can be placed in an annulling insn or aren't
2546 needed at OPPOSITE_THREAD. */
2548 CLEAR_RESOURCE (&needed
);
2549 CLEAR_RESOURCE (&set
);
2551 /* If we do not own this thread, we must stop as soon as we find
2552 something that we can't put in a delay slot, since all we can do
2553 is branch into THREAD at a later point. Therefore, labels stop
2554 the search if this is not the `true' thread. */
2556 for (trial
= thread
;
2557 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
2558 trial
= next_nonnote_insn (trial
))
2562 /* If we have passed a label, we no longer own this thread. */
2563 if (GET_CODE (trial
) == CODE_LABEL
)
2569 pat
= PATTERN (trial
);
2570 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2573 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2574 don't separate or copy insns that set and use CC0. */
2575 if (! insn_references_resource_p (trial
, &set
, 1)
2576 && ! insn_sets_resource_p (trial
, &set
, 1)
2577 && ! insn_sets_resource_p (trial
, &needed
, 1)
2579 && ! (reg_mentioned_p (cc0_rtx
, pat
)
2580 && (! own_thread
|| ! sets_cc0_p (pat
)))
2586 /* If TRIAL is redundant with some insn before INSN, we don't
2587 actually need to add it to the delay list; we can merely pretend
2589 if ((prior_insn
= redundant_insn (trial
, insn
, delay_list
)))
2591 fix_reg_dead_note (prior_insn
, insn
);
2594 update_block (trial
, thread
);
2595 if (trial
== thread
)
2597 thread
= next_active_insn (thread
);
2598 if (new_thread
== trial
)
2599 new_thread
= thread
;
2602 delete_insn (trial
);
2606 update_reg_unused_notes (prior_insn
, trial
);
2607 new_thread
= next_active_insn (trial
);
2613 /* There are two ways we can win: If TRIAL doesn't set anything
2614 needed at the opposite thread and can't trap, or if it can
2615 go into an annulled delay slot. */
2617 && (condition
== const_true_rtx
2618 || (! insn_sets_resource_p (trial
, &opposite_needed
, 1)
2619 && ! may_trap_p (pat
))))
2622 trial
= try_split (pat
, trial
, 0);
2623 if (new_thread
== old_trial
)
2625 if (thread
== old_trial
)
2627 pat
= PATTERN (trial
);
2628 if (eligible_for_delay (insn
, *pslots_filled
, trial
, flags
))
2632 #ifdef ANNUL_IFTRUE_SLOTS
2635 #ifdef ANNUL_IFFALSE_SLOTS
2641 trial
= try_split (pat
, trial
, 0);
2642 if (new_thread
== old_trial
)
2644 if (thread
== old_trial
)
2646 pat
= PATTERN (trial
);
2647 if ((must_annul
|| delay_list
== NULL
) && (thread_if_true
2648 ? check_annul_list_true_false (0, delay_list
)
2649 && eligible_for_annul_false (insn
, *pslots_filled
, trial
, flags
)
2650 : check_annul_list_true_false (1, delay_list
)
2651 && eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
2659 if (reg_mentioned_p (cc0_rtx
, pat
))
2660 link_cc0_insns (trial
);
2663 /* If we own this thread, delete the insn. If this is the
2664 destination of a branch, show that a basic block status
2665 may have been updated. In any case, mark the new
2666 starting point of this thread. */
2669 update_block (trial
, thread
);
2670 if (trial
== thread
)
2672 thread
= next_active_insn (thread
);
2673 if (new_thread
== trial
)
2674 new_thread
= thread
;
2676 delete_insn (trial
);
2679 new_thread
= next_active_insn (trial
);
2681 temp
= own_thread
? trial
: copy_rtx (trial
);
2683 INSN_FROM_TARGET_P (temp
) = 1;
2685 delay_list
= add_to_delay_list (temp
, delay_list
);
2687 if (slots_to_fill
== ++(*pslots_filled
))
2689 /* Even though we have filled all the slots, we
2690 may be branching to a location that has a
2691 redundant insn. Skip any if so. */
2692 while (new_thread
&& ! own_thread
2693 && ! insn_sets_resource_p (new_thread
, &set
, 1)
2694 && ! insn_sets_resource_p (new_thread
, &needed
, 1)
2695 && ! insn_references_resource_p (new_thread
,
2698 = redundant_insn (new_thread
, insn
,
2701 /* We know we do not own the thread, so no need
2702 to call update_block and delete_insn. */
2703 fix_reg_dead_note (prior_insn
, insn
);
2704 update_reg_unused_notes (prior_insn
, new_thread
);
2705 new_thread
= next_active_insn (new_thread
);
2715 /* This insn can't go into a delay slot. */
2717 mark_set_resources (trial
, &set
, 0, 1);
2718 mark_referenced_resources (trial
, &needed
, 1);
2720 /* Ensure we don't put insns between the setting of cc and the comparison
2721 by moving a setting of cc into an earlier delay slot since these insns
2722 could clobber the condition code. */
2725 /* If this insn is a register-register copy and the next insn has
2726 a use of our destination, change it to use our source. That way,
2727 it will become a candidate for our delay slot the next time
2728 through this loop. This case occurs commonly in loops that
2731 We could check for more complex cases than those tested below,
2732 but it doesn't seem worth it. It might also be a good idea to try
2733 to swap the two insns. That might do better.
2735 We can't do this if the next insn modifies our destination, because
2736 that would make the replacement into the insn invalid. We also can't
2737 do this if it modifies our source, because it might be an earlyclobber
2738 operand. This latter test also prevents updating the contents of
2741 if (GET_CODE (trial
) == INSN
&& GET_CODE (pat
) == SET
2742 && GET_CODE (SET_SRC (pat
)) == REG
2743 && GET_CODE (SET_DEST (pat
)) == REG
)
2745 rtx next
= next_nonnote_insn (trial
);
2747 if (next
&& GET_CODE (next
) == INSN
2748 && GET_CODE (PATTERN (next
)) != USE
2749 && ! reg_set_p (SET_DEST (pat
), next
)
2750 && ! reg_set_p (SET_SRC (pat
), next
)
2751 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
)))
2752 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
2756 /* If we stopped on a branch insn that has delay slots, see if we can
2757 steal some of the insns in those slots. */
2758 if (trial
&& GET_CODE (trial
) == INSN
2759 && GET_CODE (PATTERN (trial
)) == SEQUENCE
2760 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
)
2762 /* If this is the `true' thread, we will want to follow the jump,
2763 so we can only do this if we have taken everything up to here. */
2764 if (thread_if_true
&& trial
== new_thread
)
2766 = steal_delay_list_from_target (insn
, condition
, PATTERN (trial
),
2767 delay_list
, &set
, &needed
,
2768 &opposite_needed
, slots_to_fill
,
2769 pslots_filled
, &must_annul
,
2771 else if (! thread_if_true
)
2773 = steal_delay_list_from_fallthrough (insn
, condition
,
2775 delay_list
, &set
, &needed
,
2776 &opposite_needed
, slots_to_fill
,
2777 pslots_filled
, &must_annul
);
2780 /* If we haven't found anything for this delay slot and it is very
2781 likely that the branch will be taken, see if the insn at our target
2782 increments or decrements a register with an increment that does not
2783 depend on the destination register. If so, try to place the opposite
2784 arithmetic insn after the jump insn and put the arithmetic insn in the
2785 delay slot. If we can't do this, return. */
2786 if (delay_list
== 0 && likely
&& new_thread
2787 && GET_CODE (new_thread
) == INSN
2788 && GET_CODE (PATTERN (new_thread
)) != ASM_INPUT
2789 && asm_noperands (PATTERN (new_thread
)) < 0)
2791 rtx pat
= PATTERN (new_thread
);
2796 pat
= PATTERN (trial
);
2798 if (GET_CODE (trial
) != INSN
|| GET_CODE (pat
) != SET
2799 || ! eligible_for_delay (insn
, 0, trial
, flags
))
2802 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
2803 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
2804 && rtx_equal_p (XEXP (src
, 0), dest
)
2805 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1)))
2807 rtx other
= XEXP (src
, 1);
2811 /* If this is a constant adjustment, use the same code with
2812 the negated constant. Otherwise, reverse the sense of the
2814 if (GET_CODE (other
) == CONST_INT
)
2815 new_arith
= gen_rtx_fmt_ee (GET_CODE (src
), GET_MODE (src
), dest
,
2816 negate_rtx (GET_MODE (src
), other
));
2818 new_arith
= gen_rtx_fmt_ee (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
2819 GET_MODE (src
), dest
, other
);
2821 ninsn
= emit_insn_after (gen_rtx_SET (VOIDmode
, dest
, new_arith
),
2824 if (recog_memoized (ninsn
) < 0
2825 || (extract_insn (ninsn
), ! constrain_operands (1)))
2827 delete_insn (ninsn
);
2833 update_block (trial
, thread
);
2834 if (trial
== thread
)
2836 thread
= next_active_insn (thread
);
2837 if (new_thread
== trial
)
2838 new_thread
= thread
;
2840 delete_insn (trial
);
2843 new_thread
= next_active_insn (trial
);
2845 ninsn
= own_thread
? trial
: copy_rtx (trial
);
2847 INSN_FROM_TARGET_P (ninsn
) = 1;
2849 delay_list
= add_to_delay_list (ninsn
, NULL_RTX
);
2854 if (delay_list
&& must_annul
)
2855 INSN_ANNULLED_BRANCH_P (insn
) = 1;
2857 /* If we are to branch into the middle of this thread, find an appropriate
2858 label or make a new one if none, and redirect INSN to it. If we hit the
2859 end of the function, use the end-of-function label. */
2860 if (new_thread
!= thread
)
2864 if (! thread_if_true
)
2867 if (new_thread
&& GET_CODE (new_thread
) == JUMP_INSN
2868 && (simplejump_p (new_thread
)
2869 || GET_CODE (PATTERN (new_thread
)) == RETURN
)
2870 && redirect_with_delay_list_safe_p (insn
,
2871 JUMP_LABEL (new_thread
),
2873 new_thread
= follow_jumps (JUMP_LABEL (new_thread
));
2875 if (new_thread
== 0)
2876 label
= find_end_label ();
2877 else if (GET_CODE (new_thread
) == CODE_LABEL
)
2880 label
= get_label_before (new_thread
);
2882 reorg_redirect_jump (insn
, label
);
2888 /* Make another attempt to find insns to place in delay slots.
2890 We previously looked for insns located in front of the delay insn
2891 and, for non-jump delay insns, located behind the delay insn.
2893 Here only try to schedule jump insns and try to move insns from either
2894 the target or the following insns into the delay slot. If annulling is
2895 supported, we will be likely to do this. Otherwise, we can do this only
2899 fill_eager_delay_slots ()
2903 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2905 for (i
= 0; i
< num_unfilled_slots
; i
++)
2908 rtx target_label
, insn_at_target
, fallthrough_insn
;
2911 int own_fallthrough
;
2912 int prediction
, slots_to_fill
, slots_filled
;
2914 insn
= unfilled_slots_base
[i
];
2916 || INSN_DELETED_P (insn
)
2917 || GET_CODE (insn
) != JUMP_INSN
2918 || ! (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
2921 slots_to_fill
= num_delay_slots (insn
);
2922 /* Some machine description have defined instructions to have
2923 delay slots only in certain circumstances which may depend on
2924 nearby insns (which change due to reorg's actions).
2926 For example, the PA port normally has delay slots for unconditional
2929 However, the PA port claims such jumps do not have a delay slot
2930 if they are immediate successors of certain CALL_INSNs. This
2931 allows the port to favor filling the delay slot of the call with
2932 the unconditional jump. */
2933 if (slots_to_fill
== 0)
2937 target_label
= JUMP_LABEL (insn
);
2938 condition
= get_branch_condition (insn
, target_label
);
2943 /* Get the next active fallthrough and target insns and see if we own
2944 them. Then see whether the branch is likely true. We don't need
2945 to do a lot of this for unconditional branches. */
2947 insn_at_target
= next_active_insn (target_label
);
2948 own_target
= own_thread_p (target_label
, target_label
, 0);
2950 if (condition
== const_true_rtx
)
2952 own_fallthrough
= 0;
2953 fallthrough_insn
= 0;
2958 fallthrough_insn
= next_active_insn (insn
);
2959 own_fallthrough
= own_thread_p (NEXT_INSN (insn
), NULL_RTX
, 1);
2960 prediction
= mostly_true_jump (insn
, condition
);
2963 /* If this insn is expected to branch, first try to get insns from our
2964 target, then our fallthrough insns. If it is not, expected to branch,
2965 try the other order. */
2970 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
2971 fallthrough_insn
, prediction
== 2, 1,
2973 slots_to_fill
, &slots_filled
, delay_list
);
2975 if (delay_list
== 0 && own_fallthrough
)
2977 /* Even though we didn't find anything for delay slots,
2978 we might have found a redundant insn which we deleted
2979 from the thread that was filled. So we have to recompute
2980 the next insn at the target. */
2981 target_label
= JUMP_LABEL (insn
);
2982 insn_at_target
= next_active_insn (target_label
);
2985 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
2986 insn_at_target
, 0, 0,
2988 slots_to_fill
, &slots_filled
,
2994 if (own_fallthrough
)
2996 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
2997 insn_at_target
, 0, 0,
2999 slots_to_fill
, &slots_filled
,
3002 if (delay_list
== 0)
3004 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3005 next_active_insn (insn
), 0, 1,
3007 slots_to_fill
, &slots_filled
,
3012 unfilled_slots_base
[i
]
3013 = emit_delay_sequence (insn
, delay_list
, slots_filled
);
3015 if (slots_to_fill
== slots_filled
)
3016 unfilled_slots_base
[i
] = 0;
3018 note_delay_statistics (slots_filled
, 1);
3022 /* Once we have tried two ways to fill a delay slot, make a pass over the
3023 code to try to improve the results and to do such things as more jump
3027 relax_delay_slots (first
)
3030 register rtx insn
, next
, pat
;
3031 register rtx trial
, delay_insn
, target_label
;
3033 /* Look at every JUMP_INSN and see if we can improve it. */
3034 for (insn
= first
; insn
; insn
= next
)
3038 next
= next_active_insn (insn
);
3040 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3041 the next insn, or jumps to a label that is not the last of a
3042 group of consecutive labels. */
3043 if (GET_CODE (insn
) == JUMP_INSN
3044 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3045 && (target_label
= JUMP_LABEL (insn
)) != 0)
3047 target_label
= follow_jumps (target_label
);
3048 target_label
= prev_label (next_active_insn (target_label
));
3050 if (target_label
== 0)
3051 target_label
= find_end_label ();
3053 if (next_active_insn (target_label
) == next
3054 && ! condjump_in_parallel_p (insn
))
3060 if (target_label
!= JUMP_LABEL (insn
))
3061 reorg_redirect_jump (insn
, target_label
);
3063 /* See if this jump branches around a unconditional jump.
3064 If so, invert this jump and point it to the target of the
3066 if (next
&& GET_CODE (next
) == JUMP_INSN
3067 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3068 && next_active_insn (target_label
) == next_active_insn (next
)
3069 && no_labels_between_p (insn
, next
))
3071 rtx label
= JUMP_LABEL (next
);
3073 /* Be careful how we do this to avoid deleting code or
3074 labels that are momentarily dead. See similar optimization
3077 We also need to ensure we properly handle the case when
3078 invert_jump fails. */
3080 ++LABEL_NUSES (target_label
);
3082 ++LABEL_NUSES (label
);
3084 if (invert_jump (insn
, label
))
3091 --LABEL_NUSES (label
);
3093 if (--LABEL_NUSES (target_label
) == 0)
3094 delete_insn (target_label
);
3100 /* If this is an unconditional jump and the previous insn is a
3101 conditional jump, try reversing the condition of the previous
3102 insn and swapping our targets. The next pass might be able to
3105 Don't do this if we expect the conditional branch to be true, because
3106 we would then be making the more common case longer. */
3108 if (GET_CODE (insn
) == JUMP_INSN
3109 && (simplejump_p (insn
) || GET_CODE (PATTERN (insn
)) == RETURN
)
3110 && (other
= prev_active_insn (insn
)) != 0
3111 && (condjump_p (other
) || condjump_in_parallel_p (other
))
3112 && no_labels_between_p (other
, insn
)
3113 && 0 > mostly_true_jump (other
,
3114 get_branch_condition (other
,
3115 JUMP_LABEL (other
))))
3117 rtx other_target
= JUMP_LABEL (other
);
3118 target_label
= JUMP_LABEL (insn
);
3120 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3121 as we move the label. */
3123 ++LABEL_NUSES (other_target
);
3125 if (invert_jump (other
, target_label
))
3126 reorg_redirect_jump (insn
, other_target
);
3129 --LABEL_NUSES (other_target
);
3132 /* Now look only at cases where we have filled a delay slot. */
3133 if (GET_CODE (insn
) != INSN
3134 || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
3137 pat
= PATTERN (insn
);
3138 delay_insn
= XVECEXP (pat
, 0, 0);
3140 /* See if the first insn in the delay slot is redundant with some
3141 previous insn. Remove it from the delay slot if so; then set up
3142 to reprocess this insn. */
3143 if (redundant_insn (XVECEXP (pat
, 0, 1), delay_insn
, 0))
3145 delete_from_delay_slot (XVECEXP (pat
, 0, 1));
3146 next
= prev_active_insn (next
);
3150 /* See if we have a RETURN insn with a filled delay slot followed
3151 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3152 the first RETURN (but not it's delay insn). This gives the same
3153 effect in fewer instructions.
3155 Only do so if optimizing for size since this results in slower, but
3158 && GET_CODE (PATTERN (delay_insn
)) == RETURN
3160 && GET_CODE (next
) == JUMP_INSN
3161 && GET_CODE (PATTERN (next
)) == RETURN
)
3165 /* Delete the RETURN and just execute the delay list insns.
3167 We do this by deleting the INSN containing the SEQUENCE, then
3168 re-emitting the insns separately, and then deleting the RETURN.
3169 This allows the count of the jump target to be properly
3172 /* Clear the from target bit, since these insns are no longer
3174 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3175 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3177 trial
= PREV_INSN (insn
);
3179 emit_insn_after (pat
, trial
);
3180 delete_scheduled_jump (delay_insn
);
3184 /* Now look only at the cases where we have a filled JUMP_INSN. */
3185 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
3186 || ! (condjump_p (XVECEXP (PATTERN (insn
), 0, 0))
3187 || condjump_in_parallel_p (XVECEXP (PATTERN (insn
), 0, 0))))
3190 target_label
= JUMP_LABEL (delay_insn
);
3194 /* If this jump goes to another unconditional jump, thread it, but
3195 don't convert a jump into a RETURN here. */
3196 trial
= follow_jumps (target_label
);
3197 /* We use next_real_insn instead of next_active_insn, so that
3198 the special USE insns emitted by reorg won't be ignored.
3199 If they are ignored, then they will get deleted if target_label
3200 is now unreachable, and that would cause mark_target_live_regs
3202 trial
= prev_label (next_real_insn (trial
));
3203 if (trial
== 0 && target_label
!= 0)
3204 trial
= find_end_label ();
3206 if (trial
!= target_label
3207 && redirect_with_delay_slots_safe_p (delay_insn
, trial
, insn
))
3209 reorg_redirect_jump (delay_insn
, trial
);
3210 target_label
= trial
;
3213 /* If the first insn at TARGET_LABEL is redundant with a previous
3214 insn, redirect the jump to the following insn process again. */
3215 trial
= next_active_insn (target_label
);
3216 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
3217 && redundant_insn (trial
, insn
, 0))
3221 /* Figure out where to emit the special USE insn so we don't
3222 later incorrectly compute register live/death info. */
3223 tmp
= next_active_insn (trial
);
3225 tmp
= find_end_label ();
3227 /* Insert the special USE insn and update dataflow info. */
3228 update_block (trial
, tmp
);
3230 /* Now emit a label before the special USE insn, and
3231 redirect our jump to the new label. */
3232 target_label
= get_label_before (PREV_INSN (tmp
));
3233 reorg_redirect_jump (delay_insn
, target_label
);
3238 /* Similarly, if it is an unconditional jump with one insn in its
3239 delay list and that insn is redundant, thread the jump. */
3240 if (trial
&& GET_CODE (PATTERN (trial
)) == SEQUENCE
3241 && XVECLEN (PATTERN (trial
), 0) == 2
3242 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
3243 && (simplejump_p (XVECEXP (PATTERN (trial
), 0, 0))
3244 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial
), 0, 0))) == RETURN
)
3245 && redundant_insn (XVECEXP (PATTERN (trial
), 0, 1), insn
, 0))
3247 target_label
= JUMP_LABEL (XVECEXP (PATTERN (trial
), 0, 0));
3248 if (target_label
== 0)
3249 target_label
= find_end_label ();
3251 if (redirect_with_delay_slots_safe_p (delay_insn
, target_label
,
3254 reorg_redirect_jump (delay_insn
, target_label
);
3261 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3262 && prev_active_insn (target_label
) == insn
3263 && ! condjump_in_parallel_p (delay_insn
)
3265 /* If the last insn in the delay slot sets CC0 for some insn,
3266 various code assumes that it is in a delay slot. We could
3267 put it back where it belonged and delete the register notes,
3268 but it doesn't seem worthwhile in this uncommon case. */
3269 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
3270 REG_CC_USER
, NULL_RTX
)
3276 /* All this insn does is execute its delay list and jump to the
3277 following insn. So delete the jump and just execute the delay
3280 We do this by deleting the INSN containing the SEQUENCE, then
3281 re-emitting the insns separately, and then deleting the jump.
3282 This allows the count of the jump target to be properly
3285 /* Clear the from target bit, since these insns are no longer
3287 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3288 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3290 trial
= PREV_INSN (insn
);
3292 emit_insn_after (pat
, trial
);
3293 delete_scheduled_jump (delay_insn
);
3297 /* See if this is an unconditional jump around a single insn which is
3298 identical to the one in its delay slot. In this case, we can just
3299 delete the branch and the insn in its delay slot. */
3300 if (next
&& GET_CODE (next
) == INSN
3301 && prev_label (next_active_insn (next
)) == target_label
3302 && simplejump_p (insn
)
3303 && XVECLEN (pat
, 0) == 2
3304 && rtx_equal_p (PATTERN (next
), PATTERN (XVECEXP (pat
, 0, 1))))
3310 /* See if this jump (with its delay slots) branches around another
3311 jump (without delay slots). If so, invert this jump and point
3312 it to the target of the second jump. We cannot do this for
3313 annulled jumps, though. Again, don't convert a jump to a RETURN
3315 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3316 && next
&& GET_CODE (next
) == JUMP_INSN
3317 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3318 && next_active_insn (target_label
) == next_active_insn (next
)
3319 && no_labels_between_p (insn
, next
))
3321 rtx label
= JUMP_LABEL (next
);
3322 rtx old_label
= JUMP_LABEL (delay_insn
);
3325 label
= find_end_label ();
3327 if (redirect_with_delay_slots_safe_p (delay_insn
, label
, insn
))
3329 /* Be careful how we do this to avoid deleting code or labels
3330 that are momentarily dead. See similar optimization in
3333 ++LABEL_NUSES (old_label
);
3335 if (invert_jump (delay_insn
, label
))
3339 /* Must update the INSN_FROM_TARGET_P bits now that
3340 the branch is reversed, so that mark_target_live_regs
3341 will handle the delay slot insn correctly. */
3342 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
3344 rtx slot
= XVECEXP (PATTERN (insn
), 0, i
);
3345 INSN_FROM_TARGET_P (slot
) = ! INSN_FROM_TARGET_P (slot
);
3352 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
3353 delete_insn (old_label
);
3358 /* If we own the thread opposite the way this insn branches, see if we
3359 can merge its delay slots with following insns. */
3360 if (INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3361 && own_thread_p (NEXT_INSN (insn
), 0, 1))
3362 try_merge_delay_insns (insn
, next
);
3363 else if (! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3364 && own_thread_p (target_label
, target_label
, 0))
3365 try_merge_delay_insns (insn
, next_active_insn (target_label
));
3367 /* If we get here, we haven't deleted INSN. But we may have deleted
3368 NEXT, so recompute it. */
3369 next
= next_active_insn (insn
);
3375 /* Look for filled jumps to the end of function label. We can try to convert
3376 them into RETURN insns if the insns in the delay slot are valid for the
3380 make_return_insns (first
)
3383 rtx insn
, jump_insn
, pat
;
3384 rtx real_return_label
= end_of_function_label
;
3387 /* See if there is a RETURN insn in the function other than the one we
3388 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3389 into a RETURN to jump to it. */
3390 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3391 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == RETURN
)
3393 real_return_label
= get_label_before (insn
);
3397 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3398 was equal to END_OF_FUNCTION_LABEL. */
3399 LABEL_NUSES (real_return_label
)++;
3401 /* Clear the list of insns to fill so we can use it. */
3402 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3404 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3408 /* Only look at filled JUMP_INSNs that go to the end of function
3410 if (GET_CODE (insn
) != INSN
3411 || GET_CODE (PATTERN (insn
)) != SEQUENCE
3412 || GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
3413 || JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0)) != end_of_function_label
)
3416 pat
= PATTERN (insn
);
3417 jump_insn
= XVECEXP (pat
, 0, 0);
3419 /* If we can't make the jump into a RETURN, try to redirect it to the best
3420 RETURN and go on to the next insn. */
3421 if (! reorg_redirect_jump (jump_insn
, NULL_RTX
))
3423 /* Make sure redirecting the jump will not invalidate the delay
3425 if (redirect_with_delay_slots_safe_p (jump_insn
,
3428 reorg_redirect_jump (jump_insn
, real_return_label
);
3432 /* See if this RETURN can accept the insns current in its delay slot.
3433 It can if it has more or an equal number of slots and the contents
3434 of each is valid. */
3436 flags
= get_jump_flags (jump_insn
, JUMP_LABEL (jump_insn
));
3437 slots
= num_delay_slots (jump_insn
);
3438 if (slots
>= XVECLEN (pat
, 0) - 1)
3440 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3442 #ifdef ANNUL_IFFALSE_SLOTS
3443 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3444 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3445 ? eligible_for_annul_false (jump_insn
, i
- 1,
3446 XVECEXP (pat
, 0, i
), flags
) :
3448 #ifdef ANNUL_IFTRUE_SLOTS
3449 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3450 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3451 ? eligible_for_annul_true (jump_insn
, i
- 1,
3452 XVECEXP (pat
, 0, i
), flags
) :
3454 eligible_for_delay (jump_insn
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
3460 if (i
== XVECLEN (pat
, 0))
3463 /* We have to do something with this insn. If it is an unconditional
3464 RETURN, delete the SEQUENCE and output the individual insns,
3465 followed by the RETURN. Then set things up so we try to find
3466 insns for its delay slots, if it needs some. */
3467 if (GET_CODE (PATTERN (jump_insn
)) == RETURN
)
3469 rtx prev
= PREV_INSN (insn
);
3472 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3473 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
3475 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
3476 emit_barrier_after (insn
);
3479 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3482 /* It is probably more efficient to keep this with its current
3483 delay slot as a branch to a RETURN. */
3484 reorg_redirect_jump (jump_insn
, real_return_label
);
3487 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3488 new delay slots we have created. */
3489 if (--LABEL_NUSES (real_return_label
) == 0)
3490 delete_insn (real_return_label
);
3492 fill_simple_delay_slots (1);
3493 fill_simple_delay_slots (0);
3497 /* Try to find insns to place in delay slots. */
3500 dbr_schedule (first
, file
)
3504 rtx insn
, next
, epilogue_insn
= 0;
3507 int old_flag_no_peephole
= flag_no_peephole
;
3509 /* Execute `final' once in prescan mode to delete any insns that won't be
3510 used. Don't let final try to do any peephole optimization--it will
3511 ruin dataflow information for this pass. */
3513 flag_no_peephole
= 1;
3514 final (first
, 0, NO_DEBUG
, 1, 1);
3515 flag_no_peephole
= old_flag_no_peephole
;
3518 /* If the current function has no insns other than the prologue and
3519 epilogue, then do not try to fill any delay slots. */
3520 if (n_basic_blocks
== 0)
3523 /* Find the highest INSN_UID and allocate and initialize our map from
3524 INSN_UID's to position in code. */
3525 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3527 if (INSN_UID (insn
) > max_uid
)
3528 max_uid
= INSN_UID (insn
);
3529 if (GET_CODE (insn
) == NOTE
3530 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
3531 epilogue_insn
= insn
;
3534 uid_to_ruid
= (int *) alloca ((max_uid
+ 1) * sizeof (int));
3535 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
3536 uid_to_ruid
[INSN_UID (insn
)] = i
;
3538 /* Initialize the list of insns that need filling. */
3539 if (unfilled_firstobj
== 0)
3541 gcc_obstack_init (&unfilled_slots_obstack
);
3542 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
3545 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
3549 INSN_ANNULLED_BRANCH_P (insn
) = 0;
3550 INSN_FROM_TARGET_P (insn
) = 0;
3552 /* Skip vector tables. We can't get attributes for them. */
3553 if (GET_CODE (insn
) == JUMP_INSN
3554 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
3555 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
3558 if (num_delay_slots (insn
) > 0)
3559 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3561 /* Ensure all jumps go to the last of a set of consecutive labels. */
3562 if (GET_CODE (insn
) == JUMP_INSN
3563 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3564 && JUMP_LABEL (insn
) != 0
3565 && ((target
= prev_label (next_active_insn (JUMP_LABEL (insn
))))
3566 != JUMP_LABEL (insn
)))
3567 redirect_jump (insn
, target
);
3570 init_resource_info (epilogue_insn
);
3572 /* Show we haven't computed an end-of-function label yet. */
3573 end_of_function_label
= 0;
3575 /* Initialize the statistics for this function. */
3576 bzero ((char *) num_insns_needing_delays
, sizeof num_insns_needing_delays
);
3577 bzero ((char *) num_filled_delays
, sizeof num_filled_delays
);
3579 /* Now do the delay slot filling. Try everything twice in case earlier
3580 changes make more slots fillable. */
3582 for (reorg_pass_number
= 0;
3583 reorg_pass_number
< MAX_REORG_PASSES
;
3584 reorg_pass_number
++)
3586 fill_simple_delay_slots (1);
3587 fill_simple_delay_slots (0);
3588 fill_eager_delay_slots ();
3589 relax_delay_slots (first
);
3592 /* Delete any USE insns made by update_block; subsequent passes don't need
3593 them or know how to deal with them. */
3594 for (insn
= first
; insn
; insn
= next
)
3596 next
= NEXT_INSN (insn
);
3598 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
3599 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
3600 next
= delete_insn (insn
);
3603 /* If we made an end of function label, indicate that it is now
3604 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3605 If it is now unused, delete it. */
3606 if (end_of_function_label
&& --LABEL_NUSES (end_of_function_label
) == 0)
3607 delete_insn (end_of_function_label
);
3610 if (HAVE_return
&& end_of_function_label
!= 0)
3611 make_return_insns (first
);
3614 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3616 /* It is not clear why the line below is needed, but it does seem to be. */
3617 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
3619 /* Reposition the prologue and epilogue notes in case we moved the
3620 prologue/epilogue insns. */
3621 reposition_prologue_and_epilogue_notes (first
);
3625 register int i
, j
, need_comma
;
3627 for (reorg_pass_number
= 0;
3628 reorg_pass_number
< MAX_REORG_PASSES
;
3629 reorg_pass_number
++)
3631 fprintf (file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
3632 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
3635 fprintf (file
, ";; Reorg function #%d\n", i
);
3637 fprintf (file
, ";; %d insns needing delay slots\n;; ",
3638 num_insns_needing_delays
[i
][reorg_pass_number
]);
3640 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
; j
++)
3641 if (num_filled_delays
[i
][j
][reorg_pass_number
])
3644 fprintf (file
, ", ");
3646 fprintf (file
, "%d got %d delays",
3647 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
3649 fprintf (file
, "\n");
3654 /* For all JUMP insns, fill in branch prediction notes, so that during
3655 assembler output a target can set branch prediction bits in the code.
3656 We have to do this now, as up until this point the destinations of
3657 JUMPS can be moved around and changed, but past right here that cannot
3659 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3663 if (GET_CODE (insn
) == INSN
)
3665 rtx pat
= PATTERN (insn
);
3667 if (GET_CODE (pat
) == SEQUENCE
)
3668 insn
= XVECEXP (pat
, 0, 0);
3670 if (GET_CODE (insn
) != JUMP_INSN
)
3673 pred_flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
3674 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_BR_PRED
,
3675 GEN_INT (pred_flags
),
3678 free_resource_info ();
3680 #endif /* DELAY_SLOTS */