1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
6 Hacked by Michael Tiemann (tiemann@cygnus.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Instruction reorganization pass.
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
61 Three techniques for filling delay slots have been implemented so far:
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
116 #include "coretypes.h"
118 #include "diagnostic-core.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
132 #include "insn-attr.h"
133 #include "resource.h"
138 #include "tree-pass.h"
142 #ifndef ANNUL_IFTRUE_SLOTS
143 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
145 #ifndef ANNUL_IFFALSE_SLOTS
146 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
149 /* Insns which have delay slots that have not yet been filled. */
151 static struct obstack unfilled_slots_obstack
;
152 static rtx
*unfilled_firstobj
;
154 /* Define macros to refer to the first and last slot containing unfilled
155 insns. These are used because the list may move and its address
156 should be recomputed at each use. */
158 #define unfilled_slots_base \
159 ((rtx *) obstack_base (&unfilled_slots_obstack))
161 #define unfilled_slots_next \
162 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
164 /* Points to the label before the end of the function, or before a
166 static rtx function_return_label
;
167 /* Likewise for a simple_return. */
168 static rtx function_simple_return_label
;
170 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
171 not always monotonically increase. */
172 static int *uid_to_ruid
;
174 /* Highest valid index in `uid_to_ruid'. */
177 static int stop_search_p (rtx
, int);
178 static int resource_conflicts_p (struct resources
*, struct resources
*);
179 static int insn_references_resource_p (rtx
, struct resources
*, bool);
180 static int insn_sets_resource_p (rtx
, struct resources
*, bool);
181 static rtx
find_end_label (rtx
);
182 static rtx
emit_delay_sequence (rtx
, rtx
, int);
183 static rtx
add_to_delay_list (rtx
, rtx
);
184 static rtx
delete_from_delay_slot (rtx
);
185 static void delete_scheduled_jump (rtx
);
186 static void note_delay_statistics (int, int);
187 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
188 static rtx
optimize_skip (rtx
);
190 static int get_jump_flags (rtx
, rtx
);
191 static int rare_destination (rtx
);
192 static int mostly_true_jump (rtx
, rtx
);
193 static rtx
get_branch_condition (rtx
, rtx
);
194 static int condition_dominates_p (rtx
, rtx
);
195 static int redirect_with_delay_slots_safe_p (rtx
, rtx
, rtx
);
196 static int redirect_with_delay_list_safe_p (rtx
, rtx
, rtx
);
197 static int check_annul_list_true_false (int, rtx
);
198 static rtx
steal_delay_list_from_target (rtx
, rtx
, rtx
, rtx
,
202 int, int *, int *, rtx
*);
203 static rtx
steal_delay_list_from_fallthrough (rtx
, rtx
, rtx
, rtx
,
208 static void try_merge_delay_insns (rtx
, rtx
);
209 static rtx
redundant_insn (rtx
, rtx
, rtx
);
210 static int own_thread_p (rtx
, rtx
, int);
211 static void update_block (rtx
, rtx
);
212 static int reorg_redirect_jump (rtx
, rtx
);
213 static void update_reg_dead_notes (rtx
, rtx
);
214 static void fix_reg_dead_note (rtx
, rtx
);
215 static void update_reg_unused_notes (rtx
, rtx
);
216 static void fill_simple_delay_slots (int);
217 static rtx
fill_slots_from_thread (rtx
, rtx
, rtx
, rtx
,
220 static void fill_eager_delay_slots (void);
221 static void relax_delay_slots (rtx
);
222 static void make_return_insns (rtx
);
224 /* A wrapper around next_active_insn which takes care to return ret_rtx
228 first_active_target_insn (rtx insn
)
230 if (ANY_RETURN_P (insn
))
232 return next_active_insn (insn
);
235 /* Return true iff INSN is a simplejump, or any kind of return insn. */
238 simplejump_or_return_p (rtx insn
)
240 return (JUMP_P (insn
)
241 && (simplejump_p (insn
) || ANY_RETURN_P (PATTERN (insn
))));
244 /* Return TRUE if this insn should stop the search for insn to fill delay
245 slots. LABELS_P indicates that labels should terminate the search.
246 In all cases, jumps terminate the search. */
249 stop_search_p (rtx insn
, int labels_p
)
254 /* If the insn can throw an exception that is caught within the function,
255 it may effectively perform a jump from the viewpoint of the function.
256 Therefore act like for a jump. */
257 if (can_throw_internal (insn
))
260 switch (GET_CODE (insn
))
274 /* OK unless it contains a delay slot or is an `asm' insn of some type.
275 We don't know anything about these. */
276 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
277 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
278 || asm_noperands (PATTERN (insn
)) >= 0);
285 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
286 resource set contains a volatile memory reference. Otherwise, return FALSE. */
289 resource_conflicts_p (struct resources
*res1
, struct resources
*res2
)
291 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
292 || (res1
->unch_memory
&& res2
->unch_memory
)
293 || res1
->volatil
|| res2
->volatil
)
297 return (res1
->regs
& res2
->regs
) != HARD_CONST (0);
302 for (i
= 0; i
< HARD_REG_SET_LONGS
; i
++)
303 if ((res1
->regs
[i
] & res2
->regs
[i
]) != 0)
310 /* Return TRUE if any resource marked in RES, a `struct resources', is
311 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
312 routine is using those resources.
314 We compute this by computing all the resources referenced by INSN and
315 seeing if this conflicts with RES. It might be faster to directly check
316 ourselves, and this is the way it used to work, but it means duplicating
317 a large block of complex code. */
320 insn_references_resource_p (rtx insn
, struct resources
*res
,
321 bool include_delayed_effects
)
323 struct resources insn_res
;
325 CLEAR_RESOURCE (&insn_res
);
326 mark_referenced_resources (insn
, &insn_res
, include_delayed_effects
);
327 return resource_conflicts_p (&insn_res
, res
);
330 /* Return TRUE if INSN modifies resources that are marked in RES.
331 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
332 included. CC0 is only modified if it is explicitly set; see comments
333 in front of mark_set_resources for details. */
336 insn_sets_resource_p (rtx insn
, struct resources
*res
,
337 bool include_delayed_effects
)
339 struct resources insn_sets
;
341 CLEAR_RESOURCE (&insn_sets
);
342 mark_set_resources (insn
, &insn_sets
, 0,
343 (include_delayed_effects
346 return resource_conflicts_p (&insn_sets
, res
);
349 /* Find a label at the end of the function or before a RETURN. If there
350 is none, try to make one. If that fails, returns 0.
352 The property of such a label is that it is placed just before the
353 epilogue or a bare RETURN insn, so that another bare RETURN can be
354 turned into a jump to the label unconditionally. In particular, the
355 label cannot be placed before a RETURN insn with a filled delay slot.
357 ??? There may be a problem with the current implementation. Suppose
358 we start with a bare RETURN insn and call find_end_label. It may set
359 function_return_label just before the RETURN. Suppose the machinery
360 is able to fill the delay slot of the RETURN insn afterwards. Then
361 function_return_label is no longer valid according to the property
362 described above and find_end_label will still return it unmodified.
363 Note that this is probably mitigated by the following observation:
364 once function_return_label is made, it is very likely the target of
365 a jump, so filling the delay slot of the RETURN will be much more
367 KIND is either simple_return_rtx or ret_rtx, indicating which type of
368 return we're looking for. */
371 find_end_label (rtx kind
)
377 plabel
= &function_return_label
;
380 gcc_assert (kind
== simple_return_rtx
);
381 plabel
= &function_simple_return_label
;
384 /* If we found one previously, return it. */
388 /* Otherwise, see if there is a label at the end of the function. If there
389 is, it must be that RETURN insns aren't needed, so that is our return
390 label and we don't have to do anything else. */
392 insn
= get_last_insn ();
394 || (NONJUMP_INSN_P (insn
)
395 && (GET_CODE (PATTERN (insn
)) == USE
396 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
397 insn
= PREV_INSN (insn
);
399 /* When a target threads its epilogue we might already have a
400 suitable return insn. If so put a label before it for the
401 function_return_label. */
403 && JUMP_P (PREV_INSN (insn
))
404 && PATTERN (PREV_INSN (insn
)) == kind
)
406 rtx temp
= PREV_INSN (PREV_INSN (insn
));
407 rtx label
= gen_label_rtx ();
408 LABEL_NUSES (label
) = 0;
410 /* Put the label before any USE insns that may precede the RETURN
412 while (GET_CODE (temp
) == USE
)
413 temp
= PREV_INSN (temp
);
415 emit_label_after (label
, temp
);
419 else if (LABEL_P (insn
))
423 rtx label
= gen_label_rtx ();
424 LABEL_NUSES (label
) = 0;
425 /* If the basic block reorder pass moves the return insn to
426 some other place try to locate it again and put our
427 function_return_label there. */
428 while (insn
&& ! (JUMP_P (insn
) && (PATTERN (insn
) == kind
)))
429 insn
= PREV_INSN (insn
);
432 insn
= PREV_INSN (insn
);
434 /* Put the label before any USE insns that may precede the
436 while (GET_CODE (insn
) == USE
)
437 insn
= PREV_INSN (insn
);
439 emit_label_after (label
, insn
);
449 /* The RETURN insn has its delay slot filled so we cannot
450 emit the label just before it. Since we already have
451 an epilogue and cannot emit a new RETURN, we cannot
452 emit the label at all. */
454 #endif /* HAVE_epilogue */
456 /* Otherwise, make a new label and emit a RETURN and BARRIER,
460 /* We don't bother trying to create a return insn if the
461 epilogue has filled delay-slots; we would have to try and
462 move the delay-slot fillers to the delay-slots for the new
463 return insn or in front of the new return insn. */
464 if (crtl
->epilogue_delay_list
== NULL
467 /* The return we make may have delay slots too. */
468 rtx insn
= gen_return ();
469 insn
= emit_jump_insn (insn
);
470 set_return_jump_label (insn
);
472 if (num_delay_slots (insn
) > 0)
473 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
480 /* Show one additional use for this label so it won't go away until
482 ++LABEL_NUSES (*plabel
);
487 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
488 the pattern of INSN with the SEQUENCE.
490 Chain the insns so that NEXT_INSN of each insn in the sequence points to
491 the next and NEXT_INSN of the last insn in the sequence points to
492 the first insn after the sequence. Similarly for PREV_INSN. This makes
493 it easier to scan all insns.
495 Returns the SEQUENCE that replaces INSN. */
498 emit_delay_sequence (rtx insn
, rtx list
, int length
)
504 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
505 rtvec seqv
= rtvec_alloc (length
+ 1);
506 rtx seq
= gen_rtx_SEQUENCE (VOIDmode
, seqv
);
507 rtx seq_insn
= make_insn_raw (seq
);
508 rtx first
= get_insns ();
509 rtx last
= get_last_insn ();
511 /* Make a copy of the insn having delay slots. */
512 rtx delay_insn
= copy_rtx (insn
);
514 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
515 confuse further processing. Update LAST in case it was the last insn.
516 We will put the BARRIER back in later. */
517 if (NEXT_INSN (insn
) && BARRIER_P (NEXT_INSN (insn
)))
519 delete_related_insns (NEXT_INSN (insn
));
520 last
= get_last_insn ();
524 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
525 NEXT_INSN (seq_insn
) = NEXT_INSN (insn
);
526 PREV_INSN (seq_insn
) = PREV_INSN (insn
);
529 PREV_INSN (NEXT_INSN (seq_insn
)) = seq_insn
;
532 NEXT_INSN (PREV_INSN (seq_insn
)) = seq_insn
;
534 /* Note the calls to set_new_first_and_last_insn must occur after
535 SEQ_INSN has been completely spliced into the insn stream.
537 Otherwise CUR_INSN_UID will get set to an incorrect value because
538 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
540 set_new_first_and_last_insn (first
, seq_insn
);
543 set_new_first_and_last_insn (seq_insn
, last
);
545 /* Build our SEQUENCE and rebuild the insn chain. */
546 XVECEXP (seq
, 0, 0) = delay_insn
;
547 INSN_DELETED_P (delay_insn
) = 0;
548 PREV_INSN (delay_insn
) = PREV_INSN (seq_insn
);
550 INSN_LOCATOR (seq_insn
) = INSN_LOCATOR (delay_insn
);
552 for (li
= list
; li
; li
= XEXP (li
, 1), i
++)
554 rtx tem
= XEXP (li
, 0);
557 /* Show that this copy of the insn isn't deleted. */
558 INSN_DELETED_P (tem
) = 0;
560 XVECEXP (seq
, 0, i
) = tem
;
561 PREV_INSN (tem
) = XVECEXP (seq
, 0, i
- 1);
562 NEXT_INSN (XVECEXP (seq
, 0, i
- 1)) = tem
;
564 /* SPARC assembler, for instance, emit warning when debug info is output
565 into the delay slot. */
566 if (INSN_LOCATOR (tem
) && !INSN_LOCATOR (seq_insn
))
567 INSN_LOCATOR (seq_insn
) = INSN_LOCATOR (tem
);
568 INSN_LOCATOR (tem
) = 0;
570 for (note
= REG_NOTES (tem
); note
; note
= next
)
572 next
= XEXP (note
, 1);
573 switch (REG_NOTE_KIND (note
))
576 /* Remove any REG_DEAD notes because we can't rely on them now
577 that the insn has been moved. */
578 remove_note (tem
, note
);
581 case REG_LABEL_OPERAND
:
582 case REG_LABEL_TARGET
:
583 /* Keep the label reference count up to date. */
584 if (LABEL_P (XEXP (note
, 0)))
585 LABEL_NUSES (XEXP (note
, 0)) ++;
594 NEXT_INSN (XVECEXP (seq
, 0, length
)) = NEXT_INSN (seq_insn
);
596 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
597 last insn in that SEQUENCE to point to us. Similarly for the first
598 insn in the following insn if it is a SEQUENCE. */
600 if (PREV_INSN (seq_insn
) && NONJUMP_INSN_P (PREV_INSN (seq_insn
))
601 && GET_CODE (PATTERN (PREV_INSN (seq_insn
))) == SEQUENCE
)
602 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn
)), 0,
603 XVECLEN (PATTERN (PREV_INSN (seq_insn
)), 0) - 1))
606 if (NEXT_INSN (seq_insn
) && NONJUMP_INSN_P (NEXT_INSN (seq_insn
))
607 && GET_CODE (PATTERN (NEXT_INSN (seq_insn
))) == SEQUENCE
)
608 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn
)), 0, 0)) = seq_insn
;
610 /* If there used to be a BARRIER, put it back. */
612 emit_barrier_after (seq_insn
);
614 gcc_assert (i
== length
+ 1);
619 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
620 be in the order in which the insns are to be executed. */
623 add_to_delay_list (rtx insn
, rtx delay_list
)
625 /* If we have an empty list, just make a new list element. If
626 INSN has its block number recorded, clear it since we may
627 be moving the insn to a new block. */
631 clear_hashed_info_for_insn (insn
);
632 return gen_rtx_INSN_LIST (VOIDmode
, insn
, NULL_RTX
);
635 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
637 XEXP (delay_list
, 1) = add_to_delay_list (insn
, XEXP (delay_list
, 1));
642 /* Delete INSN from the delay slot of the insn that it is in, which may
643 produce an insn with no delay slots. Return the new insn. */
646 delete_from_delay_slot (rtx insn
)
648 rtx trial
, seq_insn
, seq
, prev
;
653 /* We first must find the insn containing the SEQUENCE with INSN in its
654 delay slot. Do this by finding an insn, TRIAL, where
655 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
658 PREV_INSN (NEXT_INSN (trial
)) == trial
;
659 trial
= NEXT_INSN (trial
))
662 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
663 seq
= PATTERN (seq_insn
);
665 if (NEXT_INSN (seq_insn
) && BARRIER_P (NEXT_INSN (seq_insn
)))
668 /* Create a delay list consisting of all the insns other than the one
669 we are deleting (unless we were the only one). */
670 if (XVECLEN (seq
, 0) > 2)
671 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
672 if (XVECEXP (seq
, 0, i
) != insn
)
673 delay_list
= add_to_delay_list (XVECEXP (seq
, 0, i
), delay_list
);
675 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
676 list, and rebuild the delay list if non-empty. */
677 prev
= PREV_INSN (seq_insn
);
678 trial
= XVECEXP (seq
, 0, 0);
679 delete_related_insns (seq_insn
);
680 add_insn_after (trial
, prev
, NULL
);
682 /* If there was a barrier after the old SEQUENCE, remit it. */
684 emit_barrier_after (trial
);
686 /* If there are any delay insns, remit them. Otherwise clear the
689 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2);
690 else if (JUMP_P (trial
))
691 INSN_ANNULLED_BRANCH_P (trial
) = 0;
693 INSN_FROM_TARGET_P (insn
) = 0;
695 /* Show we need to fill this insn again. */
696 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
701 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
702 the insn that sets CC0 for it and delete it too. */
705 delete_scheduled_jump (rtx insn
)
707 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
708 delete the insn that sets the condition code, but it is hard to find it.
709 Since this case is rare anyway, don't bother trying; there would likely
710 be other insns that became dead anyway, which we wouldn't know to
714 if (reg_mentioned_p (cc0_rtx
, insn
))
716 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
718 /* If a reg-note was found, it points to an insn to set CC0. This
719 insn is in the delay list of some other insn. So delete it from
720 the delay list it was in. */
723 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), NULL_RTX
)
724 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
725 delete_from_delay_slot (XEXP (note
, 0));
729 /* The insn setting CC0 is our previous insn, but it may be in
730 a delay slot. It will be the last insn in the delay slot, if
732 rtx trial
= previous_insn (insn
);
734 trial
= prev_nonnote_insn (trial
);
735 if (sets_cc0_p (PATTERN (trial
)) != 1
736 || FIND_REG_INC_NOTE (trial
, NULL_RTX
))
738 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
739 delete_related_insns (trial
);
741 delete_from_delay_slot (trial
);
746 delete_related_insns (insn
);
749 /* Counters for delay-slot filling. */
751 #define NUM_REORG_FUNCTIONS 2
752 #define MAX_DELAY_HISTOGRAM 3
753 #define MAX_REORG_PASSES 2
755 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
757 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
759 static int reorg_pass_number
;
762 note_delay_statistics (int slots_filled
, int index
)
764 num_insns_needing_delays
[index
][reorg_pass_number
]++;
765 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
766 slots_filled
= MAX_DELAY_HISTOGRAM
;
767 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
770 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
772 /* Optimize the following cases:
774 1. When a conditional branch skips over only one instruction,
775 use an annulling branch and put that insn in the delay slot.
776 Use either a branch that annuls when the condition if true or
777 invert the test with a branch that annuls when the condition is
778 false. This saves insns, since otherwise we must copy an insn
781 (orig) (skip) (otherwise)
782 Bcc.n L1 Bcc',a L1 Bcc,a L1'
789 2. When a conditional branch skips over only one instruction,
790 and after that, it unconditionally branches somewhere else,
791 perform the similar optimization. This saves executing the
792 second branch in the case where the inverted condition is true.
801 This should be expanded to skip over N insns, where N is the number
802 of delay slots required. */
805 optimize_skip (rtx insn
)
807 rtx trial
= next_nonnote_insn (insn
);
808 rtx next_trial
= next_active_insn (trial
);
812 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
815 || !NONJUMP_INSN_P (trial
)
816 || GET_CODE (PATTERN (trial
)) == SEQUENCE
817 || recog_memoized (trial
) < 0
818 || (! eligible_for_annul_false (insn
, 0, trial
, flags
)
819 && ! eligible_for_annul_true (insn
, 0, trial
, flags
))
820 || can_throw_internal (trial
))
823 /* There are two cases where we are just executing one insn (we assume
824 here that a branch requires only one insn; this should be generalized
825 at some point): Where the branch goes around a single insn or where
826 we have one insn followed by a branch to the same label we branch to.
827 In both of these cases, inverting the jump and annulling the delay
828 slot give the same effect in fewer insns. */
829 if ((next_trial
== next_active_insn (JUMP_LABEL (insn
))
830 && ! (next_trial
== 0 && crtl
->epilogue_delay_list
!= 0))
832 && simplejump_or_return_p (next_trial
)
833 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)))
835 if (eligible_for_annul_false (insn
, 0, trial
, flags
))
837 if (invert_jump (insn
, JUMP_LABEL (insn
), 1))
838 INSN_FROM_TARGET_P (trial
) = 1;
839 else if (! eligible_for_annul_true (insn
, 0, trial
, flags
))
843 delay_list
= add_to_delay_list (trial
, NULL_RTX
);
844 next_trial
= next_active_insn (trial
);
845 update_block (trial
, trial
);
846 delete_related_insns (trial
);
848 /* Also, if we are targeting an unconditional
849 branch, thread our jump to the target of that branch. Don't
850 change this into a RETURN here, because it may not accept what
851 we have in the delay slot. We'll fix this up later. */
852 if (next_trial
&& simplejump_or_return_p (next_trial
))
854 rtx target_label
= JUMP_LABEL (next_trial
);
855 if (ANY_RETURN_P (target_label
))
856 target_label
= find_end_label (target_label
);
860 /* Recompute the flags based on TARGET_LABEL since threading
861 the jump to TARGET_LABEL may change the direction of the
862 jump (which may change the circumstances in which the
863 delay slot is nullified). */
864 flags
= get_jump_flags (insn
, target_label
);
865 if (eligible_for_annul_true (insn
, 0, trial
, flags
))
866 reorg_redirect_jump (insn
, target_label
);
870 INSN_ANNULLED_BRANCH_P (insn
) = 1;
877 /* Encode and return branch direction and prediction information for
878 INSN assuming it will jump to LABEL.
880 Non conditional branches return no direction information and
881 are predicted as very likely taken. */
884 get_jump_flags (rtx insn
, rtx label
)
888 /* get_jump_flags can be passed any insn with delay slots, these may
889 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
890 direction information, and only if they are conditional jumps.
892 If LABEL is a return, then there is no way to determine the branch
895 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
896 && !ANY_RETURN_P (label
)
897 && INSN_UID (insn
) <= max_uid
898 && INSN_UID (label
) <= max_uid
)
900 = (uid_to_ruid
[INSN_UID (label
)] > uid_to_ruid
[INSN_UID (insn
)])
901 ? ATTR_FLAG_forward
: ATTR_FLAG_backward
;
902 /* No valid direction information. */
909 /* Return 1 if INSN is a destination that will be branched to rarely (the
910 return point of a function); return 2 if DEST will be branched to very
911 rarely (a call to a function that doesn't return). Otherwise,
915 rare_destination (rtx insn
)
920 for (; insn
&& !ANY_RETURN_P (insn
); insn
= next
)
922 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
923 insn
= XVECEXP (PATTERN (insn
), 0, 0);
925 next
= NEXT_INSN (insn
);
927 switch (GET_CODE (insn
))
932 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
933 don't scan past JUMP_INSNs, so any barrier we find here must
934 have been after a CALL_INSN and hence mean the call doesn't
938 if (ANY_RETURN_P (PATTERN (insn
)))
940 else if (simplejump_p (insn
)
941 && jump_count
++ < 10)
942 next
= JUMP_LABEL (insn
);
951 /* If we got here it means we hit the end of the function. So this
952 is an unlikely destination. */
957 /* Return truth value of the statement that this branch
958 is mostly taken. If we think that the branch is extremely likely
959 to be taken, we return 2. If the branch is slightly more likely to be
960 taken, return 1. If the branch is slightly less likely to be taken,
961 return 0 and if the branch is highly unlikely to be taken, return -1.
963 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
966 mostly_true_jump (rtx jump_insn
, rtx condition
)
968 rtx target_label
= JUMP_LABEL (jump_insn
);
970 int rare_dest
, rare_fallthrough
;
972 /* If branch probabilities are available, then use that number since it
973 always gives a correct answer. */
974 note
= find_reg_note (jump_insn
, REG_BR_PROB
, 0);
977 int prob
= INTVAL (XEXP (note
, 0));
979 if (prob
>= REG_BR_PROB_BASE
* 9 / 10)
981 else if (prob
>= REG_BR_PROB_BASE
/ 2)
983 else if (prob
>= REG_BR_PROB_BASE
/ 10)
989 /* Look at the relative rarities of the fallthrough and destination. If
990 they differ, we can predict the branch that way. */
991 rare_dest
= rare_destination (target_label
);
992 rare_fallthrough
= rare_destination (NEXT_INSN (jump_insn
));
994 switch (rare_fallthrough
- rare_dest
)
1008 /* If we couldn't figure out what this jump was, assume it won't be
1009 taken. This should be rare. */
1013 /* Predict backward branches usually take, forward branches usually not. If
1014 we don't know whether this is forward or backward, assume the branch
1015 will be taken, since most are. */
1016 return (ANY_RETURN_P (target_label
) || INSN_UID (jump_insn
) > max_uid
1017 || INSN_UID (target_label
) > max_uid
1018 || (uid_to_ruid
[INSN_UID (jump_insn
)]
1019 > uid_to_ruid
[INSN_UID (target_label
)]));
1022 /* Return the condition under which INSN will branch to TARGET. If TARGET
1023 is zero, return the condition under which INSN will return. If INSN is
1024 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1025 type of jump, or it doesn't go to TARGET, return 0. */
1028 get_branch_condition (rtx insn
, rtx target
)
1030 rtx pat
= PATTERN (insn
);
1033 if (condjump_in_parallel_p (insn
))
1034 pat
= XVECEXP (pat
, 0, 0);
1036 if (ANY_RETURN_P (pat
))
1037 return pat
== target
? const_true_rtx
: 0;
1039 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
1042 src
= SET_SRC (pat
);
1043 if (GET_CODE (src
) == LABEL_REF
&& XEXP (src
, 0) == target
)
1044 return const_true_rtx
;
1046 else if (GET_CODE (src
) == IF_THEN_ELSE
1047 && XEXP (src
, 2) == pc_rtx
1048 && GET_CODE (XEXP (src
, 1)) == LABEL_REF
1049 && XEXP (XEXP (src
, 1), 0) == target
)
1050 return XEXP (src
, 0);
1052 else if (GET_CODE (src
) == IF_THEN_ELSE
1053 && XEXP (src
, 1) == pc_rtx
1054 && GET_CODE (XEXP (src
, 2)) == LABEL_REF
1055 && XEXP (XEXP (src
, 2), 0) == target
)
1058 rev
= reversed_comparison_code (XEXP (src
, 0), insn
);
1060 return gen_rtx_fmt_ee (rev
, GET_MODE (XEXP (src
, 0)),
1061 XEXP (XEXP (src
, 0), 0),
1062 XEXP (XEXP (src
, 0), 1));
1068 /* Return nonzero if CONDITION is more strict than the condition of
1069 INSN, i.e., if INSN will always branch if CONDITION is true. */
1072 condition_dominates_p (rtx condition
, rtx insn
)
1074 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
1075 enum rtx_code code
= GET_CODE (condition
);
1076 enum rtx_code other_code
;
1078 if (rtx_equal_p (condition
, other_condition
)
1079 || other_condition
== const_true_rtx
)
1082 else if (condition
== const_true_rtx
|| other_condition
== 0)
1085 other_code
= GET_CODE (other_condition
);
1086 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
1087 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
1088 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
1091 return comparison_dominates_p (code
, other_code
);
1094 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1095 any insns already in the delay slot of JUMP. */
1098 redirect_with_delay_slots_safe_p (rtx jump
, rtx newlabel
, rtx seq
)
1101 rtx pat
= PATTERN (seq
);
1103 /* Make sure all the delay slots of this jump would still
1104 be valid after threading the jump. If they are still
1105 valid, then return nonzero. */
1107 flags
= get_jump_flags (jump
, newlabel
);
1108 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1110 #ifdef ANNUL_IFFALSE_SLOTS
1111 (INSN_ANNULLED_BRANCH_P (jump
)
1112 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1113 ? eligible_for_annul_false (jump
, i
- 1,
1114 XVECEXP (pat
, 0, i
), flags
) :
1116 #ifdef ANNUL_IFTRUE_SLOTS
1117 (INSN_ANNULLED_BRANCH_P (jump
)
1118 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1119 ? eligible_for_annul_true (jump
, i
- 1,
1120 XVECEXP (pat
, 0, i
), flags
) :
1122 eligible_for_delay (jump
, i
- 1, XVECEXP (pat
, 0, i
), flags
)))
1125 return (i
== XVECLEN (pat
, 0));
1128 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1129 any insns we wish to place in the delay slot of JUMP. */
1132 redirect_with_delay_list_safe_p (rtx jump
, rtx newlabel
, rtx delay_list
)
1137 /* Make sure all the insns in DELAY_LIST would still be
1138 valid after threading the jump. If they are still
1139 valid, then return nonzero. */
1141 flags
= get_jump_flags (jump
, newlabel
);
1142 for (li
= delay_list
, i
= 0; li
; li
= XEXP (li
, 1), i
++)
1144 #ifdef ANNUL_IFFALSE_SLOTS
1145 (INSN_ANNULLED_BRANCH_P (jump
)
1146 && INSN_FROM_TARGET_P (XEXP (li
, 0)))
1147 ? eligible_for_annul_false (jump
, i
, XEXP (li
, 0), flags
) :
1149 #ifdef ANNUL_IFTRUE_SLOTS
1150 (INSN_ANNULLED_BRANCH_P (jump
)
1151 && ! INSN_FROM_TARGET_P (XEXP (li
, 0)))
1152 ? eligible_for_annul_true (jump
, i
, XEXP (li
, 0), flags
) :
1154 eligible_for_delay (jump
, i
, XEXP (li
, 0), flags
)))
1157 return (li
== NULL
);
1160 /* DELAY_LIST is a list of insns that have already been placed into delay
1161 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1162 If not, return 0; otherwise return 1. */
1165 check_annul_list_true_false (int annul_true_p
, rtx delay_list
)
1171 for (temp
= delay_list
; temp
; temp
= XEXP (temp
, 1))
1173 rtx trial
= XEXP (temp
, 0);
1175 if ((annul_true_p
&& INSN_FROM_TARGET_P (trial
))
1176 || (!annul_true_p
&& !INSN_FROM_TARGET_P (trial
)))
1184 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1185 the condition tested by INSN is CONDITION and the resources shown in
1186 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1187 from SEQ's delay list, in addition to whatever insns it may execute
1188 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1189 needed while searching for delay slot insns. Return the concatenated
1190 delay list if possible, otherwise, return 0.
1192 SLOTS_TO_FILL is the total number of slots required by INSN, and
1193 PSLOTS_FILLED points to the number filled so far (also the number of
1194 insns in DELAY_LIST). It is updated with the number that have been
1195 filled from the SEQUENCE, if any.
1197 PANNUL_P points to a nonzero value if we already know that we need
1198 to annul INSN. If this routine determines that annulling is needed,
1199 it may set that value nonzero.
1201 PNEW_THREAD points to a location that is to receive the place at which
1202 execution should continue. */
1205 steal_delay_list_from_target (rtx insn
, rtx condition
, rtx seq
,
1206 rtx delay_list
, struct resources
*sets
,
1207 struct resources
*needed
,
1208 struct resources
*other_needed
,
1209 int slots_to_fill
, int *pslots_filled
,
1210 int *pannul_p
, rtx
*pnew_thread
)
1213 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1214 int total_slots_filled
= *pslots_filled
;
1215 rtx new_delay_list
= 0;
1216 int must_annul
= *pannul_p
;
1219 struct resources cc_set
;
1221 /* We can't do anything if there are more delay slots in SEQ than we
1222 can handle, or if we don't know that it will be a taken branch.
1223 We know that it will be a taken branch if it is either an unconditional
1224 branch or a conditional branch with a stricter branch condition.
1226 Also, exit if the branch has more than one set, since then it is computing
1227 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1228 ??? It may be possible to move other sets into INSN in addition to
1229 moving the instructions in the delay slots.
1231 We can not steal the delay list if one of the instructions in the
1232 current delay_list modifies the condition codes and the jump in the
1233 sequence is a conditional jump. We can not do this because we can
1234 not change the direction of the jump because the condition codes
1235 will effect the direction of the jump in the sequence. */
1237 CLEAR_RESOURCE (&cc_set
);
1238 for (temp
= delay_list
; temp
; temp
= XEXP (temp
, 1))
1240 rtx trial
= XEXP (temp
, 0);
1242 mark_set_resources (trial
, &cc_set
, 0, MARK_SRC_DEST_CALL
);
1243 if (insn_references_resource_p (XVECEXP (seq
, 0, 0), &cc_set
, false))
1247 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1248 || ! condition_dominates_p (condition
, XVECEXP (seq
, 0, 0))
1249 || ! single_set (XVECEXP (seq
, 0, 0)))
1252 #ifdef MD_CAN_REDIRECT_BRANCH
1253 /* On some targets, branches with delay slots can have a limited
1254 displacement. Give the back end a chance to tell us we can't do
1256 if (! MD_CAN_REDIRECT_BRANCH (insn
, XVECEXP (seq
, 0, 0)))
1260 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1262 rtx trial
= XVECEXP (seq
, 0, i
);
1265 if (insn_references_resource_p (trial
, sets
, false)
1266 || insn_sets_resource_p (trial
, needed
, false)
1267 || insn_sets_resource_p (trial
, sets
, false)
1269 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1271 || find_reg_note (trial
, REG_CC_USER
, NULL_RTX
)
1273 /* If TRIAL is from the fallthrough code of an annulled branch insn
1274 in SEQ, we cannot use it. */
1275 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq
, 0, 0))
1276 && ! INSN_FROM_TARGET_P (trial
)))
1279 /* If this insn was already done (usually in a previous delay slot),
1280 pretend we put it in our delay slot. */
1281 if (redundant_insn (trial
, insn
, new_delay_list
))
1284 /* We will end up re-vectoring this branch, so compute flags
1285 based on jumping to the new label. */
1286 flags
= get_jump_flags (insn
, JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1289 && ((condition
== const_true_rtx
1290 || (! insn_sets_resource_p (trial
, other_needed
, false)
1291 && ! may_trap_or_fault_p (PATTERN (trial
)))))
1292 ? eligible_for_delay (insn
, total_slots_filled
, trial
, flags
)
1293 : (must_annul
|| (delay_list
== NULL
&& new_delay_list
== NULL
))
1295 check_annul_list_true_false (0, delay_list
)
1296 && check_annul_list_true_false (0, new_delay_list
)
1297 && eligible_for_annul_false (insn
, total_slots_filled
,
1302 temp
= copy_rtx (trial
);
1303 INSN_FROM_TARGET_P (temp
) = 1;
1304 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1305 total_slots_filled
++;
1307 if (--slots_remaining
== 0)
1314 /* Show the place to which we will be branching. */
1315 *pnew_thread
= first_active_target_insn (JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1317 /* Add any new insns to the delay list and update the count of the
1318 number of slots filled. */
1319 *pslots_filled
= total_slots_filled
;
1323 if (delay_list
== 0)
1324 return new_delay_list
;
1326 for (temp
= new_delay_list
; temp
; temp
= XEXP (temp
, 1))
1327 delay_list
= add_to_delay_list (XEXP (temp
, 0), delay_list
);
1332 /* Similar to steal_delay_list_from_target except that SEQ is on the
1333 fallthrough path of INSN. Here we only do something if the delay insn
1334 of SEQ is an unconditional branch. In that case we steal its delay slot
1335 for INSN since unconditional branches are much easier to fill. */
1338 steal_delay_list_from_fallthrough (rtx insn
, rtx condition
, rtx seq
,
1339 rtx delay_list
, struct resources
*sets
,
1340 struct resources
*needed
,
1341 struct resources
*other_needed
,
1342 int slots_to_fill
, int *pslots_filled
,
1347 int must_annul
= *pannul_p
;
1350 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1352 /* We can't do anything if SEQ's delay insn isn't an
1353 unconditional branch. */
1355 if (! simplejump_or_return_p (XVECEXP (seq
, 0, 0)))
1358 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1360 rtx trial
= XVECEXP (seq
, 0, i
);
1362 /* If TRIAL sets CC0, stealing it will move it too far from the use
1364 if (insn_references_resource_p (trial
, sets
, false)
1365 || insn_sets_resource_p (trial
, needed
, false)
1366 || insn_sets_resource_p (trial
, sets
, false)
1368 || sets_cc0_p (PATTERN (trial
))
1374 /* If this insn was already done, we don't need it. */
1375 if (redundant_insn (trial
, insn
, delay_list
))
1377 delete_from_delay_slot (trial
);
1382 && ((condition
== const_true_rtx
1383 || (! insn_sets_resource_p (trial
, other_needed
, false)
1384 && ! may_trap_or_fault_p (PATTERN (trial
)))))
1385 ? eligible_for_delay (insn
, *pslots_filled
, trial
, flags
)
1386 : (must_annul
|| delay_list
== NULL
) && (must_annul
= 1,
1387 check_annul_list_true_false (1, delay_list
)
1388 && eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
1392 delete_from_delay_slot (trial
);
1393 delay_list
= add_to_delay_list (trial
, delay_list
);
1395 if (++(*pslots_filled
) == slots_to_fill
)
1407 /* Try merging insns starting at THREAD which match exactly the insns in
1410 If all insns were matched and the insn was previously annulling, the
1411 annul bit will be cleared.
1413 For each insn that is merged, if the branch is or will be non-annulling,
1414 we delete the merged insn. */
1417 try_merge_delay_insns (rtx insn
, rtx thread
)
1419 rtx trial
, next_trial
;
1420 rtx delay_insn
= XVECEXP (PATTERN (insn
), 0, 0);
1421 int annul_p
= JUMP_P (delay_insn
) && INSN_ANNULLED_BRANCH_P (delay_insn
);
1422 int slot_number
= 1;
1423 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1424 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1425 struct resources set
, needed
;
1426 rtx merged_insns
= 0;
1430 flags
= get_jump_flags (delay_insn
, JUMP_LABEL (delay_insn
));
1432 CLEAR_RESOURCE (&needed
);
1433 CLEAR_RESOURCE (&set
);
1435 /* If this is not an annulling branch, take into account anything needed in
1436 INSN's delay slot. This prevents two increments from being incorrectly
1437 folded into one. If we are annulling, this would be the correct
1438 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1439 will essentially disable this optimization. This method is somewhat of
1440 a kludge, but I don't see a better way.) */
1442 for (i
= 1 ; i
< num_slots
; i
++)
1443 if (XVECEXP (PATTERN (insn
), 0, i
))
1444 mark_referenced_resources (XVECEXP (PATTERN (insn
), 0, i
), &needed
,
1447 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1449 rtx pat
= PATTERN (trial
);
1450 rtx oldtrial
= trial
;
1452 next_trial
= next_nonnote_insn (trial
);
1454 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1455 if (NONJUMP_INSN_P (trial
)
1456 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1459 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1461 /* We can't share an insn that sets cc0. */
1462 && ! sets_cc0_p (pat
)
1464 && ! insn_references_resource_p (trial
, &set
, true)
1465 && ! insn_sets_resource_p (trial
, &set
, true)
1466 && ! insn_sets_resource_p (trial
, &needed
, true)
1467 && (trial
= try_split (pat
, trial
, 0)) != 0
1468 /* Update next_trial, in case try_split succeeded. */
1469 && (next_trial
= next_nonnote_insn (trial
))
1470 /* Likewise THREAD. */
1471 && (thread
= oldtrial
== thread
? trial
: thread
)
1472 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1473 /* Have to test this condition if annul condition is different
1474 from (and less restrictive than) non-annulling one. */
1475 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
, flags
))
1480 update_block (trial
, thread
);
1481 if (trial
== thread
)
1482 thread
= next_active_insn (thread
);
1484 delete_related_insns (trial
);
1485 INSN_FROM_TARGET_P (next_to_match
) = 0;
1488 merged_insns
= gen_rtx_INSN_LIST (VOIDmode
, trial
, merged_insns
);
1490 if (++slot_number
== num_slots
)
1493 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1496 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
1497 mark_referenced_resources (trial
, &needed
, true);
1500 /* See if we stopped on a filled insn. If we did, try to see if its
1501 delay slots match. */
1502 if (slot_number
!= num_slots
1503 && trial
&& NONJUMP_INSN_P (trial
)
1504 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1505 && !(JUMP_P (XVECEXP (PATTERN (trial
), 0, 0))
1506 && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0))))
1508 rtx pat
= PATTERN (trial
);
1509 rtx filled_insn
= XVECEXP (pat
, 0, 0);
1511 /* Account for resources set/needed by the filled insn. */
1512 mark_set_resources (filled_insn
, &set
, 0, MARK_SRC_DEST_CALL
);
1513 mark_referenced_resources (filled_insn
, &needed
, true);
1515 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1517 rtx dtrial
= XVECEXP (pat
, 0, i
);
1519 if (! insn_references_resource_p (dtrial
, &set
, true)
1520 && ! insn_sets_resource_p (dtrial
, &set
, true)
1521 && ! insn_sets_resource_p (dtrial
, &needed
, true)
1523 && ! sets_cc0_p (PATTERN (dtrial
))
1525 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1526 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
, flags
))
1532 update_block (dtrial
, thread
);
1533 new_rtx
= delete_from_delay_slot (dtrial
);
1534 if (INSN_DELETED_P (thread
))
1536 INSN_FROM_TARGET_P (next_to_match
) = 0;
1539 merged_insns
= gen_rtx_INSN_LIST (SImode
, dtrial
,
1542 if (++slot_number
== num_slots
)
1545 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1549 /* Keep track of the set/referenced resources for the delay
1550 slots of any trial insns we encounter. */
1551 mark_set_resources (dtrial
, &set
, 0, MARK_SRC_DEST_CALL
);
1552 mark_referenced_resources (dtrial
, &needed
, true);
1557 /* If all insns in the delay slot have been matched and we were previously
1558 annulling the branch, we need not any more. In that case delete all the
1559 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1560 the delay list so that we know that it isn't only being used at the
1562 if (slot_number
== num_slots
&& annul_p
)
1564 for (; merged_insns
; merged_insns
= XEXP (merged_insns
, 1))
1566 if (GET_MODE (merged_insns
) == SImode
)
1570 update_block (XEXP (merged_insns
, 0), thread
);
1571 new_rtx
= delete_from_delay_slot (XEXP (merged_insns
, 0));
1572 if (INSN_DELETED_P (thread
))
1577 update_block (XEXP (merged_insns
, 0), thread
);
1578 delete_related_insns (XEXP (merged_insns
, 0));
1582 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1584 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1585 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1589 /* See if INSN is redundant with an insn in front of TARGET. Often this
1590 is called when INSN is a candidate for a delay slot of TARGET.
1591 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1592 of INSN. Often INSN will be redundant with an insn in a delay slot of
1593 some previous insn. This happens when we have a series of branches to the
1594 same label; in that case the first insn at the target might want to go
1595 into each of the delay slots.
1597 If we are not careful, this routine can take up a significant fraction
1598 of the total compilation time (4%), but only wins rarely. Hence we
1599 speed this routine up by making two passes. The first pass goes back
1600 until it hits a label and sees if it finds an insn with an identical
1601 pattern. Only in this (relatively rare) event does it check for
1604 We do not split insns we encounter. This could cause us not to find a
1605 redundant insn, but the cost of splitting seems greater than the possible
1606 gain in rare cases. */
1609 redundant_insn (rtx insn
, rtx target
, rtx delay_list
)
1611 rtx target_main
= target
;
1612 rtx ipat
= PATTERN (insn
);
1614 struct resources needed
, set
;
1616 unsigned insns_to_search
;
1618 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1619 are allowed to not actually assign to such a register. */
1620 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
) != 0)
1623 /* Scan backwards looking for a match. */
1624 for (trial
= PREV_INSN (target
),
1625 insns_to_search
= MAX_DELAY_SLOT_INSN_SEARCH
;
1626 trial
&& insns_to_search
> 0;
1627 trial
= PREV_INSN (trial
))
1629 if (LABEL_P (trial
))
1632 if (!NONDEBUG_INSN_P (trial
))
1636 pat
= PATTERN (trial
);
1637 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1640 if (GET_CODE (pat
) == SEQUENCE
)
1642 /* Stop for a CALL and its delay slots because it is difficult to
1643 track its resource needs correctly. */
1644 if (CALL_P (XVECEXP (pat
, 0, 0)))
1647 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1648 slots because it is difficult to track its resource needs
1651 #ifdef INSN_SETS_ARE_DELAYED
1652 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1656 #ifdef INSN_REFERENCES_ARE_DELAYED
1657 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1661 /* See if any of the insns in the delay slot match, updating
1662 resource requirements as we go. */
1663 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1664 if (GET_CODE (XVECEXP (pat
, 0, i
)) == GET_CODE (insn
)
1665 && rtx_equal_p (PATTERN (XVECEXP (pat
, 0, i
)), ipat
)
1666 && ! find_reg_note (XVECEXP (pat
, 0, i
), REG_UNUSED
, NULL_RTX
))
1669 /* If found a match, exit this loop early. */
1674 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
)
1675 && ! find_reg_note (trial
, REG_UNUSED
, NULL_RTX
))
1679 /* If we didn't find an insn that matches, return 0. */
1683 /* See what resources this insn sets and needs. If they overlap, or
1684 if this insn references CC0, it can't be redundant. */
1686 CLEAR_RESOURCE (&needed
);
1687 CLEAR_RESOURCE (&set
);
1688 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
1689 mark_referenced_resources (insn
, &needed
, true);
1691 /* If TARGET is a SEQUENCE, get the main insn. */
1692 if (NONJUMP_INSN_P (target
) && GET_CODE (PATTERN (target
)) == SEQUENCE
)
1693 target_main
= XVECEXP (PATTERN (target
), 0, 0);
1695 if (resource_conflicts_p (&needed
, &set
)
1697 || reg_mentioned_p (cc0_rtx
, ipat
)
1699 /* The insn requiring the delay may not set anything needed or set by
1701 || insn_sets_resource_p (target_main
, &needed
, true)
1702 || insn_sets_resource_p (target_main
, &set
, true))
1705 /* Insns we pass may not set either NEEDED or SET, so merge them for
1707 needed
.memory
|= set
.memory
;
1708 needed
.unch_memory
|= set
.unch_memory
;
1709 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
1711 /* This insn isn't redundant if it conflicts with an insn that either is
1712 or will be in a delay slot of TARGET. */
1716 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, true))
1718 delay_list
= XEXP (delay_list
, 1);
1721 if (NONJUMP_INSN_P (target
) && GET_CODE (PATTERN (target
)) == SEQUENCE
)
1722 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
1723 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
,
1727 /* Scan backwards until we reach a label or an insn that uses something
1728 INSN sets or sets something insn uses or sets. */
1730 for (trial
= PREV_INSN (target
),
1731 insns_to_search
= MAX_DELAY_SLOT_INSN_SEARCH
;
1732 trial
&& !LABEL_P (trial
) && insns_to_search
> 0;
1733 trial
= PREV_INSN (trial
))
1735 if (!NONDEBUG_INSN_P (trial
))
1739 pat
= PATTERN (trial
);
1740 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1743 if (GET_CODE (pat
) == SEQUENCE
)
1745 bool annul_p
= false;
1746 rtx control
= XVECEXP (pat
, 0, 0);
1748 /* If this is a CALL_INSN and its delay slots, it is hard to track
1749 the resource needs properly, so give up. */
1750 if (CALL_P (control
))
1753 /* If this is an INSN or JUMP_INSN with delayed effects, it
1754 is hard to track the resource needs properly, so give up. */
1756 #ifdef INSN_SETS_ARE_DELAYED
1757 if (INSN_SETS_ARE_DELAYED (control
))
1761 #ifdef INSN_REFERENCES_ARE_DELAYED
1762 if (INSN_REFERENCES_ARE_DELAYED (control
))
1766 if (JUMP_P (control
))
1767 annul_p
= INSN_ANNULLED_BRANCH_P (control
);
1769 /* See if any of the insns in the delay slot match, updating
1770 resource requirements as we go. */
1771 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1773 rtx candidate
= XVECEXP (pat
, 0, i
);
1775 /* If an insn will be annulled if the branch is false, it isn't
1776 considered as a possible duplicate insn. */
1777 if (rtx_equal_p (PATTERN (candidate
), ipat
)
1778 && ! (annul_p
&& INSN_FROM_TARGET_P (candidate
)))
1780 /* Show that this insn will be used in the sequel. */
1781 INSN_FROM_TARGET_P (candidate
) = 0;
1785 /* Unless this is an annulled insn from the target of a branch,
1786 we must stop if it sets anything needed or set by INSN. */
1787 if ((!annul_p
|| !INSN_FROM_TARGET_P (candidate
))
1788 && insn_sets_resource_p (candidate
, &needed
, true))
1792 /* If the insn requiring the delay slot conflicts with INSN, we
1794 if (insn_sets_resource_p (control
, &needed
, true))
1799 /* See if TRIAL is the same as INSN. */
1800 pat
= PATTERN (trial
);
1801 if (rtx_equal_p (pat
, ipat
))
1804 /* Can't go any further if TRIAL conflicts with INSN. */
1805 if (insn_sets_resource_p (trial
, &needed
, true))
1813 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1814 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1815 is nonzero, we are allowed to fall into this thread; otherwise, we are
1818 If LABEL is used more than one or we pass a label other than LABEL before
1819 finding an active insn, we do not own this thread. */
1822 own_thread_p (rtx thread
, rtx label
, int allow_fallthrough
)
1827 /* We don't own the function end. */
1828 if (thread
== 0 || ANY_RETURN_P (thread
))
1831 /* Get the first active insn, or THREAD, if it is an active insn. */
1832 active_insn
= next_active_insn (PREV_INSN (thread
));
1834 for (insn
= thread
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
1836 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
1839 if (allow_fallthrough
)
1842 /* Ensure that we reach a BARRIER before any insn or label. */
1843 for (insn
= prev_nonnote_insn (thread
);
1844 insn
== 0 || !BARRIER_P (insn
);
1845 insn
= prev_nonnote_insn (insn
))
1848 || (NONJUMP_INSN_P (insn
)
1849 && GET_CODE (PATTERN (insn
)) != USE
1850 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
1856 /* Called when INSN is being moved from a location near the target of a jump.
1857 We leave a marker of the form (use (INSN)) immediately in front
1858 of WHERE for mark_target_live_regs. These markers will be deleted when
1861 We used to try to update the live status of registers if WHERE is at
1862 the start of a basic block, but that can't work since we may remove a
1863 BARRIER in relax_delay_slots. */
1866 update_block (rtx insn
, rtx where
)
1868 /* Ignore if this was in a delay slot and it came from the target of
1870 if (INSN_FROM_TARGET_P (insn
))
1873 emit_insn_before (gen_rtx_USE (VOIDmode
, insn
), where
);
1875 /* INSN might be making a value live in a block where it didn't use to
1876 be. So recompute liveness information for this block. */
1878 incr_ticks_for_insn (insn
);
1881 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1882 the basic block containing the jump. */
1885 reorg_redirect_jump (rtx jump
, rtx nlabel
)
1887 incr_ticks_for_insn (jump
);
1888 return redirect_jump (jump
, nlabel
, 1);
1891 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1892 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1893 that reference values used in INSN. If we find one, then we move the
1894 REG_DEAD note to INSN.
1896 This is needed to handle the case where a later insn (after INSN) has a
1897 REG_DEAD note for a register used by INSN, and this later insn subsequently
1898 gets moved before a CODE_LABEL because it is a redundant insn. In this
1899 case, mark_target_live_regs may be confused into thinking the register
1900 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1903 update_reg_dead_notes (rtx insn
, rtx delayed_insn
)
1907 for (p
= next_nonnote_insn (insn
); p
!= delayed_insn
;
1908 p
= next_nonnote_insn (p
))
1909 for (link
= REG_NOTES (p
); link
; link
= next
)
1911 next
= XEXP (link
, 1);
1913 if (REG_NOTE_KIND (link
) != REG_DEAD
1914 || !REG_P (XEXP (link
, 0)))
1917 if (reg_referenced_p (XEXP (link
, 0), PATTERN (insn
)))
1919 /* Move the REG_DEAD note from P to INSN. */
1920 remove_note (p
, link
);
1921 XEXP (link
, 1) = REG_NOTES (insn
);
1922 REG_NOTES (insn
) = link
;
1927 /* Called when an insn redundant with start_insn is deleted. If there
1928 is a REG_DEAD note for the target of start_insn between start_insn
1929 and stop_insn, then the REG_DEAD note needs to be deleted since the
1930 value no longer dies there.
1932 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1933 confused into thinking the register is dead. */
1936 fix_reg_dead_note (rtx start_insn
, rtx stop_insn
)
1940 for (p
= next_nonnote_insn (start_insn
); p
!= stop_insn
;
1941 p
= next_nonnote_insn (p
))
1942 for (link
= REG_NOTES (p
); link
; link
= next
)
1944 next
= XEXP (link
, 1);
1946 if (REG_NOTE_KIND (link
) != REG_DEAD
1947 || !REG_P (XEXP (link
, 0)))
1950 if (reg_set_p (XEXP (link
, 0), PATTERN (start_insn
)))
1952 remove_note (p
, link
);
1958 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1960 This handles the case of udivmodXi4 instructions which optimize their
1961 output depending on whether any REG_UNUSED notes are present.
1962 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1966 update_reg_unused_notes (rtx insn
, rtx redundant_insn
)
1970 for (link
= REG_NOTES (insn
); link
; link
= next
)
1972 next
= XEXP (link
, 1);
1974 if (REG_NOTE_KIND (link
) != REG_UNUSED
1975 || !REG_P (XEXP (link
, 0)))
1978 if (! find_regno_note (redundant_insn
, REG_UNUSED
,
1979 REGNO (XEXP (link
, 0))))
1980 remove_note (insn
, link
);
1984 /* Return the label before INSN, or put a new label there. */
1987 get_label_before (rtx insn
)
1991 /* Find an existing label at this point
1992 or make a new one if there is none. */
1993 label
= prev_nonnote_insn (insn
);
1995 if (label
== 0 || !LABEL_P (label
))
1997 rtx prev
= PREV_INSN (insn
);
1999 label
= gen_label_rtx ();
2000 emit_label_after (label
, prev
);
2001 LABEL_NUSES (label
) = 0;
2006 /* Scan a function looking for insns that need a delay slot and find insns to
2007 put into the delay slot.
2009 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2010 as calls). We do these first since we don't want jump insns (that are
2011 easier to fill) to get the only insns that could be used for non-jump insns.
2012 When it is zero, only try to fill JUMP_INSNs.
2014 When slots are filled in this manner, the insns (including the
2015 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2016 it is possible to tell whether a delay slot has really been filled
2017 or not. `final' knows how to deal with this, by communicating
2018 through FINAL_SEQUENCE. */
2021 fill_simple_delay_slots (int non_jumps_p
)
2023 rtx insn
, pat
, trial
, next_trial
;
2025 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2026 struct resources needed
, set
;
2027 int slots_to_fill
, slots_filled
;
2030 for (i
= 0; i
< num_unfilled_slots
; i
++)
2033 /* Get the next insn to fill. If it has already had any slots assigned,
2034 we can't do anything with it. Maybe we'll improve this later. */
2036 insn
= unfilled_slots_base
[i
];
2038 || INSN_DELETED_P (insn
)
2039 || (NONJUMP_INSN_P (insn
)
2040 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2041 || (JUMP_P (insn
) && non_jumps_p
)
2042 || (!JUMP_P (insn
) && ! non_jumps_p
))
2045 /* It may have been that this insn used to need delay slots, but
2046 now doesn't; ignore in that case. This can happen, for example,
2047 on the HP PA RISC, where the number of delay slots depends on
2048 what insns are nearby. */
2049 slots_to_fill
= num_delay_slots (insn
);
2051 /* Some machine description have defined instructions to have
2052 delay slots only in certain circumstances which may depend on
2053 nearby insns (which change due to reorg's actions).
2055 For example, the PA port normally has delay slots for unconditional
2058 However, the PA port claims such jumps do not have a delay slot
2059 if they are immediate successors of certain CALL_INSNs. This
2060 allows the port to favor filling the delay slot of the call with
2061 the unconditional jump. */
2062 if (slots_to_fill
== 0)
2065 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2066 says how many. After initialization, first try optimizing
2069 nop add %o7,.-L1,%o7
2073 If this case applies, the delay slot of the call is filled with
2074 the unconditional jump. This is done first to avoid having the
2075 delay slot of the call filled in the backward scan. Also, since
2076 the unconditional jump is likely to also have a delay slot, that
2077 insn must exist when it is subsequently scanned.
2079 This is tried on each insn with delay slots as some machines
2080 have insns which perform calls, but are not represented as
2087 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2089 flags
= get_jump_flags (insn
, NULL_RTX
);
2091 if ((trial
= next_active_insn (insn
))
2093 && simplejump_p (trial
)
2094 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
2095 && no_labels_between_p (insn
, trial
)
2096 && ! can_throw_internal (trial
))
2100 delay_list
= add_to_delay_list (trial
, delay_list
);
2102 /* TRIAL may have had its delay slot filled, then unfilled. When
2103 the delay slot is unfilled, TRIAL is placed back on the unfilled
2104 slots obstack. Unfortunately, it is placed on the end of the
2105 obstack, not in its original location. Therefore, we must search
2106 from entry i + 1 to the end of the unfilled slots obstack to
2107 try and find TRIAL. */
2108 tmp
= &unfilled_slots_base
[i
+ 1];
2109 while (*tmp
!= trial
&& tmp
!= unfilled_slots_next
)
2112 /* Remove the unconditional jump from consideration for delay slot
2113 filling and unthread it. */
2117 rtx next
= NEXT_INSN (trial
);
2118 rtx prev
= PREV_INSN (trial
);
2120 NEXT_INSN (prev
) = next
;
2122 PREV_INSN (next
) = prev
;
2126 /* Now, scan backwards from the insn to search for a potential
2127 delay-slot candidate. Stop searching when a label or jump is hit.
2129 For each candidate, if it is to go into the delay slot (moved
2130 forward in execution sequence), it must not need or set any resources
2131 that were set by later insns and must not set any resources that
2132 are needed for those insns.
2134 The delay slot insn itself sets resources unless it is a call
2135 (in which case the called routine, not the insn itself, is doing
2138 if (slots_filled
< slots_to_fill
)
2140 CLEAR_RESOURCE (&needed
);
2141 CLEAR_RESOURCE (&set
);
2142 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST
);
2143 mark_referenced_resources (insn
, &needed
, false);
2145 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
2148 next_trial
= prev_nonnote_insn (trial
);
2150 /* This must be an INSN or CALL_INSN. */
2151 pat
= PATTERN (trial
);
2153 /* Stand-alone USE and CLOBBER are just for flow. */
2154 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2157 /* Check for resource conflict first, to avoid unnecessary
2159 if (! insn_references_resource_p (trial
, &set
, true)
2160 && ! insn_sets_resource_p (trial
, &set
, true)
2161 && ! insn_sets_resource_p (trial
, &needed
, true)
2163 /* Can't separate set of cc0 from its use. */
2164 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
2166 && ! can_throw_internal (trial
))
2168 trial
= try_split (pat
, trial
, 1);
2169 next_trial
= prev_nonnote_insn (trial
);
2170 if (eligible_for_delay (insn
, slots_filled
, trial
, flags
))
2172 /* In this case, we are searching backward, so if we
2173 find insns to put on the delay list, we want
2174 to put them at the head, rather than the
2175 tail, of the list. */
2177 update_reg_dead_notes (trial
, insn
);
2178 delay_list
= gen_rtx_INSN_LIST (VOIDmode
,
2180 update_block (trial
, trial
);
2181 delete_related_insns (trial
);
2182 if (slots_to_fill
== ++slots_filled
)
2188 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
2189 mark_referenced_resources (trial
, &needed
, true);
2193 /* If all needed slots haven't been filled, we come here. */
2195 /* Try to optimize case of jumping around a single insn. */
2196 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2197 if (slots_filled
!= slots_to_fill
2200 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
2202 delay_list
= optimize_skip (insn
);
2208 /* Try to get insns from beyond the insn needing the delay slot.
2209 These insns can neither set or reference resources set in insns being
2210 skipped, cannot set resources in the insn being skipped, and, if this
2211 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2212 call might not return).
2214 There used to be code which continued past the target label if
2215 we saw all uses of the target label. This code did not work,
2216 because it failed to account for some instructions which were
2217 both annulled and marked as from the target. This can happen as a
2218 result of optimize_skip. Since this code was redundant with
2219 fill_eager_delay_slots anyways, it was just deleted. */
2221 if (slots_filled
!= slots_to_fill
2222 /* If this instruction could throw an exception which is
2223 caught in the same function, then it's not safe to fill
2224 the delay slot with an instruction from beyond this
2225 point. For example, consider:
2236 Even though `i' is a local variable, we must be sure not
2237 to put `i = 3' in the delay slot if `f' might throw an
2240 Presumably, we should also check to see if we could get
2241 back to this function via `setjmp'. */
2242 && ! can_throw_internal (insn
)
2244 || ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
2245 && ! simplejump_p (insn
)
2246 && !ANY_RETURN_P (JUMP_LABEL (insn
)))))
2248 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2249 label. Otherwise, zero. */
2251 int maybe_never
= 0;
2252 rtx pat
, trial_delay
;
2254 CLEAR_RESOURCE (&needed
);
2255 CLEAR_RESOURCE (&set
);
2259 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
2260 mark_referenced_resources (insn
, &needed
, true);
2265 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
2266 mark_referenced_resources (insn
, &needed
, true);
2268 target
= JUMP_LABEL (insn
);
2271 if (target
== 0 || ANY_RETURN_P (target
))
2272 for (trial
= next_nonnote_insn (insn
); !stop_search_p (trial
, 1);
2275 next_trial
= next_nonnote_insn (trial
);
2277 /* This must be an INSN or CALL_INSN. */
2278 pat
= PATTERN (trial
);
2280 /* Stand-alone USE and CLOBBER are just for flow. */
2281 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2284 /* If this already has filled delay slots, get the insn needing
2286 if (GET_CODE (pat
) == SEQUENCE
)
2287 trial_delay
= XVECEXP (pat
, 0, 0);
2289 trial_delay
= trial
;
2291 /* Stop our search when seeing a jump. */
2292 if (JUMP_P (trial_delay
))
2295 /* See if we have a resource problem before we try to
2297 if (GET_CODE (pat
) != SEQUENCE
2298 && ! insn_references_resource_p (trial
, &set
, true)
2299 && ! insn_sets_resource_p (trial
, &set
, true)
2300 && ! insn_sets_resource_p (trial
, &needed
, true)
2302 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
2304 && ! (maybe_never
&& may_trap_or_fault_p (pat
))
2305 && (trial
= try_split (pat
, trial
, 0))
2306 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
2307 && ! can_throw_internal(trial
))
2309 next_trial
= next_nonnote_insn (trial
);
2310 delay_list
= add_to_delay_list (trial
, delay_list
);
2313 if (reg_mentioned_p (cc0_rtx
, pat
))
2314 link_cc0_insns (trial
);
2317 delete_related_insns (trial
);
2318 if (slots_to_fill
== ++slots_filled
)
2323 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
2324 mark_referenced_resources (trial
, &needed
, true);
2326 /* Ensure we don't put insns between the setting of cc and the
2327 comparison by moving a setting of cc into an earlier delay
2328 slot since these insns could clobber the condition code. */
2331 /* If this is a call or jump, we might not get here. */
2332 if (CALL_P (trial_delay
)
2333 || JUMP_P (trial_delay
))
2337 /* If there are slots left to fill and our search was stopped by an
2338 unconditional branch, try the insn at the branch target. We can
2339 redirect the branch if it works.
2341 Don't do this if the insn at the branch target is a branch. */
2342 if (slots_to_fill
!= slots_filled
2344 && jump_to_label_p (trial
)
2345 && simplejump_p (trial
)
2346 && (target
== 0 || JUMP_LABEL (trial
) == target
)
2347 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
2348 && ! (NONJUMP_INSN_P (next_trial
)
2349 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
2350 && !JUMP_P (next_trial
)
2351 && ! insn_references_resource_p (next_trial
, &set
, true)
2352 && ! insn_sets_resource_p (next_trial
, &set
, true)
2353 && ! insn_sets_resource_p (next_trial
, &needed
, true)
2355 && ! reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
))
2357 && ! (maybe_never
&& may_trap_or_fault_p (PATTERN (next_trial
)))
2358 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
2359 && eligible_for_delay (insn
, slots_filled
, next_trial
, flags
)
2360 && ! can_throw_internal (trial
))
2362 /* See comment in relax_delay_slots about necessity of using
2363 next_real_insn here. */
2364 rtx new_label
= next_real_insn (next_trial
);
2367 new_label
= get_label_before (new_label
);
2369 new_label
= find_end_label (simple_return_rtx
);
2374 = add_to_delay_list (copy_rtx (next_trial
), delay_list
);
2376 reorg_redirect_jump (trial
, new_label
);
2378 /* If we merged because we both jumped to the same place,
2379 redirect the original insn also. */
2381 reorg_redirect_jump (insn
, new_label
);
2386 /* If this is an unconditional jump, then try to get insns from the
2387 target of the jump. */
2389 && simplejump_p (insn
)
2390 && slots_filled
!= slots_to_fill
)
2392 = fill_slots_from_thread (insn
, const_true_rtx
,
2393 next_active_insn (JUMP_LABEL (insn
)),
2395 own_thread_p (JUMP_LABEL (insn
),
2396 JUMP_LABEL (insn
), 0),
2397 slots_to_fill
, &slots_filled
,
2401 unfilled_slots_base
[i
]
2402 = emit_delay_sequence (insn
, delay_list
, slots_filled
);
2404 if (slots_to_fill
== slots_filled
)
2405 unfilled_slots_base
[i
] = 0;
2407 note_delay_statistics (slots_filled
, 0);
2410 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2411 /* See if the epilogue needs any delay slots. Try to fill them if so.
2412 The only thing we can do is scan backwards from the end of the
2413 function. If we did this in a previous pass, it is incorrect to do it
2415 if (crtl
->epilogue_delay_list
)
2418 slots_to_fill
= DELAY_SLOTS_FOR_EPILOGUE
;
2419 if (slots_to_fill
== 0)
2423 CLEAR_RESOURCE (&set
);
2425 /* The frame pointer and stack pointer are needed at the beginning of
2426 the epilogue, so instructions setting them can not be put in the
2427 epilogue delay slot. However, everything else needed at function
2428 end is safe, so we don't want to use end_of_function_needs here. */
2429 CLEAR_RESOURCE (&needed
);
2430 if (frame_pointer_needed
)
2432 SET_HARD_REG_BIT (needed
.regs
, FRAME_POINTER_REGNUM
);
2433 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2434 SET_HARD_REG_BIT (needed
.regs
, HARD_FRAME_POINTER_REGNUM
);
2436 if (! EXIT_IGNORE_STACK
2437 || current_function_sp_is_unchanging
)
2438 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
2441 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
2443 #ifdef EPILOGUE_USES
2444 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2446 if (EPILOGUE_USES (i
))
2447 SET_HARD_REG_BIT (needed
.regs
, i
);
2451 for (trial
= get_last_insn (); ! stop_search_p (trial
, 1);
2452 trial
= PREV_INSN (trial
))
2456 pat
= PATTERN (trial
);
2457 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2460 if (! insn_references_resource_p (trial
, &set
, true)
2461 && ! insn_sets_resource_p (trial
, &needed
, true)
2462 && ! insn_sets_resource_p (trial
, &set
, true)
2464 /* Don't want to mess with cc0 here. */
2465 && ! reg_mentioned_p (cc0_rtx
, pat
)
2467 && ! can_throw_internal (trial
))
2469 trial
= try_split (pat
, trial
, 1);
2470 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial
, slots_filled
))
2472 /* Here as well we are searching backward, so put the
2473 insns we find on the head of the list. */
2475 crtl
->epilogue_delay_list
2476 = gen_rtx_INSN_LIST (VOIDmode
, trial
,
2477 crtl
->epilogue_delay_list
);
2478 mark_end_of_function_resources (trial
, true);
2479 update_block (trial
, trial
);
2480 delete_related_insns (trial
);
2482 /* Clear deleted bit so final.c will output the insn. */
2483 INSN_DELETED_P (trial
) = 0;
2485 if (slots_to_fill
== ++slots_filled
)
2491 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
2492 mark_referenced_resources (trial
, &needed
, true);
2495 note_delay_statistics (slots_filled
, 0);
2499 /* Follow any unconditional jump at LABEL;
2500 return the ultimate label reached by any such chain of jumps.
2501 Return a suitable return rtx if the chain ultimately leads to a
2503 If LABEL is not followed by a jump, return LABEL.
2504 If the chain loops or we can't find end, return LABEL,
2505 since that tells caller to avoid changing the insn. */
2508 follow_jumps (rtx label
)
2515 if (ANY_RETURN_P (label
))
2519 && (insn
= next_active_insn (value
)) != 0
2521 && JUMP_LABEL (insn
) != NULL_RTX
2522 && ((any_uncondjump_p (insn
) && onlyjump_p (insn
))
2523 || ANY_RETURN_P (PATTERN (insn
)))
2524 && (next
= NEXT_INSN (insn
))
2525 && BARRIER_P (next
));
2528 rtx this_label
= JUMP_LABEL (insn
);
2531 /* If we have found a cycle, make the insn jump to itself. */
2532 if (this_label
== label
)
2534 if (ANY_RETURN_P (this_label
))
2536 tem
= next_active_insn (this_label
);
2538 && (GET_CODE (PATTERN (tem
)) == ADDR_VEC
2539 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
2549 /* Try to find insns to place in delay slots.
2551 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2552 or is an unconditional branch if CONDITION is const_true_rtx.
2553 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2555 THREAD is a flow-of-control, either the insns to be executed if the
2556 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2558 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2559 to see if any potential delay slot insns set things needed there.
2561 LIKELY is nonzero if it is extremely likely that the branch will be
2562 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2563 end of a loop back up to the top.
2565 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2566 thread. I.e., it is the fallthrough code of our jump or the target of the
2567 jump when we are the only jump going there.
2569 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2570 case, we can only take insns from the head of the thread for our delay
2571 slot. We then adjust the jump to point after the insns we have taken. */
2574 fill_slots_from_thread (rtx insn
, rtx condition
, rtx thread
,
2575 rtx opposite_thread
, int likely
, int thread_if_true
,
2576 int own_thread
, int slots_to_fill
,
2577 int *pslots_filled
, rtx delay_list
)
2580 struct resources opposite_needed
, set
, needed
;
2586 /* Validate our arguments. */
2587 gcc_assert(condition
!= const_true_rtx
|| thread_if_true
);
2588 gcc_assert(own_thread
|| thread_if_true
);
2590 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2592 /* If our thread is the end of subroutine, we can't get any delay
2594 if (thread
== NULL_RTX
|| ANY_RETURN_P (thread
))
2597 /* If this is an unconditional branch, nothing is needed at the
2598 opposite thread. Otherwise, compute what is needed there. */
2599 if (condition
== const_true_rtx
)
2600 CLEAR_RESOURCE (&opposite_needed
);
2602 mark_target_live_regs (get_insns (), opposite_thread
, &opposite_needed
);
2604 /* If the insn at THREAD can be split, do it here to avoid having to
2605 update THREAD and NEW_THREAD if it is done in the loop below. Also
2606 initialize NEW_THREAD. */
2608 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
2610 /* Scan insns at THREAD. We are looking for an insn that can be removed
2611 from THREAD (it neither sets nor references resources that were set
2612 ahead of it and it doesn't set anything needs by the insns ahead of
2613 it) and that either can be placed in an annulling insn or aren't
2614 needed at OPPOSITE_THREAD. */
2616 CLEAR_RESOURCE (&needed
);
2617 CLEAR_RESOURCE (&set
);
2619 /* If we do not own this thread, we must stop as soon as we find
2620 something that we can't put in a delay slot, since all we can do
2621 is branch into THREAD at a later point. Therefore, labels stop
2622 the search if this is not the `true' thread. */
2624 for (trial
= thread
;
2625 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
2626 trial
= next_nonnote_insn (trial
))
2630 /* If we have passed a label, we no longer own this thread. */
2631 if (LABEL_P (trial
))
2637 pat
= PATTERN (trial
);
2638 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2641 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2642 don't separate or copy insns that set and use CC0. */
2643 if (! insn_references_resource_p (trial
, &set
, true)
2644 && ! insn_sets_resource_p (trial
, &set
, true)
2645 && ! insn_sets_resource_p (trial
, &needed
, true)
2647 && ! (reg_mentioned_p (cc0_rtx
, pat
)
2648 && (! own_thread
|| ! sets_cc0_p (pat
)))
2650 && ! can_throw_internal (trial
))
2654 /* If TRIAL is redundant with some insn before INSN, we don't
2655 actually need to add it to the delay list; we can merely pretend
2657 if ((prior_insn
= redundant_insn (trial
, insn
, delay_list
)))
2659 fix_reg_dead_note (prior_insn
, insn
);
2662 update_block (trial
, thread
);
2663 if (trial
== thread
)
2665 thread
= next_active_insn (thread
);
2666 if (new_thread
== trial
)
2667 new_thread
= thread
;
2670 delete_related_insns (trial
);
2674 update_reg_unused_notes (prior_insn
, trial
);
2675 new_thread
= next_active_insn (trial
);
2681 /* There are two ways we can win: If TRIAL doesn't set anything
2682 needed at the opposite thread and can't trap, or if it can
2683 go into an annulled delay slot. */
2685 && (condition
== const_true_rtx
2686 || (! insn_sets_resource_p (trial
, &opposite_needed
, true)
2687 && ! may_trap_or_fault_p (pat
)
2688 && ! RTX_FRAME_RELATED_P (trial
))))
2691 trial
= try_split (pat
, trial
, 0);
2692 if (new_thread
== old_trial
)
2694 if (thread
== old_trial
)
2696 pat
= PATTERN (trial
);
2697 if (eligible_for_delay (insn
, *pslots_filled
, trial
, flags
))
2701 #ifdef ANNUL_IFTRUE_SLOTS
2704 #ifdef ANNUL_IFFALSE_SLOTS
2710 trial
= try_split (pat
, trial
, 0);
2711 if (new_thread
== old_trial
)
2713 if (thread
== old_trial
)
2715 pat
= PATTERN (trial
);
2716 if ((must_annul
|| delay_list
== NULL
) && (thread_if_true
2717 ? check_annul_list_true_false (0, delay_list
)
2718 && eligible_for_annul_false (insn
, *pslots_filled
, trial
, flags
)
2719 : check_annul_list_true_false (1, delay_list
)
2720 && eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
2728 if (reg_mentioned_p (cc0_rtx
, pat
))
2729 link_cc0_insns (trial
);
2732 /* If we own this thread, delete the insn. If this is the
2733 destination of a branch, show that a basic block status
2734 may have been updated. In any case, mark the new
2735 starting point of this thread. */
2740 update_block (trial
, thread
);
2741 if (trial
== thread
)
2743 thread
= next_active_insn (thread
);
2744 if (new_thread
== trial
)
2745 new_thread
= thread
;
2748 /* We are moving this insn, not deleting it. We must
2749 temporarily increment the use count on any referenced
2750 label lest it be deleted by delete_related_insns. */
2751 for (note
= REG_NOTES (trial
);
2753 note
= XEXP (note
, 1))
2754 if (REG_NOTE_KIND (note
) == REG_LABEL_OPERAND
2755 || REG_NOTE_KIND (note
) == REG_LABEL_TARGET
)
2757 /* REG_LABEL_OPERAND could be
2758 NOTE_INSN_DELETED_LABEL too. */
2759 if (LABEL_P (XEXP (note
, 0)))
2760 LABEL_NUSES (XEXP (note
, 0))++;
2762 gcc_assert (REG_NOTE_KIND (note
)
2763 == REG_LABEL_OPERAND
);
2765 if (jump_to_label_p (trial
))
2766 LABEL_NUSES (JUMP_LABEL (trial
))++;
2768 delete_related_insns (trial
);
2770 for (note
= REG_NOTES (trial
);
2772 note
= XEXP (note
, 1))
2773 if (REG_NOTE_KIND (note
) == REG_LABEL_OPERAND
2774 || REG_NOTE_KIND (note
) == REG_LABEL_TARGET
)
2776 /* REG_LABEL_OPERAND could be
2777 NOTE_INSN_DELETED_LABEL too. */
2778 if (LABEL_P (XEXP (note
, 0)))
2779 LABEL_NUSES (XEXP (note
, 0))--;
2781 gcc_assert (REG_NOTE_KIND (note
)
2782 == REG_LABEL_OPERAND
);
2784 if (jump_to_label_p (trial
))
2785 LABEL_NUSES (JUMP_LABEL (trial
))--;
2788 new_thread
= next_active_insn (trial
);
2790 temp
= own_thread
? trial
: copy_rtx (trial
);
2792 INSN_FROM_TARGET_P (temp
) = 1;
2794 delay_list
= add_to_delay_list (temp
, delay_list
);
2796 if (slots_to_fill
== ++(*pslots_filled
))
2798 /* Even though we have filled all the slots, we
2799 may be branching to a location that has a
2800 redundant insn. Skip any if so. */
2801 while (new_thread
&& ! own_thread
2802 && ! insn_sets_resource_p (new_thread
, &set
, true)
2803 && ! insn_sets_resource_p (new_thread
, &needed
,
2805 && ! insn_references_resource_p (new_thread
,
2808 = redundant_insn (new_thread
, insn
,
2811 /* We know we do not own the thread, so no need
2812 to call update_block and delete_insn. */
2813 fix_reg_dead_note (prior_insn
, insn
);
2814 update_reg_unused_notes (prior_insn
, new_thread
);
2815 new_thread
= next_active_insn (new_thread
);
2825 /* This insn can't go into a delay slot. */
2827 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
2828 mark_referenced_resources (trial
, &needed
, true);
2830 /* Ensure we don't put insns between the setting of cc and the comparison
2831 by moving a setting of cc into an earlier delay slot since these insns
2832 could clobber the condition code. */
2835 /* If this insn is a register-register copy and the next insn has
2836 a use of our destination, change it to use our source. That way,
2837 it will become a candidate for our delay slot the next time
2838 through this loop. This case occurs commonly in loops that
2841 We could check for more complex cases than those tested below,
2842 but it doesn't seem worth it. It might also be a good idea to try
2843 to swap the two insns. That might do better.
2845 We can't do this if the next insn modifies our destination, because
2846 that would make the replacement into the insn invalid. We also can't
2847 do this if it modifies our source, because it might be an earlyclobber
2848 operand. This latter test also prevents updating the contents of
2849 a PRE_INC. We also can't do this if there's overlap of source and
2850 destination. Overlap may happen for larger-than-register-size modes. */
2852 if (NONJUMP_INSN_P (trial
) && GET_CODE (pat
) == SET
2853 && REG_P (SET_SRC (pat
))
2854 && REG_P (SET_DEST (pat
))
2855 && !reg_overlap_mentioned_p (SET_DEST (pat
), SET_SRC (pat
)))
2857 rtx next
= next_nonnote_insn (trial
);
2859 if (next
&& NONJUMP_INSN_P (next
)
2860 && GET_CODE (PATTERN (next
)) != USE
2861 && ! reg_set_p (SET_DEST (pat
), next
)
2862 && ! reg_set_p (SET_SRC (pat
), next
)
2863 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
))
2864 && ! modified_in_p (SET_DEST (pat
), next
))
2865 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
2869 /* If we stopped on a branch insn that has delay slots, see if we can
2870 steal some of the insns in those slots. */
2871 if (trial
&& NONJUMP_INSN_P (trial
)
2872 && GET_CODE (PATTERN (trial
)) == SEQUENCE
2873 && JUMP_P (XVECEXP (PATTERN (trial
), 0, 0)))
2875 /* If this is the `true' thread, we will want to follow the jump,
2876 so we can only do this if we have taken everything up to here. */
2877 if (thread_if_true
&& trial
== new_thread
)
2880 = steal_delay_list_from_target (insn
, condition
, PATTERN (trial
),
2881 delay_list
, &set
, &needed
,
2882 &opposite_needed
, slots_to_fill
,
2883 pslots_filled
, &must_annul
,
2885 /* If we owned the thread and are told that it branched
2886 elsewhere, make sure we own the thread at the new location. */
2887 if (own_thread
&& trial
!= new_thread
)
2888 own_thread
= own_thread_p (new_thread
, new_thread
, 0);
2890 else if (! thread_if_true
)
2892 = steal_delay_list_from_fallthrough (insn
, condition
,
2894 delay_list
, &set
, &needed
,
2895 &opposite_needed
, slots_to_fill
,
2896 pslots_filled
, &must_annul
);
2899 /* If we haven't found anything for this delay slot and it is very
2900 likely that the branch will be taken, see if the insn at our target
2901 increments or decrements a register with an increment that does not
2902 depend on the destination register. If so, try to place the opposite
2903 arithmetic insn after the jump insn and put the arithmetic insn in the
2904 delay slot. If we can't do this, return. */
2905 if (delay_list
== 0 && likely
2906 && new_thread
&& !ANY_RETURN_P (new_thread
)
2907 && NONJUMP_INSN_P (new_thread
)
2908 && !RTX_FRAME_RELATED_P (new_thread
)
2909 && GET_CODE (PATTERN (new_thread
)) != ASM_INPUT
2910 && asm_noperands (PATTERN (new_thread
)) < 0)
2912 rtx pat
= PATTERN (new_thread
);
2917 pat
= PATTERN (trial
);
2919 if (!NONJUMP_INSN_P (trial
)
2920 || GET_CODE (pat
) != SET
2921 || ! eligible_for_delay (insn
, 0, trial
, flags
)
2922 || can_throw_internal (trial
))
2925 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
2926 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
2927 && rtx_equal_p (XEXP (src
, 0), dest
)
2928 && (!FLOAT_MODE_P (GET_MODE (src
))
2929 || flag_unsafe_math_optimizations
)
2930 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1))
2931 && ! side_effects_p (pat
))
2933 rtx other
= XEXP (src
, 1);
2937 /* If this is a constant adjustment, use the same code with
2938 the negated constant. Otherwise, reverse the sense of the
2940 if (CONST_INT_P (other
))
2941 new_arith
= gen_rtx_fmt_ee (GET_CODE (src
), GET_MODE (src
), dest
,
2942 negate_rtx (GET_MODE (src
), other
));
2944 new_arith
= gen_rtx_fmt_ee (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
2945 GET_MODE (src
), dest
, other
);
2947 ninsn
= emit_insn_after (gen_rtx_SET (VOIDmode
, dest
, new_arith
),
2950 if (recog_memoized (ninsn
) < 0
2951 || (extract_insn (ninsn
), ! constrain_operands (1)))
2953 delete_related_insns (ninsn
);
2959 update_block (trial
, thread
);
2960 if (trial
== thread
)
2962 thread
= next_active_insn (thread
);
2963 if (new_thread
== trial
)
2964 new_thread
= thread
;
2966 delete_related_insns (trial
);
2969 new_thread
= next_active_insn (trial
);
2971 ninsn
= own_thread
? trial
: copy_rtx (trial
);
2973 INSN_FROM_TARGET_P (ninsn
) = 1;
2975 delay_list
= add_to_delay_list (ninsn
, NULL_RTX
);
2980 if (delay_list
&& must_annul
)
2981 INSN_ANNULLED_BRANCH_P (insn
) = 1;
2983 /* If we are to branch into the middle of this thread, find an appropriate
2984 label or make a new one if none, and redirect INSN to it. If we hit the
2985 end of the function, use the end-of-function label. */
2986 if (new_thread
!= thread
)
2990 gcc_assert (thread_if_true
);
2992 if (new_thread
&& simplejump_or_return_p (new_thread
)
2993 && redirect_with_delay_list_safe_p (insn
,
2994 JUMP_LABEL (new_thread
),
2996 new_thread
= follow_jumps (JUMP_LABEL (new_thread
));
2998 if (ANY_RETURN_P (new_thread
))
2999 label
= find_end_label (new_thread
);
3000 else if (LABEL_P (new_thread
))
3003 label
= get_label_before (new_thread
);
3006 reorg_redirect_jump (insn
, label
);
3012 /* Make another attempt to find insns to place in delay slots.
3014 We previously looked for insns located in front of the delay insn
3015 and, for non-jump delay insns, located behind the delay insn.
3017 Here only try to schedule jump insns and try to move insns from either
3018 the target or the following insns into the delay slot. If annulling is
3019 supported, we will be likely to do this. Otherwise, we can do this only
3023 fill_eager_delay_slots (void)
3027 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
3029 for (i
= 0; i
< num_unfilled_slots
; i
++)
3032 rtx target_label
, insn_at_target
, fallthrough_insn
;
3035 int own_fallthrough
;
3036 int prediction
, slots_to_fill
, slots_filled
;
3038 insn
= unfilled_slots_base
[i
];
3040 || INSN_DELETED_P (insn
)
3042 || ! (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
3045 slots_to_fill
= num_delay_slots (insn
);
3046 /* Some machine description have defined instructions to have
3047 delay slots only in certain circumstances which may depend on
3048 nearby insns (which change due to reorg's actions).
3050 For example, the PA port normally has delay slots for unconditional
3053 However, the PA port claims such jumps do not have a delay slot
3054 if they are immediate successors of certain CALL_INSNs. This
3055 allows the port to favor filling the delay slot of the call with
3056 the unconditional jump. */
3057 if (slots_to_fill
== 0)
3061 target_label
= JUMP_LABEL (insn
);
3062 condition
= get_branch_condition (insn
, target_label
);
3067 /* Get the next active fallthrough and target insns and see if we own
3068 them. Then see whether the branch is likely true. We don't need
3069 to do a lot of this for unconditional branches. */
3071 insn_at_target
= first_active_target_insn (target_label
);
3072 own_target
= own_thread_p (target_label
, target_label
, 0);
3074 if (condition
== const_true_rtx
)
3076 own_fallthrough
= 0;
3077 fallthrough_insn
= 0;
3082 fallthrough_insn
= next_active_insn (insn
);
3083 own_fallthrough
= own_thread_p (NEXT_INSN (insn
), NULL_RTX
, 1);
3084 prediction
= mostly_true_jump (insn
, condition
);
3087 /* If this insn is expected to branch, first try to get insns from our
3088 target, then our fallthrough insns. If it is not expected to branch,
3089 try the other order. */
3094 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3095 fallthrough_insn
, prediction
== 2, 1,
3097 slots_to_fill
, &slots_filled
, delay_list
);
3099 if (delay_list
== 0 && own_fallthrough
)
3101 /* Even though we didn't find anything for delay slots,
3102 we might have found a redundant insn which we deleted
3103 from the thread that was filled. So we have to recompute
3104 the next insn at the target. */
3105 target_label
= JUMP_LABEL (insn
);
3106 insn_at_target
= first_active_target_insn (target_label
);
3109 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3110 insn_at_target
, 0, 0,
3112 slots_to_fill
, &slots_filled
,
3118 if (own_fallthrough
)
3120 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3121 insn_at_target
, 0, 0,
3123 slots_to_fill
, &slots_filled
,
3126 if (delay_list
== 0)
3128 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3129 next_active_insn (insn
), 0, 1,
3131 slots_to_fill
, &slots_filled
,
3136 unfilled_slots_base
[i
]
3137 = emit_delay_sequence (insn
, delay_list
, slots_filled
);
3139 if (slots_to_fill
== slots_filled
)
3140 unfilled_slots_base
[i
] = 0;
3142 note_delay_statistics (slots_filled
, 1);
3146 static void delete_computation (rtx insn
);
3148 /* Recursively delete prior insns that compute the value (used only by INSN
3149 which the caller is deleting) stored in the register mentioned by NOTE
3150 which is a REG_DEAD note associated with INSN. */
3153 delete_prior_computation (rtx note
, rtx insn
)
3156 rtx reg
= XEXP (note
, 0);
3158 for (our_prev
= prev_nonnote_insn (insn
);
3159 our_prev
&& (NONJUMP_INSN_P (our_prev
)
3160 || CALL_P (our_prev
));
3161 our_prev
= prev_nonnote_insn (our_prev
))
3163 rtx pat
= PATTERN (our_prev
);
3165 /* If we reach a CALL which is not calling a const function
3166 or the callee pops the arguments, then give up. */
3167 if (CALL_P (our_prev
)
3168 && (! RTL_CONST_CALL_P (our_prev
)
3169 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
3172 /* If we reach a SEQUENCE, it is too complex to try to
3173 do anything with it, so give up. We can be run during
3174 and after reorg, so SEQUENCE rtl can legitimately show
3176 if (GET_CODE (pat
) == SEQUENCE
)
3179 if (GET_CODE (pat
) == USE
3180 && NONJUMP_INSN_P (XEXP (pat
, 0)))
3181 /* reorg creates USEs that look like this. We leave them
3182 alone because reorg needs them for its own purposes. */
3185 if (reg_set_p (reg
, pat
))
3187 if (side_effects_p (pat
) && !CALL_P (our_prev
))
3190 if (GET_CODE (pat
) == PARALLEL
)
3192 /* If we find a SET of something else, we can't
3197 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3199 rtx part
= XVECEXP (pat
, 0, i
);
3201 if (GET_CODE (part
) == SET
3202 && SET_DEST (part
) != reg
)
3206 if (i
== XVECLEN (pat
, 0))
3207 delete_computation (our_prev
);
3209 else if (GET_CODE (pat
) == SET
3210 && REG_P (SET_DEST (pat
)))
3212 int dest_regno
= REGNO (SET_DEST (pat
));
3213 int dest_endregno
= END_REGNO (SET_DEST (pat
));
3214 int regno
= REGNO (reg
);
3215 int endregno
= END_REGNO (reg
);
3217 if (dest_regno
>= regno
3218 && dest_endregno
<= endregno
)
3219 delete_computation (our_prev
);
3221 /* We may have a multi-word hard register and some, but not
3222 all, of the words of the register are needed in subsequent
3223 insns. Write REG_UNUSED notes for those parts that were not
3225 else if (dest_regno
<= regno
3226 && dest_endregno
>= endregno
)
3230 add_reg_note (our_prev
, REG_UNUSED
, reg
);
3232 for (i
= dest_regno
; i
< dest_endregno
; i
++)
3233 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
3236 if (i
== dest_endregno
)
3237 delete_computation (our_prev
);
3244 /* If PAT references the register that dies here, it is an
3245 additional use. Hence any prior SET isn't dead. However, this
3246 insn becomes the new place for the REG_DEAD note. */
3247 if (reg_overlap_mentioned_p (reg
, pat
))
3249 XEXP (note
, 1) = REG_NOTES (our_prev
);
3250 REG_NOTES (our_prev
) = note
;
3256 /* Delete INSN and recursively delete insns that compute values used only
3257 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3259 Look at all our REG_DEAD notes. If a previous insn does nothing other
3260 than set a register that dies in this insn, we can delete that insn
3263 On machines with CC0, if CC0 is used in this insn, we may be able to
3264 delete the insn that set it. */
3267 delete_computation (rtx insn
)
3272 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
3274 rtx prev
= prev_nonnote_insn (insn
);
3275 /* We assume that at this stage
3276 CC's are always set explicitly
3277 and always immediately before the jump that
3278 will use them. So if the previous insn
3279 exists to set the CC's, delete it
3280 (unless it performs auto-increments, etc.). */
3281 if (prev
&& NONJUMP_INSN_P (prev
)
3282 && sets_cc0_p (PATTERN (prev
)))
3284 if (sets_cc0_p (PATTERN (prev
)) > 0
3285 && ! side_effects_p (PATTERN (prev
)))
3286 delete_computation (prev
);
3288 /* Otherwise, show that cc0 won't be used. */
3289 add_reg_note (prev
, REG_UNUSED
, cc0_rtx
);
3294 for (note
= REG_NOTES (insn
); note
; note
= next
)
3296 next
= XEXP (note
, 1);
3298 if (REG_NOTE_KIND (note
) != REG_DEAD
3299 /* Verify that the REG_NOTE is legitimate. */
3300 || !REG_P (XEXP (note
, 0)))
3303 delete_prior_computation (note
, insn
);
3306 delete_related_insns (insn
);
3309 /* If all INSN does is set the pc, delete it,
3310 and delete the insn that set the condition codes for it
3311 if that's what the previous thing was. */
3314 delete_jump (rtx insn
)
3316 rtx set
= single_set (insn
);
3318 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
3319 delete_computation (insn
);
3323 label_before_next_insn (rtx x
, rtx scan_limit
)
3325 rtx insn
= next_active_insn (x
);
3328 insn
= PREV_INSN (insn
);
3329 if (insn
== scan_limit
|| insn
== NULL_RTX
)
3338 /* Once we have tried two ways to fill a delay slot, make a pass over the
3339 code to try to improve the results and to do such things as more jump
3343 relax_delay_slots (rtx first
)
3345 rtx insn
, next
, pat
;
3346 rtx trial
, delay_insn
, target_label
;
3348 /* Look at every JUMP_INSN and see if we can improve it. */
3349 for (insn
= first
; insn
; insn
= next
)
3353 next
= next_active_insn (insn
);
3355 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3356 the next insn, or jumps to a label that is not the last of a
3357 group of consecutive labels. */
3359 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3360 && !ANY_RETURN_P (target_label
= JUMP_LABEL (insn
)))
3362 target_label
= skip_consecutive_labels (follow_jumps (target_label
));
3363 if (ANY_RETURN_P (target_label
))
3364 target_label
= find_end_label (target_label
);
3366 if (target_label
&& next_active_insn (target_label
) == next
3367 && ! condjump_in_parallel_p (insn
))
3373 if (target_label
&& target_label
!= JUMP_LABEL (insn
))
3374 reorg_redirect_jump (insn
, target_label
);
3376 /* See if this jump conditionally branches around an unconditional
3377 jump. If so, invert this jump and point it to the target of the
3379 if (next
&& simplejump_or_return_p (next
)
3380 && any_condjump_p (insn
)
3382 && next_active_insn (target_label
) == next_active_insn (next
)
3383 && no_labels_between_p (insn
, next
))
3385 rtx label
= JUMP_LABEL (next
);
3387 /* Be careful how we do this to avoid deleting code or
3388 labels that are momentarily dead. See similar optimization
3391 We also need to ensure we properly handle the case when
3392 invert_jump fails. */
3394 ++LABEL_NUSES (target_label
);
3395 if (!ANY_RETURN_P (label
))
3396 ++LABEL_NUSES (label
);
3398 if (invert_jump (insn
, label
, 1))
3400 delete_related_insns (next
);
3404 if (!ANY_RETURN_P (label
))
3405 --LABEL_NUSES (label
);
3407 if (--LABEL_NUSES (target_label
) == 0)
3408 delete_related_insns (target_label
);
3414 /* If this is an unconditional jump and the previous insn is a
3415 conditional jump, try reversing the condition of the previous
3416 insn and swapping our targets. The next pass might be able to
3419 Don't do this if we expect the conditional branch to be true, because
3420 we would then be making the more common case longer. */
3422 if (simplejump_or_return_p (insn
)
3423 && (other
= prev_active_insn (insn
)) != 0
3424 && any_condjump_p (other
)
3425 && no_labels_between_p (other
, insn
)
3426 && 0 > mostly_true_jump (other
,
3427 get_branch_condition (other
,
3428 JUMP_LABEL (other
))))
3430 rtx other_target
= JUMP_LABEL (other
);
3431 target_label
= JUMP_LABEL (insn
);
3433 if (invert_jump (other
, target_label
, 0))
3434 reorg_redirect_jump (insn
, other_target
);
3437 /* Now look only at cases where we have filled a delay slot. */
3438 if (!NONJUMP_INSN_P (insn
)
3439 || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
3442 pat
= PATTERN (insn
);
3443 delay_insn
= XVECEXP (pat
, 0, 0);
3445 /* See if the first insn in the delay slot is redundant with some
3446 previous insn. Remove it from the delay slot if so; then set up
3447 to reprocess this insn. */
3448 if (redundant_insn (XVECEXP (pat
, 0, 1), delay_insn
, 0))
3450 delete_from_delay_slot (XVECEXP (pat
, 0, 1));
3451 next
= prev_active_insn (next
);
3455 /* See if we have a RETURN insn with a filled delay slot followed
3456 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3457 the first RETURN (but not its delay insn). This gives the same
3458 effect in fewer instructions.
3460 Only do so if optimizing for size since this results in slower, but
3462 if (optimize_function_for_size_p (cfun
)
3463 && ANY_RETURN_P (PATTERN (delay_insn
))
3466 && PATTERN (next
) == PATTERN (delay_insn
))
3471 /* Delete the RETURN and just execute the delay list insns.
3473 We do this by deleting the INSN containing the SEQUENCE, then
3474 re-emitting the insns separately, and then deleting the RETURN.
3475 This allows the count of the jump target to be properly
3478 Note that we need to change the INSN_UID of the re-emitted insns
3479 since it is used to hash the insns for mark_target_live_regs and
3480 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3482 Clear the from target bit, since these insns are no longer
3484 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3485 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3487 trial
= PREV_INSN (insn
);
3488 delete_related_insns (insn
);
3489 gcc_assert (GET_CODE (pat
) == SEQUENCE
);
3490 add_insn_after (delay_insn
, trial
, NULL
);
3492 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3493 after
= emit_copy_of_insn_after (XVECEXP (pat
, 0, i
), after
);
3494 delete_scheduled_jump (delay_insn
);
3498 /* Now look only at the cases where we have a filled JUMP_INSN. */
3499 if (!JUMP_P (XVECEXP (PATTERN (insn
), 0, 0))
3500 || ! (condjump_p (XVECEXP (PATTERN (insn
), 0, 0))
3501 || condjump_in_parallel_p (XVECEXP (PATTERN (insn
), 0, 0))))
3504 target_label
= JUMP_LABEL (delay_insn
);
3505 if (target_label
&& ANY_RETURN_P (target_label
))
3508 /* If this jump goes to another unconditional jump, thread it, but
3509 don't convert a jump into a RETURN here. */
3510 trial
= skip_consecutive_labels (follow_jumps (target_label
));
3511 if (ANY_RETURN_P (trial
))
3512 trial
= find_end_label (trial
);
3514 if (trial
&& trial
!= target_label
3515 && redirect_with_delay_slots_safe_p (delay_insn
, trial
, insn
))
3517 reorg_redirect_jump (delay_insn
, trial
);
3518 target_label
= trial
;
3521 /* If the first insn at TARGET_LABEL is redundant with a previous
3522 insn, redirect the jump to the following insn and process again.
3523 We use next_real_insn instead of next_active_insn so we
3524 don't skip USE-markers, or we'll end up with incorrect
3526 trial
= next_real_insn (target_label
);
3527 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
3528 && redundant_insn (trial
, insn
, 0)
3529 && ! can_throw_internal (trial
))
3531 /* Figure out where to emit the special USE insn so we don't
3532 later incorrectly compute register live/death info. */
3533 rtx tmp
= next_active_insn (trial
);
3535 tmp
= find_end_label (simple_return_rtx
);
3539 /* Insert the special USE insn and update dataflow info. */
3540 update_block (trial
, tmp
);
3542 /* Now emit a label before the special USE insn, and
3543 redirect our jump to the new label. */
3544 target_label
= get_label_before (PREV_INSN (tmp
));
3545 reorg_redirect_jump (delay_insn
, target_label
);
3551 /* Similarly, if it is an unconditional jump with one insn in its
3552 delay list and that insn is redundant, thread the jump. */
3553 if (trial
&& GET_CODE (PATTERN (trial
)) == SEQUENCE
3554 && XVECLEN (PATTERN (trial
), 0) == 2
3555 && JUMP_P (XVECEXP (PATTERN (trial
), 0, 0))
3556 && simplejump_or_return_p (XVECEXP (PATTERN (trial
), 0, 0))
3557 && redundant_insn (XVECEXP (PATTERN (trial
), 0, 1), insn
, 0))
3559 target_label
= JUMP_LABEL (XVECEXP (PATTERN (trial
), 0, 0));
3560 if (ANY_RETURN_P (target_label
))
3561 target_label
= find_end_label (target_label
);
3564 && redirect_with_delay_slots_safe_p (delay_insn
, target_label
,
3567 reorg_redirect_jump (delay_insn
, target_label
);
3573 /* See if we have a simple (conditional) jump that is useless. */
3574 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3575 && ! condjump_in_parallel_p (delay_insn
)
3576 && prev_active_insn (target_label
) == insn
3577 && ! BARRIER_P (prev_nonnote_insn (target_label
))
3579 /* If the last insn in the delay slot sets CC0 for some insn,
3580 various code assumes that it is in a delay slot. We could
3581 put it back where it belonged and delete the register notes,
3582 but it doesn't seem worthwhile in this uncommon case. */
3583 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
3584 REG_CC_USER
, NULL_RTX
)
3591 /* All this insn does is execute its delay list and jump to the
3592 following insn. So delete the jump and just execute the delay
3595 We do this by deleting the INSN containing the SEQUENCE, then
3596 re-emitting the insns separately, and then deleting the jump.
3597 This allows the count of the jump target to be properly
3600 Note that we need to change the INSN_UID of the re-emitted insns
3601 since it is used to hash the insns for mark_target_live_regs and
3602 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3604 Clear the from target bit, since these insns are no longer
3606 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3607 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3609 trial
= PREV_INSN (insn
);
3610 delete_related_insns (insn
);
3611 gcc_assert (GET_CODE (pat
) == SEQUENCE
);
3612 add_insn_after (delay_insn
, trial
, NULL
);
3614 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3615 after
= emit_copy_of_insn_after (XVECEXP (pat
, 0, i
), after
);
3616 delete_scheduled_jump (delay_insn
);
3620 /* See if this is an unconditional jump around a single insn which is
3621 identical to the one in its delay slot. In this case, we can just
3622 delete the branch and the insn in its delay slot. */
3623 if (next
&& NONJUMP_INSN_P (next
)
3624 && label_before_next_insn (next
, insn
) == target_label
3625 && simplejump_p (insn
)
3626 && XVECLEN (pat
, 0) == 2
3627 && rtx_equal_p (PATTERN (next
), PATTERN (XVECEXP (pat
, 0, 1))))
3629 delete_related_insns (insn
);
3633 /* See if this jump (with its delay slots) conditionally branches
3634 around an unconditional jump (without delay slots). If so, invert
3635 this jump and point it to the target of the second jump. We cannot
3636 do this for annulled jumps, though. Again, don't convert a jump to
3638 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3639 && any_condjump_p (delay_insn
)
3640 && next
&& simplejump_or_return_p (next
)
3641 && next_active_insn (target_label
) == next_active_insn (next
)
3642 && no_labels_between_p (insn
, next
))
3644 rtx label
= JUMP_LABEL (next
);
3645 rtx old_label
= JUMP_LABEL (delay_insn
);
3647 if (ANY_RETURN_P (label
))
3648 label
= find_end_label (label
);
3650 /* find_end_label can generate a new label. Check this first. */
3652 && no_labels_between_p (insn
, next
)
3653 && redirect_with_delay_slots_safe_p (delay_insn
, label
, insn
))
3655 /* Be careful how we do this to avoid deleting code or labels
3656 that are momentarily dead. See similar optimization in
3659 ++LABEL_NUSES (old_label
);
3661 if (invert_jump (delay_insn
, label
, 1))
3665 /* Must update the INSN_FROM_TARGET_P bits now that
3666 the branch is reversed, so that mark_target_live_regs
3667 will handle the delay slot insn correctly. */
3668 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
3670 rtx slot
= XVECEXP (PATTERN (insn
), 0, i
);
3671 INSN_FROM_TARGET_P (slot
) = ! INSN_FROM_TARGET_P (slot
);
3674 delete_related_insns (next
);
3678 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
3679 delete_related_insns (old_label
);
3684 /* If we own the thread opposite the way this insn branches, see if we
3685 can merge its delay slots with following insns. */
3686 if (INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3687 && own_thread_p (NEXT_INSN (insn
), 0, 1))
3688 try_merge_delay_insns (insn
, next
);
3689 else if (! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
3690 && own_thread_p (target_label
, target_label
, 0))
3691 try_merge_delay_insns (insn
, next_active_insn (target_label
));
3693 /* If we get here, we haven't deleted INSN. But we may have deleted
3694 NEXT, so recompute it. */
3695 next
= next_active_insn (insn
);
3700 /* Look for filled jumps to the end of function label. We can try to convert
3701 them into RETURN insns if the insns in the delay slot are valid for the
3705 make_return_insns (rtx first
)
3707 rtx insn
, jump_insn
, pat
;
3708 rtx real_return_label
= function_return_label
;
3709 rtx real_simple_return_label
= function_simple_return_label
;
3712 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3713 /* If a previous pass filled delay slots in the epilogue, things get a
3714 bit more complicated, as those filler insns would generally (without
3715 data flow analysis) have to be executed after any existing branch
3716 delay slot filler insns. It is also unknown whether such a
3717 transformation would actually be profitable. Note that the existing
3718 code only cares for branches with (some) filled delay slots. */
3719 if (crtl
->epilogue_delay_list
!= NULL
)
3723 /* See if there is a RETURN insn in the function other than the one we
3724 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3725 into a RETURN to jump to it. */
3726 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3727 if (JUMP_P (insn
) && ANY_RETURN_P (PATTERN (insn
)))
3729 rtx t
= get_label_before (insn
);
3730 if (PATTERN (insn
) == ret_rtx
)
3731 real_return_label
= t
;
3733 real_simple_return_label
= t
;
3737 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3738 was equal to END_OF_FUNCTION_LABEL. */
3739 if (real_return_label
)
3740 LABEL_NUSES (real_return_label
)++;
3741 if (real_simple_return_label
)
3742 LABEL_NUSES (real_simple_return_label
)++;
3744 /* Clear the list of insns to fill so we can use it. */
3745 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3747 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3750 rtx kind
, real_label
;
3752 /* Only look at filled JUMP_INSNs that go to the end of function
3754 if (!NONJUMP_INSN_P (insn
)
3755 || GET_CODE (PATTERN (insn
)) != SEQUENCE
3756 || !jump_to_label_p (XVECEXP (PATTERN (insn
), 0, 0)))
3759 if (JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0)) == function_return_label
)
3762 real_label
= real_return_label
;
3764 else if (JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0))
3765 == function_simple_return_label
)
3767 kind
= simple_return_rtx
;
3768 real_label
= real_simple_return_label
;
3773 pat
= PATTERN (insn
);
3774 jump_insn
= XVECEXP (pat
, 0, 0);
3776 /* If we can't make the jump into a RETURN, try to redirect it to the best
3777 RETURN and go on to the next insn. */
3778 if (!reorg_redirect_jump (jump_insn
, kind
))
3780 /* Make sure redirecting the jump will not invalidate the delay
3782 if (redirect_with_delay_slots_safe_p (jump_insn
, real_label
, insn
))
3783 reorg_redirect_jump (jump_insn
, real_label
);
3787 /* See if this RETURN can accept the insns current in its delay slot.
3788 It can if it has more or an equal number of slots and the contents
3789 of each is valid. */
3791 flags
= get_jump_flags (jump_insn
, JUMP_LABEL (jump_insn
));
3792 slots
= num_delay_slots (jump_insn
);
3793 if (slots
>= XVECLEN (pat
, 0) - 1)
3795 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3797 #ifdef ANNUL_IFFALSE_SLOTS
3798 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3799 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3800 ? eligible_for_annul_false (jump_insn
, i
- 1,
3801 XVECEXP (pat
, 0, i
), flags
) :
3803 #ifdef ANNUL_IFTRUE_SLOTS
3804 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3805 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
3806 ? eligible_for_annul_true (jump_insn
, i
- 1,
3807 XVECEXP (pat
, 0, i
), flags
) :
3809 eligible_for_delay (jump_insn
, i
- 1,
3810 XVECEXP (pat
, 0, i
), flags
)))
3816 if (i
== XVECLEN (pat
, 0))
3819 /* We have to do something with this insn. If it is an unconditional
3820 RETURN, delete the SEQUENCE and output the individual insns,
3821 followed by the RETURN. Then set things up so we try to find
3822 insns for its delay slots, if it needs some. */
3823 if (ANY_RETURN_P (PATTERN (jump_insn
)))
3825 rtx prev
= PREV_INSN (insn
);
3827 delete_related_insns (insn
);
3828 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3829 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
3831 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
3832 emit_barrier_after (insn
);
3835 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3838 /* It is probably more efficient to keep this with its current
3839 delay slot as a branch to a RETURN. */
3840 reorg_redirect_jump (jump_insn
, real_label
);
3843 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3844 new delay slots we have created. */
3845 if (real_return_label
!= NULL_RTX
&& --LABEL_NUSES (real_return_label
) == 0)
3846 delete_related_insns (real_return_label
);
3847 if (real_simple_return_label
!= NULL_RTX
3848 && --LABEL_NUSES (real_simple_return_label
) == 0)
3849 delete_related_insns (real_simple_return_label
);
3851 fill_simple_delay_slots (1);
3852 fill_simple_delay_slots (0);
3855 /* Try to find insns to place in delay slots. */
3858 dbr_schedule (rtx first
)
3860 rtx insn
, next
, epilogue_insn
= 0;
3862 bool need_return_insns
;
3864 /* If the current function has no insns other than the prologue and
3865 epilogue, then do not try to fill any delay slots. */
3866 if (n_basic_blocks
== NUM_FIXED_BLOCKS
)
3869 /* Find the highest INSN_UID and allocate and initialize our map from
3870 INSN_UID's to position in code. */
3871 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3873 if (INSN_UID (insn
) > max_uid
)
3874 max_uid
= INSN_UID (insn
);
3876 && NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
3877 epilogue_insn
= insn
;
3880 uid_to_ruid
= XNEWVEC (int, max_uid
+ 1);
3881 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
3882 uid_to_ruid
[INSN_UID (insn
)] = i
;
3884 /* Initialize the list of insns that need filling. */
3885 if (unfilled_firstobj
== 0)
3887 gcc_obstack_init (&unfilled_slots_obstack
);
3888 unfilled_firstobj
= XOBNEWVAR (&unfilled_slots_obstack
, rtx
, 0);
3891 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
3896 INSN_ANNULLED_BRANCH_P (insn
) = 0;
3897 INSN_FROM_TARGET_P (insn
) = 0;
3899 /* Skip vector tables. We can't get attributes for them. */
3900 if (JUMP_TABLE_DATA_P (insn
))
3903 if (num_delay_slots (insn
) > 0)
3904 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3906 /* Ensure all jumps go to the last of a set of consecutive labels. */
3908 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3909 && !ANY_RETURN_P (JUMP_LABEL (insn
))
3910 && ((target
= skip_consecutive_labels (JUMP_LABEL (insn
)))
3911 != JUMP_LABEL (insn
)))
3912 redirect_jump (insn
, target
, 1);
3915 init_resource_info (epilogue_insn
);
3917 /* Show we haven't computed an end-of-function label yet. */
3918 function_return_label
= function_simple_return_label
= NULL_RTX
;
3920 /* Initialize the statistics for this function. */
3921 memset (num_insns_needing_delays
, 0, sizeof num_insns_needing_delays
);
3922 memset (num_filled_delays
, 0, sizeof num_filled_delays
);
3924 /* Now do the delay slot filling. Try everything twice in case earlier
3925 changes make more slots fillable. */
3927 for (reorg_pass_number
= 0;
3928 reorg_pass_number
< MAX_REORG_PASSES
;
3929 reorg_pass_number
++)
3931 fill_simple_delay_slots (1);
3932 fill_simple_delay_slots (0);
3933 fill_eager_delay_slots ();
3934 relax_delay_slots (first
);
3937 /* If we made an end of function label, indicate that it is now
3938 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3939 If it is now unused, delete it. */
3940 if (function_return_label
&& --LABEL_NUSES (function_return_label
) == 0)
3941 delete_related_insns (function_return_label
);
3942 if (function_simple_return_label
3943 && --LABEL_NUSES (function_simple_return_label
) == 0)
3944 delete_related_insns (function_simple_return_label
);
3946 need_return_insns
= false;
3948 need_return_insns
|= HAVE_return
&& function_return_label
!= 0;
3950 #ifdef HAVE_simple_return
3951 need_return_insns
|= HAVE_simple_return
&& function_simple_return_label
!= 0;
3953 if (need_return_insns
)
3954 make_return_insns (first
);
3956 /* Delete any USE insns made by update_block; subsequent passes don't need
3957 them or know how to deal with them. */
3958 for (insn
= first
; insn
; insn
= next
)
3960 next
= NEXT_INSN (insn
);
3962 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == USE
3963 && INSN_P (XEXP (PATTERN (insn
), 0)))
3964 next
= delete_related_insns (insn
);
3967 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3969 /* It is not clear why the line below is needed, but it does seem to be. */
3970 unfilled_firstobj
= XOBNEWVAR (&unfilled_slots_obstack
, rtx
, 0);
3974 int i
, j
, need_comma
;
3975 int total_delay_slots
[MAX_DELAY_HISTOGRAM
+ 1];
3976 int total_annul_slots
[MAX_DELAY_HISTOGRAM
+ 1];
3978 for (reorg_pass_number
= 0;
3979 reorg_pass_number
< MAX_REORG_PASSES
;
3980 reorg_pass_number
++)
3982 fprintf (dump_file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
3983 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
3986 fprintf (dump_file
, ";; Reorg function #%d\n", i
);
3988 fprintf (dump_file
, ";; %d insns needing delay slots\n;; ",
3989 num_insns_needing_delays
[i
][reorg_pass_number
]);
3991 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
+ 1; j
++)
3992 if (num_filled_delays
[i
][j
][reorg_pass_number
])
3995 fprintf (dump_file
, ", ");
3997 fprintf (dump_file
, "%d got %d delays",
3998 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
4000 fprintf (dump_file
, "\n");
4003 memset (total_delay_slots
, 0, sizeof total_delay_slots
);
4004 memset (total_annul_slots
, 0, sizeof total_annul_slots
);
4005 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4007 if (! INSN_DELETED_P (insn
)
4008 && NONJUMP_INSN_P (insn
)
4009 && GET_CODE (PATTERN (insn
)) != USE
4010 && GET_CODE (PATTERN (insn
)) != CLOBBER
)
4012 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4015 j
= XVECLEN (PATTERN (insn
), 0) - 1;
4016 if (j
> MAX_DELAY_HISTOGRAM
)
4017 j
= MAX_DELAY_HISTOGRAM
;
4018 control
= XVECEXP (PATTERN (insn
), 0, 0);
4019 if (JUMP_P (control
) && INSN_ANNULLED_BRANCH_P (control
))
4020 total_annul_slots
[j
]++;
4022 total_delay_slots
[j
]++;
4024 else if (num_delay_slots (insn
) > 0)
4025 total_delay_slots
[0]++;
4028 fprintf (dump_file
, ";; Reorg totals: ");
4030 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
+ 1; j
++)
4032 if (total_delay_slots
[j
])
4035 fprintf (dump_file
, ", ");
4037 fprintf (dump_file
, "%d got %d delays", total_delay_slots
[j
], j
);
4040 fprintf (dump_file
, "\n");
4041 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
4042 fprintf (dump_file
, ";; Reorg annuls: ");
4044 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
+ 1; j
++)
4046 if (total_annul_slots
[j
])
4049 fprintf (dump_file
, ", ");
4051 fprintf (dump_file
, "%d got %d delays", total_annul_slots
[j
], j
);
4054 fprintf (dump_file
, "\n");
4056 fprintf (dump_file
, "\n");
4059 /* For all JUMP insns, fill in branch prediction notes, so that during
4060 assembler output a target can set branch prediction bits in the code.
4061 We have to do this now, as up until this point the destinations of
4062 JUMPS can be moved around and changed, but past right here that cannot
4064 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4068 if (NONJUMP_INSN_P (insn
))
4070 rtx pat
= PATTERN (insn
);
4072 if (GET_CODE (pat
) == SEQUENCE
)
4073 insn
= XVECEXP (pat
, 0, 0);
4078 pred_flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
4079 add_reg_note (insn
, REG_BR_PRED
, GEN_INT (pred_flags
));
4081 free_resource_info ();
4083 #ifdef DELAY_SLOTS_FOR_EPILOGUE
4084 /* SPARC assembler, for instance, emit warning when debug info is output
4085 into the delay slot. */
4089 for (link
= crtl
->epilogue_delay_list
;
4091 link
= XEXP (link
, 1))
4092 INSN_LOCATOR (XEXP (link
, 0)) = 0;
4096 crtl
->dbr_scheduled_p
= true;
4098 #endif /* DELAY_SLOTS */
4101 gate_handle_delay_slots (void)
4104 /* At -O0 dataflow info isn't updated after RA. */
4105 return optimize
> 0 && flag_delayed_branch
&& !crtl
->dbr_scheduled_p
;
4111 /* Run delay slot optimization. */
4113 rest_of_handle_delay_slots (void)
4116 dbr_schedule (get_insns ());
4121 struct rtl_opt_pass pass_delay_slots
=
4126 gate_handle_delay_slots
, /* gate */
4127 rest_of_handle_delay_slots
, /* execute */
4130 0, /* static_pass_number */
4131 TV_DBR_SCHED
, /* tv_id */
4132 0, /* properties_required */
4133 0, /* properties_provided */
4134 0, /* properties_destroyed */
4135 0, /* todo_flags_start */
4136 TODO_ggc_collect
/* todo_flags_finish */
4140 /* Machine dependent reorg pass. */
4142 gate_handle_machine_reorg (void)
4144 return targetm
.machine_dependent_reorg
!= 0;
4149 rest_of_handle_machine_reorg (void)
4151 targetm
.machine_dependent_reorg ();
4155 struct rtl_opt_pass pass_machine_reorg
=
4160 gate_handle_machine_reorg
, /* gate */
4161 rest_of_handle_machine_reorg
, /* execute */
4164 0, /* static_pass_number */
4165 TV_MACH_DEP
, /* tv_id */
4166 0, /* properties_required */
4167 0, /* properties_provided */
4168 0, /* properties_destroyed */
4169 0, /* todo_flags_start */
4170 TODO_ggc_collect
/* todo_flags_finish */