1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992-2015 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Instruction reorganization pass.
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
37 The MIPS has a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
42 The SPARC always has a branch delay slot, but its effects can be
43 annulled when the branch is not taken. This means that failing to
44 find other sources of insns, we can hoist an insn from the branch
45 target that would only be safe to execute knowing that the branch
48 The HP-PA always has a branch delay slot. For unconditional branches
49 its effects can be annulled when the branch is taken. The effects
50 of the delay slot in a conditional branch can be nullified for forward
51 taken branches, or for untaken backward branches. This means
52 we can hoist insns from the fall-through path for forward branches or
53 steal insns from the target of backward branches.
55 The TMS320C3x and C4x have three branch delay slots. When the three
56 slots are filled, the branch penalty is zero. Most insns can fill the
57 delay slots except jump insns.
59 Three techniques for filling delay slots have been implemented so far:
61 (1) `fill_simple_delay_slots' is the simplest, most efficient way
62 to fill delay slots. This pass first looks for insns which come
63 from before the branch and which are safe to execute after the
64 branch. Then it searches after the insn requiring delay slots or,
65 in the case of a branch, for insns that are after the point at
66 which the branch merges into the fallthrough code, if such a point
67 exists. When such insns are found, the branch penalty decreases
68 and no code expansion takes place.
70 (2) `fill_eager_delay_slots' is more complicated: it is used for
71 scheduling conditional jumps, or for scheduling jumps which cannot
72 be filled using (1). A machine need not have annulled jumps to use
73 this strategy, but it helps (by keeping more options open).
74 `fill_eager_delay_slots' tries to guess the direction the branch
75 will go; if it guesses right 100% of the time, it can reduce the
76 branch penalty as much as `fill_simple_delay_slots' does. If it
77 guesses wrong 100% of the time, it might as well schedule nops. When
78 `fill_eager_delay_slots' takes insns from the fall-through path of
79 the jump, usually there is no code expansion; when it takes insns
80 from the branch target, there is code expansion if it is not the
81 only way to reach that target.
83 (3) `relax_delay_slots' uses a set of rules to simplify code that
84 has been reorganized by (1) and (2). It finds cases where
85 conditional test can be eliminated, jumps can be threaded, extra
86 insns can be eliminated, etc. It is the job of (1) and (2) to do a
87 good job of scheduling locally; `relax_delay_slots' takes care of
88 making the various individual schedules work well together. It is
89 especially tuned to handle the control flow interactions of branch
90 insns. It does nothing for insns with delay slots that do not
93 On machines that use CC0, we are very conservative. We will not make
94 a copy of an insn involving CC0 since we want to maintain a 1-1
95 correspondence between the insn that sets and uses CC0. The insns are
96 allowed to be separated by placing an insn that sets CC0 (but not an insn
97 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
98 delay slot. In that case, we point each insn at the other with REG_CC_USER
99 and REG_CC_SETTER notes. Note that these restrictions affect very few
100 machines because most RISC machines with delay slots will not use CC0
101 (the RT is the only known exception at this point). */
105 #include "coretypes.h"
111 #include "diagnostic-core.h"
115 #include "insn-config.h"
120 #include "emit-rtl.h"
124 #include "conditions.h"
127 #include "insn-attr.h"
128 #include "resource.h"
132 #include "tree-pass.h"
136 #ifndef ANNUL_IFTRUE_SLOTS
137 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
139 #ifndef ANNUL_IFFALSE_SLOTS
140 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
144 /* First, some functions that were used before GCC got a control flow graph.
145 These functions are now only used here in reorg.c, and have therefore
146 been moved here to avoid inadvertent misuse elsewhere in the compiler. */
148 /* Return the last label to mark the same position as LABEL. Return LABEL
149 itself if it is null or any return rtx. */
152 skip_consecutive_labels (rtx label_or_return
)
156 if (label_or_return
&& ANY_RETURN_P (label_or_return
))
157 return label_or_return
;
159 rtx_insn
*label
= as_a
<rtx_insn
*> (label_or_return
);
161 for (insn
= label
; insn
!= 0 && !INSN_P (insn
); insn
= NEXT_INSN (insn
))
168 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
169 and REG_CC_USER notes so we can find it. */
172 link_cc0_insns (rtx_insn
*insn
)
174 rtx user
= next_nonnote_insn (insn
);
176 if (NONJUMP_INSN_P (user
) && GET_CODE (PATTERN (user
)) == SEQUENCE
)
177 user
= XVECEXP (PATTERN (user
), 0, 0);
179 add_reg_note (user
, REG_CC_SETTER
, insn
);
180 add_reg_note (insn
, REG_CC_USER
, user
);
183 /* Insns which have delay slots that have not yet been filled. */
185 static struct obstack unfilled_slots_obstack
;
186 static rtx
*unfilled_firstobj
;
188 /* Define macros to refer to the first and last slot containing unfilled
189 insns. These are used because the list may move and its address
190 should be recomputed at each use. */
192 #define unfilled_slots_base \
193 ((rtx_insn **) obstack_base (&unfilled_slots_obstack))
195 #define unfilled_slots_next \
196 ((rtx_insn **) obstack_next_free (&unfilled_slots_obstack))
198 /* Points to the label before the end of the function, or before a
200 static rtx_code_label
*function_return_label
;
201 /* Likewise for a simple_return. */
202 static rtx_code_label
*function_simple_return_label
;
204 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
205 not always monotonically increase. */
206 static int *uid_to_ruid
;
208 /* Highest valid index in `uid_to_ruid'. */
211 static int stop_search_p (rtx_insn
*, int);
212 static int resource_conflicts_p (struct resources
*, struct resources
*);
213 static int insn_references_resource_p (rtx
, struct resources
*, bool);
214 static int insn_sets_resource_p (rtx
, struct resources
*, bool);
215 static rtx_code_label
*find_end_label (rtx
);
216 static rtx_insn
*emit_delay_sequence (rtx_insn
*, rtx_insn_list
*, int);
217 static rtx_insn_list
*add_to_delay_list (rtx_insn
*, rtx_insn_list
*);
218 static rtx_insn
*delete_from_delay_slot (rtx_insn
*);
219 static void delete_scheduled_jump (rtx_insn
*);
220 static void note_delay_statistics (int, int);
221 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
222 static rtx_insn_list
*optimize_skip (rtx_jump_insn
*);
224 static int get_jump_flags (const rtx_insn
*, rtx
);
225 static int mostly_true_jump (rtx
);
226 static rtx
get_branch_condition (const rtx_insn
*, rtx
);
227 static int condition_dominates_p (rtx
, const rtx_insn
*);
228 static int redirect_with_delay_slots_safe_p (rtx_insn
*, rtx
, rtx
);
229 static int redirect_with_delay_list_safe_p (rtx_insn
*, rtx
, rtx_insn_list
*);
230 static int check_annul_list_true_false (int, rtx
);
231 static rtx_insn_list
*steal_delay_list_from_target (rtx_insn
*, rtx
,
239 static rtx_insn_list
*steal_delay_list_from_fallthrough (rtx_insn
*, rtx
,
246 static void try_merge_delay_insns (rtx_insn
*, rtx_insn
*);
247 static rtx
redundant_insn (rtx
, rtx_insn
*, rtx
);
248 static int own_thread_p (rtx
, rtx
, int);
249 static void update_block (rtx_insn
*, rtx
);
250 static int reorg_redirect_jump (rtx_jump_insn
*, rtx
);
251 static void update_reg_dead_notes (rtx_insn
*, rtx_insn
*);
252 static void fix_reg_dead_note (rtx
, rtx
);
253 static void update_reg_unused_notes (rtx
, rtx
);
254 static void fill_simple_delay_slots (int);
255 static rtx_insn_list
*fill_slots_from_thread (rtx_jump_insn
*, rtx
, rtx
, rtx
,
257 int *, rtx_insn_list
*);
258 static void fill_eager_delay_slots (void);
259 static void relax_delay_slots (rtx_insn
*);
260 static void make_return_insns (rtx_insn
*);
262 /* A wrapper around next_active_insn which takes care to return ret_rtx
266 first_active_target_insn (rtx insn
)
268 if (ANY_RETURN_P (insn
))
270 return next_active_insn (as_a
<rtx_insn
*> (insn
));
273 /* Return true iff INSN is a simplejump, or any kind of return insn. */
276 simplejump_or_return_p (rtx insn
)
278 return (JUMP_P (insn
)
279 && (simplejump_p (as_a
<rtx_insn
*> (insn
))
280 || ANY_RETURN_P (PATTERN (insn
))));
283 /* Return TRUE if this insn should stop the search for insn to fill delay
284 slots. LABELS_P indicates that labels should terminate the search.
285 In all cases, jumps terminate the search. */
288 stop_search_p (rtx_insn
*insn
, int labels_p
)
293 /* If the insn can throw an exception that is caught within the function,
294 it may effectively perform a jump from the viewpoint of the function.
295 Therefore act like for a jump. */
296 if (can_throw_internal (insn
))
299 switch (GET_CODE (insn
))
313 /* OK unless it contains a delay slot or is an `asm' insn of some type.
314 We don't know anything about these. */
315 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
316 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
317 || asm_noperands (PATTERN (insn
)) >= 0);
324 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
325 resource set contains a volatile memory reference. Otherwise, return FALSE. */
328 resource_conflicts_p (struct resources
*res1
, struct resources
*res2
)
330 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
331 || res1
->volatil
|| res2
->volatil
)
334 return hard_reg_set_intersect_p (res1
->regs
, res2
->regs
);
337 /* Return TRUE if any resource marked in RES, a `struct resources', is
338 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
339 routine is using those resources.
341 We compute this by computing all the resources referenced by INSN and
342 seeing if this conflicts with RES. It might be faster to directly check
343 ourselves, and this is the way it used to work, but it means duplicating
344 a large block of complex code. */
347 insn_references_resource_p (rtx insn
, struct resources
*res
,
348 bool include_delayed_effects
)
350 struct resources insn_res
;
352 CLEAR_RESOURCE (&insn_res
);
353 mark_referenced_resources (insn
, &insn_res
, include_delayed_effects
);
354 return resource_conflicts_p (&insn_res
, res
);
357 /* Return TRUE if INSN modifies resources that are marked in RES.
358 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
359 included. CC0 is only modified if it is explicitly set; see comments
360 in front of mark_set_resources for details. */
363 insn_sets_resource_p (rtx insn
, struct resources
*res
,
364 bool include_delayed_effects
)
366 struct resources insn_sets
;
368 CLEAR_RESOURCE (&insn_sets
);
369 mark_set_resources (insn
, &insn_sets
, 0,
370 (include_delayed_effects
373 return resource_conflicts_p (&insn_sets
, res
);
376 /* Find a label at the end of the function or before a RETURN. If there
377 is none, try to make one. If that fails, returns 0.
379 The property of such a label is that it is placed just before the
380 epilogue or a bare RETURN insn, so that another bare RETURN can be
381 turned into a jump to the label unconditionally. In particular, the
382 label cannot be placed before a RETURN insn with a filled delay slot.
384 ??? There may be a problem with the current implementation. Suppose
385 we start with a bare RETURN insn and call find_end_label. It may set
386 function_return_label just before the RETURN. Suppose the machinery
387 is able to fill the delay slot of the RETURN insn afterwards. Then
388 function_return_label is no longer valid according to the property
389 described above and find_end_label will still return it unmodified.
390 Note that this is probably mitigated by the following observation:
391 once function_return_label is made, it is very likely the target of
392 a jump, so filling the delay slot of the RETURN will be much more
394 KIND is either simple_return_rtx or ret_rtx, indicating which type of
395 return we're looking for. */
397 static rtx_code_label
*
398 find_end_label (rtx kind
)
401 rtx_code_label
**plabel
;
404 plabel
= &function_return_label
;
407 gcc_assert (kind
== simple_return_rtx
);
408 plabel
= &function_simple_return_label
;
411 /* If we found one previously, return it. */
415 /* Otherwise, see if there is a label at the end of the function. If there
416 is, it must be that RETURN insns aren't needed, so that is our return
417 label and we don't have to do anything else. */
419 insn
= get_last_insn ();
421 || (NONJUMP_INSN_P (insn
)
422 && (GET_CODE (PATTERN (insn
)) == USE
423 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
424 insn
= PREV_INSN (insn
);
426 /* When a target threads its epilogue we might already have a
427 suitable return insn. If so put a label before it for the
428 function_return_label. */
430 && JUMP_P (PREV_INSN (insn
))
431 && PATTERN (PREV_INSN (insn
)) == kind
)
433 rtx_insn
*temp
= PREV_INSN (PREV_INSN (insn
));
434 rtx_code_label
*label
= gen_label_rtx ();
435 LABEL_NUSES (label
) = 0;
437 /* Put the label before any USE insns that may precede the RETURN
439 while (GET_CODE (temp
) == USE
)
440 temp
= PREV_INSN (temp
);
442 emit_label_after (label
, temp
);
446 else if (LABEL_P (insn
))
447 *plabel
= as_a
<rtx_code_label
*> (insn
);
450 rtx_code_label
*label
= gen_label_rtx ();
451 LABEL_NUSES (label
) = 0;
452 /* If the basic block reorder pass moves the return insn to
453 some other place try to locate it again and put our
454 function_return_label there. */
455 while (insn
&& ! (JUMP_P (insn
) && (PATTERN (insn
) == kind
)))
456 insn
= PREV_INSN (insn
);
459 insn
= PREV_INSN (insn
);
461 /* Put the label before any USE insns that may precede the
463 while (GET_CODE (insn
) == USE
)
464 insn
= PREV_INSN (insn
);
466 emit_label_after (label
, insn
);
470 if (targetm
.have_epilogue () && ! targetm
.have_return ())
471 /* The RETURN insn has its delay slot filled so we cannot
472 emit the label just before it. Since we already have
473 an epilogue and cannot emit a new RETURN, we cannot
474 emit the label at all. */
477 /* Otherwise, make a new label and emit a RETURN and BARRIER,
480 if (targetm
.have_return ())
482 /* The return we make may have delay slots too. */
483 rtx_insn
*pat
= targetm
.gen_return ();
484 rtx_insn
*insn
= emit_jump_insn (pat
);
485 set_return_jump_label (insn
);
487 if (num_delay_slots (insn
) > 0)
488 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
494 /* Show one additional use for this label so it won't go away until
496 ++LABEL_NUSES (*plabel
);
501 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
502 the pattern of INSN with the SEQUENCE.
504 Returns the insn containing the SEQUENCE that replaces INSN. */
507 emit_delay_sequence (rtx_insn
*insn
, rtx_insn_list
*list
, int length
)
509 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
510 rtvec seqv
= rtvec_alloc (length
+ 1);
511 rtx seq
= gen_rtx_SEQUENCE (VOIDmode
, seqv
);
512 rtx_insn
*seq_insn
= make_insn_raw (seq
);
514 /* If DELAY_INSN has a location, use it for SEQ_INSN. If DELAY_INSN does
515 not have a location, but one of the delayed insns does, we pick up a
516 location from there later. */
517 INSN_LOCATION (seq_insn
) = INSN_LOCATION (insn
);
519 /* Unlink INSN from the insn chain, so that we can put it into
520 the SEQUENCE. Remember where we want to emit SEQUENCE in AFTER. */
521 rtx_insn
*after
= PREV_INSN (insn
);
523 SET_NEXT_INSN (insn
) = SET_PREV_INSN (insn
) = NULL
;
525 /* Build our SEQUENCE and rebuild the insn chain. */
528 XVECEXP (seq
, 0, 0) = emit_insn (insn
);
529 for (rtx_insn_list
*li
= list
; li
; li
= li
->next (), i
++)
531 rtx_insn
*tem
= li
->insn ();
534 /* Show that this copy of the insn isn't deleted. */
535 tem
->set_undeleted ();
537 /* Unlink insn from its original place, and re-emit it into
539 SET_NEXT_INSN (tem
) = SET_PREV_INSN (tem
) = NULL
;
540 XVECEXP (seq
, 0, i
) = emit_insn (tem
);
542 /* SPARC assembler, for instance, emit warning when debug info is output
543 into the delay slot. */
544 if (INSN_LOCATION (tem
) && !INSN_LOCATION (seq_insn
))
545 INSN_LOCATION (seq_insn
) = INSN_LOCATION (tem
);
546 INSN_LOCATION (tem
) = 0;
548 for (note
= REG_NOTES (tem
); note
; note
= next
)
550 next
= XEXP (note
, 1);
551 switch (REG_NOTE_KIND (note
))
554 /* Remove any REG_DEAD notes because we can't rely on them now
555 that the insn has been moved. */
556 remove_note (tem
, note
);
559 case REG_LABEL_OPERAND
:
560 case REG_LABEL_TARGET
:
561 /* Keep the label reference count up to date. */
562 if (LABEL_P (XEXP (note
, 0)))
563 LABEL_NUSES (XEXP (note
, 0)) ++;
572 gcc_assert (i
== length
+ 1);
574 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
575 add_insn_after (seq_insn
, after
, NULL
);
580 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
581 be in the order in which the insns are to be executed. */
583 static rtx_insn_list
*
584 add_to_delay_list (rtx_insn
*insn
, rtx_insn_list
*delay_list
)
586 /* If we have an empty list, just make a new list element. If
587 INSN has its block number recorded, clear it since we may
588 be moving the insn to a new block. */
592 clear_hashed_info_for_insn (insn
);
593 return gen_rtx_INSN_LIST (VOIDmode
, insn
, NULL_RTX
);
596 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
598 XEXP (delay_list
, 1) = add_to_delay_list (insn
, delay_list
->next ());
603 /* Delete INSN from the delay slot of the insn that it is in, which may
604 produce an insn with no delay slots. Return the new insn. */
607 delete_from_delay_slot (rtx_insn
*insn
)
609 rtx_insn
*trial
, *seq_insn
, *prev
;
611 rtx_insn_list
*delay_list
= 0;
615 /* We first must find the insn containing the SEQUENCE with INSN in its
616 delay slot. Do this by finding an insn, TRIAL, where
617 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
620 PREV_INSN (NEXT_INSN (trial
)) == trial
;
621 trial
= NEXT_INSN (trial
))
624 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
625 seq
= as_a
<rtx_sequence
*> (PATTERN (seq_insn
));
627 if (NEXT_INSN (seq_insn
) && BARRIER_P (NEXT_INSN (seq_insn
)))
630 /* Create a delay list consisting of all the insns other than the one
631 we are deleting (unless we were the only one). */
633 for (i
= 1; i
< seq
->len (); i
++)
634 if (seq
->insn (i
) != insn
)
635 delay_list
= add_to_delay_list (seq
->insn (i
), delay_list
);
637 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
638 list, and rebuild the delay list if non-empty. */
639 prev
= PREV_INSN (seq_insn
);
640 trial
= seq
->insn (0);
641 delete_related_insns (seq_insn
);
642 add_insn_after (trial
, prev
, NULL
);
644 /* If there was a barrier after the old SEQUENCE, remit it. */
646 emit_barrier_after (trial
);
648 /* If there are any delay insns, remit them. Otherwise clear the
651 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2);
652 else if (JUMP_P (trial
))
653 INSN_ANNULLED_BRANCH_P (trial
) = 0;
655 INSN_FROM_TARGET_P (insn
) = 0;
657 /* Show we need to fill this insn again. */
658 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
663 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
664 the insn that sets CC0 for it and delete it too. */
667 delete_scheduled_jump (rtx_insn
*insn
)
669 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
670 delete the insn that sets the condition code, but it is hard to find it.
671 Since this case is rare anyway, don't bother trying; there would likely
672 be other insns that became dead anyway, which we wouldn't know to
675 if (HAVE_cc0
&& reg_mentioned_p (cc0_rtx
, insn
))
677 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
679 /* If a reg-note was found, it points to an insn to set CC0. This
680 insn is in the delay list of some other insn. So delete it from
681 the delay list it was in. */
684 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), NULL_RTX
)
685 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
686 delete_from_delay_slot (as_a
<rtx_insn
*> (XEXP (note
, 0)));
690 /* The insn setting CC0 is our previous insn, but it may be in
691 a delay slot. It will be the last insn in the delay slot, if
693 rtx_insn
*trial
= previous_insn (insn
);
695 trial
= prev_nonnote_insn (trial
);
696 if (sets_cc0_p (PATTERN (trial
)) != 1
697 || FIND_REG_INC_NOTE (trial
, NULL_RTX
))
699 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
700 delete_related_insns (trial
);
702 delete_from_delay_slot (trial
);
706 delete_related_insns (insn
);
709 /* Counters for delay-slot filling. */
711 #define NUM_REORG_FUNCTIONS 2
712 #define MAX_DELAY_HISTOGRAM 3
713 #define MAX_REORG_PASSES 2
715 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
717 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
719 static int reorg_pass_number
;
722 note_delay_statistics (int slots_filled
, int index
)
724 num_insns_needing_delays
[index
][reorg_pass_number
]++;
725 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
726 slots_filled
= MAX_DELAY_HISTOGRAM
;
727 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
730 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
732 /* Optimize the following cases:
734 1. When a conditional branch skips over only one instruction,
735 use an annulling branch and put that insn in the delay slot.
736 Use either a branch that annuls when the condition if true or
737 invert the test with a branch that annuls when the condition is
738 false. This saves insns, since otherwise we must copy an insn
741 (orig) (skip) (otherwise)
742 Bcc.n L1 Bcc',a L1 Bcc,a L1'
749 2. When a conditional branch skips over only one instruction,
750 and after that, it unconditionally branches somewhere else,
751 perform the similar optimization. This saves executing the
752 second branch in the case where the inverted condition is true.
761 This should be expanded to skip over N insns, where N is the number
762 of delay slots required. */
764 static rtx_insn_list
*
765 optimize_skip (rtx_jump_insn
*insn
)
767 rtx_insn
*trial
= next_nonnote_insn (insn
);
768 rtx_insn
*next_trial
= next_active_insn (trial
);
769 rtx_insn_list
*delay_list
= 0;
772 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
775 || !NONJUMP_INSN_P (trial
)
776 || GET_CODE (PATTERN (trial
)) == SEQUENCE
777 || recog_memoized (trial
) < 0
778 || (! eligible_for_annul_false (insn
, 0, trial
, flags
)
779 && ! eligible_for_annul_true (insn
, 0, trial
, flags
))
780 || can_throw_internal (trial
))
783 /* There are two cases where we are just executing one insn (we assume
784 here that a branch requires only one insn; this should be generalized
785 at some point): Where the branch goes around a single insn or where
786 we have one insn followed by a branch to the same label we branch to.
787 In both of these cases, inverting the jump and annulling the delay
788 slot give the same effect in fewer insns. */
789 if (next_trial
== next_active_insn (JUMP_LABEL (insn
))
791 && simplejump_or_return_p (next_trial
)
792 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)))
794 if (eligible_for_annul_false (insn
, 0, trial
, flags
))
796 if (invert_jump (insn
, JUMP_LABEL (insn
), 1))
797 INSN_FROM_TARGET_P (trial
) = 1;
798 else if (! eligible_for_annul_true (insn
, 0, trial
, flags
))
802 delay_list
= add_to_delay_list (trial
, NULL
);
803 next_trial
= next_active_insn (trial
);
804 update_block (trial
, trial
);
805 delete_related_insns (trial
);
807 /* Also, if we are targeting an unconditional
808 branch, thread our jump to the target of that branch. Don't
809 change this into a RETURN here, because it may not accept what
810 we have in the delay slot. We'll fix this up later. */
811 if (next_trial
&& simplejump_or_return_p (next_trial
))
813 rtx target_label
= JUMP_LABEL (next_trial
);
814 if (ANY_RETURN_P (target_label
))
815 target_label
= find_end_label (target_label
);
819 /* Recompute the flags based on TARGET_LABEL since threading
820 the jump to TARGET_LABEL may change the direction of the
821 jump (which may change the circumstances in which the
822 delay slot is nullified). */
823 flags
= get_jump_flags (insn
, target_label
);
824 if (eligible_for_annul_true (insn
, 0, trial
, flags
))
825 reorg_redirect_jump (insn
, target_label
);
829 INSN_ANNULLED_BRANCH_P (insn
) = 1;
836 /* Encode and return branch direction and prediction information for
837 INSN assuming it will jump to LABEL.
839 Non conditional branches return no direction information and
840 are predicted as very likely taken. */
843 get_jump_flags (const rtx_insn
*insn
, rtx label
)
847 /* get_jump_flags can be passed any insn with delay slots, these may
848 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
849 direction information, and only if they are conditional jumps.
851 If LABEL is a return, then there is no way to determine the branch
854 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
855 && !ANY_RETURN_P (label
)
856 && INSN_UID (insn
) <= max_uid
857 && INSN_UID (label
) <= max_uid
)
859 = (uid_to_ruid
[INSN_UID (label
)] > uid_to_ruid
[INSN_UID (insn
)])
860 ? ATTR_FLAG_forward
: ATTR_FLAG_backward
;
861 /* No valid direction information. */
868 /* Return truth value of the statement that this branch
869 is mostly taken. If we think that the branch is extremely likely
870 to be taken, we return 2. If the branch is slightly more likely to be
871 taken, return 1. If the branch is slightly less likely to be taken,
872 return 0 and if the branch is highly unlikely to be taken, return -1. */
875 mostly_true_jump (rtx jump_insn
)
877 /* If branch probabilities are available, then use that number since it
878 always gives a correct answer. */
879 rtx note
= find_reg_note (jump_insn
, REG_BR_PROB
, 0);
882 int prob
= XINT (note
, 0);
884 if (prob
>= REG_BR_PROB_BASE
* 9 / 10)
886 else if (prob
>= REG_BR_PROB_BASE
/ 2)
888 else if (prob
>= REG_BR_PROB_BASE
/ 10)
894 /* If there is no note, assume branches are not taken.
895 This should be rare. */
899 /* Return the condition under which INSN will branch to TARGET. If TARGET
900 is zero, return the condition under which INSN will return. If INSN is
901 an unconditional branch, return const_true_rtx. If INSN isn't a simple
902 type of jump, or it doesn't go to TARGET, return 0. */
905 get_branch_condition (const rtx_insn
*insn
, rtx target
)
907 rtx pat
= PATTERN (insn
);
910 if (condjump_in_parallel_p (insn
))
911 pat
= XVECEXP (pat
, 0, 0);
913 if (ANY_RETURN_P (pat
) && pat
== target
)
914 return const_true_rtx
;
916 if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
920 if (GET_CODE (src
) == LABEL_REF
&& LABEL_REF_LABEL (src
) == target
)
921 return const_true_rtx
;
923 else if (GET_CODE (src
) == IF_THEN_ELSE
924 && XEXP (src
, 2) == pc_rtx
925 && ((GET_CODE (XEXP (src
, 1)) == LABEL_REF
926 && LABEL_REF_LABEL (XEXP (src
, 1)) == target
)
927 || (ANY_RETURN_P (XEXP (src
, 1)) && XEXP (src
, 1) == target
)))
928 return XEXP (src
, 0);
930 else if (GET_CODE (src
) == IF_THEN_ELSE
931 && XEXP (src
, 1) == pc_rtx
932 && ((GET_CODE (XEXP (src
, 2)) == LABEL_REF
933 && LABEL_REF_LABEL (XEXP (src
, 2)) == target
)
934 || (ANY_RETURN_P (XEXP (src
, 2)) && XEXP (src
, 2) == target
)))
937 rev
= reversed_comparison_code (XEXP (src
, 0), insn
);
939 return gen_rtx_fmt_ee (rev
, GET_MODE (XEXP (src
, 0)),
940 XEXP (XEXP (src
, 0), 0),
941 XEXP (XEXP (src
, 0), 1));
947 /* Return nonzero if CONDITION is more strict than the condition of
948 INSN, i.e., if INSN will always branch if CONDITION is true. */
951 condition_dominates_p (rtx condition
, const rtx_insn
*insn
)
953 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
954 enum rtx_code code
= GET_CODE (condition
);
955 enum rtx_code other_code
;
957 if (rtx_equal_p (condition
, other_condition
)
958 || other_condition
== const_true_rtx
)
961 else if (condition
== const_true_rtx
|| other_condition
== 0)
964 other_code
= GET_CODE (other_condition
);
965 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
966 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
967 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
970 return comparison_dominates_p (code
, other_code
);
973 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
974 any insns already in the delay slot of JUMP. */
977 redirect_with_delay_slots_safe_p (rtx_insn
*jump
, rtx newlabel
, rtx seq
)
980 rtx_sequence
*pat
= as_a
<rtx_sequence
*> (PATTERN (seq
));
982 /* Make sure all the delay slots of this jump would still
983 be valid after threading the jump. If they are still
984 valid, then return nonzero. */
986 flags
= get_jump_flags (jump
, newlabel
);
987 for (i
= 1; i
< pat
->len (); i
++)
989 #ifdef ANNUL_IFFALSE_SLOTS
990 (INSN_ANNULLED_BRANCH_P (jump
)
991 && INSN_FROM_TARGET_P (pat
->insn (i
)))
992 ? eligible_for_annul_false (jump
, i
- 1, pat
->insn (i
), flags
) :
994 #ifdef ANNUL_IFTRUE_SLOTS
995 (INSN_ANNULLED_BRANCH_P (jump
)
996 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
997 ? eligible_for_annul_true (jump
, i
- 1, pat
->insn (i
), flags
) :
999 eligible_for_delay (jump
, i
- 1, pat
->insn (i
), flags
)))
1002 return (i
== pat
->len ());
1005 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1006 any insns we wish to place in the delay slot of JUMP. */
1009 redirect_with_delay_list_safe_p (rtx_insn
*jump
, rtx newlabel
,
1010 rtx_insn_list
*delay_list
)
1015 /* Make sure all the insns in DELAY_LIST would still be
1016 valid after threading the jump. If they are still
1017 valid, then return nonzero. */
1019 flags
= get_jump_flags (jump
, newlabel
);
1020 for (li
= delay_list
, i
= 0; li
; li
= li
->next (), i
++)
1022 #ifdef ANNUL_IFFALSE_SLOTS
1023 (INSN_ANNULLED_BRANCH_P (jump
)
1024 && INSN_FROM_TARGET_P (li
->insn ()))
1025 ? eligible_for_annul_false (jump
, i
, li
->insn (), flags
) :
1027 #ifdef ANNUL_IFTRUE_SLOTS
1028 (INSN_ANNULLED_BRANCH_P (jump
)
1029 && ! INSN_FROM_TARGET_P (XEXP (li
, 0)))
1030 ? eligible_for_annul_true (jump
, i
, li
->insn (), flags
) :
1032 eligible_for_delay (jump
, i
, li
->insn (), flags
)))
1035 return (li
== NULL
);
1038 /* DELAY_LIST is a list of insns that have already been placed into delay
1039 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1040 If not, return 0; otherwise return 1. */
1043 check_annul_list_true_false (int annul_true_p
, rtx delay_list
)
1049 for (temp
= delay_list
; temp
; temp
= XEXP (temp
, 1))
1051 rtx trial
= XEXP (temp
, 0);
1053 if ((annul_true_p
&& INSN_FROM_TARGET_P (trial
))
1054 || (!annul_true_p
&& !INSN_FROM_TARGET_P (trial
)))
1062 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1063 the condition tested by INSN is CONDITION and the resources shown in
1064 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1065 from SEQ's delay list, in addition to whatever insns it may execute
1066 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1067 needed while searching for delay slot insns. Return the concatenated
1068 delay list if possible, otherwise, return 0.
1070 SLOTS_TO_FILL is the total number of slots required by INSN, and
1071 PSLOTS_FILLED points to the number filled so far (also the number of
1072 insns in DELAY_LIST). It is updated with the number that have been
1073 filled from the SEQUENCE, if any.
1075 PANNUL_P points to a nonzero value if we already know that we need
1076 to annul INSN. If this routine determines that annulling is needed,
1077 it may set that value nonzero.
1079 PNEW_THREAD points to a location that is to receive the place at which
1080 execution should continue. */
1082 static rtx_insn_list
*
1083 steal_delay_list_from_target (rtx_insn
*insn
, rtx condition
, rtx_sequence
*seq
,
1084 rtx_insn_list
*delay_list
, struct resources
*sets
,
1085 struct resources
*needed
,
1086 struct resources
*other_needed
,
1087 int slots_to_fill
, int *pslots_filled
,
1088 int *pannul_p
, rtx
*pnew_thread
)
1090 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1091 int total_slots_filled
= *pslots_filled
;
1092 rtx_insn_list
*new_delay_list
= 0;
1093 int must_annul
= *pannul_p
;
1096 struct resources cc_set
;
1099 /* We can't do anything if there are more delay slots in SEQ than we
1100 can handle, or if we don't know that it will be a taken branch.
1101 We know that it will be a taken branch if it is either an unconditional
1102 branch or a conditional branch with a stricter branch condition.
1104 Also, exit if the branch has more than one set, since then it is computing
1105 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1106 ??? It may be possible to move other sets into INSN in addition to
1107 moving the instructions in the delay slots.
1109 We can not steal the delay list if one of the instructions in the
1110 current delay_list modifies the condition codes and the jump in the
1111 sequence is a conditional jump. We can not do this because we can
1112 not change the direction of the jump because the condition codes
1113 will effect the direction of the jump in the sequence. */
1115 CLEAR_RESOURCE (&cc_set
);
1116 for (rtx_insn_list
*temp
= delay_list
; temp
; temp
= temp
->next ())
1118 rtx_insn
*trial
= temp
->insn ();
1120 mark_set_resources (trial
, &cc_set
, 0, MARK_SRC_DEST_CALL
);
1121 if (insn_references_resource_p (seq
->insn (0), &cc_set
, false))
1125 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1126 || ! condition_dominates_p (condition
, seq
->insn (0))
1127 || ! single_set (seq
->insn (0)))
1130 /* On some targets, branches with delay slots can have a limited
1131 displacement. Give the back end a chance to tell us we can't do
1133 if (! targetm
.can_follow_jump (insn
, seq
->insn (0)))
1136 redundant
= XALLOCAVEC (bool, XVECLEN (seq
, 0));
1137 for (i
= 1; i
< seq
->len (); i
++)
1139 rtx_insn
*trial
= seq
->insn (i
);
1142 if (insn_references_resource_p (trial
, sets
, false)
1143 || insn_sets_resource_p (trial
, needed
, false)
1144 || insn_sets_resource_p (trial
, sets
, false)
1145 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1147 || (HAVE_cc0
&& find_reg_note (trial
, REG_CC_USER
, NULL_RTX
))
1148 /* If TRIAL is from the fallthrough code of an annulled branch insn
1149 in SEQ, we cannot use it. */
1150 || (INSN_ANNULLED_BRANCH_P (seq
->insn (0))
1151 && ! INSN_FROM_TARGET_P (trial
)))
1154 /* If this insn was already done (usually in a previous delay slot),
1155 pretend we put it in our delay slot. */
1156 redundant
[i
] = redundant_insn (trial
, insn
, new_delay_list
);
1160 /* We will end up re-vectoring this branch, so compute flags
1161 based on jumping to the new label. */
1162 flags
= get_jump_flags (insn
, JUMP_LABEL (seq
->insn (0)));
1165 && ((condition
== const_true_rtx
1166 || (! insn_sets_resource_p (trial
, other_needed
, false)
1167 && ! may_trap_or_fault_p (PATTERN (trial
)))))
1168 ? eligible_for_delay (insn
, total_slots_filled
, trial
, flags
)
1169 : (must_annul
|| (delay_list
== NULL
&& new_delay_list
== NULL
))
1171 check_annul_list_true_false (0, delay_list
)
1172 && check_annul_list_true_false (0, new_delay_list
)
1173 && eligible_for_annul_false (insn
, total_slots_filled
,
1178 rtx_insn
*temp
= copy_delay_slot_insn (trial
);
1179 INSN_FROM_TARGET_P (temp
) = 1;
1180 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1181 total_slots_filled
++;
1183 if (--slots_remaining
== 0)
1190 /* Record the effect of the instructions that were redundant and which
1191 we therefore decided not to copy. */
1192 for (i
= 1; i
< seq
->len (); i
++)
1194 update_block (seq
->insn (i
), insn
);
1196 /* Show the place to which we will be branching. */
1197 *pnew_thread
= first_active_target_insn (JUMP_LABEL (seq
->insn (0)));
1199 /* Add any new insns to the delay list and update the count of the
1200 number of slots filled. */
1201 *pslots_filled
= total_slots_filled
;
1205 if (delay_list
== 0)
1206 return new_delay_list
;
1208 for (rtx_insn_list
*temp
= new_delay_list
; temp
; temp
= temp
->next ())
1209 delay_list
= add_to_delay_list (temp
->insn (), delay_list
);
1214 /* Similar to steal_delay_list_from_target except that SEQ is on the
1215 fallthrough path of INSN. Here we only do something if the delay insn
1216 of SEQ is an unconditional branch. In that case we steal its delay slot
1217 for INSN since unconditional branches are much easier to fill. */
1219 static rtx_insn_list
*
1220 steal_delay_list_from_fallthrough (rtx_insn
*insn
, rtx condition
,
1222 rtx_insn_list
*delay_list
,
1223 struct resources
*sets
,
1224 struct resources
*needed
,
1225 struct resources
*other_needed
,
1226 int slots_to_fill
, int *pslots_filled
,
1231 int must_annul
= *pannul_p
;
1234 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1236 /* We can't do anything if SEQ's delay insn isn't an
1237 unconditional branch. */
1239 if (! simplejump_or_return_p (seq
->insn (0)))
1242 for (i
= 1; i
< seq
->len (); i
++)
1244 rtx_insn
*trial
= seq
->insn (i
);
1246 /* If TRIAL sets CC0, stealing it will move it too far from the use
1248 if (insn_references_resource_p (trial
, sets
, false)
1249 || insn_sets_resource_p (trial
, needed
, false)
1250 || insn_sets_resource_p (trial
, sets
, false)
1251 || (HAVE_cc0
&& sets_cc0_p (PATTERN (trial
))))
1255 /* If this insn was already done, we don't need it. */
1256 if (redundant_insn (trial
, insn
, delay_list
))
1258 update_block (trial
, insn
);
1259 delete_from_delay_slot (trial
);
1264 && ((condition
== const_true_rtx
1265 || (! insn_sets_resource_p (trial
, other_needed
, false)
1266 && ! may_trap_or_fault_p (PATTERN (trial
)))))
1267 ? eligible_for_delay (insn
, *pslots_filled
, trial
, flags
)
1268 : (must_annul
|| delay_list
== NULL
) && (must_annul
= 1,
1269 check_annul_list_true_false (1, delay_list
)
1270 && eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
1274 delete_from_delay_slot (trial
);
1275 delay_list
= add_to_delay_list (trial
, delay_list
);
1277 if (++(*pslots_filled
) == slots_to_fill
)
1289 /* Try merging insns starting at THREAD which match exactly the insns in
1292 If all insns were matched and the insn was previously annulling, the
1293 annul bit will be cleared.
1295 For each insn that is merged, if the branch is or will be non-annulling,
1296 we delete the merged insn. */
1299 try_merge_delay_insns (rtx_insn
*insn
, rtx_insn
*thread
)
1301 rtx_insn
*trial
, *next_trial
;
1302 rtx_insn
*delay_insn
= as_a
<rtx_insn
*> (XVECEXP (PATTERN (insn
), 0, 0));
1303 int annul_p
= JUMP_P (delay_insn
) && INSN_ANNULLED_BRANCH_P (delay_insn
);
1304 int slot_number
= 1;
1305 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1306 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1307 struct resources set
, needed
, modified
;
1308 rtx_insn_list
*merged_insns
= 0;
1312 flags
= get_jump_flags (delay_insn
, JUMP_LABEL (delay_insn
));
1314 CLEAR_RESOURCE (&needed
);
1315 CLEAR_RESOURCE (&set
);
1317 /* If this is not an annulling branch, take into account anything needed in
1318 INSN's delay slot. This prevents two increments from being incorrectly
1319 folded into one. If we are annulling, this would be the correct
1320 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1321 will essentially disable this optimization. This method is somewhat of
1322 a kludge, but I don't see a better way.) */
1324 for (i
= 1 ; i
< num_slots
; i
++)
1325 if (XVECEXP (PATTERN (insn
), 0, i
))
1326 mark_referenced_resources (XVECEXP (PATTERN (insn
), 0, i
), &needed
,
1329 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1331 rtx pat
= PATTERN (trial
);
1332 rtx oldtrial
= trial
;
1334 next_trial
= next_nonnote_insn (trial
);
1336 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1337 if (NONJUMP_INSN_P (trial
)
1338 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1341 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1342 /* We can't share an insn that sets cc0. */
1343 && (!HAVE_cc0
|| ! sets_cc0_p (pat
))
1344 && ! insn_references_resource_p (trial
, &set
, true)
1345 && ! insn_sets_resource_p (trial
, &set
, true)
1346 && ! insn_sets_resource_p (trial
, &needed
, true)
1347 && (trial
= try_split (pat
, trial
, 0)) != 0
1348 /* Update next_trial, in case try_split succeeded. */
1349 && (next_trial
= next_nonnote_insn (trial
))
1350 /* Likewise THREAD. */
1351 && (thread
= oldtrial
== thread
? trial
: thread
)
1352 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1353 /* Have to test this condition if annul condition is different
1354 from (and less restrictive than) non-annulling one. */
1355 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
, flags
))
1360 update_block (trial
, thread
);
1361 if (trial
== thread
)
1362 thread
= next_active_insn (thread
);
1364 delete_related_insns (trial
);
1365 INSN_FROM_TARGET_P (next_to_match
) = 0;
1368 merged_insns
= gen_rtx_INSN_LIST (VOIDmode
, trial
, merged_insns
);
1370 if (++slot_number
== num_slots
)
1373 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1376 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
1377 mark_referenced_resources (trial
, &needed
, true);
1380 /* See if we stopped on a filled insn. If we did, try to see if its
1381 delay slots match. */
1382 if (slot_number
!= num_slots
1383 && trial
&& NONJUMP_INSN_P (trial
)
1384 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1385 && !(JUMP_P (XVECEXP (PATTERN (trial
), 0, 0))
1386 && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0))))
1388 rtx_sequence
*pat
= as_a
<rtx_sequence
*> (PATTERN (trial
));
1389 rtx filled_insn
= XVECEXP (pat
, 0, 0);
1391 /* Account for resources set/needed by the filled insn. */
1392 mark_set_resources (filled_insn
, &set
, 0, MARK_SRC_DEST_CALL
);
1393 mark_referenced_resources (filled_insn
, &needed
, true);
1395 for (i
= 1; i
< pat
->len (); i
++)
1397 rtx_insn
*dtrial
= pat
->insn (i
);
1399 CLEAR_RESOURCE (&modified
);
1400 /* Account for resources set by the insn following NEXT_TO_MATCH
1401 inside INSN's delay list. */
1402 for (j
= 1; slot_number
+ j
< num_slots
; j
++)
1403 mark_set_resources (XVECEXP (PATTERN (insn
), 0, slot_number
+ j
),
1404 &modified
, 0, MARK_SRC_DEST_CALL
);
1405 /* Account for resources set by the insn before DTRIAL and inside
1406 TRIAL's delay list. */
1407 for (j
= 1; j
< i
; j
++)
1408 mark_set_resources (XVECEXP (pat
, 0, j
),
1409 &modified
, 0, MARK_SRC_DEST_CALL
);
1410 if (! insn_references_resource_p (dtrial
, &set
, true)
1411 && ! insn_sets_resource_p (dtrial
, &set
, true)
1412 && ! insn_sets_resource_p (dtrial
, &needed
, true)
1413 && (!HAVE_cc0
|| ! sets_cc0_p (PATTERN (dtrial
)))
1414 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1415 /* Check that DTRIAL and NEXT_TO_MATCH does not reference a
1416 resource modified between them (only dtrial is checked because
1417 next_to_match and dtrial shall to be equal in order to hit
1419 && ! insn_references_resource_p (dtrial
, &modified
, true)
1420 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
, flags
))
1426 update_block (dtrial
, thread
);
1427 new_rtx
= delete_from_delay_slot (dtrial
);
1428 if (thread
->deleted ())
1430 INSN_FROM_TARGET_P (next_to_match
) = 0;
1433 merged_insns
= gen_rtx_INSN_LIST (SImode
, dtrial
,
1436 if (++slot_number
== num_slots
)
1439 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1443 /* Keep track of the set/referenced resources for the delay
1444 slots of any trial insns we encounter. */
1445 mark_set_resources (dtrial
, &set
, 0, MARK_SRC_DEST_CALL
);
1446 mark_referenced_resources (dtrial
, &needed
, true);
1451 /* If all insns in the delay slot have been matched and we were previously
1452 annulling the branch, we need not any more. In that case delete all the
1453 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1454 the delay list so that we know that it isn't only being used at the
1456 if (slot_number
== num_slots
&& annul_p
)
1458 for (; merged_insns
; merged_insns
= merged_insns
->next ())
1460 if (GET_MODE (merged_insns
) == SImode
)
1464 update_block (merged_insns
->insn (), thread
);
1465 new_rtx
= delete_from_delay_slot (merged_insns
->insn ());
1466 if (thread
->deleted ())
1471 update_block (merged_insns
->insn (), thread
);
1472 delete_related_insns (merged_insns
->insn ());
1476 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1478 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1479 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1483 /* See if INSN is redundant with an insn in front of TARGET. Often this
1484 is called when INSN is a candidate for a delay slot of TARGET.
1485 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1486 of INSN. Often INSN will be redundant with an insn in a delay slot of
1487 some previous insn. This happens when we have a series of branches to the
1488 same label; in that case the first insn at the target might want to go
1489 into each of the delay slots.
1491 If we are not careful, this routine can take up a significant fraction
1492 of the total compilation time (4%), but only wins rarely. Hence we
1493 speed this routine up by making two passes. The first pass goes back
1494 until it hits a label and sees if it finds an insn with an identical
1495 pattern. Only in this (relatively rare) event does it check for
1498 We do not split insns we encounter. This could cause us not to find a
1499 redundant insn, but the cost of splitting seems greater than the possible
1500 gain in rare cases. */
1503 redundant_insn (rtx insn
, rtx_insn
*target
, rtx delay_list
)
1505 rtx target_main
= target
;
1506 rtx ipat
= PATTERN (insn
);
1509 struct resources needed
, set
;
1511 unsigned insns_to_search
;
1513 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1514 are allowed to not actually assign to such a register. */
1515 if (find_reg_note (insn
, REG_UNUSED
, NULL_RTX
) != 0)
1518 /* Scan backwards looking for a match. */
1519 for (trial
= PREV_INSN (target
),
1520 insns_to_search
= MAX_DELAY_SLOT_INSN_SEARCH
;
1521 trial
&& insns_to_search
> 0;
1522 trial
= PREV_INSN (trial
))
1524 /* (use (insn))s can come immediately after a barrier if the
1525 label that used to precede them has been deleted as dead.
1526 See delete_related_insns. */
1527 if (LABEL_P (trial
) || BARRIER_P (trial
))
1530 if (!INSN_P (trial
))
1534 pat
= PATTERN (trial
);
1535 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1538 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (pat
))
1540 /* Stop for a CALL and its delay slots because it is difficult to
1541 track its resource needs correctly. */
1542 if (CALL_P (seq
->element (0)))
1545 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1546 slots because it is difficult to track its resource needs
1549 if (INSN_SETS_ARE_DELAYED (seq
->insn (0)))
1552 if (INSN_REFERENCES_ARE_DELAYED (seq
->insn (0)))
1555 /* See if any of the insns in the delay slot match, updating
1556 resource requirements as we go. */
1557 for (i
= seq
->len () - 1; i
> 0; i
--)
1558 if (GET_CODE (seq
->element (i
)) == GET_CODE (insn
)
1559 && rtx_equal_p (PATTERN (seq
->element (i
)), ipat
)
1560 && ! find_reg_note (seq
->element (i
), REG_UNUSED
, NULL_RTX
))
1563 /* If found a match, exit this loop early. */
1568 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
)
1569 && ! find_reg_note (trial
, REG_UNUSED
, NULL_RTX
))
1573 /* If we didn't find an insn that matches, return 0. */
1577 /* See what resources this insn sets and needs. If they overlap, or
1578 if this insn references CC0, it can't be redundant. */
1580 CLEAR_RESOURCE (&needed
);
1581 CLEAR_RESOURCE (&set
);
1582 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
1583 mark_referenced_resources (insn
, &needed
, true);
1585 /* If TARGET is a SEQUENCE, get the main insn. */
1586 if (NONJUMP_INSN_P (target
) && GET_CODE (PATTERN (target
)) == SEQUENCE
)
1587 target_main
= XVECEXP (PATTERN (target
), 0, 0);
1589 if (resource_conflicts_p (&needed
, &set
)
1590 || (HAVE_cc0
&& reg_mentioned_p (cc0_rtx
, ipat
))
1591 /* The insn requiring the delay may not set anything needed or set by
1593 || insn_sets_resource_p (target_main
, &needed
, true)
1594 || insn_sets_resource_p (target_main
, &set
, true))
1597 /* Insns we pass may not set either NEEDED or SET, so merge them for
1599 needed
.memory
|= set
.memory
;
1600 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
1602 /* This insn isn't redundant if it conflicts with an insn that either is
1603 or will be in a delay slot of TARGET. */
1607 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, true))
1609 delay_list
= XEXP (delay_list
, 1);
1612 if (NONJUMP_INSN_P (target
) && GET_CODE (PATTERN (target
)) == SEQUENCE
)
1613 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
1614 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
,
1618 /* Scan backwards until we reach a label or an insn that uses something
1619 INSN sets or sets something insn uses or sets. */
1621 for (trial
= PREV_INSN (target
),
1622 insns_to_search
= MAX_DELAY_SLOT_INSN_SEARCH
;
1623 trial
&& !LABEL_P (trial
) && insns_to_search
> 0;
1624 trial
= PREV_INSN (trial
))
1626 if (!INSN_P (trial
))
1630 pat
= PATTERN (trial
);
1631 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1634 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (pat
))
1636 bool annul_p
= false;
1637 rtx_insn
*control
= seq
->insn (0);
1639 /* If this is a CALL_INSN and its delay slots, it is hard to track
1640 the resource needs properly, so give up. */
1641 if (CALL_P (control
))
1644 /* If this is an INSN or JUMP_INSN with delayed effects, it
1645 is hard to track the resource needs properly, so give up. */
1647 if (INSN_SETS_ARE_DELAYED (control
))
1650 if (INSN_REFERENCES_ARE_DELAYED (control
))
1653 if (JUMP_P (control
))
1654 annul_p
= INSN_ANNULLED_BRANCH_P (control
);
1656 /* See if any of the insns in the delay slot match, updating
1657 resource requirements as we go. */
1658 for (i
= seq
->len () - 1; i
> 0; i
--)
1660 rtx candidate
= seq
->element (i
);
1662 /* If an insn will be annulled if the branch is false, it isn't
1663 considered as a possible duplicate insn. */
1664 if (rtx_equal_p (PATTERN (candidate
), ipat
)
1665 && ! (annul_p
&& INSN_FROM_TARGET_P (candidate
)))
1667 /* Show that this insn will be used in the sequel. */
1668 INSN_FROM_TARGET_P (candidate
) = 0;
1672 /* Unless this is an annulled insn from the target of a branch,
1673 we must stop if it sets anything needed or set by INSN. */
1674 if ((!annul_p
|| !INSN_FROM_TARGET_P (candidate
))
1675 && insn_sets_resource_p (candidate
, &needed
, true))
1679 /* If the insn requiring the delay slot conflicts with INSN, we
1681 if (insn_sets_resource_p (control
, &needed
, true))
1686 /* See if TRIAL is the same as INSN. */
1687 pat
= PATTERN (trial
);
1688 if (rtx_equal_p (pat
, ipat
))
1691 /* Can't go any further if TRIAL conflicts with INSN. */
1692 if (insn_sets_resource_p (trial
, &needed
, true))
1700 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1701 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1702 is nonzero, we are allowed to fall into this thread; otherwise, we are
1705 If LABEL is used more than one or we pass a label other than LABEL before
1706 finding an active insn, we do not own this thread. */
1709 own_thread_p (rtx thread
, rtx label
, int allow_fallthrough
)
1711 rtx_insn
*active_insn
;
1714 /* We don't own the function end. */
1715 if (thread
== 0 || ANY_RETURN_P (thread
))
1718 /* We have a non-NULL insn. */
1719 rtx_insn
*thread_insn
= as_a
<rtx_insn
*> (thread
);
1721 /* Get the first active insn, or THREAD_INSN, if it is an active insn. */
1722 active_insn
= next_active_insn (PREV_INSN (thread_insn
));
1724 for (insn
= thread_insn
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
1726 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
1729 if (allow_fallthrough
)
1732 /* Ensure that we reach a BARRIER before any insn or label. */
1733 for (insn
= prev_nonnote_insn (thread_insn
);
1734 insn
== 0 || !BARRIER_P (insn
);
1735 insn
= prev_nonnote_insn (insn
))
1738 || (NONJUMP_INSN_P (insn
)
1739 && GET_CODE (PATTERN (insn
)) != USE
1740 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
1746 /* Called when INSN is being moved from a location near the target of a jump.
1747 We leave a marker of the form (use (INSN)) immediately in front
1748 of WHERE for mark_target_live_regs. These markers will be deleted when
1751 We used to try to update the live status of registers if WHERE is at
1752 the start of a basic block, but that can't work since we may remove a
1753 BARRIER in relax_delay_slots. */
1756 update_block (rtx_insn
*insn
, rtx where
)
1758 /* Ignore if this was in a delay slot and it came from the target of
1760 if (INSN_FROM_TARGET_P (insn
))
1763 emit_insn_before (gen_rtx_USE (VOIDmode
, insn
), where
);
1765 /* INSN might be making a value live in a block where it didn't use to
1766 be. So recompute liveness information for this block. */
1768 incr_ticks_for_insn (insn
);
1771 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1772 the basic block containing the jump. */
1775 reorg_redirect_jump (rtx_jump_insn
*jump
, rtx nlabel
)
1777 incr_ticks_for_insn (jump
);
1778 return redirect_jump (jump
, nlabel
, 1);
1781 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1782 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1783 that reference values used in INSN. If we find one, then we move the
1784 REG_DEAD note to INSN.
1786 This is needed to handle the case where a later insn (after INSN) has a
1787 REG_DEAD note for a register used by INSN, and this later insn subsequently
1788 gets moved before a CODE_LABEL because it is a redundant insn. In this
1789 case, mark_target_live_regs may be confused into thinking the register
1790 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1793 update_reg_dead_notes (rtx_insn
*insn
, rtx_insn
*delayed_insn
)
1798 for (p
= next_nonnote_insn (insn
); p
!= delayed_insn
;
1799 p
= next_nonnote_insn (p
))
1800 for (link
= REG_NOTES (p
); link
; link
= next
)
1802 next
= XEXP (link
, 1);
1804 if (REG_NOTE_KIND (link
) != REG_DEAD
1805 || !REG_P (XEXP (link
, 0)))
1808 if (reg_referenced_p (XEXP (link
, 0), PATTERN (insn
)))
1810 /* Move the REG_DEAD note from P to INSN. */
1811 remove_note (p
, link
);
1812 XEXP (link
, 1) = REG_NOTES (insn
);
1813 REG_NOTES (insn
) = link
;
1818 /* Called when an insn redundant with start_insn is deleted. If there
1819 is a REG_DEAD note for the target of start_insn between start_insn
1820 and stop_insn, then the REG_DEAD note needs to be deleted since the
1821 value no longer dies there.
1823 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1824 confused into thinking the register is dead. */
1827 fix_reg_dead_note (rtx start_insn
, rtx stop_insn
)
1832 for (p
= next_nonnote_insn (start_insn
); p
!= stop_insn
;
1833 p
= next_nonnote_insn (p
))
1834 for (link
= REG_NOTES (p
); link
; link
= next
)
1836 next
= XEXP (link
, 1);
1838 if (REG_NOTE_KIND (link
) != REG_DEAD
1839 || !REG_P (XEXP (link
, 0)))
1842 if (reg_set_p (XEXP (link
, 0), PATTERN (start_insn
)))
1844 remove_note (p
, link
);
1850 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1852 This handles the case of udivmodXi4 instructions which optimize their
1853 output depending on whether any REG_UNUSED notes are present.
1854 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1858 update_reg_unused_notes (rtx insn
, rtx redundant_insn
)
1862 for (link
= REG_NOTES (insn
); link
; link
= next
)
1864 next
= XEXP (link
, 1);
1866 if (REG_NOTE_KIND (link
) != REG_UNUSED
1867 || !REG_P (XEXP (link
, 0)))
1870 if (! find_regno_note (redundant_insn
, REG_UNUSED
,
1871 REGNO (XEXP (link
, 0))))
1872 remove_note (insn
, link
);
1876 static vec
<rtx
> sibling_labels
;
1878 /* Return the label before INSN, or put a new label there. If SIBLING is
1879 non-zero, it is another label associated with the new label (if any),
1880 typically the former target of the jump that will be redirected to
1884 get_label_before (rtx_insn
*insn
, rtx sibling
)
1888 /* Find an existing label at this point
1889 or make a new one if there is none. */
1890 label
= prev_nonnote_insn (insn
);
1892 if (label
== 0 || !LABEL_P (label
))
1894 rtx_insn
*prev
= PREV_INSN (insn
);
1896 label
= gen_label_rtx ();
1897 emit_label_after (label
, prev
);
1898 LABEL_NUSES (label
) = 0;
1901 sibling_labels
.safe_push (label
);
1902 sibling_labels
.safe_push (sibling
);
1908 /* Scan a function looking for insns that need a delay slot and find insns to
1909 put into the delay slot.
1911 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
1912 as calls). We do these first since we don't want jump insns (that are
1913 easier to fill) to get the only insns that could be used for non-jump insns.
1914 When it is zero, only try to fill JUMP_INSNs.
1916 When slots are filled in this manner, the insns (including the
1917 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
1918 it is possible to tell whether a delay slot has really been filled
1919 or not. `final' knows how to deal with this, by communicating
1920 through FINAL_SEQUENCE. */
1923 fill_simple_delay_slots (int non_jumps_p
)
1925 rtx_insn
*insn
, *trial
, *next_trial
;
1928 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
1929 struct resources needed
, set
;
1930 int slots_to_fill
, slots_filled
;
1931 rtx_insn_list
*delay_list
;
1933 for (i
= 0; i
< num_unfilled_slots
; i
++)
1936 /* Get the next insn to fill. If it has already had any slots assigned,
1937 we can't do anything with it. Maybe we'll improve this later. */
1939 insn
= unfilled_slots_base
[i
];
1942 || (NONJUMP_INSN_P (insn
)
1943 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1944 || (JUMP_P (insn
) && non_jumps_p
)
1945 || (!JUMP_P (insn
) && ! non_jumps_p
))
1948 /* It may have been that this insn used to need delay slots, but
1949 now doesn't; ignore in that case. This can happen, for example,
1950 on the HP PA RISC, where the number of delay slots depends on
1951 what insns are nearby. */
1952 slots_to_fill
= num_delay_slots (insn
);
1954 /* Some machine description have defined instructions to have
1955 delay slots only in certain circumstances which may depend on
1956 nearby insns (which change due to reorg's actions).
1958 For example, the PA port normally has delay slots for unconditional
1961 However, the PA port claims such jumps do not have a delay slot
1962 if they are immediate successors of certain CALL_INSNs. This
1963 allows the port to favor filling the delay slot of the call with
1964 the unconditional jump. */
1965 if (slots_to_fill
== 0)
1968 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
1969 says how many. After initialization, first try optimizing
1972 nop add %o7,.-L1,%o7
1976 If this case applies, the delay slot of the call is filled with
1977 the unconditional jump. This is done first to avoid having the
1978 delay slot of the call filled in the backward scan. Also, since
1979 the unconditional jump is likely to also have a delay slot, that
1980 insn must exist when it is subsequently scanned.
1982 This is tried on each insn with delay slots as some machines
1983 have insns which perform calls, but are not represented as
1990 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1992 flags
= get_jump_flags (insn
, NULL_RTX
);
1994 if ((trial
= next_active_insn (insn
))
1996 && simplejump_p (trial
)
1997 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
1998 && no_labels_between_p (insn
, trial
)
1999 && ! can_throw_internal (trial
))
2003 delay_list
= add_to_delay_list (trial
, delay_list
);
2005 /* TRIAL may have had its delay slot filled, then unfilled. When
2006 the delay slot is unfilled, TRIAL is placed back on the unfilled
2007 slots obstack. Unfortunately, it is placed on the end of the
2008 obstack, not in its original location. Therefore, we must search
2009 from entry i + 1 to the end of the unfilled slots obstack to
2010 try and find TRIAL. */
2011 tmp
= &unfilled_slots_base
[i
+ 1];
2012 while (*tmp
!= trial
&& tmp
!= unfilled_slots_next
)
2015 /* Remove the unconditional jump from consideration for delay slot
2016 filling and unthread it. */
2020 rtx_insn
*next
= NEXT_INSN (trial
);
2021 rtx_insn
*prev
= PREV_INSN (trial
);
2023 SET_NEXT_INSN (prev
) = next
;
2025 SET_PREV_INSN (next
) = prev
;
2029 /* Now, scan backwards from the insn to search for a potential
2030 delay-slot candidate. Stop searching when a label or jump is hit.
2032 For each candidate, if it is to go into the delay slot (moved
2033 forward in execution sequence), it must not need or set any resources
2034 that were set by later insns and must not set any resources that
2035 are needed for those insns.
2037 The delay slot insn itself sets resources unless it is a call
2038 (in which case the called routine, not the insn itself, is doing
2041 if (slots_filled
< slots_to_fill
)
2043 /* If the flags register is dead after the insn, then we want to be
2044 able to accept a candidate that clobbers it. For this purpose,
2045 we need to filter the flags register during life analysis, so
2046 that it doesn't create RAW and WAW dependencies, while still
2047 creating the necessary WAR dependencies. */
2049 = (slots_to_fill
== 1
2050 && targetm
.flags_regnum
!= INVALID_REGNUM
2051 && find_regno_note (insn
, REG_DEAD
, targetm
.flags_regnum
));
2052 struct resources fset
;
2053 CLEAR_RESOURCE (&needed
);
2054 CLEAR_RESOURCE (&set
);
2055 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST
);
2058 CLEAR_RESOURCE (&fset
);
2059 mark_set_resources (insn
, &fset
, 0, MARK_SRC_DEST
);
2061 mark_referenced_resources (insn
, &needed
, false);
2063 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
2066 next_trial
= prev_nonnote_insn (trial
);
2068 /* This must be an INSN or CALL_INSN. */
2069 pat
= PATTERN (trial
);
2071 /* Stand-alone USE and CLOBBER are just for flow. */
2072 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2075 /* Check for resource conflict first, to avoid unnecessary
2077 if (! insn_references_resource_p (trial
, &set
, true)
2078 && ! insn_sets_resource_p (trial
,
2079 filter_flags
? &fset
: &set
,
2081 && ! insn_sets_resource_p (trial
, &needed
, true)
2082 /* Can't separate set of cc0 from its use. */
2083 && (!HAVE_cc0
|| ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
)))
2084 && ! can_throw_internal (trial
))
2086 trial
= try_split (pat
, trial
, 1);
2087 next_trial
= prev_nonnote_insn (trial
);
2088 if (eligible_for_delay (insn
, slots_filled
, trial
, flags
))
2090 /* In this case, we are searching backward, so if we
2091 find insns to put on the delay list, we want
2092 to put them at the head, rather than the
2093 tail, of the list. */
2095 update_reg_dead_notes (trial
, insn
);
2096 delay_list
= gen_rtx_INSN_LIST (VOIDmode
,
2098 update_block (trial
, trial
);
2099 delete_related_insns (trial
);
2100 if (slots_to_fill
== ++slots_filled
)
2106 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
2109 mark_set_resources (trial
, &fset
, 0, MARK_SRC_DEST_CALL
);
2110 /* If the flags register is set, then it doesn't create RAW
2111 dependencies any longer and it also doesn't create WAW
2112 dependencies since it's dead after the original insn. */
2113 if (TEST_HARD_REG_BIT (fset
.regs
, targetm
.flags_regnum
))
2115 CLEAR_HARD_REG_BIT (needed
.regs
, targetm
.flags_regnum
);
2116 CLEAR_HARD_REG_BIT (fset
.regs
, targetm
.flags_regnum
);
2119 mark_referenced_resources (trial
, &needed
, true);
2123 /* If all needed slots haven't been filled, we come here. */
2125 /* Try to optimize case of jumping around a single insn. */
2126 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2127 if (slots_filled
!= slots_to_fill
2130 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
2131 && !ANY_RETURN_P (JUMP_LABEL (insn
)))
2133 delay_list
= optimize_skip (as_a
<rtx_jump_insn
*> (insn
));
2139 /* Try to get insns from beyond the insn needing the delay slot.
2140 These insns can neither set or reference resources set in insns being
2141 skipped, cannot set resources in the insn being skipped, and, if this
2142 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2143 call might not return).
2145 There used to be code which continued past the target label if
2146 we saw all uses of the target label. This code did not work,
2147 because it failed to account for some instructions which were
2148 both annulled and marked as from the target. This can happen as a
2149 result of optimize_skip. Since this code was redundant with
2150 fill_eager_delay_slots anyways, it was just deleted. */
2152 if (slots_filled
!= slots_to_fill
2153 /* If this instruction could throw an exception which is
2154 caught in the same function, then it's not safe to fill
2155 the delay slot with an instruction from beyond this
2156 point. For example, consider:
2167 Even though `i' is a local variable, we must be sure not
2168 to put `i = 3' in the delay slot if `f' might throw an
2171 Presumably, we should also check to see if we could get
2172 back to this function via `setjmp'. */
2173 && ! can_throw_internal (insn
)
2176 int maybe_never
= 0;
2177 rtx pat
, trial_delay
;
2179 CLEAR_RESOURCE (&needed
);
2180 CLEAR_RESOURCE (&set
);
2181 mark_set_resources (insn
, &set
, 0, MARK_SRC_DEST_CALL
);
2182 mark_referenced_resources (insn
, &needed
, true);
2187 for (trial
= next_nonnote_insn (insn
); !stop_search_p (trial
, 1);
2190 next_trial
= next_nonnote_insn (trial
);
2192 /* This must be an INSN or CALL_INSN. */
2193 pat
= PATTERN (trial
);
2195 /* Stand-alone USE and CLOBBER are just for flow. */
2196 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2199 /* If this already has filled delay slots, get the insn needing
2201 if (GET_CODE (pat
) == SEQUENCE
)
2202 trial_delay
= XVECEXP (pat
, 0, 0);
2204 trial_delay
= trial
;
2206 /* Stop our search when seeing a jump. */
2207 if (JUMP_P (trial_delay
))
2210 /* See if we have a resource problem before we try to split. */
2211 if (GET_CODE (pat
) != SEQUENCE
2212 && ! insn_references_resource_p (trial
, &set
, true)
2213 && ! insn_sets_resource_p (trial
, &set
, true)
2214 && ! insn_sets_resource_p (trial
, &needed
, true)
2215 && (!HAVE_cc0
&& ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
)))
2216 && ! (maybe_never
&& may_trap_or_fault_p (pat
))
2217 && (trial
= try_split (pat
, trial
, 0))
2218 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
2219 && ! can_throw_internal (trial
))
2221 next_trial
= next_nonnote_insn (trial
);
2222 delay_list
= add_to_delay_list (trial
, delay_list
);
2223 if (HAVE_cc0
&& reg_mentioned_p (cc0_rtx
, pat
))
2224 link_cc0_insns (trial
);
2226 delete_related_insns (trial
);
2227 if (slots_to_fill
== ++slots_filled
)
2232 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
2233 mark_referenced_resources (trial
, &needed
, true);
2235 /* Ensure we don't put insns between the setting of cc and the
2236 comparison by moving a setting of cc into an earlier delay
2237 slot since these insns could clobber the condition code. */
2240 /* If this is a call, we might not get here. */
2241 if (CALL_P (trial_delay
))
2245 /* If there are slots left to fill and our search was stopped by an
2246 unconditional branch, try the insn at the branch target. We can
2247 redirect the branch if it works.
2249 Don't do this if the insn at the branch target is a branch. */
2250 if (slots_to_fill
!= slots_filled
2252 && jump_to_label_p (trial
)
2253 && simplejump_p (trial
)
2254 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
2255 && ! (NONJUMP_INSN_P (next_trial
)
2256 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
2257 && !JUMP_P (next_trial
)
2258 && ! insn_references_resource_p (next_trial
, &set
, true)
2259 && ! insn_sets_resource_p (next_trial
, &set
, true)
2260 && ! insn_sets_resource_p (next_trial
, &needed
, true)
2261 && (!HAVE_cc0
|| ! reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
)))
2262 && ! (maybe_never
&& may_trap_or_fault_p (PATTERN (next_trial
)))
2263 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
2264 && eligible_for_delay (insn
, slots_filled
, next_trial
, flags
)
2265 && ! can_throw_internal (trial
))
2267 /* See comment in relax_delay_slots about necessity of using
2268 next_real_insn here. */
2269 rtx_insn
*new_label
= next_real_insn (next_trial
);
2272 new_label
= get_label_before (new_label
, JUMP_LABEL (trial
));
2274 new_label
= find_end_label (simple_return_rtx
);
2279 = add_to_delay_list (copy_delay_slot_insn (next_trial
),
2282 reorg_redirect_jump (as_a
<rtx_jump_insn
*> (trial
),
2288 /* If this is an unconditional jump, then try to get insns from the
2289 target of the jump. */
2290 rtx_jump_insn
*jump_insn
;
2291 if ((jump_insn
= dyn_cast
<rtx_jump_insn
*> (insn
))
2292 && simplejump_p (jump_insn
)
2293 && slots_filled
!= slots_to_fill
)
2295 = fill_slots_from_thread (jump_insn
, const_true_rtx
,
2296 next_active_insn (JUMP_LABEL (insn
)),
2298 own_thread_p (JUMP_LABEL (insn
),
2299 JUMP_LABEL (insn
), 0),
2300 slots_to_fill
, &slots_filled
,
2304 unfilled_slots_base
[i
]
2305 = emit_delay_sequence (insn
, delay_list
, slots_filled
);
2307 if (slots_to_fill
== slots_filled
)
2308 unfilled_slots_base
[i
] = 0;
2310 note_delay_statistics (slots_filled
, 0);
2314 /* Follow any unconditional jump at LABEL, for the purpose of redirecting JUMP;
2315 return the ultimate label reached by any such chain of jumps.
2316 Return a suitable return rtx if the chain ultimately leads to a
2318 If LABEL is not followed by a jump, return LABEL.
2319 If the chain loops or we can't find end, return LABEL,
2320 since that tells caller to avoid changing the insn.
2321 If the returned label is obtained by following a crossing jump,
2322 set *CROSSING to true, otherwise set it to false. */
2325 follow_jumps (rtx label
, rtx_insn
*jump
, bool *crossing
)
2332 if (ANY_RETURN_P (label
))
2335 rtx_insn
*value
= as_a
<rtx_insn
*> (label
);
2339 && (insn
= next_active_insn (value
)) != 0
2341 && JUMP_LABEL (insn
) != NULL_RTX
2342 && ((any_uncondjump_p (insn
) && onlyjump_p (insn
))
2343 || ANY_RETURN_P (PATTERN (insn
)))
2344 && (next
= NEXT_INSN (insn
))
2345 && BARRIER_P (next
));
2348 rtx this_label_or_return
= JUMP_LABEL (insn
);
2350 /* If we have found a cycle, make the insn jump to itself. */
2351 if (this_label_or_return
== label
)
2354 /* Cannot follow returns and cannot look through tablejumps. */
2355 if (ANY_RETURN_P (this_label_or_return
))
2356 return this_label_or_return
;
2358 rtx_insn
*this_label
= as_a
<rtx_insn
*> (this_label_or_return
);
2359 if (NEXT_INSN (this_label
)
2360 && JUMP_TABLE_DATA_P (NEXT_INSN (this_label
)))
2363 if (!targetm
.can_follow_jump (jump
, insn
))
2366 *crossing
= CROSSING_JUMP_P (jump
);
2374 /* Try to find insns to place in delay slots.
2376 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2377 or is an unconditional branch if CONDITION is const_true_rtx.
2378 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2380 THREAD is a flow-of-control, either the insns to be executed if the
2381 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2383 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2384 to see if any potential delay slot insns set things needed there.
2386 LIKELY is nonzero if it is extremely likely that the branch will be
2387 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2388 end of a loop back up to the top.
2390 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2391 thread. I.e., it is the fallthrough code of our jump or the target of the
2392 jump when we are the only jump going there.
2394 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2395 case, we can only take insns from the head of the thread for our delay
2396 slot. We then adjust the jump to point after the insns we have taken. */
2398 static rtx_insn_list
*
2399 fill_slots_from_thread (rtx_jump_insn
*insn
, rtx condition
,
2400 rtx thread_or_return
, rtx opposite_thread
, int likely
,
2401 int thread_if_true
, int own_thread
, int slots_to_fill
,
2402 int *pslots_filled
, rtx_insn_list
*delay_list
)
2405 struct resources opposite_needed
, set
, needed
;
2411 /* Validate our arguments. */
2412 gcc_assert (condition
!= const_true_rtx
|| thread_if_true
);
2413 gcc_assert (own_thread
|| thread_if_true
);
2415 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2417 /* If our thread is the end of subroutine, we can't get any delay
2419 if (thread_or_return
== NULL_RTX
|| ANY_RETURN_P (thread_or_return
))
2422 rtx_insn
*thread
= as_a
<rtx_insn
*> (thread_or_return
);
2424 /* If this is an unconditional branch, nothing is needed at the
2425 opposite thread. Otherwise, compute what is needed there. */
2426 if (condition
== const_true_rtx
)
2427 CLEAR_RESOURCE (&opposite_needed
);
2429 mark_target_live_regs (get_insns (), opposite_thread
, &opposite_needed
);
2431 /* If the insn at THREAD can be split, do it here to avoid having to
2432 update THREAD and NEW_THREAD if it is done in the loop below. Also
2433 initialize NEW_THREAD. */
2435 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
2437 /* Scan insns at THREAD. We are looking for an insn that can be removed
2438 from THREAD (it neither sets nor references resources that were set
2439 ahead of it and it doesn't set anything needs by the insns ahead of
2440 it) and that either can be placed in an annulling insn or aren't
2441 needed at OPPOSITE_THREAD. */
2443 CLEAR_RESOURCE (&needed
);
2444 CLEAR_RESOURCE (&set
);
2446 /* If we do not own this thread, we must stop as soon as we find
2447 something that we can't put in a delay slot, since all we can do
2448 is branch into THREAD at a later point. Therefore, labels stop
2449 the search if this is not the `true' thread. */
2451 for (trial
= thread
;
2452 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
2453 trial
= next_nonnote_insn (trial
))
2457 /* If we have passed a label, we no longer own this thread. */
2458 if (LABEL_P (trial
))
2464 pat
= PATTERN (trial
);
2465 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2468 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2469 don't separate or copy insns that set and use CC0. */
2470 if (! insn_references_resource_p (trial
, &set
, true)
2471 && ! insn_sets_resource_p (trial
, &set
, true)
2472 && ! insn_sets_resource_p (trial
, &needed
, true)
2473 && (!HAVE_cc0
|| (! (reg_mentioned_p (cc0_rtx
, pat
)
2474 && (! own_thread
|| ! sets_cc0_p (pat
)))))
2475 && ! can_throw_internal (trial
))
2479 /* If TRIAL is redundant with some insn before INSN, we don't
2480 actually need to add it to the delay list; we can merely pretend
2482 if ((prior_insn
= redundant_insn (trial
, insn
, delay_list
)))
2484 fix_reg_dead_note (prior_insn
, insn
);
2487 update_block (trial
, thread
);
2488 if (trial
== thread
)
2490 thread
= next_active_insn (thread
);
2491 if (new_thread
== trial
)
2492 new_thread
= thread
;
2495 delete_related_insns (trial
);
2499 update_reg_unused_notes (prior_insn
, trial
);
2500 new_thread
= next_active_insn (trial
);
2506 /* There are two ways we can win: If TRIAL doesn't set anything
2507 needed at the opposite thread and can't trap, or if it can
2508 go into an annulled delay slot. But we want neither to copy
2509 nor to speculate frame-related insns. */
2511 && ((condition
== const_true_rtx
2512 && (own_thread
|| !RTX_FRAME_RELATED_P (trial
)))
2513 || (! insn_sets_resource_p (trial
, &opposite_needed
, true)
2514 && ! may_trap_or_fault_p (pat
)
2515 && ! RTX_FRAME_RELATED_P (trial
))))
2518 trial
= try_split (pat
, trial
, 0);
2519 if (new_thread
== old_trial
)
2521 if (thread
== old_trial
)
2523 pat
= PATTERN (trial
);
2524 if (eligible_for_delay (insn
, *pslots_filled
, trial
, flags
))
2528 #ifdef ANNUL_IFTRUE_SLOTS
2531 #ifdef ANNUL_IFFALSE_SLOTS
2537 trial
= try_split (pat
, trial
, 0);
2538 if (new_thread
== old_trial
)
2540 if (thread
== old_trial
)
2542 pat
= PATTERN (trial
);
2543 if ((must_annul
|| delay_list
== NULL
) && (thread_if_true
2544 ? check_annul_list_true_false (0, delay_list
)
2545 && eligible_for_annul_false (insn
, *pslots_filled
, trial
, flags
)
2546 : check_annul_list_true_false (1, delay_list
)
2547 && eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
2554 if (HAVE_cc0
&& reg_mentioned_p (cc0_rtx
, pat
))
2555 link_cc0_insns (trial
);
2557 /* If we own this thread, delete the insn. If this is the
2558 destination of a branch, show that a basic block status
2559 may have been updated. In any case, mark the new
2560 starting point of this thread. */
2565 update_block (trial
, thread
);
2566 if (trial
== thread
)
2568 thread
= next_active_insn (thread
);
2569 if (new_thread
== trial
)
2570 new_thread
= thread
;
2573 /* We are moving this insn, not deleting it. We must
2574 temporarily increment the use count on any referenced
2575 label lest it be deleted by delete_related_insns. */
2576 for (note
= REG_NOTES (trial
);
2578 note
= XEXP (note
, 1))
2579 if (REG_NOTE_KIND (note
) == REG_LABEL_OPERAND
2580 || REG_NOTE_KIND (note
) == REG_LABEL_TARGET
)
2582 /* REG_LABEL_OPERAND could be
2583 NOTE_INSN_DELETED_LABEL too. */
2584 if (LABEL_P (XEXP (note
, 0)))
2585 LABEL_NUSES (XEXP (note
, 0))++;
2587 gcc_assert (REG_NOTE_KIND (note
)
2588 == REG_LABEL_OPERAND
);
2590 if (jump_to_label_p (trial
))
2591 LABEL_NUSES (JUMP_LABEL (trial
))++;
2593 delete_related_insns (trial
);
2595 for (note
= REG_NOTES (trial
);
2597 note
= XEXP (note
, 1))
2598 if (REG_NOTE_KIND (note
) == REG_LABEL_OPERAND
2599 || REG_NOTE_KIND (note
) == REG_LABEL_TARGET
)
2601 /* REG_LABEL_OPERAND could be
2602 NOTE_INSN_DELETED_LABEL too. */
2603 if (LABEL_P (XEXP (note
, 0)))
2604 LABEL_NUSES (XEXP (note
, 0))--;
2606 gcc_assert (REG_NOTE_KIND (note
)
2607 == REG_LABEL_OPERAND
);
2609 if (jump_to_label_p (trial
))
2610 LABEL_NUSES (JUMP_LABEL (trial
))--;
2613 new_thread
= next_active_insn (trial
);
2615 temp
= own_thread
? trial
: copy_delay_slot_insn (trial
);
2617 INSN_FROM_TARGET_P (temp
) = 1;
2619 delay_list
= add_to_delay_list (temp
, delay_list
);
2621 if (slots_to_fill
== ++(*pslots_filled
))
2623 /* Even though we have filled all the slots, we
2624 may be branching to a location that has a
2625 redundant insn. Skip any if so. */
2626 while (new_thread
&& ! own_thread
2627 && ! insn_sets_resource_p (new_thread
, &set
, true)
2628 && ! insn_sets_resource_p (new_thread
, &needed
,
2630 && ! insn_references_resource_p (new_thread
,
2633 = redundant_insn (new_thread
, insn
,
2636 /* We know we do not own the thread, so no need
2637 to call update_block and delete_insn. */
2638 fix_reg_dead_note (prior_insn
, insn
);
2639 update_reg_unused_notes (prior_insn
, new_thread
);
2640 new_thread
= next_active_insn (new_thread
);
2650 /* This insn can't go into a delay slot. */
2652 mark_set_resources (trial
, &set
, 0, MARK_SRC_DEST_CALL
);
2653 mark_referenced_resources (trial
, &needed
, true);
2655 /* Ensure we don't put insns between the setting of cc and the comparison
2656 by moving a setting of cc into an earlier delay slot since these insns
2657 could clobber the condition code. */
2660 /* If this insn is a register-register copy and the next insn has
2661 a use of our destination, change it to use our source. That way,
2662 it will become a candidate for our delay slot the next time
2663 through this loop. This case occurs commonly in loops that
2666 We could check for more complex cases than those tested below,
2667 but it doesn't seem worth it. It might also be a good idea to try
2668 to swap the two insns. That might do better.
2670 We can't do this if the next insn modifies our destination, because
2671 that would make the replacement into the insn invalid. We also can't
2672 do this if it modifies our source, because it might be an earlyclobber
2673 operand. This latter test also prevents updating the contents of
2674 a PRE_INC. We also can't do this if there's overlap of source and
2675 destination. Overlap may happen for larger-than-register-size modes. */
2677 if (NONJUMP_INSN_P (trial
) && GET_CODE (pat
) == SET
2678 && REG_P (SET_SRC (pat
))
2679 && REG_P (SET_DEST (pat
))
2680 && !reg_overlap_mentioned_p (SET_DEST (pat
), SET_SRC (pat
)))
2682 rtx_insn
*next
= next_nonnote_insn (trial
);
2684 if (next
&& NONJUMP_INSN_P (next
)
2685 && GET_CODE (PATTERN (next
)) != USE
2686 && ! reg_set_p (SET_DEST (pat
), next
)
2687 && ! reg_set_p (SET_SRC (pat
), next
)
2688 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
))
2689 && ! modified_in_p (SET_DEST (pat
), next
))
2690 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
2694 /* If we stopped on a branch insn that has delay slots, see if we can
2695 steal some of the insns in those slots. */
2696 if (trial
&& NONJUMP_INSN_P (trial
)
2697 && GET_CODE (PATTERN (trial
)) == SEQUENCE
2698 && JUMP_P (XVECEXP (PATTERN (trial
), 0, 0)))
2700 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (trial
));
2701 /* If this is the `true' thread, we will want to follow the jump,
2702 so we can only do this if we have taken everything up to here. */
2703 if (thread_if_true
&& trial
== new_thread
)
2706 = steal_delay_list_from_target (insn
, condition
, sequence
,
2707 delay_list
, &set
, &needed
,
2708 &opposite_needed
, slots_to_fill
,
2709 pslots_filled
, &must_annul
,
2711 /* If we owned the thread and are told that it branched
2712 elsewhere, make sure we own the thread at the new location. */
2713 if (own_thread
&& trial
!= new_thread
)
2714 own_thread
= own_thread_p (new_thread
, new_thread
, 0);
2716 else if (! thread_if_true
)
2718 = steal_delay_list_from_fallthrough (insn
, condition
,
2720 delay_list
, &set
, &needed
,
2721 &opposite_needed
, slots_to_fill
,
2722 pslots_filled
, &must_annul
);
2725 /* If we haven't found anything for this delay slot and it is very
2726 likely that the branch will be taken, see if the insn at our target
2727 increments or decrements a register with an increment that does not
2728 depend on the destination register. If so, try to place the opposite
2729 arithmetic insn after the jump insn and put the arithmetic insn in the
2730 delay slot. If we can't do this, return. */
2731 if (delay_list
== 0 && likely
2732 && new_thread
&& !ANY_RETURN_P (new_thread
)
2733 && NONJUMP_INSN_P (new_thread
)
2734 && !RTX_FRAME_RELATED_P (new_thread
)
2735 && GET_CODE (PATTERN (new_thread
)) != ASM_INPUT
2736 && asm_noperands (PATTERN (new_thread
)) < 0)
2738 rtx pat
= PATTERN (new_thread
);
2742 /* We know "new_thread" is an insn due to NONJUMP_INSN_P (new_thread)
2744 trial
= as_a
<rtx_insn
*> (new_thread
);
2745 pat
= PATTERN (trial
);
2747 if (!NONJUMP_INSN_P (trial
)
2748 || GET_CODE (pat
) != SET
2749 || ! eligible_for_delay (insn
, 0, trial
, flags
)
2750 || can_throw_internal (trial
))
2753 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
2754 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
2755 && rtx_equal_p (XEXP (src
, 0), dest
)
2756 && (!FLOAT_MODE_P (GET_MODE (src
))
2757 || flag_unsafe_math_optimizations
)
2758 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1))
2759 && ! side_effects_p (pat
))
2761 rtx other
= XEXP (src
, 1);
2765 /* If this is a constant adjustment, use the same code with
2766 the negated constant. Otherwise, reverse the sense of the
2768 if (CONST_INT_P (other
))
2769 new_arith
= gen_rtx_fmt_ee (GET_CODE (src
), GET_MODE (src
), dest
,
2770 negate_rtx (GET_MODE (src
), other
));
2772 new_arith
= gen_rtx_fmt_ee (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
2773 GET_MODE (src
), dest
, other
);
2775 ninsn
= emit_insn_after (gen_rtx_SET (dest
, new_arith
), insn
);
2777 if (recog_memoized (ninsn
) < 0
2778 || (extract_insn (ninsn
),
2779 !constrain_operands (1, get_preferred_alternatives (ninsn
))))
2781 delete_related_insns (ninsn
);
2787 update_block (trial
, thread
);
2788 if (trial
== thread
)
2790 thread
= next_active_insn (thread
);
2791 if (new_thread
== trial
)
2792 new_thread
= thread
;
2794 delete_related_insns (trial
);
2797 new_thread
= next_active_insn (trial
);
2799 ninsn
= own_thread
? trial
: copy_delay_slot_insn (trial
);
2801 INSN_FROM_TARGET_P (ninsn
) = 1;
2803 delay_list
= add_to_delay_list (ninsn
, NULL
);
2808 if (delay_list
&& must_annul
)
2809 INSN_ANNULLED_BRANCH_P (insn
) = 1;
2811 /* If we are to branch into the middle of this thread, find an appropriate
2812 label or make a new one if none, and redirect INSN to it. If we hit the
2813 end of the function, use the end-of-function label. */
2814 if (new_thread
!= thread
)
2817 bool crossing
= false;
2819 gcc_assert (thread_if_true
);
2821 if (new_thread
&& simplejump_or_return_p (new_thread
)
2822 && redirect_with_delay_list_safe_p (insn
,
2823 JUMP_LABEL (new_thread
),
2825 new_thread
= follow_jumps (JUMP_LABEL (new_thread
), insn
,
2828 if (ANY_RETURN_P (new_thread
))
2829 label
= find_end_label (new_thread
);
2830 else if (LABEL_P (new_thread
))
2833 label
= get_label_before (as_a
<rtx_insn
*> (new_thread
),
2838 reorg_redirect_jump (insn
, label
);
2840 CROSSING_JUMP_P (insn
) = 1;
2847 /* Make another attempt to find insns to place in delay slots.
2849 We previously looked for insns located in front of the delay insn
2850 and, for non-jump delay insns, located behind the delay insn.
2852 Here only try to schedule jump insns and try to move insns from either
2853 the target or the following insns into the delay slot. If annulling is
2854 supported, we will be likely to do this. Otherwise, we can do this only
2858 fill_eager_delay_slots (void)
2862 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2864 for (i
= 0; i
< num_unfilled_slots
; i
++)
2867 rtx target_label
, insn_at_target
;
2868 rtx_insn
*fallthrough_insn
;
2869 rtx_insn_list
*delay_list
= 0;
2870 rtx_jump_insn
*jump_insn
;
2872 int own_fallthrough
;
2873 int prediction
, slots_to_fill
, slots_filled
;
2875 insn
= unfilled_slots_base
[i
];
2878 || ! (jump_insn
= dyn_cast
<rtx_jump_insn
*> (insn
))
2879 || ! (condjump_p (jump_insn
) || condjump_in_parallel_p (jump_insn
)))
2882 slots_to_fill
= num_delay_slots (jump_insn
);
2883 /* Some machine description have defined instructions to have
2884 delay slots only in certain circumstances which may depend on
2885 nearby insns (which change due to reorg's actions).
2887 For example, the PA port normally has delay slots for unconditional
2890 However, the PA port claims such jumps do not have a delay slot
2891 if they are immediate successors of certain CALL_INSNs. This
2892 allows the port to favor filling the delay slot of the call with
2893 the unconditional jump. */
2894 if (slots_to_fill
== 0)
2898 target_label
= JUMP_LABEL (jump_insn
);
2899 condition
= get_branch_condition (jump_insn
, target_label
);
2904 /* Get the next active fallthrough and target insns and see if we own
2905 them. Then see whether the branch is likely true. We don't need
2906 to do a lot of this for unconditional branches. */
2908 insn_at_target
= first_active_target_insn (target_label
);
2909 own_target
= own_thread_p (target_label
, target_label
, 0);
2911 if (condition
== const_true_rtx
)
2913 own_fallthrough
= 0;
2914 fallthrough_insn
= 0;
2919 fallthrough_insn
= next_active_insn (jump_insn
);
2920 own_fallthrough
= own_thread_p (NEXT_INSN (jump_insn
), NULL_RTX
, 1);
2921 prediction
= mostly_true_jump (jump_insn
);
2924 /* If this insn is expected to branch, first try to get insns from our
2925 target, then our fallthrough insns. If it is not expected to branch,
2926 try the other order. */
2931 = fill_slots_from_thread (jump_insn
, condition
, insn_at_target
,
2932 fallthrough_insn
, prediction
== 2, 1,
2934 slots_to_fill
, &slots_filled
, delay_list
);
2936 if (delay_list
== 0 && own_fallthrough
)
2938 /* Even though we didn't find anything for delay slots,
2939 we might have found a redundant insn which we deleted
2940 from the thread that was filled. So we have to recompute
2941 the next insn at the target. */
2942 target_label
= JUMP_LABEL (jump_insn
);
2943 insn_at_target
= first_active_target_insn (target_label
);
2946 = fill_slots_from_thread (jump_insn
, condition
,
2948 insn_at_target
, 0, 0,
2950 slots_to_fill
, &slots_filled
,
2956 if (own_fallthrough
)
2958 = fill_slots_from_thread (jump_insn
, condition
, fallthrough_insn
,
2959 insn_at_target
, 0, 0,
2961 slots_to_fill
, &slots_filled
,
2964 if (delay_list
== 0)
2966 = fill_slots_from_thread (jump_insn
, condition
, insn_at_target
,
2967 next_active_insn (insn
), 0, 1,
2969 slots_to_fill
, &slots_filled
,
2974 unfilled_slots_base
[i
]
2975 = emit_delay_sequence (jump_insn
, delay_list
, slots_filled
);
2977 if (slots_to_fill
== slots_filled
)
2978 unfilled_slots_base
[i
] = 0;
2980 note_delay_statistics (slots_filled
, 1);
2984 static void delete_computation (rtx insn
);
2986 /* Recursively delete prior insns that compute the value (used only by INSN
2987 which the caller is deleting) stored in the register mentioned by NOTE
2988 which is a REG_DEAD note associated with INSN. */
2991 delete_prior_computation (rtx note
, rtx insn
)
2994 rtx reg
= XEXP (note
, 0);
2996 for (our_prev
= prev_nonnote_insn (insn
);
2997 our_prev
&& (NONJUMP_INSN_P (our_prev
)
2998 || CALL_P (our_prev
));
2999 our_prev
= prev_nonnote_insn (our_prev
))
3001 rtx pat
= PATTERN (our_prev
);
3003 /* If we reach a CALL which is not calling a const function
3004 or the callee pops the arguments, then give up. */
3005 if (CALL_P (our_prev
)
3006 && (! RTL_CONST_CALL_P (our_prev
)
3007 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
3010 /* If we reach a SEQUENCE, it is too complex to try to
3011 do anything with it, so give up. We can be run during
3012 and after reorg, so SEQUENCE rtl can legitimately show
3014 if (GET_CODE (pat
) == SEQUENCE
)
3017 if (GET_CODE (pat
) == USE
3018 && NONJUMP_INSN_P (XEXP (pat
, 0)))
3019 /* reorg creates USEs that look like this. We leave them
3020 alone because reorg needs them for its own purposes. */
3023 if (reg_set_p (reg
, pat
))
3025 if (side_effects_p (pat
) && !CALL_P (our_prev
))
3028 if (GET_CODE (pat
) == PARALLEL
)
3030 /* If we find a SET of something else, we can't
3035 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3037 rtx part
= XVECEXP (pat
, 0, i
);
3039 if (GET_CODE (part
) == SET
3040 && SET_DEST (part
) != reg
)
3044 if (i
== XVECLEN (pat
, 0))
3045 delete_computation (our_prev
);
3047 else if (GET_CODE (pat
) == SET
3048 && REG_P (SET_DEST (pat
)))
3050 int dest_regno
= REGNO (SET_DEST (pat
));
3051 int dest_endregno
= END_REGNO (SET_DEST (pat
));
3052 int regno
= REGNO (reg
);
3053 int endregno
= END_REGNO (reg
);
3055 if (dest_regno
>= regno
3056 && dest_endregno
<= endregno
)
3057 delete_computation (our_prev
);
3059 /* We may have a multi-word hard register and some, but not
3060 all, of the words of the register are needed in subsequent
3061 insns. Write REG_UNUSED notes for those parts that were not
3063 else if (dest_regno
<= regno
3064 && dest_endregno
>= endregno
)
3068 add_reg_note (our_prev
, REG_UNUSED
, reg
);
3070 for (i
= dest_regno
; i
< dest_endregno
; i
++)
3071 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
3074 if (i
== dest_endregno
)
3075 delete_computation (our_prev
);
3082 /* If PAT references the register that dies here, it is an
3083 additional use. Hence any prior SET isn't dead. However, this
3084 insn becomes the new place for the REG_DEAD note. */
3085 if (reg_overlap_mentioned_p (reg
, pat
))
3087 XEXP (note
, 1) = REG_NOTES (our_prev
);
3088 REG_NOTES (our_prev
) = note
;
3094 /* Delete INSN and recursively delete insns that compute values used only
3095 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3097 Look at all our REG_DEAD notes. If a previous insn does nothing other
3098 than set a register that dies in this insn, we can delete that insn
3101 On machines with CC0, if CC0 is used in this insn, we may be able to
3102 delete the insn that set it. */
3105 delete_computation (rtx insn
)
3109 if (HAVE_cc0
&& reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
3111 rtx_insn
*prev
= prev_nonnote_insn (insn
);
3112 /* We assume that at this stage
3113 CC's are always set explicitly
3114 and always immediately before the jump that
3115 will use them. So if the previous insn
3116 exists to set the CC's, delete it
3117 (unless it performs auto-increments, etc.). */
3118 if (prev
&& NONJUMP_INSN_P (prev
)
3119 && sets_cc0_p (PATTERN (prev
)))
3121 if (sets_cc0_p (PATTERN (prev
)) > 0
3122 && ! side_effects_p (PATTERN (prev
)))
3123 delete_computation (prev
);
3125 /* Otherwise, show that cc0 won't be used. */
3126 add_reg_note (prev
, REG_UNUSED
, cc0_rtx
);
3130 for (note
= REG_NOTES (insn
); note
; note
= next
)
3132 next
= XEXP (note
, 1);
3134 if (REG_NOTE_KIND (note
) != REG_DEAD
3135 /* Verify that the REG_NOTE is legitimate. */
3136 || !REG_P (XEXP (note
, 0)))
3139 delete_prior_computation (note
, insn
);
3142 delete_related_insns (insn
);
3145 /* If all INSN does is set the pc, delete it,
3146 and delete the insn that set the condition codes for it
3147 if that's what the previous thing was. */
3150 delete_jump (rtx_insn
*insn
)
3152 rtx set
= single_set (insn
);
3154 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
3155 delete_computation (insn
);
3159 label_before_next_insn (rtx x
, rtx scan_limit
)
3161 rtx_insn
*insn
= next_active_insn (x
);
3164 insn
= PREV_INSN (insn
);
3165 if (insn
== scan_limit
|| insn
== NULL_RTX
)
3173 /* Return TRUE if there is a NOTE_INSN_SWITCH_TEXT_SECTIONS note in between
3177 switch_text_sections_between_p (const rtx_insn
*beg
, const rtx_insn
*end
)
3180 for (p
= beg
; p
!= end
; p
= NEXT_INSN (p
))
3181 if (NOTE_P (p
) && NOTE_KIND (p
) == NOTE_INSN_SWITCH_TEXT_SECTIONS
)
3187 /* Once we have tried two ways to fill a delay slot, make a pass over the
3188 code to try to improve the results and to do such things as more jump
3192 relax_delay_slots (rtx_insn
*first
)
3194 rtx_insn
*insn
, *next
;
3197 rtx_insn
*delay_insn
;
3200 /* Look at every JUMP_INSN and see if we can improve it. */
3201 for (insn
= first
; insn
; insn
= next
)
3206 next
= next_active_insn (insn
);
3208 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3209 the next insn, or jumps to a label that is not the last of a
3210 group of consecutive labels. */
3211 if (is_a
<rtx_jump_insn
*> (insn
)
3212 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3213 && !ANY_RETURN_P (target_label
= JUMP_LABEL (insn
)))
3215 rtx_jump_insn
*jump_insn
= as_a
<rtx_jump_insn
*> (insn
);
3217 = skip_consecutive_labels (follow_jumps (target_label
, jump_insn
,
3219 if (ANY_RETURN_P (target_label
))
3220 target_label
= find_end_label (target_label
);
3222 if (target_label
&& next_active_insn (target_label
) == next
3223 && ! condjump_in_parallel_p (jump_insn
)
3224 && ! (next
&& switch_text_sections_between_p (jump_insn
, next
)))
3226 delete_jump (jump_insn
);
3230 if (target_label
&& target_label
!= JUMP_LABEL (jump_insn
))
3232 reorg_redirect_jump (jump_insn
, target_label
);
3234 CROSSING_JUMP_P (jump_insn
) = 1;
3237 /* See if this jump conditionally branches around an unconditional
3238 jump. If so, invert this jump and point it to the target of the
3239 second jump. Check if it's possible on the target. */
3240 if (next
&& simplejump_or_return_p (next
)
3241 && any_condjump_p (jump_insn
)
3243 && next_active_insn (target_label
) == next_active_insn (next
)
3244 && no_labels_between_p (jump_insn
, next
)
3245 && targetm
.can_follow_jump (jump_insn
, next
))
3247 rtx label
= JUMP_LABEL (next
);
3249 /* Be careful how we do this to avoid deleting code or
3250 labels that are momentarily dead. See similar optimization
3253 We also need to ensure we properly handle the case when
3254 invert_jump fails. */
3256 ++LABEL_NUSES (target_label
);
3257 if (!ANY_RETURN_P (label
))
3258 ++LABEL_NUSES (label
);
3260 if (invert_jump (jump_insn
, label
, 1))
3262 delete_related_insns (next
);
3266 if (!ANY_RETURN_P (label
))
3267 --LABEL_NUSES (label
);
3269 if (--LABEL_NUSES (target_label
) == 0)
3270 delete_related_insns (target_label
);
3276 /* If this is an unconditional jump and the previous insn is a
3277 conditional jump, try reversing the condition of the previous
3278 insn and swapping our targets. The next pass might be able to
3281 Don't do this if we expect the conditional branch to be true, because
3282 we would then be making the more common case longer. */
3284 if (simplejump_or_return_p (insn
)
3285 && (other
= prev_active_insn (insn
)) != 0
3286 && any_condjump_p (other
)
3287 && no_labels_between_p (other
, insn
)
3288 && 0 > mostly_true_jump (other
))
3290 rtx other_target
= JUMP_LABEL (other
);
3291 target_label
= JUMP_LABEL (insn
);
3293 if (invert_jump (as_a
<rtx_jump_insn
*> (other
), target_label
, 0))
3294 reorg_redirect_jump (as_a
<rtx_jump_insn
*> (insn
), other_target
);
3297 /* Now look only at cases where we have a filled delay slot. */
3298 if (!NONJUMP_INSN_P (insn
) || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
3301 pat
= as_a
<rtx_sequence
*> (PATTERN (insn
));
3302 delay_insn
= pat
->insn (0);
3304 /* See if the first insn in the delay slot is redundant with some
3305 previous insn. Remove it from the delay slot if so; then set up
3306 to reprocess this insn. */
3307 if (redundant_insn (pat
->insn (1), delay_insn
, 0))
3309 update_block (pat
->insn (1), insn
);
3310 delete_from_delay_slot (pat
->insn (1));
3311 next
= prev_active_insn (next
);
3315 /* See if we have a RETURN insn with a filled delay slot followed
3316 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3317 the first RETURN (but not its delay insn). This gives the same
3318 effect in fewer instructions.
3320 Only do so if optimizing for size since this results in slower, but
3322 if (optimize_function_for_size_p (cfun
)
3323 && ANY_RETURN_P (PATTERN (delay_insn
))
3326 && PATTERN (next
) == PATTERN (delay_insn
))
3331 /* Delete the RETURN and just execute the delay list insns.
3333 We do this by deleting the INSN containing the SEQUENCE, then
3334 re-emitting the insns separately, and then deleting the RETURN.
3335 This allows the count of the jump target to be properly
3338 Note that we need to change the INSN_UID of the re-emitted insns
3339 since it is used to hash the insns for mark_target_live_regs and
3340 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3342 Clear the from target bit, since these insns are no longer
3344 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3345 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3347 trial
= PREV_INSN (insn
);
3348 delete_related_insns (insn
);
3349 gcc_assert (GET_CODE (pat
) == SEQUENCE
);
3350 add_insn_after (delay_insn
, trial
, NULL
);
3352 for (i
= 1; i
< pat
->len (); i
++)
3353 after
= emit_copy_of_insn_after (pat
->insn (i
), after
);
3354 delete_scheduled_jump (delay_insn
);
3358 /* Now look only at the cases where we have a filled JUMP_INSN. */
3359 rtx_jump_insn
*delay_jump_insn
=
3360 dyn_cast
<rtx_jump_insn
*> (delay_insn
);
3361 if (! delay_jump_insn
|| !(condjump_p (delay_jump_insn
)
3362 || condjump_in_parallel_p (delay_jump_insn
)))
3365 target_label
= JUMP_LABEL (delay_jump_insn
);
3366 if (target_label
&& ANY_RETURN_P (target_label
))
3369 /* If this jump goes to another unconditional jump, thread it, but
3370 don't convert a jump into a RETURN here. */
3371 trial
= skip_consecutive_labels (follow_jumps (target_label
,
3374 if (ANY_RETURN_P (trial
))
3375 trial
= find_end_label (trial
);
3377 if (trial
&& trial
!= target_label
3378 && redirect_with_delay_slots_safe_p (delay_jump_insn
, trial
, insn
))
3380 reorg_redirect_jump (delay_jump_insn
, trial
);
3381 target_label
= trial
;
3383 CROSSING_JUMP_P (insn
) = 1;
3386 /* If the first insn at TARGET_LABEL is redundant with a previous
3387 insn, redirect the jump to the following insn and process again.
3388 We use next_real_insn instead of next_active_insn so we
3389 don't skip USE-markers, or we'll end up with incorrect
3391 trial
= next_real_insn (target_label
);
3392 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
3393 && redundant_insn (trial
, insn
, 0)
3394 && ! can_throw_internal (trial
))
3396 /* Figure out where to emit the special USE insn so we don't
3397 later incorrectly compute register live/death info. */
3398 rtx_insn
*tmp
= next_active_insn (trial
);
3400 tmp
= find_end_label (simple_return_rtx
);
3404 /* Insert the special USE insn and update dataflow info.
3405 We know "trial" is an insn here as it is the output of
3406 next_real_insn () above. */
3407 update_block (as_a
<rtx_insn
*> (trial
), tmp
);
3409 /* Now emit a label before the special USE insn, and
3410 redirect our jump to the new label. */
3411 target_label
= get_label_before (PREV_INSN (tmp
), target_label
);
3412 reorg_redirect_jump (delay_jump_insn
, target_label
);
3418 /* Similarly, if it is an unconditional jump with one insn in its
3419 delay list and that insn is redundant, thread the jump. */
3420 rtx_sequence
*trial_seq
=
3421 trial
? dyn_cast
<rtx_sequence
*> (PATTERN (trial
)) : NULL
;
3423 && trial_seq
->len () == 2
3424 && JUMP_P (trial_seq
->insn (0))
3425 && simplejump_or_return_p (trial_seq
->insn (0))
3426 && redundant_insn (trial_seq
->insn (1), insn
, 0))
3428 target_label
= JUMP_LABEL (trial_seq
->insn (0));
3429 if (ANY_RETURN_P (target_label
))
3430 target_label
= find_end_label (target_label
);
3433 && redirect_with_delay_slots_safe_p (delay_jump_insn
,
3434 target_label
, insn
))
3436 update_block (trial_seq
->insn (1), insn
);
3437 reorg_redirect_jump (delay_jump_insn
, target_label
);
3443 /* See if we have a simple (conditional) jump that is useless. */
3444 if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn
)
3445 && ! condjump_in_parallel_p (delay_jump_insn
)
3446 && prev_active_insn (target_label
) == insn
3447 && ! BARRIER_P (prev_nonnote_insn (target_label
))
3449 /* If the last insn in the delay slot sets CC0 for some insn,
3450 various code assumes that it is in a delay slot. We could
3451 put it back where it belonged and delete the register notes,
3452 but it doesn't seem worthwhile in this uncommon case. */
3453 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
3454 REG_CC_USER
, NULL_RTX
)
3461 /* All this insn does is execute its delay list and jump to the
3462 following insn. So delete the jump and just execute the delay
3465 We do this by deleting the INSN containing the SEQUENCE, then
3466 re-emitting the insns separately, and then deleting the jump.
3467 This allows the count of the jump target to be properly
3470 Note that we need to change the INSN_UID of the re-emitted insns
3471 since it is used to hash the insns for mark_target_live_regs and
3472 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3474 Clear the from target bit, since these insns are no longer
3476 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3477 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3479 trial
= PREV_INSN (insn
);
3480 delete_related_insns (insn
);
3481 gcc_assert (GET_CODE (pat
) == SEQUENCE
);
3482 add_insn_after (delay_jump_insn
, trial
, NULL
);
3483 after
= delay_jump_insn
;
3484 for (i
= 1; i
< pat
->len (); i
++)
3485 after
= emit_copy_of_insn_after (pat
->insn (i
), after
);
3486 delete_scheduled_jump (delay_jump_insn
);
3490 /* See if this is an unconditional jump around a single insn which is
3491 identical to the one in its delay slot. In this case, we can just
3492 delete the branch and the insn in its delay slot. */
3493 if (next
&& NONJUMP_INSN_P (next
)
3494 && label_before_next_insn (next
, insn
) == target_label
3495 && simplejump_p (insn
)
3496 && XVECLEN (pat
, 0) == 2
3497 && rtx_equal_p (PATTERN (next
), PATTERN (pat
->insn (1))))
3499 delete_related_insns (insn
);
3503 /* See if this jump (with its delay slots) conditionally branches
3504 around an unconditional jump (without delay slots). If so, invert
3505 this jump and point it to the target of the second jump. We cannot
3506 do this for annulled jumps, though. Again, don't convert a jump to
3508 if (! INSN_ANNULLED_BRANCH_P (delay_jump_insn
)
3509 && any_condjump_p (delay_jump_insn
)
3510 && next
&& simplejump_or_return_p (next
)
3511 && next_active_insn (target_label
) == next_active_insn (next
)
3512 && no_labels_between_p (insn
, next
))
3514 rtx label
= JUMP_LABEL (next
);
3515 rtx old_label
= JUMP_LABEL (delay_jump_insn
);
3517 if (ANY_RETURN_P (label
))
3518 label
= find_end_label (label
);
3520 /* find_end_label can generate a new label. Check this first. */
3522 && no_labels_between_p (insn
, next
)
3523 && redirect_with_delay_slots_safe_p (delay_jump_insn
,
3526 /* Be careful how we do this to avoid deleting code or labels
3527 that are momentarily dead. See similar optimization in
3530 ++LABEL_NUSES (old_label
);
3532 if (invert_jump (delay_jump_insn
, label
, 1))
3536 /* Must update the INSN_FROM_TARGET_P bits now that
3537 the branch is reversed, so that mark_target_live_regs
3538 will handle the delay slot insn correctly. */
3539 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
3541 rtx slot
= XVECEXP (PATTERN (insn
), 0, i
);
3542 INSN_FROM_TARGET_P (slot
) = ! INSN_FROM_TARGET_P (slot
);
3545 delete_related_insns (next
);
3549 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
3550 delete_related_insns (old_label
);
3555 /* If we own the thread opposite the way this insn branches, see if we
3556 can merge its delay slots with following insns. */
3557 if (INSN_FROM_TARGET_P (pat
->insn (1))
3558 && own_thread_p (NEXT_INSN (insn
), 0, 1))
3559 try_merge_delay_insns (insn
, next
);
3560 else if (! INSN_FROM_TARGET_P (pat
->insn (1))
3561 && own_thread_p (target_label
, target_label
, 0))
3562 try_merge_delay_insns (insn
, next_active_insn (target_label
));
3564 /* If we get here, we haven't deleted INSN. But we may have deleted
3565 NEXT, so recompute it. */
3566 next
= next_active_insn (insn
);
3571 /* Look for filled jumps to the end of function label. We can try to convert
3572 them into RETURN insns if the insns in the delay slot are valid for the
3576 make_return_insns (rtx_insn
*first
)
3579 rtx_jump_insn
*jump_insn
;
3580 rtx real_return_label
= function_return_label
;
3581 rtx real_simple_return_label
= function_simple_return_label
;
3584 /* See if there is a RETURN insn in the function other than the one we
3585 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3586 into a RETURN to jump to it. */
3587 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3588 if (JUMP_P (insn
) && ANY_RETURN_P (PATTERN (insn
)))
3590 rtx t
= get_label_before (insn
, NULL_RTX
);
3591 if (PATTERN (insn
) == ret_rtx
)
3592 real_return_label
= t
;
3594 real_simple_return_label
= t
;
3598 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3599 was equal to END_OF_FUNCTION_LABEL. */
3600 if (real_return_label
)
3601 LABEL_NUSES (real_return_label
)++;
3602 if (real_simple_return_label
)
3603 LABEL_NUSES (real_simple_return_label
)++;
3605 /* Clear the list of insns to fill so we can use it. */
3606 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3608 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3611 rtx kind
, real_label
;
3613 /* Only look at filled JUMP_INSNs that go to the end of function
3615 if (!NONJUMP_INSN_P (insn
))
3618 if (GET_CODE (PATTERN (insn
)) != SEQUENCE
)
3621 rtx_sequence
*pat
= as_a
<rtx_sequence
*> (PATTERN (insn
));
3623 if (!jump_to_label_p (pat
->insn (0)))
3626 if (JUMP_LABEL (pat
->insn (0)) == function_return_label
)
3629 real_label
= real_return_label
;
3631 else if (JUMP_LABEL (pat
->insn (0)) == function_simple_return_label
)
3633 kind
= simple_return_rtx
;
3634 real_label
= real_simple_return_label
;
3639 jump_insn
= as_a
<rtx_jump_insn
*> (pat
->insn (0));
3641 /* If we can't make the jump into a RETURN, try to redirect it to the best
3642 RETURN and go on to the next insn. */
3643 if (!reorg_redirect_jump (jump_insn
, kind
))
3645 /* Make sure redirecting the jump will not invalidate the delay
3647 if (redirect_with_delay_slots_safe_p (jump_insn
, real_label
, insn
))
3648 reorg_redirect_jump (jump_insn
, real_label
);
3652 /* See if this RETURN can accept the insns current in its delay slot.
3653 It can if it has more or an equal number of slots and the contents
3654 of each is valid. */
3656 flags
= get_jump_flags (jump_insn
, JUMP_LABEL (jump_insn
));
3657 slots
= num_delay_slots (jump_insn
);
3658 if (slots
>= XVECLEN (pat
, 0) - 1)
3660 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3662 #ifdef ANNUL_IFFALSE_SLOTS
3663 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3664 && INSN_FROM_TARGET_P (pat
->insn (i
)))
3665 ? eligible_for_annul_false (jump_insn
, i
- 1,
3666 pat
->insn (i
), flags
) :
3668 #ifdef ANNUL_IFTRUE_SLOTS
3669 (INSN_ANNULLED_BRANCH_P (jump_insn
)
3670 && ! INSN_FROM_TARGET_P (pat
->insn (i
)))
3671 ? eligible_for_annul_true (jump_insn
, i
- 1,
3672 pat
->insn (i
), flags
) :
3674 eligible_for_delay (jump_insn
, i
- 1,
3675 pat
->insn (i
), flags
)))
3681 if (i
== XVECLEN (pat
, 0))
3684 /* We have to do something with this insn. If it is an unconditional
3685 RETURN, delete the SEQUENCE and output the individual insns,
3686 followed by the RETURN. Then set things up so we try to find
3687 insns for its delay slots, if it needs some. */
3688 if (ANY_RETURN_P (PATTERN (jump_insn
)))
3690 rtx_insn
*prev
= PREV_INSN (insn
);
3692 delete_related_insns (insn
);
3693 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
3694 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
3696 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
3697 emit_barrier_after (insn
);
3700 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3703 /* It is probably more efficient to keep this with its current
3704 delay slot as a branch to a RETURN. */
3705 reorg_redirect_jump (jump_insn
, real_label
);
3708 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3709 new delay slots we have created. */
3710 if (real_return_label
!= NULL_RTX
&& --LABEL_NUSES (real_return_label
) == 0)
3711 delete_related_insns (real_return_label
);
3712 if (real_simple_return_label
!= NULL_RTX
3713 && --LABEL_NUSES (real_simple_return_label
) == 0)
3714 delete_related_insns (real_simple_return_label
);
3716 fill_simple_delay_slots (1);
3717 fill_simple_delay_slots (0);
3720 /* Try to find insns to place in delay slots. */
3723 dbr_schedule (rtx_insn
*first
)
3725 rtx_insn
*insn
, *next
, *epilogue_insn
= 0;
3727 bool need_return_insns
;
3729 /* If the current function has no insns other than the prologue and
3730 epilogue, then do not try to fill any delay slots. */
3731 if (n_basic_blocks_for_fn (cfun
) == NUM_FIXED_BLOCKS
)
3734 /* Find the highest INSN_UID and allocate and initialize our map from
3735 INSN_UID's to position in code. */
3736 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3738 if (INSN_UID (insn
) > max_uid
)
3739 max_uid
= INSN_UID (insn
);
3741 && NOTE_KIND (insn
) == NOTE_INSN_EPILOGUE_BEG
)
3742 epilogue_insn
= insn
;
3745 uid_to_ruid
= XNEWVEC (int, max_uid
+ 1);
3746 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
3747 uid_to_ruid
[INSN_UID (insn
)] = i
;
3749 /* Initialize the list of insns that need filling. */
3750 if (unfilled_firstobj
== 0)
3752 gcc_obstack_init (&unfilled_slots_obstack
);
3753 unfilled_firstobj
= XOBNEWVAR (&unfilled_slots_obstack
, rtx
, 0);
3756 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
3760 /* Skip vector tables. We can't get attributes for them. */
3761 if (JUMP_TABLE_DATA_P (insn
))
3765 INSN_ANNULLED_BRANCH_P (insn
) = 0;
3766 INSN_FROM_TARGET_P (insn
) = 0;
3768 if (num_delay_slots (insn
) > 0)
3769 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
3771 /* Ensure all jumps go to the last of a set of consecutive labels. */
3773 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3774 && !ANY_RETURN_P (JUMP_LABEL (insn
))
3775 && ((target
= skip_consecutive_labels (JUMP_LABEL (insn
)))
3776 != JUMP_LABEL (insn
)))
3777 redirect_jump (as_a
<rtx_jump_insn
*> (insn
), target
, 1);
3780 init_resource_info (epilogue_insn
);
3782 /* Show we haven't computed an end-of-function label yet. */
3783 function_return_label
= function_simple_return_label
= NULL
;
3785 /* Initialize the statistics for this function. */
3786 memset (num_insns_needing_delays
, 0, sizeof num_insns_needing_delays
);
3787 memset (num_filled_delays
, 0, sizeof num_filled_delays
);
3789 /* Now do the delay slot filling. Try everything twice in case earlier
3790 changes make more slots fillable. */
3792 for (reorg_pass_number
= 0;
3793 reorg_pass_number
< MAX_REORG_PASSES
;
3794 reorg_pass_number
++)
3796 fill_simple_delay_slots (1);
3797 fill_simple_delay_slots (0);
3798 fill_eager_delay_slots ();
3799 relax_delay_slots (first
);
3802 /* If we made an end of function label, indicate that it is now
3803 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3804 If it is now unused, delete it. */
3805 if (function_return_label
&& --LABEL_NUSES (function_return_label
) == 0)
3806 delete_related_insns (function_return_label
);
3807 if (function_simple_return_label
3808 && --LABEL_NUSES (function_simple_return_label
) == 0)
3809 delete_related_insns (function_simple_return_label
);
3811 need_return_insns
= false;
3812 need_return_insns
|= targetm
.have_return () && function_return_label
!= 0;
3813 need_return_insns
|= (targetm
.have_simple_return ()
3814 && function_simple_return_label
!= 0);
3815 if (need_return_insns
)
3816 make_return_insns (first
);
3818 /* Delete any USE insns made by update_block; subsequent passes don't need
3819 them or know how to deal with them. */
3820 for (insn
= first
; insn
; insn
= next
)
3822 next
= NEXT_INSN (insn
);
3824 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == USE
3825 && INSN_P (XEXP (PATTERN (insn
), 0)))
3826 next
= delete_related_insns (insn
);
3829 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
3831 /* It is not clear why the line below is needed, but it does seem to be. */
3832 unfilled_firstobj
= XOBNEWVAR (&unfilled_slots_obstack
, rtx
, 0);
3836 int i
, j
, need_comma
;
3837 int total_delay_slots
[MAX_DELAY_HISTOGRAM
+ 1];
3838 int total_annul_slots
[MAX_DELAY_HISTOGRAM
+ 1];
3840 for (reorg_pass_number
= 0;
3841 reorg_pass_number
< MAX_REORG_PASSES
;
3842 reorg_pass_number
++)
3844 fprintf (dump_file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
3845 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
3848 fprintf (dump_file
, ";; Reorg function #%d\n", i
);
3850 fprintf (dump_file
, ";; %d insns needing delay slots\n;; ",
3851 num_insns_needing_delays
[i
][reorg_pass_number
]);
3853 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
+ 1; j
++)
3854 if (num_filled_delays
[i
][j
][reorg_pass_number
])
3857 fprintf (dump_file
, ", ");
3859 fprintf (dump_file
, "%d got %d delays",
3860 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
3862 fprintf (dump_file
, "\n");
3865 memset (total_delay_slots
, 0, sizeof total_delay_slots
);
3866 memset (total_annul_slots
, 0, sizeof total_annul_slots
);
3867 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
3869 if (! insn
->deleted ()
3870 && NONJUMP_INSN_P (insn
)
3871 && GET_CODE (PATTERN (insn
)) != USE
3872 && GET_CODE (PATTERN (insn
)) != CLOBBER
)
3874 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3877 j
= XVECLEN (PATTERN (insn
), 0) - 1;
3878 if (j
> MAX_DELAY_HISTOGRAM
)
3879 j
= MAX_DELAY_HISTOGRAM
;
3880 control
= XVECEXP (PATTERN (insn
), 0, 0);
3881 if (JUMP_P (control
) && INSN_ANNULLED_BRANCH_P (control
))
3882 total_annul_slots
[j
]++;
3884 total_delay_slots
[j
]++;
3886 else if (num_delay_slots (insn
) > 0)
3887 total_delay_slots
[0]++;
3890 fprintf (dump_file
, ";; Reorg totals: ");
3892 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
+ 1; j
++)
3894 if (total_delay_slots
[j
])
3897 fprintf (dump_file
, ", ");
3899 fprintf (dump_file
, "%d got %d delays", total_delay_slots
[j
], j
);
3902 fprintf (dump_file
, "\n");
3903 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3904 fprintf (dump_file
, ";; Reorg annuls: ");
3906 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
+ 1; j
++)
3908 if (total_annul_slots
[j
])
3911 fprintf (dump_file
, ", ");
3913 fprintf (dump_file
, "%d got %d delays", total_annul_slots
[j
], j
);
3916 fprintf (dump_file
, "\n");
3918 fprintf (dump_file
, "\n");
3921 if (!sibling_labels
.is_empty ())
3923 update_alignments (sibling_labels
);
3924 sibling_labels
.release ();
3927 free_resource_info ();
3929 crtl
->dbr_scheduled_p
= true;
3931 #endif /* DELAY_SLOTS */
3933 /* Run delay slot optimization. */
3935 rest_of_handle_delay_slots (void)
3938 dbr_schedule (get_insns ());
3945 const pass_data pass_data_delay_slots
=
3947 RTL_PASS
, /* type */
3949 OPTGROUP_NONE
, /* optinfo_flags */
3950 TV_DBR_SCHED
, /* tv_id */
3951 0, /* properties_required */
3952 0, /* properties_provided */
3953 0, /* properties_destroyed */
3954 0, /* todo_flags_start */
3955 0, /* todo_flags_finish */
3958 class pass_delay_slots
: public rtl_opt_pass
3961 pass_delay_slots (gcc::context
*ctxt
)
3962 : rtl_opt_pass (pass_data_delay_slots
, ctxt
)
3965 /* opt_pass methods: */
3966 virtual bool gate (function
*);
3967 virtual unsigned int execute (function
*)
3969 return rest_of_handle_delay_slots ();
3972 }; // class pass_delay_slots
3975 pass_delay_slots::gate (function
*)
3978 /* At -O0 dataflow info isn't updated after RA. */
3979 return optimize
> 0 && flag_delayed_branch
&& !crtl
->dbr_scheduled_p
;
3988 make_pass_delay_slots (gcc::context
*ctxt
)
3990 return new pass_delay_slots (ctxt
);
3993 /* Machine dependent reorg pass. */
3997 const pass_data pass_data_machine_reorg
=
3999 RTL_PASS
, /* type */
4001 OPTGROUP_NONE
, /* optinfo_flags */
4002 TV_MACH_DEP
, /* tv_id */
4003 0, /* properties_required */
4004 0, /* properties_provided */
4005 0, /* properties_destroyed */
4006 0, /* todo_flags_start */
4007 0, /* todo_flags_finish */
4010 class pass_machine_reorg
: public rtl_opt_pass
4013 pass_machine_reorg (gcc::context
*ctxt
)
4014 : rtl_opt_pass (pass_data_machine_reorg
, ctxt
)
4017 /* opt_pass methods: */
4018 virtual bool gate (function
*)
4020 return targetm
.machine_dependent_reorg
!= 0;
4023 virtual unsigned int execute (function
*)
4025 targetm
.machine_dependent_reorg ();
4029 }; // class pass_machine_reorg
4034 make_pass_machine_reorg (gcc::context
*ctxt
)
4036 return new pass_machine_reorg (ctxt
);