PR tree-optimization/45830
[official-gcc.git] / gcc / reorg.c
blob0b90550add5e4d498e1160fbb1670010f118450c
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
3 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
6 Hacked by Michael Tiemann (tiemann@cygnus.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 /* Instruction reorganization pass.
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
39 The MIPS has a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
44 The SPARC always has a branch delay slot, but its effects can be
45 annulled when the branch is not taken. This means that failing to
46 find other sources of insns, we can hoist an insn from the branch
47 target that would only be safe to execute knowing that the branch
48 is taken.
50 The HP-PA always has a branch delay slot. For unconditional branches
51 its effects can be annulled when the branch is taken. The effects
52 of the delay slot in a conditional branch can be nullified for forward
53 taken branches, or for untaken backward branches. This means
54 we can hoist insns from the fall-through path for forward branches or
55 steal insns from the target of backward branches.
57 The TMS320C3x and C4x have three branch delay slots. When the three
58 slots are filled, the branch penalty is zero. Most insns can fill the
59 delay slots except jump insns.
61 Three techniques for filling delay slots have been implemented so far:
63 (1) `fill_simple_delay_slots' is the simplest, most efficient way
64 to fill delay slots. This pass first looks for insns which come
65 from before the branch and which are safe to execute after the
66 branch. Then it searches after the insn requiring delay slots or,
67 in the case of a branch, for insns that are after the point at
68 which the branch merges into the fallthrough code, if such a point
69 exists. When such insns are found, the branch penalty decreases
70 and no code expansion takes place.
72 (2) `fill_eager_delay_slots' is more complicated: it is used for
73 scheduling conditional jumps, or for scheduling jumps which cannot
74 be filled using (1). A machine need not have annulled jumps to use
75 this strategy, but it helps (by keeping more options open).
76 `fill_eager_delay_slots' tries to guess the direction the branch
77 will go; if it guesses right 100% of the time, it can reduce the
78 branch penalty as much as `fill_simple_delay_slots' does. If it
79 guesses wrong 100% of the time, it might as well schedule nops. When
80 `fill_eager_delay_slots' takes insns from the fall-through path of
81 the jump, usually there is no code expansion; when it takes insns
82 from the branch target, there is code expansion if it is not the
83 only way to reach that target.
85 (3) `relax_delay_slots' uses a set of rules to simplify code that
86 has been reorganized by (1) and (2). It finds cases where
87 conditional test can be eliminated, jumps can be threaded, extra
88 insns can be eliminated, etc. It is the job of (1) and (2) to do a
89 good job of scheduling locally; `relax_delay_slots' takes care of
90 making the various individual schedules work well together. It is
91 especially tuned to handle the control flow interactions of branch
92 insns. It does nothing for insns with delay slots that do not
93 branch.
95 On machines that use CC0, we are very conservative. We will not make
96 a copy of an insn involving CC0 since we want to maintain a 1-1
97 correspondence between the insn that sets and uses CC0. The insns are
98 allowed to be separated by placing an insn that sets CC0 (but not an insn
99 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
100 delay slot. In that case, we point each insn at the other with REG_CC_USER
101 and REG_CC_SETTER notes. Note that these restrictions affect very few
102 machines because most RISC machines with delay slots will not use CC0
103 (the RT is the only known exception at this point).
105 Not yet implemented:
107 The Acorn Risc Machine can conditionally execute most insns, so
108 it is profitable to move single insns into a position to execute
109 based on the condition code of the previous insn.
111 The HP-PA can conditionally nullify insns, providing a similar
112 effect to the ARM, differing mostly in which insn is "in charge". */
114 #include "config.h"
115 #include "system.h"
116 #include "coretypes.h"
117 #include "tm.h"
118 #include "diagnostic-core.h"
119 #include "rtl.h"
120 #include "tm_p.h"
121 #include "expr.h"
122 #include "function.h"
123 #include "insn-config.h"
124 #include "conditions.h"
125 #include "hard-reg-set.h"
126 #include "basic-block.h"
127 #include "regs.h"
128 #include "recog.h"
129 #include "flags.h"
130 #include "output.h"
131 #include "obstack.h"
132 #include "insn-attr.h"
133 #include "resource.h"
134 #include "except.h"
135 #include "params.h"
136 #include "timevar.h"
137 #include "target.h"
138 #include "tree-pass.h"
140 #ifdef DELAY_SLOTS
142 #ifndef ANNUL_IFTRUE_SLOTS
143 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
144 #endif
145 #ifndef ANNUL_IFFALSE_SLOTS
146 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
147 #endif
149 /* Insns which have delay slots that have not yet been filled. */
151 static struct obstack unfilled_slots_obstack;
152 static rtx *unfilled_firstobj;
154 /* Define macros to refer to the first and last slot containing unfilled
155 insns. These are used because the list may move and its address
156 should be recomputed at each use. */
158 #define unfilled_slots_base \
159 ((rtx *) obstack_base (&unfilled_slots_obstack))
161 #define unfilled_slots_next \
162 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
164 /* Points to the label before the end of the function, or before a
165 return insn. */
166 static rtx function_return_label;
167 /* Likewise for a simple_return. */
168 static rtx function_simple_return_label;
170 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
171 not always monotonically increase. */
172 static int *uid_to_ruid;
174 /* Highest valid index in `uid_to_ruid'. */
175 static int max_uid;
177 static int stop_search_p (rtx, int);
178 static int resource_conflicts_p (struct resources *, struct resources *);
179 static int insn_references_resource_p (rtx, struct resources *, bool);
180 static int insn_sets_resource_p (rtx, struct resources *, bool);
181 static rtx find_end_label (rtx);
182 static rtx emit_delay_sequence (rtx, rtx, int);
183 static rtx add_to_delay_list (rtx, rtx);
184 static rtx delete_from_delay_slot (rtx);
185 static void delete_scheduled_jump (rtx);
186 static void note_delay_statistics (int, int);
187 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
188 static rtx optimize_skip (rtx);
189 #endif
190 static int get_jump_flags (rtx, rtx);
191 static int rare_destination (rtx);
192 static int mostly_true_jump (rtx, rtx);
193 static rtx get_branch_condition (rtx, rtx);
194 static int condition_dominates_p (rtx, rtx);
195 static int redirect_with_delay_slots_safe_p (rtx, rtx, rtx);
196 static int redirect_with_delay_list_safe_p (rtx, rtx, rtx);
197 static int check_annul_list_true_false (int, rtx);
198 static rtx steal_delay_list_from_target (rtx, rtx, rtx, rtx,
199 struct resources *,
200 struct resources *,
201 struct resources *,
202 int, int *, int *, rtx *);
203 static rtx steal_delay_list_from_fallthrough (rtx, rtx, rtx, rtx,
204 struct resources *,
205 struct resources *,
206 struct resources *,
207 int, int *, int *);
208 static void try_merge_delay_insns (rtx, rtx);
209 static rtx redundant_insn (rtx, rtx, rtx);
210 static int own_thread_p (rtx, rtx, int);
211 static void update_block (rtx, rtx);
212 static int reorg_redirect_jump (rtx, rtx);
213 static void update_reg_dead_notes (rtx, rtx);
214 static void fix_reg_dead_note (rtx, rtx);
215 static void update_reg_unused_notes (rtx, rtx);
216 static void fill_simple_delay_slots (int);
217 static rtx fill_slots_from_thread (rtx, rtx, rtx, rtx,
218 int, int, int, int,
219 int *, rtx);
220 static void fill_eager_delay_slots (void);
221 static void relax_delay_slots (rtx);
222 static void make_return_insns (rtx);
224 /* A wrapper around next_active_insn which takes care to return ret_rtx
225 unchanged. */
227 static rtx
228 first_active_target_insn (rtx insn)
230 if (ANY_RETURN_P (insn))
231 return insn;
232 return next_active_insn (insn);
235 /* Return true iff INSN is a simplejump, or any kind of return insn. */
237 static bool
238 simplejump_or_return_p (rtx insn)
240 return (JUMP_P (insn)
241 && (simplejump_p (insn) || ANY_RETURN_P (PATTERN (insn))));
244 /* Return TRUE if this insn should stop the search for insn to fill delay
245 slots. LABELS_P indicates that labels should terminate the search.
246 In all cases, jumps terminate the search. */
248 static int
249 stop_search_p (rtx insn, int labels_p)
251 if (insn == 0)
252 return 1;
254 /* If the insn can throw an exception that is caught within the function,
255 it may effectively perform a jump from the viewpoint of the function.
256 Therefore act like for a jump. */
257 if (can_throw_internal (insn))
258 return 1;
260 switch (GET_CODE (insn))
262 case NOTE:
263 case CALL_INSN:
264 return 0;
266 case CODE_LABEL:
267 return labels_p;
269 case JUMP_INSN:
270 case BARRIER:
271 return 1;
273 case INSN:
274 /* OK unless it contains a delay slot or is an `asm' insn of some type.
275 We don't know anything about these. */
276 return (GET_CODE (PATTERN (insn)) == SEQUENCE
277 || GET_CODE (PATTERN (insn)) == ASM_INPUT
278 || asm_noperands (PATTERN (insn)) >= 0);
280 default:
281 gcc_unreachable ();
285 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
286 resource set contains a volatile memory reference. Otherwise, return FALSE. */
288 static int
289 resource_conflicts_p (struct resources *res1, struct resources *res2)
291 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
292 || (res1->unch_memory && res2->unch_memory)
293 || res1->volatil || res2->volatil)
294 return 1;
296 #ifdef HARD_REG_SET
297 return (res1->regs & res2->regs) != HARD_CONST (0);
298 #else
300 int i;
302 for (i = 0; i < HARD_REG_SET_LONGS; i++)
303 if ((res1->regs[i] & res2->regs[i]) != 0)
304 return 1;
305 return 0;
307 #endif
310 /* Return TRUE if any resource marked in RES, a `struct resources', is
311 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
312 routine is using those resources.
314 We compute this by computing all the resources referenced by INSN and
315 seeing if this conflicts with RES. It might be faster to directly check
316 ourselves, and this is the way it used to work, but it means duplicating
317 a large block of complex code. */
319 static int
320 insn_references_resource_p (rtx insn, struct resources *res,
321 bool include_delayed_effects)
323 struct resources insn_res;
325 CLEAR_RESOURCE (&insn_res);
326 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
327 return resource_conflicts_p (&insn_res, res);
330 /* Return TRUE if INSN modifies resources that are marked in RES.
331 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
332 included. CC0 is only modified if it is explicitly set; see comments
333 in front of mark_set_resources for details. */
335 static int
336 insn_sets_resource_p (rtx insn, struct resources *res,
337 bool include_delayed_effects)
339 struct resources insn_sets;
341 CLEAR_RESOURCE (&insn_sets);
342 mark_set_resources (insn, &insn_sets, 0,
343 (include_delayed_effects
344 ? MARK_SRC_DEST_CALL
345 : MARK_SRC_DEST));
346 return resource_conflicts_p (&insn_sets, res);
349 /* Find a label at the end of the function or before a RETURN. If there
350 is none, try to make one. If that fails, returns 0.
352 The property of such a label is that it is placed just before the
353 epilogue or a bare RETURN insn, so that another bare RETURN can be
354 turned into a jump to the label unconditionally. In particular, the
355 label cannot be placed before a RETURN insn with a filled delay slot.
357 ??? There may be a problem with the current implementation. Suppose
358 we start with a bare RETURN insn and call find_end_label. It may set
359 function_return_label just before the RETURN. Suppose the machinery
360 is able to fill the delay slot of the RETURN insn afterwards. Then
361 function_return_label is no longer valid according to the property
362 described above and find_end_label will still return it unmodified.
363 Note that this is probably mitigated by the following observation:
364 once function_return_label is made, it is very likely the target of
365 a jump, so filling the delay slot of the RETURN will be much more
366 difficult.
367 KIND is either simple_return_rtx or ret_rtx, indicating which type of
368 return we're looking for. */
370 static rtx
371 find_end_label (rtx kind)
373 rtx insn;
374 rtx *plabel;
376 if (kind == ret_rtx)
377 plabel = &function_return_label;
378 else
380 gcc_assert (kind == simple_return_rtx);
381 plabel = &function_simple_return_label;
384 /* If we found one previously, return it. */
385 if (*plabel)
386 return *plabel;
388 /* Otherwise, see if there is a label at the end of the function. If there
389 is, it must be that RETURN insns aren't needed, so that is our return
390 label and we don't have to do anything else. */
392 insn = get_last_insn ();
393 while (NOTE_P (insn)
394 || (NONJUMP_INSN_P (insn)
395 && (GET_CODE (PATTERN (insn)) == USE
396 || GET_CODE (PATTERN (insn)) == CLOBBER)))
397 insn = PREV_INSN (insn);
399 /* When a target threads its epilogue we might already have a
400 suitable return insn. If so put a label before it for the
401 function_return_label. */
402 if (BARRIER_P (insn)
403 && JUMP_P (PREV_INSN (insn))
404 && PATTERN (PREV_INSN (insn)) == kind)
406 rtx temp = PREV_INSN (PREV_INSN (insn));
407 rtx label = gen_label_rtx ();
408 LABEL_NUSES (label) = 0;
410 /* Put the label before any USE insns that may precede the RETURN
411 insn. */
412 while (GET_CODE (temp) == USE)
413 temp = PREV_INSN (temp);
415 emit_label_after (label, temp);
416 *plabel = label;
419 else if (LABEL_P (insn))
420 *plabel = insn;
421 else
423 rtx label = gen_label_rtx ();
424 LABEL_NUSES (label) = 0;
425 /* If the basic block reorder pass moves the return insn to
426 some other place try to locate it again and put our
427 function_return_label there. */
428 while (insn && ! (JUMP_P (insn) && (PATTERN (insn) == kind)))
429 insn = PREV_INSN (insn);
430 if (insn)
432 insn = PREV_INSN (insn);
434 /* Put the label before any USE insns that may precede the
435 RETURN insn. */
436 while (GET_CODE (insn) == USE)
437 insn = PREV_INSN (insn);
439 emit_label_after (label, insn);
441 else
443 #ifdef HAVE_epilogue
444 if (HAVE_epilogue
445 #ifdef HAVE_return
446 && ! HAVE_return
447 #endif
449 /* The RETURN insn has its delay slot filled so we cannot
450 emit the label just before it. Since we already have
451 an epilogue and cannot emit a new RETURN, we cannot
452 emit the label at all. */
453 return NULL_RTX;
454 #endif /* HAVE_epilogue */
456 /* Otherwise, make a new label and emit a RETURN and BARRIER,
457 if needed. */
458 emit_label (label);
459 #ifdef HAVE_return
460 /* We don't bother trying to create a return insn if the
461 epilogue has filled delay-slots; we would have to try and
462 move the delay-slot fillers to the delay-slots for the new
463 return insn or in front of the new return insn. */
464 if (crtl->epilogue_delay_list == NULL
465 && HAVE_return)
467 /* The return we make may have delay slots too. */
468 rtx insn = gen_return ();
469 insn = emit_jump_insn (insn);
470 set_return_jump_label (insn);
471 emit_barrier ();
472 if (num_delay_slots (insn) > 0)
473 obstack_ptr_grow (&unfilled_slots_obstack, insn);
475 #endif
477 *plabel = label;
480 /* Show one additional use for this label so it won't go away until
481 we are done. */
482 ++LABEL_NUSES (*plabel);
484 return *plabel;
487 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
488 the pattern of INSN with the SEQUENCE.
490 Chain the insns so that NEXT_INSN of each insn in the sequence points to
491 the next and NEXT_INSN of the last insn in the sequence points to
492 the first insn after the sequence. Similarly for PREV_INSN. This makes
493 it easier to scan all insns.
495 Returns the SEQUENCE that replaces INSN. */
497 static rtx
498 emit_delay_sequence (rtx insn, rtx list, int length)
500 int i = 1;
501 rtx li;
502 int had_barrier = 0;
504 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
505 rtvec seqv = rtvec_alloc (length + 1);
506 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
507 rtx seq_insn = make_insn_raw (seq);
508 rtx first = get_insns ();
509 rtx last = get_last_insn ();
511 /* Make a copy of the insn having delay slots. */
512 rtx delay_insn = copy_rtx (insn);
514 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
515 confuse further processing. Update LAST in case it was the last insn.
516 We will put the BARRIER back in later. */
517 if (NEXT_INSN (insn) && BARRIER_P (NEXT_INSN (insn)))
519 delete_related_insns (NEXT_INSN (insn));
520 last = get_last_insn ();
521 had_barrier = 1;
524 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
525 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
526 PREV_INSN (seq_insn) = PREV_INSN (insn);
528 if (insn != last)
529 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
531 if (insn != first)
532 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
534 /* Note the calls to set_new_first_and_last_insn must occur after
535 SEQ_INSN has been completely spliced into the insn stream.
537 Otherwise CUR_INSN_UID will get set to an incorrect value because
538 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
539 if (insn == last)
540 set_new_first_and_last_insn (first, seq_insn);
542 if (insn == first)
543 set_new_first_and_last_insn (seq_insn, last);
545 /* Build our SEQUENCE and rebuild the insn chain. */
546 XVECEXP (seq, 0, 0) = delay_insn;
547 INSN_DELETED_P (delay_insn) = 0;
548 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
550 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (delay_insn);
552 for (li = list; li; li = XEXP (li, 1), i++)
554 rtx tem = XEXP (li, 0);
555 rtx note, next;
557 /* Show that this copy of the insn isn't deleted. */
558 INSN_DELETED_P (tem) = 0;
560 XVECEXP (seq, 0, i) = tem;
561 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
562 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
564 /* SPARC assembler, for instance, emit warning when debug info is output
565 into the delay slot. */
566 if (INSN_LOCATOR (tem) && !INSN_LOCATOR (seq_insn))
567 INSN_LOCATOR (seq_insn) = INSN_LOCATOR (tem);
568 INSN_LOCATOR (tem) = 0;
570 for (note = REG_NOTES (tem); note; note = next)
572 next = XEXP (note, 1);
573 switch (REG_NOTE_KIND (note))
575 case REG_DEAD:
576 /* Remove any REG_DEAD notes because we can't rely on them now
577 that the insn has been moved. */
578 remove_note (tem, note);
579 break;
581 case REG_LABEL_OPERAND:
582 case REG_LABEL_TARGET:
583 /* Keep the label reference count up to date. */
584 if (LABEL_P (XEXP (note, 0)))
585 LABEL_NUSES (XEXP (note, 0)) ++;
586 break;
588 default:
589 break;
594 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
596 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
597 last insn in that SEQUENCE to point to us. Similarly for the first
598 insn in the following insn if it is a SEQUENCE. */
600 if (PREV_INSN (seq_insn) && NONJUMP_INSN_P (PREV_INSN (seq_insn))
601 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
602 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
603 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
604 = seq_insn;
606 if (NEXT_INSN (seq_insn) && NONJUMP_INSN_P (NEXT_INSN (seq_insn))
607 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
608 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
610 /* If there used to be a BARRIER, put it back. */
611 if (had_barrier)
612 emit_barrier_after (seq_insn);
614 gcc_assert (i == length + 1);
616 return seq_insn;
619 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
620 be in the order in which the insns are to be executed. */
622 static rtx
623 add_to_delay_list (rtx insn, rtx delay_list)
625 /* If we have an empty list, just make a new list element. If
626 INSN has its block number recorded, clear it since we may
627 be moving the insn to a new block. */
629 if (delay_list == 0)
631 clear_hashed_info_for_insn (insn);
632 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
635 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
636 list. */
637 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
639 return delay_list;
642 /* Delete INSN from the delay slot of the insn that it is in, which may
643 produce an insn with no delay slots. Return the new insn. */
645 static rtx
646 delete_from_delay_slot (rtx insn)
648 rtx trial, seq_insn, seq, prev;
649 rtx delay_list = 0;
650 int i;
651 int had_barrier = 0;
653 /* We first must find the insn containing the SEQUENCE with INSN in its
654 delay slot. Do this by finding an insn, TRIAL, where
655 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
657 for (trial = insn;
658 PREV_INSN (NEXT_INSN (trial)) == trial;
659 trial = NEXT_INSN (trial))
662 seq_insn = PREV_INSN (NEXT_INSN (trial));
663 seq = PATTERN (seq_insn);
665 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
666 had_barrier = 1;
668 /* Create a delay list consisting of all the insns other than the one
669 we are deleting (unless we were the only one). */
670 if (XVECLEN (seq, 0) > 2)
671 for (i = 1; i < XVECLEN (seq, 0); i++)
672 if (XVECEXP (seq, 0, i) != insn)
673 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
675 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
676 list, and rebuild the delay list if non-empty. */
677 prev = PREV_INSN (seq_insn);
678 trial = XVECEXP (seq, 0, 0);
679 delete_related_insns (seq_insn);
680 add_insn_after (trial, prev, NULL);
682 /* If there was a barrier after the old SEQUENCE, remit it. */
683 if (had_barrier)
684 emit_barrier_after (trial);
686 /* If there are any delay insns, remit them. Otherwise clear the
687 annul flag. */
688 if (delay_list)
689 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
690 else if (JUMP_P (trial))
691 INSN_ANNULLED_BRANCH_P (trial) = 0;
693 INSN_FROM_TARGET_P (insn) = 0;
695 /* Show we need to fill this insn again. */
696 obstack_ptr_grow (&unfilled_slots_obstack, trial);
698 return trial;
701 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
702 the insn that sets CC0 for it and delete it too. */
704 static void
705 delete_scheduled_jump (rtx insn)
707 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
708 delete the insn that sets the condition code, but it is hard to find it.
709 Since this case is rare anyway, don't bother trying; there would likely
710 be other insns that became dead anyway, which we wouldn't know to
711 delete. */
713 #ifdef HAVE_cc0
714 if (reg_mentioned_p (cc0_rtx, insn))
716 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
718 /* If a reg-note was found, it points to an insn to set CC0. This
719 insn is in the delay list of some other insn. So delete it from
720 the delay list it was in. */
721 if (note)
723 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
724 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
725 delete_from_delay_slot (XEXP (note, 0));
727 else
729 /* The insn setting CC0 is our previous insn, but it may be in
730 a delay slot. It will be the last insn in the delay slot, if
731 it is. */
732 rtx trial = previous_insn (insn);
733 if (NOTE_P (trial))
734 trial = prev_nonnote_insn (trial);
735 if (sets_cc0_p (PATTERN (trial)) != 1
736 || FIND_REG_INC_NOTE (trial, NULL_RTX))
737 return;
738 if (PREV_INSN (NEXT_INSN (trial)) == trial)
739 delete_related_insns (trial);
740 else
741 delete_from_delay_slot (trial);
744 #endif
746 delete_related_insns (insn);
749 /* Counters for delay-slot filling. */
751 #define NUM_REORG_FUNCTIONS 2
752 #define MAX_DELAY_HISTOGRAM 3
753 #define MAX_REORG_PASSES 2
755 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
757 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
759 static int reorg_pass_number;
761 static void
762 note_delay_statistics (int slots_filled, int index)
764 num_insns_needing_delays[index][reorg_pass_number]++;
765 if (slots_filled > MAX_DELAY_HISTOGRAM)
766 slots_filled = MAX_DELAY_HISTOGRAM;
767 num_filled_delays[index][slots_filled][reorg_pass_number]++;
770 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
772 /* Optimize the following cases:
774 1. When a conditional branch skips over only one instruction,
775 use an annulling branch and put that insn in the delay slot.
776 Use either a branch that annuls when the condition if true or
777 invert the test with a branch that annuls when the condition is
778 false. This saves insns, since otherwise we must copy an insn
779 from the L1 target.
781 (orig) (skip) (otherwise)
782 Bcc.n L1 Bcc',a L1 Bcc,a L1'
783 insn insn insn2
784 L1: L1: L1:
785 insn2 insn2 insn2
786 insn3 insn3 L1':
787 insn3
789 2. When a conditional branch skips over only one instruction,
790 and after that, it unconditionally branches somewhere else,
791 perform the similar optimization. This saves executing the
792 second branch in the case where the inverted condition is true.
794 Bcc.n L1 Bcc',a L2
795 insn insn
796 L1: L1:
797 Bra L2 Bra L2
799 INSN is a JUMP_INSN.
801 This should be expanded to skip over N insns, where N is the number
802 of delay slots required. */
804 static rtx
805 optimize_skip (rtx insn)
807 rtx trial = next_nonnote_insn (insn);
808 rtx next_trial = next_active_insn (trial);
809 rtx delay_list = 0;
810 int flags;
812 flags = get_jump_flags (insn, JUMP_LABEL (insn));
814 if (trial == 0
815 || !NONJUMP_INSN_P (trial)
816 || GET_CODE (PATTERN (trial)) == SEQUENCE
817 || recog_memoized (trial) < 0
818 || (! eligible_for_annul_false (insn, 0, trial, flags)
819 && ! eligible_for_annul_true (insn, 0, trial, flags))
820 || can_throw_internal (trial))
821 return 0;
823 /* There are two cases where we are just executing one insn (we assume
824 here that a branch requires only one insn; this should be generalized
825 at some point): Where the branch goes around a single insn or where
826 we have one insn followed by a branch to the same label we branch to.
827 In both of these cases, inverting the jump and annulling the delay
828 slot give the same effect in fewer insns. */
829 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
830 && ! (next_trial == 0 && crtl->epilogue_delay_list != 0))
831 || (next_trial != 0
832 && simplejump_or_return_p (next_trial)
833 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)))
835 if (eligible_for_annul_false (insn, 0, trial, flags))
837 if (invert_jump (insn, JUMP_LABEL (insn), 1))
838 INSN_FROM_TARGET_P (trial) = 1;
839 else if (! eligible_for_annul_true (insn, 0, trial, flags))
840 return 0;
843 delay_list = add_to_delay_list (trial, NULL_RTX);
844 next_trial = next_active_insn (trial);
845 update_block (trial, trial);
846 delete_related_insns (trial);
848 /* Also, if we are targeting an unconditional
849 branch, thread our jump to the target of that branch. Don't
850 change this into a RETURN here, because it may not accept what
851 we have in the delay slot. We'll fix this up later. */
852 if (next_trial && simplejump_or_return_p (next_trial))
854 rtx target_label = JUMP_LABEL (next_trial);
855 if (ANY_RETURN_P (target_label))
856 target_label = find_end_label (target_label);
858 if (target_label)
860 /* Recompute the flags based on TARGET_LABEL since threading
861 the jump to TARGET_LABEL may change the direction of the
862 jump (which may change the circumstances in which the
863 delay slot is nullified). */
864 flags = get_jump_flags (insn, target_label);
865 if (eligible_for_annul_true (insn, 0, trial, flags))
866 reorg_redirect_jump (insn, target_label);
870 INSN_ANNULLED_BRANCH_P (insn) = 1;
873 return delay_list;
875 #endif
877 /* Encode and return branch direction and prediction information for
878 INSN assuming it will jump to LABEL.
880 Non conditional branches return no direction information and
881 are predicted as very likely taken. */
883 static int
884 get_jump_flags (rtx insn, rtx label)
886 int flags;
888 /* get_jump_flags can be passed any insn with delay slots, these may
889 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
890 direction information, and only if they are conditional jumps.
892 If LABEL is a return, then there is no way to determine the branch
893 direction. */
894 if (JUMP_P (insn)
895 && (condjump_p (insn) || condjump_in_parallel_p (insn))
896 && !ANY_RETURN_P (label)
897 && INSN_UID (insn) <= max_uid
898 && INSN_UID (label) <= max_uid)
899 flags
900 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
901 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
902 /* No valid direction information. */
903 else
904 flags = 0;
906 /* If insn is a conditional branch call mostly_true_jump to get
907 determine the branch prediction.
909 Non conditional branches are predicted as very likely taken. */
910 if (JUMP_P (insn)
911 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
913 int prediction;
915 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
916 switch (prediction)
918 case 2:
919 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
920 break;
921 case 1:
922 flags |= ATTR_FLAG_likely;
923 break;
924 case 0:
925 flags |= ATTR_FLAG_unlikely;
926 break;
927 case -1:
928 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
929 break;
931 default:
932 gcc_unreachable ();
935 else
936 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
938 return flags;
941 /* Return 1 if INSN is a destination that will be branched to rarely (the
942 return point of a function); return 2 if DEST will be branched to very
943 rarely (a call to a function that doesn't return). Otherwise,
944 return 0. */
946 static int
947 rare_destination (rtx insn)
949 int jump_count = 0;
950 rtx next;
952 for (; insn && !ANY_RETURN_P (insn); insn = next)
954 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
955 insn = XVECEXP (PATTERN (insn), 0, 0);
957 next = NEXT_INSN (insn);
959 switch (GET_CODE (insn))
961 case CODE_LABEL:
962 return 0;
963 case BARRIER:
964 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
965 don't scan past JUMP_INSNs, so any barrier we find here must
966 have been after a CALL_INSN and hence mean the call doesn't
967 return. */
968 return 2;
969 case JUMP_INSN:
970 if (ANY_RETURN_P (PATTERN (insn)))
971 return 1;
972 else if (simplejump_p (insn)
973 && jump_count++ < 10)
974 next = JUMP_LABEL (insn);
975 else
976 return 0;
978 default:
979 break;
983 /* If we got here it means we hit the end of the function. So this
984 is an unlikely destination. */
986 return 1;
989 /* Return truth value of the statement that this branch
990 is mostly taken. If we think that the branch is extremely likely
991 to be taken, we return 2. If the branch is slightly more likely to be
992 taken, return 1. If the branch is slightly less likely to be taken,
993 return 0 and if the branch is highly unlikely to be taken, return -1.
995 CONDITION, if nonzero, is the condition that JUMP_INSN is testing. */
997 static int
998 mostly_true_jump (rtx jump_insn, rtx condition)
1000 rtx target_label = JUMP_LABEL (jump_insn);
1001 rtx note;
1002 int rare_dest, rare_fallthrough;
1004 /* If branch probabilities are available, then use that number since it
1005 always gives a correct answer. */
1006 note = find_reg_note (jump_insn, REG_BR_PROB, 0);
1007 if (note)
1009 int prob = INTVAL (XEXP (note, 0));
1011 if (prob >= REG_BR_PROB_BASE * 9 / 10)
1012 return 2;
1013 else if (prob >= REG_BR_PROB_BASE / 2)
1014 return 1;
1015 else if (prob >= REG_BR_PROB_BASE / 10)
1016 return 0;
1017 else
1018 return -1;
1021 /* Look at the relative rarities of the fallthrough and destination. If
1022 they differ, we can predict the branch that way. */
1023 rare_dest = rare_destination (target_label);
1024 rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1026 switch (rare_fallthrough - rare_dest)
1028 case -2:
1029 return -1;
1030 case -1:
1031 return 0;
1032 case 0:
1033 break;
1034 case 1:
1035 return 1;
1036 case 2:
1037 return 2;
1040 /* If we couldn't figure out what this jump was, assume it won't be
1041 taken. This should be rare. */
1042 if (condition == 0)
1043 return 0;
1045 /* Predict backward branches usually take, forward branches usually not. If
1046 we don't know whether this is forward or backward, assume the branch
1047 will be taken, since most are. */
1048 return (ANY_RETURN_P (target_label) || INSN_UID (jump_insn) > max_uid
1049 || INSN_UID (target_label) > max_uid
1050 || (uid_to_ruid[INSN_UID (jump_insn)]
1051 > uid_to_ruid[INSN_UID (target_label)]));
1054 /* Return the condition under which INSN will branch to TARGET. If TARGET
1055 is zero, return the condition under which INSN will return. If INSN is
1056 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1057 type of jump, or it doesn't go to TARGET, return 0. */
1059 static rtx
1060 get_branch_condition (rtx insn, rtx target)
1062 rtx pat = PATTERN (insn);
1063 rtx src;
1065 if (condjump_in_parallel_p (insn))
1066 pat = XVECEXP (pat, 0, 0);
1068 if (ANY_RETURN_P (pat))
1069 return pat == target ? const_true_rtx : 0;
1071 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1072 return 0;
1074 src = SET_SRC (pat);
1075 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1076 return const_true_rtx;
1078 else if (GET_CODE (src) == IF_THEN_ELSE
1079 && XEXP (src, 2) == pc_rtx
1080 && GET_CODE (XEXP (src, 1)) == LABEL_REF
1081 && XEXP (XEXP (src, 1), 0) == target)
1082 return XEXP (src, 0);
1084 else if (GET_CODE (src) == IF_THEN_ELSE
1085 && XEXP (src, 1) == pc_rtx
1086 && GET_CODE (XEXP (src, 2)) == LABEL_REF
1087 && XEXP (XEXP (src, 2), 0) == target)
1089 enum rtx_code rev;
1090 rev = reversed_comparison_code (XEXP (src, 0), insn);
1091 if (rev != UNKNOWN)
1092 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
1093 XEXP (XEXP (src, 0), 0),
1094 XEXP (XEXP (src, 0), 1));
1097 return 0;
1100 /* Return nonzero if CONDITION is more strict than the condition of
1101 INSN, i.e., if INSN will always branch if CONDITION is true. */
1103 static int
1104 condition_dominates_p (rtx condition, rtx insn)
1106 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1107 enum rtx_code code = GET_CODE (condition);
1108 enum rtx_code other_code;
1110 if (rtx_equal_p (condition, other_condition)
1111 || other_condition == const_true_rtx)
1112 return 1;
1114 else if (condition == const_true_rtx || other_condition == 0)
1115 return 0;
1117 other_code = GET_CODE (other_condition);
1118 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1119 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1120 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1121 return 0;
1123 return comparison_dominates_p (code, other_code);
1126 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1127 any insns already in the delay slot of JUMP. */
1129 static int
1130 redirect_with_delay_slots_safe_p (rtx jump, rtx newlabel, rtx seq)
1132 int flags, i;
1133 rtx pat = PATTERN (seq);
1135 /* Make sure all the delay slots of this jump would still
1136 be valid after threading the jump. If they are still
1137 valid, then return nonzero. */
1139 flags = get_jump_flags (jump, newlabel);
1140 for (i = 1; i < XVECLEN (pat, 0); i++)
1141 if (! (
1142 #ifdef ANNUL_IFFALSE_SLOTS
1143 (INSN_ANNULLED_BRANCH_P (jump)
1144 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1145 ? eligible_for_annul_false (jump, i - 1,
1146 XVECEXP (pat, 0, i), flags) :
1147 #endif
1148 #ifdef ANNUL_IFTRUE_SLOTS
1149 (INSN_ANNULLED_BRANCH_P (jump)
1150 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1151 ? eligible_for_annul_true (jump, i - 1,
1152 XVECEXP (pat, 0, i), flags) :
1153 #endif
1154 eligible_for_delay (jump, i - 1, XVECEXP (pat, 0, i), flags)))
1155 break;
1157 return (i == XVECLEN (pat, 0));
1160 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1161 any insns we wish to place in the delay slot of JUMP. */
1163 static int
1164 redirect_with_delay_list_safe_p (rtx jump, rtx newlabel, rtx delay_list)
1166 int flags, i;
1167 rtx li;
1169 /* Make sure all the insns in DELAY_LIST would still be
1170 valid after threading the jump. If they are still
1171 valid, then return nonzero. */
1173 flags = get_jump_flags (jump, newlabel);
1174 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1175 if (! (
1176 #ifdef ANNUL_IFFALSE_SLOTS
1177 (INSN_ANNULLED_BRANCH_P (jump)
1178 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1179 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1180 #endif
1181 #ifdef ANNUL_IFTRUE_SLOTS
1182 (INSN_ANNULLED_BRANCH_P (jump)
1183 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1184 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1185 #endif
1186 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1187 break;
1189 return (li == NULL);
1192 /* DELAY_LIST is a list of insns that have already been placed into delay
1193 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1194 If not, return 0; otherwise return 1. */
1196 static int
1197 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1199 rtx temp;
1201 if (delay_list)
1203 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1205 rtx trial = XEXP (temp, 0);
1207 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1208 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1209 return 0;
1213 return 1;
1216 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1217 the condition tested by INSN is CONDITION and the resources shown in
1218 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1219 from SEQ's delay list, in addition to whatever insns it may execute
1220 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1221 needed while searching for delay slot insns. Return the concatenated
1222 delay list if possible, otherwise, return 0.
1224 SLOTS_TO_FILL is the total number of slots required by INSN, and
1225 PSLOTS_FILLED points to the number filled so far (also the number of
1226 insns in DELAY_LIST). It is updated with the number that have been
1227 filled from the SEQUENCE, if any.
1229 PANNUL_P points to a nonzero value if we already know that we need
1230 to annul INSN. If this routine determines that annulling is needed,
1231 it may set that value nonzero.
1233 PNEW_THREAD points to a location that is to receive the place at which
1234 execution should continue. */
1236 static rtx
1237 steal_delay_list_from_target (rtx insn, rtx condition, rtx seq,
1238 rtx delay_list, struct resources *sets,
1239 struct resources *needed,
1240 struct resources *other_needed,
1241 int slots_to_fill, int *pslots_filled,
1242 int *pannul_p, rtx *pnew_thread)
1244 rtx temp;
1245 int slots_remaining = slots_to_fill - *pslots_filled;
1246 int total_slots_filled = *pslots_filled;
1247 rtx new_delay_list = 0;
1248 int must_annul = *pannul_p;
1249 int used_annul = 0;
1250 int i;
1251 struct resources cc_set;
1253 /* We can't do anything if there are more delay slots in SEQ than we
1254 can handle, or if we don't know that it will be a taken branch.
1255 We know that it will be a taken branch if it is either an unconditional
1256 branch or a conditional branch with a stricter branch condition.
1258 Also, exit if the branch has more than one set, since then it is computing
1259 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1260 ??? It may be possible to move other sets into INSN in addition to
1261 moving the instructions in the delay slots.
1263 We can not steal the delay list if one of the instructions in the
1264 current delay_list modifies the condition codes and the jump in the
1265 sequence is a conditional jump. We can not do this because we can
1266 not change the direction of the jump because the condition codes
1267 will effect the direction of the jump in the sequence. */
1269 CLEAR_RESOURCE (&cc_set);
1270 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1272 rtx trial = XEXP (temp, 0);
1274 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1275 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, false))
1276 return delay_list;
1279 if (XVECLEN (seq, 0) - 1 > slots_remaining
1280 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1281 || ! single_set (XVECEXP (seq, 0, 0)))
1282 return delay_list;
1284 #ifdef MD_CAN_REDIRECT_BRANCH
1285 /* On some targets, branches with delay slots can have a limited
1286 displacement. Give the back end a chance to tell us we can't do
1287 this. */
1288 if (! MD_CAN_REDIRECT_BRANCH (insn, XVECEXP (seq, 0, 0)))
1289 return delay_list;
1290 #endif
1292 for (i = 1; i < XVECLEN (seq, 0); i++)
1294 rtx trial = XVECEXP (seq, 0, i);
1295 int flags;
1297 if (insn_references_resource_p (trial, sets, false)
1298 || insn_sets_resource_p (trial, needed, false)
1299 || insn_sets_resource_p (trial, sets, false)
1300 #ifdef HAVE_cc0
1301 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1302 delay list. */
1303 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1304 #endif
1305 /* If TRIAL is from the fallthrough code of an annulled branch insn
1306 in SEQ, we cannot use it. */
1307 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1308 && ! INSN_FROM_TARGET_P (trial)))
1309 return delay_list;
1311 /* If this insn was already done (usually in a previous delay slot),
1312 pretend we put it in our delay slot. */
1313 if (redundant_insn (trial, insn, new_delay_list))
1314 continue;
1316 /* We will end up re-vectoring this branch, so compute flags
1317 based on jumping to the new label. */
1318 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1320 if (! must_annul
1321 && ((condition == const_true_rtx
1322 || (! insn_sets_resource_p (trial, other_needed, false)
1323 && ! may_trap_or_fault_p (PATTERN (trial)))))
1324 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1325 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1326 && (must_annul = 1,
1327 check_annul_list_true_false (0, delay_list)
1328 && check_annul_list_true_false (0, new_delay_list)
1329 && eligible_for_annul_false (insn, total_slots_filled,
1330 trial, flags)))
1332 if (must_annul)
1333 used_annul = 1;
1334 temp = copy_rtx (trial);
1335 INSN_FROM_TARGET_P (temp) = 1;
1336 new_delay_list = add_to_delay_list (temp, new_delay_list);
1337 total_slots_filled++;
1339 if (--slots_remaining == 0)
1340 break;
1342 else
1343 return delay_list;
1346 /* Show the place to which we will be branching. */
1347 *pnew_thread = first_active_target_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1349 /* Add any new insns to the delay list and update the count of the
1350 number of slots filled. */
1351 *pslots_filled = total_slots_filled;
1352 if (used_annul)
1353 *pannul_p = 1;
1355 if (delay_list == 0)
1356 return new_delay_list;
1358 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1359 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1361 return delay_list;
1364 /* Similar to steal_delay_list_from_target except that SEQ is on the
1365 fallthrough path of INSN. Here we only do something if the delay insn
1366 of SEQ is an unconditional branch. In that case we steal its delay slot
1367 for INSN since unconditional branches are much easier to fill. */
1369 static rtx
1370 steal_delay_list_from_fallthrough (rtx insn, rtx condition, rtx seq,
1371 rtx delay_list, struct resources *sets,
1372 struct resources *needed,
1373 struct resources *other_needed,
1374 int slots_to_fill, int *pslots_filled,
1375 int *pannul_p)
1377 int i;
1378 int flags;
1379 int must_annul = *pannul_p;
1380 int used_annul = 0;
1382 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1384 /* We can't do anything if SEQ's delay insn isn't an
1385 unconditional branch. */
1387 if (! simplejump_or_return_p (XVECEXP (seq, 0, 0)))
1388 return delay_list;
1390 for (i = 1; i < XVECLEN (seq, 0); i++)
1392 rtx trial = XVECEXP (seq, 0, i);
1394 /* If TRIAL sets CC0, stealing it will move it too far from the use
1395 of CC0. */
1396 if (insn_references_resource_p (trial, sets, false)
1397 || insn_sets_resource_p (trial, needed, false)
1398 || insn_sets_resource_p (trial, sets, false)
1399 #ifdef HAVE_cc0
1400 || sets_cc0_p (PATTERN (trial))
1401 #endif
1404 break;
1406 /* If this insn was already done, we don't need it. */
1407 if (redundant_insn (trial, insn, delay_list))
1409 delete_from_delay_slot (trial);
1410 continue;
1413 if (! must_annul
1414 && ((condition == const_true_rtx
1415 || (! insn_sets_resource_p (trial, other_needed, false)
1416 && ! may_trap_or_fault_p (PATTERN (trial)))))
1417 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1418 : (must_annul || delay_list == NULL) && (must_annul = 1,
1419 check_annul_list_true_false (1, delay_list)
1420 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1422 if (must_annul)
1423 used_annul = 1;
1424 delete_from_delay_slot (trial);
1425 delay_list = add_to_delay_list (trial, delay_list);
1427 if (++(*pslots_filled) == slots_to_fill)
1428 break;
1430 else
1431 break;
1434 if (used_annul)
1435 *pannul_p = 1;
1436 return delay_list;
1439 /* Try merging insns starting at THREAD which match exactly the insns in
1440 INSN's delay list.
1442 If all insns were matched and the insn was previously annulling, the
1443 annul bit will be cleared.
1445 For each insn that is merged, if the branch is or will be non-annulling,
1446 we delete the merged insn. */
1448 static void
1449 try_merge_delay_insns (rtx insn, rtx thread)
1451 rtx trial, next_trial;
1452 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1453 int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn);
1454 int slot_number = 1;
1455 int num_slots = XVECLEN (PATTERN (insn), 0);
1456 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1457 struct resources set, needed;
1458 rtx merged_insns = 0;
1459 int i;
1460 int flags;
1462 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1464 CLEAR_RESOURCE (&needed);
1465 CLEAR_RESOURCE (&set);
1467 /* If this is not an annulling branch, take into account anything needed in
1468 INSN's delay slot. This prevents two increments from being incorrectly
1469 folded into one. If we are annulling, this would be the correct
1470 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1471 will essentially disable this optimization. This method is somewhat of
1472 a kludge, but I don't see a better way.) */
1473 if (! annul_p)
1474 for (i = 1 ; i < num_slots; i++)
1475 if (XVECEXP (PATTERN (insn), 0, i))
1476 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed,
1477 true);
1479 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1481 rtx pat = PATTERN (trial);
1482 rtx oldtrial = trial;
1484 next_trial = next_nonnote_insn (trial);
1486 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1487 if (NONJUMP_INSN_P (trial)
1488 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1489 continue;
1491 if (GET_CODE (next_to_match) == GET_CODE (trial)
1492 #ifdef HAVE_cc0
1493 /* We can't share an insn that sets cc0. */
1494 && ! sets_cc0_p (pat)
1495 #endif
1496 && ! insn_references_resource_p (trial, &set, true)
1497 && ! insn_sets_resource_p (trial, &set, true)
1498 && ! insn_sets_resource_p (trial, &needed, true)
1499 && (trial = try_split (pat, trial, 0)) != 0
1500 /* Update next_trial, in case try_split succeeded. */
1501 && (next_trial = next_nonnote_insn (trial))
1502 /* Likewise THREAD. */
1503 && (thread = oldtrial == thread ? trial : thread)
1504 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1505 /* Have to test this condition if annul condition is different
1506 from (and less restrictive than) non-annulling one. */
1507 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1510 if (! annul_p)
1512 update_block (trial, thread);
1513 if (trial == thread)
1514 thread = next_active_insn (thread);
1516 delete_related_insns (trial);
1517 INSN_FROM_TARGET_P (next_to_match) = 0;
1519 else
1520 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1522 if (++slot_number == num_slots)
1523 break;
1525 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1528 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1529 mark_referenced_resources (trial, &needed, true);
1532 /* See if we stopped on a filled insn. If we did, try to see if its
1533 delay slots match. */
1534 if (slot_number != num_slots
1535 && trial && NONJUMP_INSN_P (trial)
1536 && GET_CODE (PATTERN (trial)) == SEQUENCE
1537 && !(JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
1538 && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0))))
1540 rtx pat = PATTERN (trial);
1541 rtx filled_insn = XVECEXP (pat, 0, 0);
1543 /* Account for resources set/needed by the filled insn. */
1544 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1545 mark_referenced_resources (filled_insn, &needed, true);
1547 for (i = 1; i < XVECLEN (pat, 0); i++)
1549 rtx dtrial = XVECEXP (pat, 0, i);
1551 if (! insn_references_resource_p (dtrial, &set, true)
1552 && ! insn_sets_resource_p (dtrial, &set, true)
1553 && ! insn_sets_resource_p (dtrial, &needed, true)
1554 #ifdef HAVE_cc0
1555 && ! sets_cc0_p (PATTERN (dtrial))
1556 #endif
1557 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1558 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1560 if (! annul_p)
1562 rtx new_rtx;
1564 update_block (dtrial, thread);
1565 new_rtx = delete_from_delay_slot (dtrial);
1566 if (INSN_DELETED_P (thread))
1567 thread = new_rtx;
1568 INSN_FROM_TARGET_P (next_to_match) = 0;
1570 else
1571 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1572 merged_insns);
1574 if (++slot_number == num_slots)
1575 break;
1577 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1579 else
1581 /* Keep track of the set/referenced resources for the delay
1582 slots of any trial insns we encounter. */
1583 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1584 mark_referenced_resources (dtrial, &needed, true);
1589 /* If all insns in the delay slot have been matched and we were previously
1590 annulling the branch, we need not any more. In that case delete all the
1591 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1592 the delay list so that we know that it isn't only being used at the
1593 target. */
1594 if (slot_number == num_slots && annul_p)
1596 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1598 if (GET_MODE (merged_insns) == SImode)
1600 rtx new_rtx;
1602 update_block (XEXP (merged_insns, 0), thread);
1603 new_rtx = delete_from_delay_slot (XEXP (merged_insns, 0));
1604 if (INSN_DELETED_P (thread))
1605 thread = new_rtx;
1607 else
1609 update_block (XEXP (merged_insns, 0), thread);
1610 delete_related_insns (XEXP (merged_insns, 0));
1614 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1616 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1617 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1621 /* See if INSN is redundant with an insn in front of TARGET. Often this
1622 is called when INSN is a candidate for a delay slot of TARGET.
1623 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1624 of INSN. Often INSN will be redundant with an insn in a delay slot of
1625 some previous insn. This happens when we have a series of branches to the
1626 same label; in that case the first insn at the target might want to go
1627 into each of the delay slots.
1629 If we are not careful, this routine can take up a significant fraction
1630 of the total compilation time (4%), but only wins rarely. Hence we
1631 speed this routine up by making two passes. The first pass goes back
1632 until it hits a label and sees if it finds an insn with an identical
1633 pattern. Only in this (relatively rare) event does it check for
1634 data conflicts.
1636 We do not split insns we encounter. This could cause us not to find a
1637 redundant insn, but the cost of splitting seems greater than the possible
1638 gain in rare cases. */
1640 static rtx
1641 redundant_insn (rtx insn, rtx target, rtx delay_list)
1643 rtx target_main = target;
1644 rtx ipat = PATTERN (insn);
1645 rtx trial, pat;
1646 struct resources needed, set;
1647 int i;
1648 unsigned insns_to_search;
1650 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1651 are allowed to not actually assign to such a register. */
1652 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1653 return 0;
1655 /* Scan backwards looking for a match. */
1656 for (trial = PREV_INSN (target),
1657 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1658 trial && insns_to_search > 0;
1659 trial = PREV_INSN (trial))
1661 if (LABEL_P (trial))
1662 return 0;
1664 if (!NONDEBUG_INSN_P (trial))
1665 continue;
1666 --insns_to_search;
1668 pat = PATTERN (trial);
1669 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1670 continue;
1672 if (GET_CODE (pat) == SEQUENCE)
1674 /* Stop for a CALL and its delay slots because it is difficult to
1675 track its resource needs correctly. */
1676 if (CALL_P (XVECEXP (pat, 0, 0)))
1677 return 0;
1679 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1680 slots because it is difficult to track its resource needs
1681 correctly. */
1683 #ifdef INSN_SETS_ARE_DELAYED
1684 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1685 return 0;
1686 #endif
1688 #ifdef INSN_REFERENCES_ARE_DELAYED
1689 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1690 return 0;
1691 #endif
1693 /* See if any of the insns in the delay slot match, updating
1694 resource requirements as we go. */
1695 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1696 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1697 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1698 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1699 break;
1701 /* If found a match, exit this loop early. */
1702 if (i > 0)
1703 break;
1706 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1707 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1708 break;
1711 /* If we didn't find an insn that matches, return 0. */
1712 if (trial == 0)
1713 return 0;
1715 /* See what resources this insn sets and needs. If they overlap, or
1716 if this insn references CC0, it can't be redundant. */
1718 CLEAR_RESOURCE (&needed);
1719 CLEAR_RESOURCE (&set);
1720 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1721 mark_referenced_resources (insn, &needed, true);
1723 /* If TARGET is a SEQUENCE, get the main insn. */
1724 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1725 target_main = XVECEXP (PATTERN (target), 0, 0);
1727 if (resource_conflicts_p (&needed, &set)
1728 #ifdef HAVE_cc0
1729 || reg_mentioned_p (cc0_rtx, ipat)
1730 #endif
1731 /* The insn requiring the delay may not set anything needed or set by
1732 INSN. */
1733 || insn_sets_resource_p (target_main, &needed, true)
1734 || insn_sets_resource_p (target_main, &set, true))
1735 return 0;
1737 /* Insns we pass may not set either NEEDED or SET, so merge them for
1738 simpler tests. */
1739 needed.memory |= set.memory;
1740 needed.unch_memory |= set.unch_memory;
1741 IOR_HARD_REG_SET (needed.regs, set.regs);
1743 /* This insn isn't redundant if it conflicts with an insn that either is
1744 or will be in a delay slot of TARGET. */
1746 while (delay_list)
1748 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true))
1749 return 0;
1750 delay_list = XEXP (delay_list, 1);
1753 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1754 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1755 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed,
1756 true))
1757 return 0;
1759 /* Scan backwards until we reach a label or an insn that uses something
1760 INSN sets or sets something insn uses or sets. */
1762 for (trial = PREV_INSN (target),
1763 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1764 trial && !LABEL_P (trial) && insns_to_search > 0;
1765 trial = PREV_INSN (trial))
1767 if (!NONDEBUG_INSN_P (trial))
1768 continue;
1769 --insns_to_search;
1771 pat = PATTERN (trial);
1772 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1773 continue;
1775 if (GET_CODE (pat) == SEQUENCE)
1777 bool annul_p = false;
1778 rtx control = XVECEXP (pat, 0, 0);
1780 /* If this is a CALL_INSN and its delay slots, it is hard to track
1781 the resource needs properly, so give up. */
1782 if (CALL_P (control))
1783 return 0;
1785 /* If this is an INSN or JUMP_INSN with delayed effects, it
1786 is hard to track the resource needs properly, so give up. */
1788 #ifdef INSN_SETS_ARE_DELAYED
1789 if (INSN_SETS_ARE_DELAYED (control))
1790 return 0;
1791 #endif
1793 #ifdef INSN_REFERENCES_ARE_DELAYED
1794 if (INSN_REFERENCES_ARE_DELAYED (control))
1795 return 0;
1796 #endif
1798 if (JUMP_P (control))
1799 annul_p = INSN_ANNULLED_BRANCH_P (control);
1801 /* See if any of the insns in the delay slot match, updating
1802 resource requirements as we go. */
1803 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1805 rtx candidate = XVECEXP (pat, 0, i);
1807 /* If an insn will be annulled if the branch is false, it isn't
1808 considered as a possible duplicate insn. */
1809 if (rtx_equal_p (PATTERN (candidate), ipat)
1810 && ! (annul_p && INSN_FROM_TARGET_P (candidate)))
1812 /* Show that this insn will be used in the sequel. */
1813 INSN_FROM_TARGET_P (candidate) = 0;
1814 return candidate;
1817 /* Unless this is an annulled insn from the target of a branch,
1818 we must stop if it sets anything needed or set by INSN. */
1819 if ((!annul_p || !INSN_FROM_TARGET_P (candidate))
1820 && insn_sets_resource_p (candidate, &needed, true))
1821 return 0;
1824 /* If the insn requiring the delay slot conflicts with INSN, we
1825 must stop. */
1826 if (insn_sets_resource_p (control, &needed, true))
1827 return 0;
1829 else
1831 /* See if TRIAL is the same as INSN. */
1832 pat = PATTERN (trial);
1833 if (rtx_equal_p (pat, ipat))
1834 return trial;
1836 /* Can't go any further if TRIAL conflicts with INSN. */
1837 if (insn_sets_resource_p (trial, &needed, true))
1838 return 0;
1842 return 0;
1845 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1846 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1847 is nonzero, we are allowed to fall into this thread; otherwise, we are
1848 not.
1850 If LABEL is used more than one or we pass a label other than LABEL before
1851 finding an active insn, we do not own this thread. */
1853 static int
1854 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1856 rtx active_insn;
1857 rtx insn;
1859 /* We don't own the function end. */
1860 if (thread == 0 || ANY_RETURN_P (thread))
1861 return 0;
1863 /* Get the first active insn, or THREAD, if it is an active insn. */
1864 active_insn = next_active_insn (PREV_INSN (thread));
1866 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1867 if (LABEL_P (insn)
1868 && (insn != label || LABEL_NUSES (insn) != 1))
1869 return 0;
1871 if (allow_fallthrough)
1872 return 1;
1874 /* Ensure that we reach a BARRIER before any insn or label. */
1875 for (insn = prev_nonnote_insn (thread);
1876 insn == 0 || !BARRIER_P (insn);
1877 insn = prev_nonnote_insn (insn))
1878 if (insn == 0
1879 || LABEL_P (insn)
1880 || (NONJUMP_INSN_P (insn)
1881 && GET_CODE (PATTERN (insn)) != USE
1882 && GET_CODE (PATTERN (insn)) != CLOBBER))
1883 return 0;
1885 return 1;
1888 /* Called when INSN is being moved from a location near the target of a jump.
1889 We leave a marker of the form (use (INSN)) immediately in front
1890 of WHERE for mark_target_live_regs. These markers will be deleted when
1891 reorg finishes.
1893 We used to try to update the live status of registers if WHERE is at
1894 the start of a basic block, but that can't work since we may remove a
1895 BARRIER in relax_delay_slots. */
1897 static void
1898 update_block (rtx insn, rtx where)
1900 /* Ignore if this was in a delay slot and it came from the target of
1901 a branch. */
1902 if (INSN_FROM_TARGET_P (insn))
1903 return;
1905 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1907 /* INSN might be making a value live in a block where it didn't use to
1908 be. So recompute liveness information for this block. */
1910 incr_ticks_for_insn (insn);
1913 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1914 the basic block containing the jump. */
1916 static int
1917 reorg_redirect_jump (rtx jump, rtx nlabel)
1919 incr_ticks_for_insn (jump);
1920 return redirect_jump (jump, nlabel, 1);
1923 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1924 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1925 that reference values used in INSN. If we find one, then we move the
1926 REG_DEAD note to INSN.
1928 This is needed to handle the case where a later insn (after INSN) has a
1929 REG_DEAD note for a register used by INSN, and this later insn subsequently
1930 gets moved before a CODE_LABEL because it is a redundant insn. In this
1931 case, mark_target_live_regs may be confused into thinking the register
1932 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1934 static void
1935 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1937 rtx p, link, next;
1939 for (p = next_nonnote_insn (insn); p != delayed_insn;
1940 p = next_nonnote_insn (p))
1941 for (link = REG_NOTES (p); link; link = next)
1943 next = XEXP (link, 1);
1945 if (REG_NOTE_KIND (link) != REG_DEAD
1946 || !REG_P (XEXP (link, 0)))
1947 continue;
1949 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1951 /* Move the REG_DEAD note from P to INSN. */
1952 remove_note (p, link);
1953 XEXP (link, 1) = REG_NOTES (insn);
1954 REG_NOTES (insn) = link;
1959 /* Called when an insn redundant with start_insn is deleted. If there
1960 is a REG_DEAD note for the target of start_insn between start_insn
1961 and stop_insn, then the REG_DEAD note needs to be deleted since the
1962 value no longer dies there.
1964 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1965 confused into thinking the register is dead. */
1967 static void
1968 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1970 rtx p, link, next;
1972 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1973 p = next_nonnote_insn (p))
1974 for (link = REG_NOTES (p); link; link = next)
1976 next = XEXP (link, 1);
1978 if (REG_NOTE_KIND (link) != REG_DEAD
1979 || !REG_P (XEXP (link, 0)))
1980 continue;
1982 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1984 remove_note (p, link);
1985 return;
1990 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1992 This handles the case of udivmodXi4 instructions which optimize their
1993 output depending on whether any REG_UNUSED notes are present.
1994 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1995 does. */
1997 static void
1998 update_reg_unused_notes (rtx insn, rtx redundant_insn)
2000 rtx link, next;
2002 for (link = REG_NOTES (insn); link; link = next)
2004 next = XEXP (link, 1);
2006 if (REG_NOTE_KIND (link) != REG_UNUSED
2007 || !REG_P (XEXP (link, 0)))
2008 continue;
2010 if (! find_regno_note (redundant_insn, REG_UNUSED,
2011 REGNO (XEXP (link, 0))))
2012 remove_note (insn, link);
2016 /* Return the label before INSN, or put a new label there. */
2018 static rtx
2019 get_label_before (rtx insn)
2021 rtx label;
2023 /* Find an existing label at this point
2024 or make a new one if there is none. */
2025 label = prev_nonnote_insn (insn);
2027 if (label == 0 || !LABEL_P (label))
2029 rtx prev = PREV_INSN (insn);
2031 label = gen_label_rtx ();
2032 emit_label_after (label, prev);
2033 LABEL_NUSES (label) = 0;
2035 return label;
2038 /* Scan a function looking for insns that need a delay slot and find insns to
2039 put into the delay slot.
2041 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
2042 as calls). We do these first since we don't want jump insns (that are
2043 easier to fill) to get the only insns that could be used for non-jump insns.
2044 When it is zero, only try to fill JUMP_INSNs.
2046 When slots are filled in this manner, the insns (including the
2047 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2048 it is possible to tell whether a delay slot has really been filled
2049 or not. `final' knows how to deal with this, by communicating
2050 through FINAL_SEQUENCE. */
2052 static void
2053 fill_simple_delay_slots (int non_jumps_p)
2055 rtx insn, pat, trial, next_trial;
2056 int i;
2057 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2058 struct resources needed, set;
2059 int slots_to_fill, slots_filled;
2060 rtx delay_list;
2062 for (i = 0; i < num_unfilled_slots; i++)
2064 int flags;
2065 /* Get the next insn to fill. If it has already had any slots assigned,
2066 we can't do anything with it. Maybe we'll improve this later. */
2068 insn = unfilled_slots_base[i];
2069 if (insn == 0
2070 || INSN_DELETED_P (insn)
2071 || (NONJUMP_INSN_P (insn)
2072 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2073 || (JUMP_P (insn) && non_jumps_p)
2074 || (!JUMP_P (insn) && ! non_jumps_p))
2075 continue;
2077 /* It may have been that this insn used to need delay slots, but
2078 now doesn't; ignore in that case. This can happen, for example,
2079 on the HP PA RISC, where the number of delay slots depends on
2080 what insns are nearby. */
2081 slots_to_fill = num_delay_slots (insn);
2083 /* Some machine description have defined instructions to have
2084 delay slots only in certain circumstances which may depend on
2085 nearby insns (which change due to reorg's actions).
2087 For example, the PA port normally has delay slots for unconditional
2088 jumps.
2090 However, the PA port claims such jumps do not have a delay slot
2091 if they are immediate successors of certain CALL_INSNs. This
2092 allows the port to favor filling the delay slot of the call with
2093 the unconditional jump. */
2094 if (slots_to_fill == 0)
2095 continue;
2097 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2098 says how many. After initialization, first try optimizing
2100 call _foo call _foo
2101 nop add %o7,.-L1,%o7
2102 b,a L1
2105 If this case applies, the delay slot of the call is filled with
2106 the unconditional jump. This is done first to avoid having the
2107 delay slot of the call filled in the backward scan. Also, since
2108 the unconditional jump is likely to also have a delay slot, that
2109 insn must exist when it is subsequently scanned.
2111 This is tried on each insn with delay slots as some machines
2112 have insns which perform calls, but are not represented as
2113 CALL_INSNs. */
2115 slots_filled = 0;
2116 delay_list = 0;
2118 if (JUMP_P (insn))
2119 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2120 else
2121 flags = get_jump_flags (insn, NULL_RTX);
2123 if ((trial = next_active_insn (insn))
2124 && JUMP_P (trial)
2125 && simplejump_p (trial)
2126 && eligible_for_delay (insn, slots_filled, trial, flags)
2127 && no_labels_between_p (insn, trial)
2128 && ! can_throw_internal (trial))
2130 rtx *tmp;
2131 slots_filled++;
2132 delay_list = add_to_delay_list (trial, delay_list);
2134 /* TRIAL may have had its delay slot filled, then unfilled. When
2135 the delay slot is unfilled, TRIAL is placed back on the unfilled
2136 slots obstack. Unfortunately, it is placed on the end of the
2137 obstack, not in its original location. Therefore, we must search
2138 from entry i + 1 to the end of the unfilled slots obstack to
2139 try and find TRIAL. */
2140 tmp = &unfilled_slots_base[i + 1];
2141 while (*tmp != trial && tmp != unfilled_slots_next)
2142 tmp++;
2144 /* Remove the unconditional jump from consideration for delay slot
2145 filling and unthread it. */
2146 if (*tmp == trial)
2147 *tmp = 0;
2149 rtx next = NEXT_INSN (trial);
2150 rtx prev = PREV_INSN (trial);
2151 if (prev)
2152 NEXT_INSN (prev) = next;
2153 if (next)
2154 PREV_INSN (next) = prev;
2158 /* Now, scan backwards from the insn to search for a potential
2159 delay-slot candidate. Stop searching when a label or jump is hit.
2161 For each candidate, if it is to go into the delay slot (moved
2162 forward in execution sequence), it must not need or set any resources
2163 that were set by later insns and must not set any resources that
2164 are needed for those insns.
2166 The delay slot insn itself sets resources unless it is a call
2167 (in which case the called routine, not the insn itself, is doing
2168 the setting). */
2170 if (slots_filled < slots_to_fill)
2172 CLEAR_RESOURCE (&needed);
2173 CLEAR_RESOURCE (&set);
2174 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2175 mark_referenced_resources (insn, &needed, false);
2177 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2178 trial = next_trial)
2180 next_trial = prev_nonnote_insn (trial);
2182 /* This must be an INSN or CALL_INSN. */
2183 pat = PATTERN (trial);
2185 /* Stand-alone USE and CLOBBER are just for flow. */
2186 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2187 continue;
2189 /* Check for resource conflict first, to avoid unnecessary
2190 splitting. */
2191 if (! insn_references_resource_p (trial, &set, true)
2192 && ! insn_sets_resource_p (trial, &set, true)
2193 && ! insn_sets_resource_p (trial, &needed, true)
2194 #ifdef HAVE_cc0
2195 /* Can't separate set of cc0 from its use. */
2196 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2197 #endif
2198 && ! can_throw_internal (trial))
2200 trial = try_split (pat, trial, 1);
2201 next_trial = prev_nonnote_insn (trial);
2202 if (eligible_for_delay (insn, slots_filled, trial, flags))
2204 /* In this case, we are searching backward, so if we
2205 find insns to put on the delay list, we want
2206 to put them at the head, rather than the
2207 tail, of the list. */
2209 update_reg_dead_notes (trial, insn);
2210 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2211 trial, delay_list);
2212 update_block (trial, trial);
2213 delete_related_insns (trial);
2214 if (slots_to_fill == ++slots_filled)
2215 break;
2216 continue;
2220 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2221 mark_referenced_resources (trial, &needed, true);
2225 /* If all needed slots haven't been filled, we come here. */
2227 /* Try to optimize case of jumping around a single insn. */
2228 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2229 if (slots_filled != slots_to_fill
2230 && delay_list == 0
2231 && JUMP_P (insn)
2232 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2234 delay_list = optimize_skip (insn);
2235 if (delay_list)
2236 slots_filled += 1;
2238 #endif
2240 /* Try to get insns from beyond the insn needing the delay slot.
2241 These insns can neither set or reference resources set in insns being
2242 skipped, cannot set resources in the insn being skipped, and, if this
2243 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2244 call might not return).
2246 There used to be code which continued past the target label if
2247 we saw all uses of the target label. This code did not work,
2248 because it failed to account for some instructions which were
2249 both annulled and marked as from the target. This can happen as a
2250 result of optimize_skip. Since this code was redundant with
2251 fill_eager_delay_slots anyways, it was just deleted. */
2253 if (slots_filled != slots_to_fill
2254 /* If this instruction could throw an exception which is
2255 caught in the same function, then it's not safe to fill
2256 the delay slot with an instruction from beyond this
2257 point. For example, consider:
2259 int i = 2;
2261 try {
2262 f();
2263 i = 3;
2264 } catch (...) {}
2266 return i;
2268 Even though `i' is a local variable, we must be sure not
2269 to put `i = 3' in the delay slot if `f' might throw an
2270 exception.
2272 Presumably, we should also check to see if we could get
2273 back to this function via `setjmp'. */
2274 && ! can_throw_internal (insn)
2275 && (!JUMP_P (insn)
2276 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2277 && ! simplejump_p (insn)
2278 && !ANY_RETURN_P (JUMP_LABEL (insn)))))
2280 /* Invariant: If insn is a JUMP_INSN, the insn's jump
2281 label. Otherwise, zero. */
2282 rtx target = 0;
2283 int maybe_never = 0;
2284 rtx pat, trial_delay;
2286 CLEAR_RESOURCE (&needed);
2287 CLEAR_RESOURCE (&set);
2289 if (CALL_P (insn))
2291 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2292 mark_referenced_resources (insn, &needed, true);
2293 maybe_never = 1;
2295 else
2297 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2298 mark_referenced_resources (insn, &needed, true);
2299 if (JUMP_P (insn))
2300 target = JUMP_LABEL (insn);
2303 if (target == 0 || ANY_RETURN_P (target))
2304 for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
2305 trial = next_trial)
2307 next_trial = next_nonnote_insn (trial);
2309 /* This must be an INSN or CALL_INSN. */
2310 pat = PATTERN (trial);
2312 /* Stand-alone USE and CLOBBER are just for flow. */
2313 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2314 continue;
2316 /* If this already has filled delay slots, get the insn needing
2317 the delay slots. */
2318 if (GET_CODE (pat) == SEQUENCE)
2319 trial_delay = XVECEXP (pat, 0, 0);
2320 else
2321 trial_delay = trial;
2323 /* Stop our search when seeing a jump. */
2324 if (JUMP_P (trial_delay))
2325 break;
2327 /* See if we have a resource problem before we try to
2328 split. */
2329 if (GET_CODE (pat) != SEQUENCE
2330 && ! insn_references_resource_p (trial, &set, true)
2331 && ! insn_sets_resource_p (trial, &set, true)
2332 && ! insn_sets_resource_p (trial, &needed, true)
2333 #ifdef HAVE_cc0
2334 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2335 #endif
2336 && ! (maybe_never && may_trap_or_fault_p (pat))
2337 && (trial = try_split (pat, trial, 0))
2338 && eligible_for_delay (insn, slots_filled, trial, flags)
2339 && ! can_throw_internal(trial))
2341 next_trial = next_nonnote_insn (trial);
2342 delay_list = add_to_delay_list (trial, delay_list);
2344 #ifdef HAVE_cc0
2345 if (reg_mentioned_p (cc0_rtx, pat))
2346 link_cc0_insns (trial);
2347 #endif
2349 delete_related_insns (trial);
2350 if (slots_to_fill == ++slots_filled)
2351 break;
2352 continue;
2355 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2356 mark_referenced_resources (trial, &needed, true);
2358 /* Ensure we don't put insns between the setting of cc and the
2359 comparison by moving a setting of cc into an earlier delay
2360 slot since these insns could clobber the condition code. */
2361 set.cc = 1;
2363 /* If this is a call or jump, we might not get here. */
2364 if (CALL_P (trial_delay)
2365 || JUMP_P (trial_delay))
2366 maybe_never = 1;
2369 /* If there are slots left to fill and our search was stopped by an
2370 unconditional branch, try the insn at the branch target. We can
2371 redirect the branch if it works.
2373 Don't do this if the insn at the branch target is a branch. */
2374 if (slots_to_fill != slots_filled
2375 && trial
2376 && jump_to_label_p (trial)
2377 && simplejump_p (trial)
2378 && (target == 0 || JUMP_LABEL (trial) == target)
2379 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2380 && ! (NONJUMP_INSN_P (next_trial)
2381 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2382 && !JUMP_P (next_trial)
2383 && ! insn_references_resource_p (next_trial, &set, true)
2384 && ! insn_sets_resource_p (next_trial, &set, true)
2385 && ! insn_sets_resource_p (next_trial, &needed, true)
2386 #ifdef HAVE_cc0
2387 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2388 #endif
2389 && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2390 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2391 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2392 && ! can_throw_internal (trial))
2394 /* See comment in relax_delay_slots about necessity of using
2395 next_real_insn here. */
2396 rtx new_label = next_real_insn (next_trial);
2398 if (new_label != 0)
2399 new_label = get_label_before (new_label);
2400 else
2401 new_label = find_end_label (simple_return_rtx);
2403 if (new_label)
2405 delay_list
2406 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2407 slots_filled++;
2408 reorg_redirect_jump (trial, new_label);
2410 /* If we merged because we both jumped to the same place,
2411 redirect the original insn also. */
2412 if (target)
2413 reorg_redirect_jump (insn, new_label);
2418 /* If this is an unconditional jump, then try to get insns from the
2419 target of the jump. */
2420 if (JUMP_P (insn)
2421 && simplejump_p (insn)
2422 && slots_filled != slots_to_fill)
2423 delay_list
2424 = fill_slots_from_thread (insn, const_true_rtx,
2425 next_active_insn (JUMP_LABEL (insn)),
2426 NULL, 1, 1,
2427 own_thread_p (JUMP_LABEL (insn),
2428 JUMP_LABEL (insn), 0),
2429 slots_to_fill, &slots_filled,
2430 delay_list);
2432 if (delay_list)
2433 unfilled_slots_base[i]
2434 = emit_delay_sequence (insn, delay_list, slots_filled);
2436 if (slots_to_fill == slots_filled)
2437 unfilled_slots_base[i] = 0;
2439 note_delay_statistics (slots_filled, 0);
2442 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2443 /* See if the epilogue needs any delay slots. Try to fill them if so.
2444 The only thing we can do is scan backwards from the end of the
2445 function. If we did this in a previous pass, it is incorrect to do it
2446 again. */
2447 if (crtl->epilogue_delay_list)
2448 return;
2450 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2451 if (slots_to_fill == 0)
2452 return;
2454 slots_filled = 0;
2455 CLEAR_RESOURCE (&set);
2457 /* The frame pointer and stack pointer are needed at the beginning of
2458 the epilogue, so instructions setting them can not be put in the
2459 epilogue delay slot. However, everything else needed at function
2460 end is safe, so we don't want to use end_of_function_needs here. */
2461 CLEAR_RESOURCE (&needed);
2462 if (frame_pointer_needed)
2464 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2465 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2466 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2467 #endif
2468 if (! EXIT_IGNORE_STACK
2469 || current_function_sp_is_unchanging)
2470 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2472 else
2473 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2475 #ifdef EPILOGUE_USES
2476 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2478 if (EPILOGUE_USES (i))
2479 SET_HARD_REG_BIT (needed.regs, i);
2481 #endif
2483 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2484 trial = PREV_INSN (trial))
2486 if (NOTE_P (trial))
2487 continue;
2488 pat = PATTERN (trial);
2489 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2490 continue;
2492 if (! insn_references_resource_p (trial, &set, true)
2493 && ! insn_sets_resource_p (trial, &needed, true)
2494 && ! insn_sets_resource_p (trial, &set, true)
2495 #ifdef HAVE_cc0
2496 /* Don't want to mess with cc0 here. */
2497 && ! reg_mentioned_p (cc0_rtx, pat)
2498 #endif
2499 && ! can_throw_internal (trial))
2501 trial = try_split (pat, trial, 1);
2502 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2504 /* Here as well we are searching backward, so put the
2505 insns we find on the head of the list. */
2507 crtl->epilogue_delay_list
2508 = gen_rtx_INSN_LIST (VOIDmode, trial,
2509 crtl->epilogue_delay_list);
2510 mark_end_of_function_resources (trial, true);
2511 update_block (trial, trial);
2512 delete_related_insns (trial);
2514 /* Clear deleted bit so final.c will output the insn. */
2515 INSN_DELETED_P (trial) = 0;
2517 if (slots_to_fill == ++slots_filled)
2518 break;
2519 continue;
2523 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2524 mark_referenced_resources (trial, &needed, true);
2527 note_delay_statistics (slots_filled, 0);
2528 #endif
2531 /* Follow any unconditional jump at LABEL;
2532 return the ultimate label reached by any such chain of jumps.
2533 Return a suitable return rtx if the chain ultimately leads to a
2534 return instruction.
2535 If LABEL is not followed by a jump, return LABEL.
2536 If the chain loops or we can't find end, return LABEL,
2537 since that tells caller to avoid changing the insn. */
2539 static rtx
2540 follow_jumps (rtx label)
2542 rtx insn;
2543 rtx next;
2544 rtx value = label;
2545 int depth;
2547 if (ANY_RETURN_P (label))
2548 return label;
2549 for (depth = 0;
2550 (depth < 10
2551 && (insn = next_active_insn (value)) != 0
2552 && JUMP_P (insn)
2553 && JUMP_LABEL (insn) != NULL_RTX
2554 && ((any_uncondjump_p (insn) && onlyjump_p (insn))
2555 || ANY_RETURN_P (PATTERN (insn)))
2556 && (next = NEXT_INSN (insn))
2557 && BARRIER_P (next));
2558 depth++)
2560 rtx this_label = JUMP_LABEL (insn);
2561 rtx tem;
2563 /* If we have found a cycle, make the insn jump to itself. */
2564 if (this_label == label)
2565 return label;
2566 if (ANY_RETURN_P (this_label))
2567 return this_label;
2568 tem = next_active_insn (this_label);
2569 if (tem
2570 && (GET_CODE (PATTERN (tem)) == ADDR_VEC
2571 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
2572 break;
2574 value = this_label;
2576 if (depth == 10)
2577 return label;
2578 return value;
2581 /* Try to find insns to place in delay slots.
2583 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2584 or is an unconditional branch if CONDITION is const_true_rtx.
2585 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2587 THREAD is a flow-of-control, either the insns to be executed if the
2588 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2590 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2591 to see if any potential delay slot insns set things needed there.
2593 LIKELY is nonzero if it is extremely likely that the branch will be
2594 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2595 end of a loop back up to the top.
2597 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2598 thread. I.e., it is the fallthrough code of our jump or the target of the
2599 jump when we are the only jump going there.
2601 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2602 case, we can only take insns from the head of the thread for our delay
2603 slot. We then adjust the jump to point after the insns we have taken. */
2605 static rtx
2606 fill_slots_from_thread (rtx insn, rtx condition, rtx thread,
2607 rtx opposite_thread, int likely, int thread_if_true,
2608 int own_thread, int slots_to_fill,
2609 int *pslots_filled, rtx delay_list)
2611 rtx new_thread;
2612 struct resources opposite_needed, set, needed;
2613 rtx trial;
2614 int lose = 0;
2615 int must_annul = 0;
2616 int flags;
2618 /* Validate our arguments. */
2619 gcc_assert(condition != const_true_rtx || thread_if_true);
2620 gcc_assert(own_thread || thread_if_true);
2622 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2624 /* If our thread is the end of subroutine, we can't get any delay
2625 insns from that. */
2626 if (thread == NULL_RTX || ANY_RETURN_P (thread))
2627 return delay_list;
2629 /* If this is an unconditional branch, nothing is needed at the
2630 opposite thread. Otherwise, compute what is needed there. */
2631 if (condition == const_true_rtx)
2632 CLEAR_RESOURCE (&opposite_needed);
2633 else
2634 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2636 /* If the insn at THREAD can be split, do it here to avoid having to
2637 update THREAD and NEW_THREAD if it is done in the loop below. Also
2638 initialize NEW_THREAD. */
2640 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2642 /* Scan insns at THREAD. We are looking for an insn that can be removed
2643 from THREAD (it neither sets nor references resources that were set
2644 ahead of it and it doesn't set anything needs by the insns ahead of
2645 it) and that either can be placed in an annulling insn or aren't
2646 needed at OPPOSITE_THREAD. */
2648 CLEAR_RESOURCE (&needed);
2649 CLEAR_RESOURCE (&set);
2651 /* If we do not own this thread, we must stop as soon as we find
2652 something that we can't put in a delay slot, since all we can do
2653 is branch into THREAD at a later point. Therefore, labels stop
2654 the search if this is not the `true' thread. */
2656 for (trial = thread;
2657 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2658 trial = next_nonnote_insn (trial))
2660 rtx pat, old_trial;
2662 /* If we have passed a label, we no longer own this thread. */
2663 if (LABEL_P (trial))
2665 own_thread = 0;
2666 continue;
2669 pat = PATTERN (trial);
2670 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2671 continue;
2673 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2674 don't separate or copy insns that set and use CC0. */
2675 if (! insn_references_resource_p (trial, &set, true)
2676 && ! insn_sets_resource_p (trial, &set, true)
2677 && ! insn_sets_resource_p (trial, &needed, true)
2678 #ifdef HAVE_cc0
2679 && ! (reg_mentioned_p (cc0_rtx, pat)
2680 && (! own_thread || ! sets_cc0_p (pat)))
2681 #endif
2682 && ! can_throw_internal (trial))
2684 rtx prior_insn;
2686 /* If TRIAL is redundant with some insn before INSN, we don't
2687 actually need to add it to the delay list; we can merely pretend
2688 we did. */
2689 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2691 fix_reg_dead_note (prior_insn, insn);
2692 if (own_thread)
2694 update_block (trial, thread);
2695 if (trial == thread)
2697 thread = next_active_insn (thread);
2698 if (new_thread == trial)
2699 new_thread = thread;
2702 delete_related_insns (trial);
2704 else
2706 update_reg_unused_notes (prior_insn, trial);
2707 new_thread = next_active_insn (trial);
2710 continue;
2713 /* There are two ways we can win: If TRIAL doesn't set anything
2714 needed at the opposite thread and can't trap, or if it can
2715 go into an annulled delay slot. */
2716 if (!must_annul
2717 && (condition == const_true_rtx
2718 || (! insn_sets_resource_p (trial, &opposite_needed, true)
2719 && ! may_trap_or_fault_p (pat))))
2721 old_trial = trial;
2722 trial = try_split (pat, trial, 0);
2723 if (new_thread == old_trial)
2724 new_thread = trial;
2725 if (thread == old_trial)
2726 thread = trial;
2727 pat = PATTERN (trial);
2728 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2729 goto winner;
2731 else if (0
2732 #ifdef ANNUL_IFTRUE_SLOTS
2733 || ! thread_if_true
2734 #endif
2735 #ifdef ANNUL_IFFALSE_SLOTS
2736 || thread_if_true
2737 #endif
2740 old_trial = trial;
2741 trial = try_split (pat, trial, 0);
2742 if (new_thread == old_trial)
2743 new_thread = trial;
2744 if (thread == old_trial)
2745 thread = trial;
2746 pat = PATTERN (trial);
2747 if ((must_annul || delay_list == NULL) && (thread_if_true
2748 ? check_annul_list_true_false (0, delay_list)
2749 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2750 : check_annul_list_true_false (1, delay_list)
2751 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2753 rtx temp;
2755 must_annul = 1;
2756 winner:
2758 #ifdef HAVE_cc0
2759 if (reg_mentioned_p (cc0_rtx, pat))
2760 link_cc0_insns (trial);
2761 #endif
2763 /* If we own this thread, delete the insn. If this is the
2764 destination of a branch, show that a basic block status
2765 may have been updated. In any case, mark the new
2766 starting point of this thread. */
2767 if (own_thread)
2769 rtx note;
2771 update_block (trial, thread);
2772 if (trial == thread)
2774 thread = next_active_insn (thread);
2775 if (new_thread == trial)
2776 new_thread = thread;
2779 /* We are moving this insn, not deleting it. We must
2780 temporarily increment the use count on any referenced
2781 label lest it be deleted by delete_related_insns. */
2782 for (note = REG_NOTES (trial);
2783 note != NULL_RTX;
2784 note = XEXP (note, 1))
2785 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2786 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2788 /* REG_LABEL_OPERAND could be
2789 NOTE_INSN_DELETED_LABEL too. */
2790 if (LABEL_P (XEXP (note, 0)))
2791 LABEL_NUSES (XEXP (note, 0))++;
2792 else
2793 gcc_assert (REG_NOTE_KIND (note)
2794 == REG_LABEL_OPERAND);
2796 if (jump_to_label_p (trial))
2797 LABEL_NUSES (JUMP_LABEL (trial))++;
2799 delete_related_insns (trial);
2801 for (note = REG_NOTES (trial);
2802 note != NULL_RTX;
2803 note = XEXP (note, 1))
2804 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2805 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2807 /* REG_LABEL_OPERAND could be
2808 NOTE_INSN_DELETED_LABEL too. */
2809 if (LABEL_P (XEXP (note, 0)))
2810 LABEL_NUSES (XEXP (note, 0))--;
2811 else
2812 gcc_assert (REG_NOTE_KIND (note)
2813 == REG_LABEL_OPERAND);
2815 if (jump_to_label_p (trial))
2816 LABEL_NUSES (JUMP_LABEL (trial))--;
2818 else
2819 new_thread = next_active_insn (trial);
2821 temp = own_thread ? trial : copy_rtx (trial);
2822 if (thread_if_true)
2823 INSN_FROM_TARGET_P (temp) = 1;
2825 delay_list = add_to_delay_list (temp, delay_list);
2827 if (slots_to_fill == ++(*pslots_filled))
2829 /* Even though we have filled all the slots, we
2830 may be branching to a location that has a
2831 redundant insn. Skip any if so. */
2832 while (new_thread && ! own_thread
2833 && ! insn_sets_resource_p (new_thread, &set, true)
2834 && ! insn_sets_resource_p (new_thread, &needed,
2835 true)
2836 && ! insn_references_resource_p (new_thread,
2837 &set, true)
2838 && (prior_insn
2839 = redundant_insn (new_thread, insn,
2840 delay_list)))
2842 /* We know we do not own the thread, so no need
2843 to call update_block and delete_insn. */
2844 fix_reg_dead_note (prior_insn, insn);
2845 update_reg_unused_notes (prior_insn, new_thread);
2846 new_thread = next_active_insn (new_thread);
2848 break;
2851 continue;
2856 /* This insn can't go into a delay slot. */
2857 lose = 1;
2858 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2859 mark_referenced_resources (trial, &needed, true);
2861 /* Ensure we don't put insns between the setting of cc and the comparison
2862 by moving a setting of cc into an earlier delay slot since these insns
2863 could clobber the condition code. */
2864 set.cc = 1;
2866 /* If this insn is a register-register copy and the next insn has
2867 a use of our destination, change it to use our source. That way,
2868 it will become a candidate for our delay slot the next time
2869 through this loop. This case occurs commonly in loops that
2870 scan a list.
2872 We could check for more complex cases than those tested below,
2873 but it doesn't seem worth it. It might also be a good idea to try
2874 to swap the two insns. That might do better.
2876 We can't do this if the next insn modifies our destination, because
2877 that would make the replacement into the insn invalid. We also can't
2878 do this if it modifies our source, because it might be an earlyclobber
2879 operand. This latter test also prevents updating the contents of
2880 a PRE_INC. We also can't do this if there's overlap of source and
2881 destination. Overlap may happen for larger-than-register-size modes. */
2883 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2884 && REG_P (SET_SRC (pat))
2885 && REG_P (SET_DEST (pat))
2886 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2888 rtx next = next_nonnote_insn (trial);
2890 if (next && NONJUMP_INSN_P (next)
2891 && GET_CODE (PATTERN (next)) != USE
2892 && ! reg_set_p (SET_DEST (pat), next)
2893 && ! reg_set_p (SET_SRC (pat), next)
2894 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2895 && ! modified_in_p (SET_DEST (pat), next))
2896 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2900 /* If we stopped on a branch insn that has delay slots, see if we can
2901 steal some of the insns in those slots. */
2902 if (trial && NONJUMP_INSN_P (trial)
2903 && GET_CODE (PATTERN (trial)) == SEQUENCE
2904 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2906 /* If this is the `true' thread, we will want to follow the jump,
2907 so we can only do this if we have taken everything up to here. */
2908 if (thread_if_true && trial == new_thread)
2910 delay_list
2911 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2912 delay_list, &set, &needed,
2913 &opposite_needed, slots_to_fill,
2914 pslots_filled, &must_annul,
2915 &new_thread);
2916 /* If we owned the thread and are told that it branched
2917 elsewhere, make sure we own the thread at the new location. */
2918 if (own_thread && trial != new_thread)
2919 own_thread = own_thread_p (new_thread, new_thread, 0);
2921 else if (! thread_if_true)
2922 delay_list
2923 = steal_delay_list_from_fallthrough (insn, condition,
2924 PATTERN (trial),
2925 delay_list, &set, &needed,
2926 &opposite_needed, slots_to_fill,
2927 pslots_filled, &must_annul);
2930 /* If we haven't found anything for this delay slot and it is very
2931 likely that the branch will be taken, see if the insn at our target
2932 increments or decrements a register with an increment that does not
2933 depend on the destination register. If so, try to place the opposite
2934 arithmetic insn after the jump insn and put the arithmetic insn in the
2935 delay slot. If we can't do this, return. */
2936 if (delay_list == 0 && likely
2937 && new_thread && !ANY_RETURN_P (new_thread)
2938 && NONJUMP_INSN_P (new_thread)
2939 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2940 && asm_noperands (PATTERN (new_thread)) < 0)
2942 rtx pat = PATTERN (new_thread);
2943 rtx dest;
2944 rtx src;
2946 trial = new_thread;
2947 pat = PATTERN (trial);
2949 if (!NONJUMP_INSN_P (trial)
2950 || GET_CODE (pat) != SET
2951 || ! eligible_for_delay (insn, 0, trial, flags)
2952 || can_throw_internal (trial))
2953 return 0;
2955 dest = SET_DEST (pat), src = SET_SRC (pat);
2956 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2957 && rtx_equal_p (XEXP (src, 0), dest)
2958 && (!FLOAT_MODE_P (GET_MODE (src))
2959 || flag_unsafe_math_optimizations)
2960 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2961 && ! side_effects_p (pat))
2963 rtx other = XEXP (src, 1);
2964 rtx new_arith;
2965 rtx ninsn;
2967 /* If this is a constant adjustment, use the same code with
2968 the negated constant. Otherwise, reverse the sense of the
2969 arithmetic. */
2970 if (CONST_INT_P (other))
2971 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2972 negate_rtx (GET_MODE (src), other));
2973 else
2974 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2975 GET_MODE (src), dest, other);
2977 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2978 insn);
2980 if (recog_memoized (ninsn) < 0
2981 || (extract_insn (ninsn), ! constrain_operands (1)))
2983 delete_related_insns (ninsn);
2984 return 0;
2987 if (own_thread)
2989 update_block (trial, thread);
2990 if (trial == thread)
2992 thread = next_active_insn (thread);
2993 if (new_thread == trial)
2994 new_thread = thread;
2996 delete_related_insns (trial);
2998 else
2999 new_thread = next_active_insn (trial);
3001 ninsn = own_thread ? trial : copy_rtx (trial);
3002 if (thread_if_true)
3003 INSN_FROM_TARGET_P (ninsn) = 1;
3005 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3006 (*pslots_filled)++;
3010 if (delay_list && must_annul)
3011 INSN_ANNULLED_BRANCH_P (insn) = 1;
3013 /* If we are to branch into the middle of this thread, find an appropriate
3014 label or make a new one if none, and redirect INSN to it. If we hit the
3015 end of the function, use the end-of-function label. */
3016 if (new_thread != thread)
3018 rtx label;
3020 gcc_assert (thread_if_true);
3022 if (new_thread && simplejump_or_return_p (new_thread)
3023 && redirect_with_delay_list_safe_p (insn,
3024 JUMP_LABEL (new_thread),
3025 delay_list))
3026 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3028 if (ANY_RETURN_P (new_thread))
3029 label = find_end_label (new_thread);
3030 else if (LABEL_P (new_thread))
3031 label = new_thread;
3032 else
3033 label = get_label_before (new_thread);
3035 if (label)
3036 reorg_redirect_jump (insn, label);
3039 return delay_list;
3042 /* Make another attempt to find insns to place in delay slots.
3044 We previously looked for insns located in front of the delay insn
3045 and, for non-jump delay insns, located behind the delay insn.
3047 Here only try to schedule jump insns and try to move insns from either
3048 the target or the following insns into the delay slot. If annulling is
3049 supported, we will be likely to do this. Otherwise, we can do this only
3050 if safe. */
3052 static void
3053 fill_eager_delay_slots (void)
3055 rtx insn;
3056 int i;
3057 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3059 for (i = 0; i < num_unfilled_slots; i++)
3061 rtx condition;
3062 rtx target_label, insn_at_target, fallthrough_insn;
3063 rtx delay_list = 0;
3064 int own_target;
3065 int own_fallthrough;
3066 int prediction, slots_to_fill, slots_filled;
3068 insn = unfilled_slots_base[i];
3069 if (insn == 0
3070 || INSN_DELETED_P (insn)
3071 || !JUMP_P (insn)
3072 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3073 continue;
3075 slots_to_fill = num_delay_slots (insn);
3076 /* Some machine description have defined instructions to have
3077 delay slots only in certain circumstances which may depend on
3078 nearby insns (which change due to reorg's actions).
3080 For example, the PA port normally has delay slots for unconditional
3081 jumps.
3083 However, the PA port claims such jumps do not have a delay slot
3084 if they are immediate successors of certain CALL_INSNs. This
3085 allows the port to favor filling the delay slot of the call with
3086 the unconditional jump. */
3087 if (slots_to_fill == 0)
3088 continue;
3090 slots_filled = 0;
3091 target_label = JUMP_LABEL (insn);
3092 condition = get_branch_condition (insn, target_label);
3094 if (condition == 0)
3095 continue;
3097 /* Get the next active fallthrough and target insns and see if we own
3098 them. Then see whether the branch is likely true. We don't need
3099 to do a lot of this for unconditional branches. */
3101 insn_at_target = first_active_target_insn (target_label);
3102 own_target = own_thread_p (target_label, target_label, 0);
3104 if (condition == const_true_rtx)
3106 own_fallthrough = 0;
3107 fallthrough_insn = 0;
3108 prediction = 2;
3110 else
3112 fallthrough_insn = next_active_insn (insn);
3113 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3114 prediction = mostly_true_jump (insn, condition);
3117 /* If this insn is expected to branch, first try to get insns from our
3118 target, then our fallthrough insns. If it is not expected to branch,
3119 try the other order. */
3121 if (prediction > 0)
3123 delay_list
3124 = fill_slots_from_thread (insn, condition, insn_at_target,
3125 fallthrough_insn, prediction == 2, 1,
3126 own_target,
3127 slots_to_fill, &slots_filled, delay_list);
3129 if (delay_list == 0 && own_fallthrough)
3131 /* Even though we didn't find anything for delay slots,
3132 we might have found a redundant insn which we deleted
3133 from the thread that was filled. So we have to recompute
3134 the next insn at the target. */
3135 target_label = JUMP_LABEL (insn);
3136 insn_at_target = first_active_target_insn (target_label);
3138 delay_list
3139 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3140 insn_at_target, 0, 0,
3141 own_fallthrough,
3142 slots_to_fill, &slots_filled,
3143 delay_list);
3146 else
3148 if (own_fallthrough)
3149 delay_list
3150 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3151 insn_at_target, 0, 0,
3152 own_fallthrough,
3153 slots_to_fill, &slots_filled,
3154 delay_list);
3156 if (delay_list == 0)
3157 delay_list
3158 = fill_slots_from_thread (insn, condition, insn_at_target,
3159 next_active_insn (insn), 0, 1,
3160 own_target,
3161 slots_to_fill, &slots_filled,
3162 delay_list);
3165 if (delay_list)
3166 unfilled_slots_base[i]
3167 = emit_delay_sequence (insn, delay_list, slots_filled);
3169 if (slots_to_fill == slots_filled)
3170 unfilled_slots_base[i] = 0;
3172 note_delay_statistics (slots_filled, 1);
3176 static void delete_computation (rtx insn);
3178 /* Recursively delete prior insns that compute the value (used only by INSN
3179 which the caller is deleting) stored in the register mentioned by NOTE
3180 which is a REG_DEAD note associated with INSN. */
3182 static void
3183 delete_prior_computation (rtx note, rtx insn)
3185 rtx our_prev;
3186 rtx reg = XEXP (note, 0);
3188 for (our_prev = prev_nonnote_insn (insn);
3189 our_prev && (NONJUMP_INSN_P (our_prev)
3190 || CALL_P (our_prev));
3191 our_prev = prev_nonnote_insn (our_prev))
3193 rtx pat = PATTERN (our_prev);
3195 /* If we reach a CALL which is not calling a const function
3196 or the callee pops the arguments, then give up. */
3197 if (CALL_P (our_prev)
3198 && (! RTL_CONST_CALL_P (our_prev)
3199 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3200 break;
3202 /* If we reach a SEQUENCE, it is too complex to try to
3203 do anything with it, so give up. We can be run during
3204 and after reorg, so SEQUENCE rtl can legitimately show
3205 up here. */
3206 if (GET_CODE (pat) == SEQUENCE)
3207 break;
3209 if (GET_CODE (pat) == USE
3210 && NONJUMP_INSN_P (XEXP (pat, 0)))
3211 /* reorg creates USEs that look like this. We leave them
3212 alone because reorg needs them for its own purposes. */
3213 break;
3215 if (reg_set_p (reg, pat))
3217 if (side_effects_p (pat) && !CALL_P (our_prev))
3218 break;
3220 if (GET_CODE (pat) == PARALLEL)
3222 /* If we find a SET of something else, we can't
3223 delete the insn. */
3225 int i;
3227 for (i = 0; i < XVECLEN (pat, 0); i++)
3229 rtx part = XVECEXP (pat, 0, i);
3231 if (GET_CODE (part) == SET
3232 && SET_DEST (part) != reg)
3233 break;
3236 if (i == XVECLEN (pat, 0))
3237 delete_computation (our_prev);
3239 else if (GET_CODE (pat) == SET
3240 && REG_P (SET_DEST (pat)))
3242 int dest_regno = REGNO (SET_DEST (pat));
3243 int dest_endregno = END_REGNO (SET_DEST (pat));
3244 int regno = REGNO (reg);
3245 int endregno = END_REGNO (reg);
3247 if (dest_regno >= regno
3248 && dest_endregno <= endregno)
3249 delete_computation (our_prev);
3251 /* We may have a multi-word hard register and some, but not
3252 all, of the words of the register are needed in subsequent
3253 insns. Write REG_UNUSED notes for those parts that were not
3254 needed. */
3255 else if (dest_regno <= regno
3256 && dest_endregno >= endregno)
3258 int i;
3260 add_reg_note (our_prev, REG_UNUSED, reg);
3262 for (i = dest_regno; i < dest_endregno; i++)
3263 if (! find_regno_note (our_prev, REG_UNUSED, i))
3264 break;
3266 if (i == dest_endregno)
3267 delete_computation (our_prev);
3271 break;
3274 /* If PAT references the register that dies here, it is an
3275 additional use. Hence any prior SET isn't dead. However, this
3276 insn becomes the new place for the REG_DEAD note. */
3277 if (reg_overlap_mentioned_p (reg, pat))
3279 XEXP (note, 1) = REG_NOTES (our_prev);
3280 REG_NOTES (our_prev) = note;
3281 break;
3286 /* Delete INSN and recursively delete insns that compute values used only
3287 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3289 Look at all our REG_DEAD notes. If a previous insn does nothing other
3290 than set a register that dies in this insn, we can delete that insn
3291 as well.
3293 On machines with CC0, if CC0 is used in this insn, we may be able to
3294 delete the insn that set it. */
3296 static void
3297 delete_computation (rtx insn)
3299 rtx note, next;
3301 #ifdef HAVE_cc0
3302 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3304 rtx prev = prev_nonnote_insn (insn);
3305 /* We assume that at this stage
3306 CC's are always set explicitly
3307 and always immediately before the jump that
3308 will use them. So if the previous insn
3309 exists to set the CC's, delete it
3310 (unless it performs auto-increments, etc.). */
3311 if (prev && NONJUMP_INSN_P (prev)
3312 && sets_cc0_p (PATTERN (prev)))
3314 if (sets_cc0_p (PATTERN (prev)) > 0
3315 && ! side_effects_p (PATTERN (prev)))
3316 delete_computation (prev);
3317 else
3318 /* Otherwise, show that cc0 won't be used. */
3319 add_reg_note (prev, REG_UNUSED, cc0_rtx);
3322 #endif
3324 for (note = REG_NOTES (insn); note; note = next)
3326 next = XEXP (note, 1);
3328 if (REG_NOTE_KIND (note) != REG_DEAD
3329 /* Verify that the REG_NOTE is legitimate. */
3330 || !REG_P (XEXP (note, 0)))
3331 continue;
3333 delete_prior_computation (note, insn);
3336 delete_related_insns (insn);
3339 /* If all INSN does is set the pc, delete it,
3340 and delete the insn that set the condition codes for it
3341 if that's what the previous thing was. */
3343 static void
3344 delete_jump (rtx insn)
3346 rtx set = single_set (insn);
3348 if (set && GET_CODE (SET_DEST (set)) == PC)
3349 delete_computation (insn);
3352 static rtx
3353 label_before_next_insn (rtx x, rtx scan_limit)
3355 rtx insn = next_active_insn (x);
3356 while (insn)
3358 insn = PREV_INSN (insn);
3359 if (insn == scan_limit || insn == NULL_RTX)
3360 return NULL_RTX;
3361 if (LABEL_P (insn))
3362 break;
3364 return insn;
3368 /* Once we have tried two ways to fill a delay slot, make a pass over the
3369 code to try to improve the results and to do such things as more jump
3370 threading. */
3372 static void
3373 relax_delay_slots (rtx first)
3375 rtx insn, next, pat;
3376 rtx trial, delay_insn, target_label;
3378 /* Look at every JUMP_INSN and see if we can improve it. */
3379 for (insn = first; insn; insn = next)
3381 rtx other;
3383 next = next_active_insn (insn);
3385 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3386 the next insn, or jumps to a label that is not the last of a
3387 group of consecutive labels. */
3388 if (JUMP_P (insn)
3389 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3390 && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
3392 target_label = skip_consecutive_labels (follow_jumps (target_label));
3393 if (ANY_RETURN_P (target_label))
3394 target_label = find_end_label (target_label);
3396 if (target_label && next_active_insn (target_label) == next
3397 && ! condjump_in_parallel_p (insn))
3399 delete_jump (insn);
3400 continue;
3403 if (target_label && target_label != JUMP_LABEL (insn))
3404 reorg_redirect_jump (insn, target_label);
3406 /* See if this jump conditionally branches around an unconditional
3407 jump. If so, invert this jump and point it to the target of the
3408 second jump. */
3409 if (next && simplejump_or_return_p (next)
3410 && any_condjump_p (insn)
3411 && target_label
3412 && next_active_insn (target_label) == next_active_insn (next)
3413 && no_labels_between_p (insn, next))
3415 rtx label = JUMP_LABEL (next);
3417 /* Be careful how we do this to avoid deleting code or
3418 labels that are momentarily dead. See similar optimization
3419 in jump.c.
3421 We also need to ensure we properly handle the case when
3422 invert_jump fails. */
3424 ++LABEL_NUSES (target_label);
3425 if (!ANY_RETURN_P (label))
3426 ++LABEL_NUSES (label);
3428 if (invert_jump (insn, label, 1))
3430 delete_related_insns (next);
3431 next = insn;
3434 if (!ANY_RETURN_P (label))
3435 --LABEL_NUSES (label);
3437 if (--LABEL_NUSES (target_label) == 0)
3438 delete_related_insns (target_label);
3440 continue;
3444 /* If this is an unconditional jump and the previous insn is a
3445 conditional jump, try reversing the condition of the previous
3446 insn and swapping our targets. The next pass might be able to
3447 fill the slots.
3449 Don't do this if we expect the conditional branch to be true, because
3450 we would then be making the more common case longer. */
3452 if (simplejump_or_return_p (insn)
3453 && (other = prev_active_insn (insn)) != 0
3454 && any_condjump_p (other)
3455 && no_labels_between_p (other, insn)
3456 && 0 > mostly_true_jump (other,
3457 get_branch_condition (other,
3458 JUMP_LABEL (other))))
3460 rtx other_target = JUMP_LABEL (other);
3461 target_label = JUMP_LABEL (insn);
3463 if (invert_jump (other, target_label, 0))
3464 reorg_redirect_jump (insn, other_target);
3467 /* Now look only at cases where we have filled a delay slot. */
3468 if (!NONJUMP_INSN_P (insn)
3469 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3470 continue;
3472 pat = PATTERN (insn);
3473 delay_insn = XVECEXP (pat, 0, 0);
3475 /* See if the first insn in the delay slot is redundant with some
3476 previous insn. Remove it from the delay slot if so; then set up
3477 to reprocess this insn. */
3478 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3480 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3481 next = prev_active_insn (next);
3482 continue;
3485 /* See if we have a RETURN insn with a filled delay slot followed
3486 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3487 the first RETURN (but not its delay insn). This gives the same
3488 effect in fewer instructions.
3490 Only do so if optimizing for size since this results in slower, but
3491 smaller code. */
3492 if (optimize_function_for_size_p (cfun)
3493 && ANY_RETURN_P (PATTERN (delay_insn))
3494 && next
3495 && JUMP_P (next)
3496 && PATTERN (next) == PATTERN (delay_insn))
3498 rtx after;
3499 int i;
3501 /* Delete the RETURN and just execute the delay list insns.
3503 We do this by deleting the INSN containing the SEQUENCE, then
3504 re-emitting the insns separately, and then deleting the RETURN.
3505 This allows the count of the jump target to be properly
3506 decremented.
3508 Note that we need to change the INSN_UID of the re-emitted insns
3509 since it is used to hash the insns for mark_target_live_regs and
3510 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3512 Clear the from target bit, since these insns are no longer
3513 in delay slots. */
3514 for (i = 0; i < XVECLEN (pat, 0); i++)
3515 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3517 trial = PREV_INSN (insn);
3518 delete_related_insns (insn);
3519 gcc_assert (GET_CODE (pat) == SEQUENCE);
3520 add_insn_after (delay_insn, trial, NULL);
3521 after = delay_insn;
3522 for (i = 1; i < XVECLEN (pat, 0); i++)
3523 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3524 delete_scheduled_jump (delay_insn);
3525 continue;
3528 /* Now look only at the cases where we have a filled JUMP_INSN. */
3529 if (!JUMP_P (XVECEXP (PATTERN (insn), 0, 0))
3530 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3531 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3532 continue;
3534 target_label = JUMP_LABEL (delay_insn);
3535 if (target_label && ANY_RETURN_P (target_label))
3536 continue;
3538 /* If this jump goes to another unconditional jump, thread it, but
3539 don't convert a jump into a RETURN here. */
3540 trial = skip_consecutive_labels (follow_jumps (target_label));
3541 if (ANY_RETURN_P (trial))
3542 trial = find_end_label (trial);
3544 if (trial && trial != target_label
3545 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3547 reorg_redirect_jump (delay_insn, trial);
3548 target_label = trial;
3551 /* If the first insn at TARGET_LABEL is redundant with a previous
3552 insn, redirect the jump to the following insn and process again.
3553 We use next_real_insn instead of next_active_insn so we
3554 don't skip USE-markers, or we'll end up with incorrect
3555 liveness info. */
3556 trial = next_real_insn (target_label);
3557 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3558 && redundant_insn (trial, insn, 0)
3559 && ! can_throw_internal (trial))
3561 /* Figure out where to emit the special USE insn so we don't
3562 later incorrectly compute register live/death info. */
3563 rtx tmp = next_active_insn (trial);
3564 if (tmp == 0)
3565 tmp = find_end_label (simple_return_rtx);
3567 if (tmp)
3569 /* Insert the special USE insn and update dataflow info. */
3570 update_block (trial, tmp);
3572 /* Now emit a label before the special USE insn, and
3573 redirect our jump to the new label. */
3574 target_label = get_label_before (PREV_INSN (tmp));
3575 reorg_redirect_jump (delay_insn, target_label);
3576 next = insn;
3577 continue;
3581 /* Similarly, if it is an unconditional jump with one insn in its
3582 delay list and that insn is redundant, thread the jump. */
3583 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3584 && XVECLEN (PATTERN (trial), 0) == 2
3585 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
3586 && simplejump_or_return_p (XVECEXP (PATTERN (trial), 0, 0))
3587 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3589 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3590 if (ANY_RETURN_P (target_label))
3591 target_label = find_end_label (target_label);
3593 if (target_label
3594 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3595 insn))
3597 reorg_redirect_jump (delay_insn, target_label);
3598 next = insn;
3599 continue;
3603 /* See if we have a simple (conditional) jump that is useless. */
3604 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3605 && ! condjump_in_parallel_p (delay_insn)
3606 && prev_active_insn (target_label) == insn
3607 && ! BARRIER_P (prev_nonnote_insn (target_label))
3608 #ifdef HAVE_cc0
3609 /* If the last insn in the delay slot sets CC0 for some insn,
3610 various code assumes that it is in a delay slot. We could
3611 put it back where it belonged and delete the register notes,
3612 but it doesn't seem worthwhile in this uncommon case. */
3613 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3614 REG_CC_USER, NULL_RTX)
3615 #endif
3618 rtx after;
3619 int i;
3621 /* All this insn does is execute its delay list and jump to the
3622 following insn. So delete the jump and just execute the delay
3623 list insns.
3625 We do this by deleting the INSN containing the SEQUENCE, then
3626 re-emitting the insns separately, and then deleting the jump.
3627 This allows the count of the jump target to be properly
3628 decremented.
3630 Note that we need to change the INSN_UID of the re-emitted insns
3631 since it is used to hash the insns for mark_target_live_regs and
3632 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3634 Clear the from target bit, since these insns are no longer
3635 in delay slots. */
3636 for (i = 0; i < XVECLEN (pat, 0); i++)
3637 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3639 trial = PREV_INSN (insn);
3640 delete_related_insns (insn);
3641 gcc_assert (GET_CODE (pat) == SEQUENCE);
3642 add_insn_after (delay_insn, trial, NULL);
3643 after = delay_insn;
3644 for (i = 1; i < XVECLEN (pat, 0); i++)
3645 after = emit_copy_of_insn_after (XVECEXP (pat, 0, i), after);
3646 delete_scheduled_jump (delay_insn);
3647 continue;
3650 /* See if this is an unconditional jump around a single insn which is
3651 identical to the one in its delay slot. In this case, we can just
3652 delete the branch and the insn in its delay slot. */
3653 if (next && NONJUMP_INSN_P (next)
3654 && label_before_next_insn (next, insn) == target_label
3655 && simplejump_p (insn)
3656 && XVECLEN (pat, 0) == 2
3657 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3659 delete_related_insns (insn);
3660 continue;
3663 /* See if this jump (with its delay slots) conditionally branches
3664 around an unconditional jump (without delay slots). If so, invert
3665 this jump and point it to the target of the second jump. We cannot
3666 do this for annulled jumps, though. Again, don't convert a jump to
3667 a RETURN here. */
3668 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3669 && any_condjump_p (delay_insn)
3670 && next && simplejump_or_return_p (next)
3671 && next_active_insn (target_label) == next_active_insn (next)
3672 && no_labels_between_p (insn, next))
3674 rtx label = JUMP_LABEL (next);
3675 rtx old_label = JUMP_LABEL (delay_insn);
3677 if (ANY_RETURN_P (label))
3678 label = find_end_label (label);
3680 /* find_end_label can generate a new label. Check this first. */
3681 if (label
3682 && no_labels_between_p (insn, next)
3683 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3685 /* Be careful how we do this to avoid deleting code or labels
3686 that are momentarily dead. See similar optimization in
3687 jump.c */
3688 if (old_label)
3689 ++LABEL_NUSES (old_label);
3691 if (invert_jump (delay_insn, label, 1))
3693 int i;
3695 /* Must update the INSN_FROM_TARGET_P bits now that
3696 the branch is reversed, so that mark_target_live_regs
3697 will handle the delay slot insn correctly. */
3698 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3700 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3701 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3704 delete_related_insns (next);
3705 next = insn;
3708 if (old_label && --LABEL_NUSES (old_label) == 0)
3709 delete_related_insns (old_label);
3710 continue;
3714 /* If we own the thread opposite the way this insn branches, see if we
3715 can merge its delay slots with following insns. */
3716 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3717 && own_thread_p (NEXT_INSN (insn), 0, 1))
3718 try_merge_delay_insns (insn, next);
3719 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3720 && own_thread_p (target_label, target_label, 0))
3721 try_merge_delay_insns (insn, next_active_insn (target_label));
3723 /* If we get here, we haven't deleted INSN. But we may have deleted
3724 NEXT, so recompute it. */
3725 next = next_active_insn (insn);
3730 /* Look for filled jumps to the end of function label. We can try to convert
3731 them into RETURN insns if the insns in the delay slot are valid for the
3732 RETURN as well. */
3734 static void
3735 make_return_insns (rtx first)
3737 rtx insn, jump_insn, pat;
3738 rtx real_return_label = function_return_label;
3739 rtx real_simple_return_label = function_simple_return_label;
3740 int slots, i;
3742 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3743 /* If a previous pass filled delay slots in the epilogue, things get a
3744 bit more complicated, as those filler insns would generally (without
3745 data flow analysis) have to be executed after any existing branch
3746 delay slot filler insns. It is also unknown whether such a
3747 transformation would actually be profitable. Note that the existing
3748 code only cares for branches with (some) filled delay slots. */
3749 if (crtl->epilogue_delay_list != NULL)
3750 return;
3751 #endif
3753 /* See if there is a RETURN insn in the function other than the one we
3754 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3755 into a RETURN to jump to it. */
3756 for (insn = first; insn; insn = NEXT_INSN (insn))
3757 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
3759 rtx t = get_label_before (insn);
3760 if (PATTERN (insn) == ret_rtx)
3761 real_return_label = t;
3762 else
3763 real_simple_return_label = t;
3764 break;
3767 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3768 was equal to END_OF_FUNCTION_LABEL. */
3769 if (real_return_label)
3770 LABEL_NUSES (real_return_label)++;
3771 if (real_simple_return_label)
3772 LABEL_NUSES (real_simple_return_label)++;
3774 /* Clear the list of insns to fill so we can use it. */
3775 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3777 for (insn = first; insn; insn = NEXT_INSN (insn))
3779 int flags;
3780 rtx kind, real_label;
3782 /* Only look at filled JUMP_INSNs that go to the end of function
3783 label. */
3784 if (!NONJUMP_INSN_P (insn)
3785 || GET_CODE (PATTERN (insn)) != SEQUENCE
3786 || !jump_to_label_p (XVECEXP (PATTERN (insn), 0, 0)))
3787 continue;
3789 if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) == function_return_label)
3791 kind = ret_rtx;
3792 real_label = real_return_label;
3794 else if (JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0))
3795 == function_simple_return_label)
3797 kind = simple_return_rtx;
3798 real_label = real_simple_return_label;
3800 else
3801 continue;
3803 pat = PATTERN (insn);
3804 jump_insn = XVECEXP (pat, 0, 0);
3806 /* If we can't make the jump into a RETURN, try to redirect it to the best
3807 RETURN and go on to the next insn. */
3808 if (!reorg_redirect_jump (jump_insn, kind))
3810 /* Make sure redirecting the jump will not invalidate the delay
3811 slot insns. */
3812 if (redirect_with_delay_slots_safe_p (jump_insn, real_label, insn))
3813 reorg_redirect_jump (jump_insn, real_label);
3814 continue;
3817 /* See if this RETURN can accept the insns current in its delay slot.
3818 It can if it has more or an equal number of slots and the contents
3819 of each is valid. */
3821 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3822 slots = num_delay_slots (jump_insn);
3823 if (slots >= XVECLEN (pat, 0) - 1)
3825 for (i = 1; i < XVECLEN (pat, 0); i++)
3826 if (! (
3827 #ifdef ANNUL_IFFALSE_SLOTS
3828 (INSN_ANNULLED_BRANCH_P (jump_insn)
3829 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3830 ? eligible_for_annul_false (jump_insn, i - 1,
3831 XVECEXP (pat, 0, i), flags) :
3832 #endif
3833 #ifdef ANNUL_IFTRUE_SLOTS
3834 (INSN_ANNULLED_BRANCH_P (jump_insn)
3835 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3836 ? eligible_for_annul_true (jump_insn, i - 1,
3837 XVECEXP (pat, 0, i), flags) :
3838 #endif
3839 eligible_for_delay (jump_insn, i - 1,
3840 XVECEXP (pat, 0, i), flags)))
3841 break;
3843 else
3844 i = 0;
3846 if (i == XVECLEN (pat, 0))
3847 continue;
3849 /* We have to do something with this insn. If it is an unconditional
3850 RETURN, delete the SEQUENCE and output the individual insns,
3851 followed by the RETURN. Then set things up so we try to find
3852 insns for its delay slots, if it needs some. */
3853 if (ANY_RETURN_P (PATTERN (jump_insn)))
3855 rtx prev = PREV_INSN (insn);
3857 delete_related_insns (insn);
3858 for (i = 1; i < XVECLEN (pat, 0); i++)
3859 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3861 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3862 emit_barrier_after (insn);
3864 if (slots)
3865 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3867 else
3868 /* It is probably more efficient to keep this with its current
3869 delay slot as a branch to a RETURN. */
3870 reorg_redirect_jump (jump_insn, real_label);
3873 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3874 new delay slots we have created. */
3875 if (real_return_label != NULL_RTX && --LABEL_NUSES (real_return_label) == 0)
3876 delete_related_insns (real_return_label);
3877 if (real_simple_return_label != NULL_RTX
3878 && --LABEL_NUSES (real_simple_return_label) == 0)
3879 delete_related_insns (real_simple_return_label);
3881 fill_simple_delay_slots (1);
3882 fill_simple_delay_slots (0);
3885 /* Try to find insns to place in delay slots. */
3887 void
3888 dbr_schedule (rtx first)
3890 rtx insn, next, epilogue_insn = 0;
3891 int i;
3892 bool need_return_insns;
3894 /* If the current function has no insns other than the prologue and
3895 epilogue, then do not try to fill any delay slots. */
3896 if (n_basic_blocks == NUM_FIXED_BLOCKS)
3897 return;
3899 /* Find the highest INSN_UID and allocate and initialize our map from
3900 INSN_UID's to position in code. */
3901 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3903 if (INSN_UID (insn) > max_uid)
3904 max_uid = INSN_UID (insn);
3905 if (NOTE_P (insn)
3906 && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3907 epilogue_insn = insn;
3910 uid_to_ruid = XNEWVEC (int, max_uid + 1);
3911 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3912 uid_to_ruid[INSN_UID (insn)] = i;
3914 /* Initialize the list of insns that need filling. */
3915 if (unfilled_firstobj == 0)
3917 gcc_obstack_init (&unfilled_slots_obstack);
3918 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3921 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3923 rtx target;
3925 if (JUMP_P (insn))
3926 INSN_ANNULLED_BRANCH_P (insn) = 0;
3927 INSN_FROM_TARGET_P (insn) = 0;
3929 /* Skip vector tables. We can't get attributes for them. */
3930 if (JUMP_TABLE_DATA_P (insn))
3931 continue;
3933 if (num_delay_slots (insn) > 0)
3934 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3936 /* Ensure all jumps go to the last of a set of consecutive labels. */
3937 if (JUMP_P (insn)
3938 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3939 && !ANY_RETURN_P (JUMP_LABEL (insn))
3940 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3941 != JUMP_LABEL (insn)))
3942 redirect_jump (insn, target, 1);
3945 init_resource_info (epilogue_insn);
3947 /* Show we haven't computed an end-of-function label yet. */
3948 function_return_label = function_simple_return_label = NULL_RTX;
3950 /* Initialize the statistics for this function. */
3951 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3952 memset (num_filled_delays, 0, sizeof num_filled_delays);
3954 /* Now do the delay slot filling. Try everything twice in case earlier
3955 changes make more slots fillable. */
3957 for (reorg_pass_number = 0;
3958 reorg_pass_number < MAX_REORG_PASSES;
3959 reorg_pass_number++)
3961 fill_simple_delay_slots (1);
3962 fill_simple_delay_slots (0);
3963 fill_eager_delay_slots ();
3964 relax_delay_slots (first);
3967 /* If we made an end of function label, indicate that it is now
3968 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3969 If it is now unused, delete it. */
3970 if (function_return_label && --LABEL_NUSES (function_return_label) == 0)
3971 delete_related_insns (function_return_label);
3972 if (function_simple_return_label
3973 && --LABEL_NUSES (function_simple_return_label) == 0)
3974 delete_related_insns (function_simple_return_label);
3976 need_return_insns = false;
3977 #ifdef HAVE_return
3978 need_return_insns |= HAVE_return && function_return_label != 0;
3979 #endif
3980 #ifdef HAVE_simple_return
3981 need_return_insns |= HAVE_simple_return && function_simple_return_label != 0;
3982 #endif
3983 if (need_return_insns)
3984 make_return_insns (first);
3986 /* Delete any USE insns made by update_block; subsequent passes don't need
3987 them or know how to deal with them. */
3988 for (insn = first; insn; insn = next)
3990 next = NEXT_INSN (insn);
3992 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3993 && INSN_P (XEXP (PATTERN (insn), 0)))
3994 next = delete_related_insns (insn);
3997 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3999 /* It is not clear why the line below is needed, but it does seem to be. */
4000 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
4002 if (dump_file)
4004 int i, j, need_comma;
4005 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
4006 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
4008 for (reorg_pass_number = 0;
4009 reorg_pass_number < MAX_REORG_PASSES;
4010 reorg_pass_number++)
4012 fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4013 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4015 need_comma = 0;
4016 fprintf (dump_file, ";; Reorg function #%d\n", i);
4018 fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
4019 num_insns_needing_delays[i][reorg_pass_number]);
4021 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4022 if (num_filled_delays[i][j][reorg_pass_number])
4024 if (need_comma)
4025 fprintf (dump_file, ", ");
4026 need_comma = 1;
4027 fprintf (dump_file, "%d got %d delays",
4028 num_filled_delays[i][j][reorg_pass_number], j);
4030 fprintf (dump_file, "\n");
4033 memset (total_delay_slots, 0, sizeof total_delay_slots);
4034 memset (total_annul_slots, 0, sizeof total_annul_slots);
4035 for (insn = first; insn; insn = NEXT_INSN (insn))
4037 if (! INSN_DELETED_P (insn)
4038 && NONJUMP_INSN_P (insn)
4039 && GET_CODE (PATTERN (insn)) != USE
4040 && GET_CODE (PATTERN (insn)) != CLOBBER)
4042 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
4044 rtx control;
4045 j = XVECLEN (PATTERN (insn), 0) - 1;
4046 if (j > MAX_DELAY_HISTOGRAM)
4047 j = MAX_DELAY_HISTOGRAM;
4048 control = XVECEXP (PATTERN (insn), 0, 0);
4049 if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control))
4050 total_annul_slots[j]++;
4051 else
4052 total_delay_slots[j]++;
4054 else if (num_delay_slots (insn) > 0)
4055 total_delay_slots[0]++;
4058 fprintf (dump_file, ";; Reorg totals: ");
4059 need_comma = 0;
4060 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4062 if (total_delay_slots[j])
4064 if (need_comma)
4065 fprintf (dump_file, ", ");
4066 need_comma = 1;
4067 fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
4070 fprintf (dump_file, "\n");
4071 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
4072 fprintf (dump_file, ";; Reorg annuls: ");
4073 need_comma = 0;
4074 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
4076 if (total_annul_slots[j])
4078 if (need_comma)
4079 fprintf (dump_file, ", ");
4080 need_comma = 1;
4081 fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
4084 fprintf (dump_file, "\n");
4085 #endif
4086 fprintf (dump_file, "\n");
4089 /* For all JUMP insns, fill in branch prediction notes, so that during
4090 assembler output a target can set branch prediction bits in the code.
4091 We have to do this now, as up until this point the destinations of
4092 JUMPS can be moved around and changed, but past right here that cannot
4093 happen. */
4094 for (insn = first; insn; insn = NEXT_INSN (insn))
4096 int pred_flags;
4098 if (NONJUMP_INSN_P (insn))
4100 rtx pat = PATTERN (insn);
4102 if (GET_CODE (pat) == SEQUENCE)
4103 insn = XVECEXP (pat, 0, 0);
4105 if (!JUMP_P (insn))
4106 continue;
4108 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
4109 add_reg_note (insn, REG_BR_PRED, GEN_INT (pred_flags));
4111 free_resource_info ();
4112 free (uid_to_ruid);
4113 #ifdef DELAY_SLOTS_FOR_EPILOGUE
4114 /* SPARC assembler, for instance, emit warning when debug info is output
4115 into the delay slot. */
4117 rtx link;
4119 for (link = crtl->epilogue_delay_list;
4120 link;
4121 link = XEXP (link, 1))
4122 INSN_LOCATOR (XEXP (link, 0)) = 0;
4125 #endif
4126 crtl->dbr_scheduled_p = true;
4128 #endif /* DELAY_SLOTS */
4130 static bool
4131 gate_handle_delay_slots (void)
4133 #ifdef DELAY_SLOTS
4134 /* At -O0 dataflow info isn't updated after RA. */
4135 return optimize > 0 && flag_delayed_branch && !crtl->dbr_scheduled_p;
4136 #else
4137 return 0;
4138 #endif
4141 /* Run delay slot optimization. */
4142 static unsigned int
4143 rest_of_handle_delay_slots (void)
4145 #ifdef DELAY_SLOTS
4146 dbr_schedule (get_insns ());
4147 #endif
4148 return 0;
4151 struct rtl_opt_pass pass_delay_slots =
4154 RTL_PASS,
4155 "dbr", /* name */
4156 gate_handle_delay_slots, /* gate */
4157 rest_of_handle_delay_slots, /* execute */
4158 NULL, /* sub */
4159 NULL, /* next */
4160 0, /* static_pass_number */
4161 TV_DBR_SCHED, /* tv_id */
4162 0, /* properties_required */
4163 0, /* properties_provided */
4164 0, /* properties_destroyed */
4165 0, /* todo_flags_start */
4166 TODO_ggc_collect /* todo_flags_finish */
4170 /* Machine dependent reorg pass. */
4171 static bool
4172 gate_handle_machine_reorg (void)
4174 return targetm.machine_dependent_reorg != 0;
4178 static unsigned int
4179 rest_of_handle_machine_reorg (void)
4181 targetm.machine_dependent_reorg ();
4182 return 0;
4185 struct rtl_opt_pass pass_machine_reorg =
4188 RTL_PASS,
4189 "mach", /* name */
4190 gate_handle_machine_reorg, /* gate */
4191 rest_of_handle_machine_reorg, /* execute */
4192 NULL, /* sub */
4193 NULL, /* next */
4194 0, /* static_pass_number */
4195 TV_MACH_DEP, /* tv_id */
4196 0, /* properties_required */
4197 0, /* properties_provided */
4198 0, /* properties_destroyed */
4199 0, /* todo_flags_start */
4200 TODO_ggc_collect /* todo_flags_finish */