2015-04-14 Marc Glisse <marc.glisse@inria.fr>
[official-gcc.git] / gcc / reorg.c
blob238791044c46acf23b0b0ad7630f4993f404f73e
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992-2015 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Instruction reorganization pass.
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
37 The MIPS has a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
42 The SPARC always has a branch delay slot, but its effects can be
43 annulled when the branch is not taken. This means that failing to
44 find other sources of insns, we can hoist an insn from the branch
45 target that would only be safe to execute knowing that the branch
46 is taken.
48 The HP-PA always has a branch delay slot. For unconditional branches
49 its effects can be annulled when the branch is taken. The effects
50 of the delay slot in a conditional branch can be nullified for forward
51 taken branches, or for untaken backward branches. This means
52 we can hoist insns from the fall-through path for forward branches or
53 steal insns from the target of backward branches.
55 The TMS320C3x and C4x have three branch delay slots. When the three
56 slots are filled, the branch penalty is zero. Most insns can fill the
57 delay slots except jump insns.
59 Three techniques for filling delay slots have been implemented so far:
61 (1) `fill_simple_delay_slots' is the simplest, most efficient way
62 to fill delay slots. This pass first looks for insns which come
63 from before the branch and which are safe to execute after the
64 branch. Then it searches after the insn requiring delay slots or,
65 in the case of a branch, for insns that are after the point at
66 which the branch merges into the fallthrough code, if such a point
67 exists. When such insns are found, the branch penalty decreases
68 and no code expansion takes place.
70 (2) `fill_eager_delay_slots' is more complicated: it is used for
71 scheduling conditional jumps, or for scheduling jumps which cannot
72 be filled using (1). A machine need not have annulled jumps to use
73 this strategy, but it helps (by keeping more options open).
74 `fill_eager_delay_slots' tries to guess the direction the branch
75 will go; if it guesses right 100% of the time, it can reduce the
76 branch penalty as much as `fill_simple_delay_slots' does. If it
77 guesses wrong 100% of the time, it might as well schedule nops. When
78 `fill_eager_delay_slots' takes insns from the fall-through path of
79 the jump, usually there is no code expansion; when it takes insns
80 from the branch target, there is code expansion if it is not the
81 only way to reach that target.
83 (3) `relax_delay_slots' uses a set of rules to simplify code that
84 has been reorganized by (1) and (2). It finds cases where
85 conditional test can be eliminated, jumps can be threaded, extra
86 insns can be eliminated, etc. It is the job of (1) and (2) to do a
87 good job of scheduling locally; `relax_delay_slots' takes care of
88 making the various individual schedules work well together. It is
89 especially tuned to handle the control flow interactions of branch
90 insns. It does nothing for insns with delay slots that do not
91 branch.
93 On machines that use CC0, we are very conservative. We will not make
94 a copy of an insn involving CC0 since we want to maintain a 1-1
95 correspondence between the insn that sets and uses CC0. The insns are
96 allowed to be separated by placing an insn that sets CC0 (but not an insn
97 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
98 delay slot. In that case, we point each insn at the other with REG_CC_USER
99 and REG_CC_SETTER notes. Note that these restrictions affect very few
100 machines because most RISC machines with delay slots will not use CC0
101 (the RT is the only known exception at this point). */
103 #include "config.h"
104 #include "system.h"
105 #include "coretypes.h"
106 #include "tm.h"
107 #include "diagnostic-core.h"
108 #include "rtl.h"
109 #include "tm_p.h"
110 #include "symtab.h"
111 #include "hashtab.h"
112 #include "hash-set.h"
113 #include "vec.h"
114 #include "machmode.h"
115 #include "hard-reg-set.h"
116 #include "input.h"
117 #include "function.h"
118 #include "flags.h"
119 #include "statistics.h"
120 #include "double-int.h"
121 #include "real.h"
122 #include "fixed-value.h"
123 #include "alias.h"
124 #include "wide-int.h"
125 #include "inchash.h"
126 #include "tree.h"
127 #include "insn-config.h"
128 #include "expmed.h"
129 #include "dojump.h"
130 #include "explow.h"
131 #include "calls.h"
132 #include "emit-rtl.h"
133 #include "varasm.h"
134 #include "stmt.h"
135 #include "expr.h"
136 #include "conditions.h"
137 #include "predict.h"
138 #include "dominance.h"
139 #include "cfg.h"
140 #include "basic-block.h"
141 #include "regs.h"
142 #include "recog.h"
143 #include "obstack.h"
144 #include "insn-attr.h"
145 #include "resource.h"
146 #include "except.h"
147 #include "params.h"
148 #include "target.h"
149 #include "tree-pass.h"
151 #ifdef DELAY_SLOTS
153 #ifndef ANNUL_IFTRUE_SLOTS
154 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
155 #endif
156 #ifndef ANNUL_IFFALSE_SLOTS
157 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
158 #endif
161 /* First, some functions that were used before GCC got a control flow graph.
162 These functions are now only used here in reorg.c, and have therefore
163 been moved here to avoid inadvertent misuse elsewhere in the compiler. */
165 /* Return the last label to mark the same position as LABEL. Return LABEL
166 itself if it is null or any return rtx. */
168 static rtx
169 skip_consecutive_labels (rtx label_or_return)
171 rtx_insn *insn;
173 if (label_or_return && ANY_RETURN_P (label_or_return))
174 return label_or_return;
176 rtx_insn *label = as_a <rtx_insn *> (label_or_return);
178 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
179 if (LABEL_P (insn))
180 label = insn;
182 return label;
185 #ifdef HAVE_cc0
186 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
187 and REG_CC_USER notes so we can find it. */
189 static void
190 link_cc0_insns (rtx insn)
192 rtx user = next_nonnote_insn (insn);
194 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
195 user = XVECEXP (PATTERN (user), 0, 0);
197 add_reg_note (user, REG_CC_SETTER, insn);
198 add_reg_note (insn, REG_CC_USER, user);
200 #endif
202 /* Insns which have delay slots that have not yet been filled. */
204 static struct obstack unfilled_slots_obstack;
205 static rtx *unfilled_firstobj;
207 /* Define macros to refer to the first and last slot containing unfilled
208 insns. These are used because the list may move and its address
209 should be recomputed at each use. */
211 #define unfilled_slots_base \
212 ((rtx_insn **) obstack_base (&unfilled_slots_obstack))
214 #define unfilled_slots_next \
215 ((rtx_insn **) obstack_next_free (&unfilled_slots_obstack))
217 /* Points to the label before the end of the function, or before a
218 return insn. */
219 static rtx_code_label *function_return_label;
220 /* Likewise for a simple_return. */
221 static rtx_code_label *function_simple_return_label;
223 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
224 not always monotonically increase. */
225 static int *uid_to_ruid;
227 /* Highest valid index in `uid_to_ruid'. */
228 static int max_uid;
230 static int stop_search_p (rtx, int);
231 static int resource_conflicts_p (struct resources *, struct resources *);
232 static int insn_references_resource_p (rtx, struct resources *, bool);
233 static int insn_sets_resource_p (rtx, struct resources *, bool);
234 static rtx_code_label *find_end_label (rtx);
235 static rtx_insn *emit_delay_sequence (rtx_insn *, rtx_insn_list *, int);
236 static rtx_insn_list *add_to_delay_list (rtx_insn *, rtx_insn_list *);
237 static rtx_insn *delete_from_delay_slot (rtx_insn *);
238 static void delete_scheduled_jump (rtx_insn *);
239 static void note_delay_statistics (int, int);
240 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
241 static rtx_insn_list *optimize_skip (rtx_insn *);
242 #endif
243 static int get_jump_flags (const rtx_insn *, rtx);
244 static int mostly_true_jump (rtx);
245 static rtx get_branch_condition (const rtx_insn *, rtx);
246 static int condition_dominates_p (rtx, const rtx_insn *);
247 static int redirect_with_delay_slots_safe_p (rtx_insn *, rtx, rtx);
248 static int redirect_with_delay_list_safe_p (rtx_insn *, rtx, rtx_insn_list *);
249 static int check_annul_list_true_false (int, rtx);
250 static rtx_insn_list *steal_delay_list_from_target (rtx_insn *, rtx,
251 rtx_sequence *,
252 rtx_insn_list *,
253 struct resources *,
254 struct resources *,
255 struct resources *,
256 int, int *, int *,
257 rtx *);
258 static rtx_insn_list *steal_delay_list_from_fallthrough (rtx_insn *, rtx,
259 rtx_sequence *,
260 rtx_insn_list *,
261 struct resources *,
262 struct resources *,
263 struct resources *,
264 int, int *, int *);
265 static void try_merge_delay_insns (rtx, rtx_insn *);
266 static rtx redundant_insn (rtx, rtx_insn *, rtx);
267 static int own_thread_p (rtx, rtx, int);
268 static void update_block (rtx_insn *, rtx);
269 static int reorg_redirect_jump (rtx_insn *, rtx);
270 static void update_reg_dead_notes (rtx, rtx);
271 static void fix_reg_dead_note (rtx, rtx);
272 static void update_reg_unused_notes (rtx, rtx);
273 static void fill_simple_delay_slots (int);
274 static rtx_insn_list *fill_slots_from_thread (rtx_insn *, rtx, rtx, rtx,
275 int, int, int, int,
276 int *, rtx_insn_list *);
277 static void fill_eager_delay_slots (void);
278 static void relax_delay_slots (rtx_insn *);
279 static void make_return_insns (rtx_insn *);
281 /* A wrapper around next_active_insn which takes care to return ret_rtx
282 unchanged. */
284 static rtx
285 first_active_target_insn (rtx insn)
287 if (ANY_RETURN_P (insn))
288 return insn;
289 return next_active_insn (as_a <rtx_insn *> (insn));
292 /* Return true iff INSN is a simplejump, or any kind of return insn. */
294 static bool
295 simplejump_or_return_p (rtx insn)
297 return (JUMP_P (insn)
298 && (simplejump_p (as_a <rtx_insn *> (insn))
299 || ANY_RETURN_P (PATTERN (insn))));
302 /* Return TRUE if this insn should stop the search for insn to fill delay
303 slots. LABELS_P indicates that labels should terminate the search.
304 In all cases, jumps terminate the search. */
306 static int
307 stop_search_p (rtx insn, int labels_p)
309 if (insn == 0)
310 return 1;
312 /* If the insn can throw an exception that is caught within the function,
313 it may effectively perform a jump from the viewpoint of the function.
314 Therefore act like for a jump. */
315 if (can_throw_internal (insn))
316 return 1;
318 switch (GET_CODE (insn))
320 case NOTE:
321 case CALL_INSN:
322 return 0;
324 case CODE_LABEL:
325 return labels_p;
327 case JUMP_INSN:
328 case BARRIER:
329 return 1;
331 case INSN:
332 /* OK unless it contains a delay slot or is an `asm' insn of some type.
333 We don't know anything about these. */
334 return (GET_CODE (PATTERN (insn)) == SEQUENCE
335 || GET_CODE (PATTERN (insn)) == ASM_INPUT
336 || asm_noperands (PATTERN (insn)) >= 0);
338 default:
339 gcc_unreachable ();
343 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
344 resource set contains a volatile memory reference. Otherwise, return FALSE. */
346 static int
347 resource_conflicts_p (struct resources *res1, struct resources *res2)
349 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
350 || res1->volatil || res2->volatil)
351 return 1;
353 return hard_reg_set_intersect_p (res1->regs, res2->regs);
356 /* Return TRUE if any resource marked in RES, a `struct resources', is
357 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
358 routine is using those resources.
360 We compute this by computing all the resources referenced by INSN and
361 seeing if this conflicts with RES. It might be faster to directly check
362 ourselves, and this is the way it used to work, but it means duplicating
363 a large block of complex code. */
365 static int
366 insn_references_resource_p (rtx insn, struct resources *res,
367 bool include_delayed_effects)
369 struct resources insn_res;
371 CLEAR_RESOURCE (&insn_res);
372 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
373 return resource_conflicts_p (&insn_res, res);
376 /* Return TRUE if INSN modifies resources that are marked in RES.
377 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
378 included. CC0 is only modified if it is explicitly set; see comments
379 in front of mark_set_resources for details. */
381 static int
382 insn_sets_resource_p (rtx insn, struct resources *res,
383 bool include_delayed_effects)
385 struct resources insn_sets;
387 CLEAR_RESOURCE (&insn_sets);
388 mark_set_resources (insn, &insn_sets, 0,
389 (include_delayed_effects
390 ? MARK_SRC_DEST_CALL
391 : MARK_SRC_DEST));
392 return resource_conflicts_p (&insn_sets, res);
395 /* Find a label at the end of the function or before a RETURN. If there
396 is none, try to make one. If that fails, returns 0.
398 The property of such a label is that it is placed just before the
399 epilogue or a bare RETURN insn, so that another bare RETURN can be
400 turned into a jump to the label unconditionally. In particular, the
401 label cannot be placed before a RETURN insn with a filled delay slot.
403 ??? There may be a problem with the current implementation. Suppose
404 we start with a bare RETURN insn and call find_end_label. It may set
405 function_return_label just before the RETURN. Suppose the machinery
406 is able to fill the delay slot of the RETURN insn afterwards. Then
407 function_return_label is no longer valid according to the property
408 described above and find_end_label will still return it unmodified.
409 Note that this is probably mitigated by the following observation:
410 once function_return_label is made, it is very likely the target of
411 a jump, so filling the delay slot of the RETURN will be much more
412 difficult.
413 KIND is either simple_return_rtx or ret_rtx, indicating which type of
414 return we're looking for. */
416 static rtx_code_label *
417 find_end_label (rtx kind)
419 rtx_insn *insn;
420 rtx_code_label **plabel;
422 if (kind == ret_rtx)
423 plabel = &function_return_label;
424 else
426 gcc_assert (kind == simple_return_rtx);
427 plabel = &function_simple_return_label;
430 /* If we found one previously, return it. */
431 if (*plabel)
432 return *plabel;
434 /* Otherwise, see if there is a label at the end of the function. If there
435 is, it must be that RETURN insns aren't needed, so that is our return
436 label and we don't have to do anything else. */
438 insn = get_last_insn ();
439 while (NOTE_P (insn)
440 || (NONJUMP_INSN_P (insn)
441 && (GET_CODE (PATTERN (insn)) == USE
442 || GET_CODE (PATTERN (insn)) == CLOBBER)))
443 insn = PREV_INSN (insn);
445 /* When a target threads its epilogue we might already have a
446 suitable return insn. If so put a label before it for the
447 function_return_label. */
448 if (BARRIER_P (insn)
449 && JUMP_P (PREV_INSN (insn))
450 && PATTERN (PREV_INSN (insn)) == kind)
452 rtx_insn *temp = PREV_INSN (PREV_INSN (insn));
453 rtx_code_label *label = gen_label_rtx ();
454 LABEL_NUSES (label) = 0;
456 /* Put the label before any USE insns that may precede the RETURN
457 insn. */
458 while (GET_CODE (temp) == USE)
459 temp = PREV_INSN (temp);
461 emit_label_after (label, temp);
462 *plabel = label;
465 else if (LABEL_P (insn))
466 *plabel = as_a <rtx_code_label *> (insn);
467 else
469 rtx_code_label *label = gen_label_rtx ();
470 LABEL_NUSES (label) = 0;
471 /* If the basic block reorder pass moves the return insn to
472 some other place try to locate it again and put our
473 function_return_label there. */
474 while (insn && ! (JUMP_P (insn) && (PATTERN (insn) == kind)))
475 insn = PREV_INSN (insn);
476 if (insn)
478 insn = PREV_INSN (insn);
480 /* Put the label before any USE insns that may precede the
481 RETURN insn. */
482 while (GET_CODE (insn) == USE)
483 insn = PREV_INSN (insn);
485 emit_label_after (label, insn);
487 else
489 #ifdef HAVE_epilogue
490 if (HAVE_epilogue
491 #ifdef HAVE_return
492 && ! HAVE_return
493 #endif
495 /* The RETURN insn has its delay slot filled so we cannot
496 emit the label just before it. Since we already have
497 an epilogue and cannot emit a new RETURN, we cannot
498 emit the label at all. */
499 return NULL;
500 #endif /* HAVE_epilogue */
502 /* Otherwise, make a new label and emit a RETURN and BARRIER,
503 if needed. */
504 emit_label (label);
505 #ifdef HAVE_return
506 if (HAVE_return)
508 /* The return we make may have delay slots too. */
509 rtx pat = gen_return ();
510 rtx_insn *insn = emit_jump_insn (pat);
511 set_return_jump_label (insn);
512 emit_barrier ();
513 if (num_delay_slots (insn) > 0)
514 obstack_ptr_grow (&unfilled_slots_obstack, insn);
516 #endif
518 *plabel = label;
521 /* Show one additional use for this label so it won't go away until
522 we are done. */
523 ++LABEL_NUSES (*plabel);
525 return *plabel;
528 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
529 the pattern of INSN with the SEQUENCE.
531 Returns the insn containing the SEQUENCE that replaces INSN. */
533 static rtx_insn *
534 emit_delay_sequence (rtx_insn *insn, rtx_insn_list *list, int length)
536 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
537 rtvec seqv = rtvec_alloc (length + 1);
538 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
539 rtx_insn *seq_insn = make_insn_raw (seq);
541 /* If DELAY_INSN has a location, use it for SEQ_INSN. If DELAY_INSN does
542 not have a location, but one of the delayed insns does, we pick up a
543 location from there later. */
544 INSN_LOCATION (seq_insn) = INSN_LOCATION (insn);
546 /* Unlink INSN from the insn chain, so that we can put it into
547 the SEQUENCE. Remember where we want to emit SEQUENCE in AFTER. */
548 rtx after = PREV_INSN (insn);
549 remove_insn (insn);
550 SET_NEXT_INSN (insn) = SET_PREV_INSN (insn) = NULL;
552 /* Build our SEQUENCE and rebuild the insn chain. */
553 int i = 1;
554 start_sequence ();
555 XVECEXP (seq, 0, 0) = emit_insn (insn);
556 for (rtx_insn_list *li = list; li; li = li->next (), i++)
558 rtx_insn *tem = li->insn ();
559 rtx note, next;
561 /* Show that this copy of the insn isn't deleted. */
562 tem->set_undeleted ();
564 /* Unlink insn from its original place, and re-emit it into
565 the sequence. */
566 SET_NEXT_INSN (tem) = SET_PREV_INSN (tem) = NULL;
567 XVECEXP (seq, 0, i) = emit_insn (tem);
569 /* SPARC assembler, for instance, emit warning when debug info is output
570 into the delay slot. */
571 if (INSN_LOCATION (tem) && !INSN_LOCATION (seq_insn))
572 INSN_LOCATION (seq_insn) = INSN_LOCATION (tem);
573 INSN_LOCATION (tem) = 0;
575 for (note = REG_NOTES (tem); note; note = next)
577 next = XEXP (note, 1);
578 switch (REG_NOTE_KIND (note))
580 case REG_DEAD:
581 /* Remove any REG_DEAD notes because we can't rely on them now
582 that the insn has been moved. */
583 remove_note (tem, note);
584 break;
586 case REG_LABEL_OPERAND:
587 case REG_LABEL_TARGET:
588 /* Keep the label reference count up to date. */
589 if (LABEL_P (XEXP (note, 0)))
590 LABEL_NUSES (XEXP (note, 0)) ++;
591 break;
593 default:
594 break;
598 end_sequence ();
599 gcc_assert (i == length + 1);
601 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
602 add_insn_after (seq_insn, after, NULL);
604 return seq_insn;
607 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
608 be in the order in which the insns are to be executed. */
610 static rtx_insn_list *
611 add_to_delay_list (rtx_insn *insn, rtx_insn_list *delay_list)
613 /* If we have an empty list, just make a new list element. If
614 INSN has its block number recorded, clear it since we may
615 be moving the insn to a new block. */
617 if (delay_list == 0)
619 clear_hashed_info_for_insn (insn);
620 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
623 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
624 list. */
625 XEXP (delay_list, 1) = add_to_delay_list (insn, delay_list->next ());
627 return delay_list;
630 /* Delete INSN from the delay slot of the insn that it is in, which may
631 produce an insn with no delay slots. Return the new insn. */
633 static rtx_insn *
634 delete_from_delay_slot (rtx_insn *insn)
636 rtx_insn *trial, *seq_insn, *prev;
637 rtx_sequence *seq;
638 rtx_insn_list *delay_list = 0;
639 int i;
640 int had_barrier = 0;
642 /* We first must find the insn containing the SEQUENCE with INSN in its
643 delay slot. Do this by finding an insn, TRIAL, where
644 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
646 for (trial = insn;
647 PREV_INSN (NEXT_INSN (trial)) == trial;
648 trial = NEXT_INSN (trial))
651 seq_insn = PREV_INSN (NEXT_INSN (trial));
652 seq = as_a <rtx_sequence *> (PATTERN (seq_insn));
654 if (NEXT_INSN (seq_insn) && BARRIER_P (NEXT_INSN (seq_insn)))
655 had_barrier = 1;
657 /* Create a delay list consisting of all the insns other than the one
658 we are deleting (unless we were the only one). */
659 if (seq->len () > 2)
660 for (i = 1; i < seq->len (); i++)
661 if (seq->insn (i) != insn)
662 delay_list = add_to_delay_list (seq->insn (i), delay_list);
664 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
665 list, and rebuild the delay list if non-empty. */
666 prev = PREV_INSN (seq_insn);
667 trial = seq->insn (0);
668 delete_related_insns (seq_insn);
669 add_insn_after (trial, prev, NULL);
671 /* If there was a barrier after the old SEQUENCE, remit it. */
672 if (had_barrier)
673 emit_barrier_after (trial);
675 /* If there are any delay insns, remit them. Otherwise clear the
676 annul flag. */
677 if (delay_list)
678 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
679 else if (JUMP_P (trial))
680 INSN_ANNULLED_BRANCH_P (trial) = 0;
682 INSN_FROM_TARGET_P (insn) = 0;
684 /* Show we need to fill this insn again. */
685 obstack_ptr_grow (&unfilled_slots_obstack, trial);
687 return trial;
690 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
691 the insn that sets CC0 for it and delete it too. */
693 static void
694 delete_scheduled_jump (rtx_insn *insn)
696 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
697 delete the insn that sets the condition code, but it is hard to find it.
698 Since this case is rare anyway, don't bother trying; there would likely
699 be other insns that became dead anyway, which we wouldn't know to
700 delete. */
702 #ifdef HAVE_cc0
703 if (reg_mentioned_p (cc0_rtx, insn))
705 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
707 /* If a reg-note was found, it points to an insn to set CC0. This
708 insn is in the delay list of some other insn. So delete it from
709 the delay list it was in. */
710 if (note)
712 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
713 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
714 delete_from_delay_slot (as_a <rtx_insn *> (XEXP (note, 0)));
716 else
718 /* The insn setting CC0 is our previous insn, but it may be in
719 a delay slot. It will be the last insn in the delay slot, if
720 it is. */
721 rtx_insn *trial = previous_insn (insn);
722 if (NOTE_P (trial))
723 trial = prev_nonnote_insn (trial);
724 if (sets_cc0_p (PATTERN (trial)) != 1
725 || FIND_REG_INC_NOTE (trial, NULL_RTX))
726 return;
727 if (PREV_INSN (NEXT_INSN (trial)) == trial)
728 delete_related_insns (trial);
729 else
730 delete_from_delay_slot (trial);
733 #endif
735 delete_related_insns (insn);
738 /* Counters for delay-slot filling. */
740 #define NUM_REORG_FUNCTIONS 2
741 #define MAX_DELAY_HISTOGRAM 3
742 #define MAX_REORG_PASSES 2
744 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
746 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
748 static int reorg_pass_number;
750 static void
751 note_delay_statistics (int slots_filled, int index)
753 num_insns_needing_delays[index][reorg_pass_number]++;
754 if (slots_filled > MAX_DELAY_HISTOGRAM)
755 slots_filled = MAX_DELAY_HISTOGRAM;
756 num_filled_delays[index][slots_filled][reorg_pass_number]++;
759 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
761 /* Optimize the following cases:
763 1. When a conditional branch skips over only one instruction,
764 use an annulling branch and put that insn in the delay slot.
765 Use either a branch that annuls when the condition if true or
766 invert the test with a branch that annuls when the condition is
767 false. This saves insns, since otherwise we must copy an insn
768 from the L1 target.
770 (orig) (skip) (otherwise)
771 Bcc.n L1 Bcc',a L1 Bcc,a L1'
772 insn insn insn2
773 L1: L1: L1:
774 insn2 insn2 insn2
775 insn3 insn3 L1':
776 insn3
778 2. When a conditional branch skips over only one instruction,
779 and after that, it unconditionally branches somewhere else,
780 perform the similar optimization. This saves executing the
781 second branch in the case where the inverted condition is true.
783 Bcc.n L1 Bcc',a L2
784 insn insn
785 L1: L1:
786 Bra L2 Bra L2
788 INSN is a JUMP_INSN.
790 This should be expanded to skip over N insns, where N is the number
791 of delay slots required. */
793 static rtx_insn_list *
794 optimize_skip (rtx_insn *insn)
796 rtx_insn *trial = next_nonnote_insn (insn);
797 rtx_insn *next_trial = next_active_insn (trial);
798 rtx_insn_list *delay_list = 0;
799 int flags;
801 flags = get_jump_flags (insn, JUMP_LABEL (insn));
803 if (trial == 0
804 || !NONJUMP_INSN_P (trial)
805 || GET_CODE (PATTERN (trial)) == SEQUENCE
806 || recog_memoized (trial) < 0
807 || (! eligible_for_annul_false (insn, 0, trial, flags)
808 && ! eligible_for_annul_true (insn, 0, trial, flags))
809 || can_throw_internal (trial))
810 return 0;
812 /* There are two cases where we are just executing one insn (we assume
813 here that a branch requires only one insn; this should be generalized
814 at some point): Where the branch goes around a single insn or where
815 we have one insn followed by a branch to the same label we branch to.
816 In both of these cases, inverting the jump and annulling the delay
817 slot give the same effect in fewer insns. */
818 if (next_trial == next_active_insn (JUMP_LABEL (insn))
819 || (next_trial != 0
820 && simplejump_or_return_p (next_trial)
821 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)))
823 if (eligible_for_annul_false (insn, 0, trial, flags))
825 if (invert_jump (insn, JUMP_LABEL (insn), 1))
826 INSN_FROM_TARGET_P (trial) = 1;
827 else if (! eligible_for_annul_true (insn, 0, trial, flags))
828 return 0;
831 delay_list = add_to_delay_list (trial, NULL);
832 next_trial = next_active_insn (trial);
833 update_block (trial, trial);
834 delete_related_insns (trial);
836 /* Also, if we are targeting an unconditional
837 branch, thread our jump to the target of that branch. Don't
838 change this into a RETURN here, because it may not accept what
839 we have in the delay slot. We'll fix this up later. */
840 if (next_trial && simplejump_or_return_p (next_trial))
842 rtx target_label = JUMP_LABEL (next_trial);
843 if (ANY_RETURN_P (target_label))
844 target_label = find_end_label (target_label);
846 if (target_label)
848 /* Recompute the flags based on TARGET_LABEL since threading
849 the jump to TARGET_LABEL may change the direction of the
850 jump (which may change the circumstances in which the
851 delay slot is nullified). */
852 flags = get_jump_flags (insn, target_label);
853 if (eligible_for_annul_true (insn, 0, trial, flags))
854 reorg_redirect_jump (insn, target_label);
858 INSN_ANNULLED_BRANCH_P (insn) = 1;
861 return delay_list;
863 #endif
865 /* Encode and return branch direction and prediction information for
866 INSN assuming it will jump to LABEL.
868 Non conditional branches return no direction information and
869 are predicted as very likely taken. */
871 static int
872 get_jump_flags (const rtx_insn *insn, rtx label)
874 int flags;
876 /* get_jump_flags can be passed any insn with delay slots, these may
877 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
878 direction information, and only if they are conditional jumps.
880 If LABEL is a return, then there is no way to determine the branch
881 direction. */
882 if (JUMP_P (insn)
883 && (condjump_p (insn) || condjump_in_parallel_p (insn))
884 && !ANY_RETURN_P (label)
885 && INSN_UID (insn) <= max_uid
886 && INSN_UID (label) <= max_uid)
887 flags
888 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
889 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
890 /* No valid direction information. */
891 else
892 flags = 0;
894 return flags;
897 /* Return truth value of the statement that this branch
898 is mostly taken. If we think that the branch is extremely likely
899 to be taken, we return 2. If the branch is slightly more likely to be
900 taken, return 1. If the branch is slightly less likely to be taken,
901 return 0 and if the branch is highly unlikely to be taken, return -1. */
903 static int
904 mostly_true_jump (rtx jump_insn)
906 /* If branch probabilities are available, then use that number since it
907 always gives a correct answer. */
908 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
909 if (note)
911 int prob = XINT (note, 0);
913 if (prob >= REG_BR_PROB_BASE * 9 / 10)
914 return 2;
915 else if (prob >= REG_BR_PROB_BASE / 2)
916 return 1;
917 else if (prob >= REG_BR_PROB_BASE / 10)
918 return 0;
919 else
920 return -1;
923 /* If there is no note, assume branches are not taken.
924 This should be rare. */
925 return 0;
928 /* Return the condition under which INSN will branch to TARGET. If TARGET
929 is zero, return the condition under which INSN will return. If INSN is
930 an unconditional branch, return const_true_rtx. If INSN isn't a simple
931 type of jump, or it doesn't go to TARGET, return 0. */
933 static rtx
934 get_branch_condition (const rtx_insn *insn, rtx target)
936 rtx pat = PATTERN (insn);
937 rtx src;
939 if (condjump_in_parallel_p (insn))
940 pat = XVECEXP (pat, 0, 0);
942 if (ANY_RETURN_P (pat) && pat == target)
943 return const_true_rtx;
945 if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
946 return 0;
948 src = SET_SRC (pat);
949 if (GET_CODE (src) == LABEL_REF && LABEL_REF_LABEL (src) == target)
950 return const_true_rtx;
952 else if (GET_CODE (src) == IF_THEN_ELSE
953 && XEXP (src, 2) == pc_rtx
954 && ((GET_CODE (XEXP (src, 1)) == LABEL_REF
955 && LABEL_REF_LABEL (XEXP (src, 1)) == target)
956 || (ANY_RETURN_P (XEXP (src, 1)) && XEXP (src, 1) == target)))
957 return XEXP (src, 0);
959 else if (GET_CODE (src) == IF_THEN_ELSE
960 && XEXP (src, 1) == pc_rtx
961 && ((GET_CODE (XEXP (src, 2)) == LABEL_REF
962 && LABEL_REF_LABEL (XEXP (src, 2)) == target)
963 || (ANY_RETURN_P (XEXP (src, 2)) && XEXP (src, 2) == target)))
965 enum rtx_code rev;
966 rev = reversed_comparison_code (XEXP (src, 0), insn);
967 if (rev != UNKNOWN)
968 return gen_rtx_fmt_ee (rev, GET_MODE (XEXP (src, 0)),
969 XEXP (XEXP (src, 0), 0),
970 XEXP (XEXP (src, 0), 1));
973 return 0;
976 /* Return nonzero if CONDITION is more strict than the condition of
977 INSN, i.e., if INSN will always branch if CONDITION is true. */
979 static int
980 condition_dominates_p (rtx condition, const rtx_insn *insn)
982 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
983 enum rtx_code code = GET_CODE (condition);
984 enum rtx_code other_code;
986 if (rtx_equal_p (condition, other_condition)
987 || other_condition == const_true_rtx)
988 return 1;
990 else if (condition == const_true_rtx || other_condition == 0)
991 return 0;
993 other_code = GET_CODE (other_condition);
994 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
995 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
996 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
997 return 0;
999 return comparison_dominates_p (code, other_code);
1002 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1003 any insns already in the delay slot of JUMP. */
1005 static int
1006 redirect_with_delay_slots_safe_p (rtx_insn *jump, rtx newlabel, rtx seq)
1008 int flags, i;
1009 rtx_sequence *pat = as_a <rtx_sequence *> (PATTERN (seq));
1011 /* Make sure all the delay slots of this jump would still
1012 be valid after threading the jump. If they are still
1013 valid, then return nonzero. */
1015 flags = get_jump_flags (jump, newlabel);
1016 for (i = 1; i < pat->len (); i++)
1017 if (! (
1018 #ifdef ANNUL_IFFALSE_SLOTS
1019 (INSN_ANNULLED_BRANCH_P (jump)
1020 && INSN_FROM_TARGET_P (pat->insn (i)))
1021 ? eligible_for_annul_false (jump, i - 1, pat->insn (i), flags) :
1022 #endif
1023 #ifdef ANNUL_IFTRUE_SLOTS
1024 (INSN_ANNULLED_BRANCH_P (jump)
1025 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1026 ? eligible_for_annul_true (jump, i - 1, pat->insn (i), flags) :
1027 #endif
1028 eligible_for_delay (jump, i - 1, pat->insn (i), flags)))
1029 break;
1031 return (i == pat->len ());
1034 /* Return nonzero if redirecting JUMP to NEWLABEL does not invalidate
1035 any insns we wish to place in the delay slot of JUMP. */
1037 static int
1038 redirect_with_delay_list_safe_p (rtx_insn *jump, rtx newlabel,
1039 rtx_insn_list *delay_list)
1041 int flags, i;
1042 rtx_insn_list *li;
1044 /* Make sure all the insns in DELAY_LIST would still be
1045 valid after threading the jump. If they are still
1046 valid, then return nonzero. */
1048 flags = get_jump_flags (jump, newlabel);
1049 for (li = delay_list, i = 0; li; li = li->next (), i++)
1050 if (! (
1051 #ifdef ANNUL_IFFALSE_SLOTS
1052 (INSN_ANNULLED_BRANCH_P (jump)
1053 && INSN_FROM_TARGET_P (li->insn ()))
1054 ? eligible_for_annul_false (jump, i, li->insn (), flags) :
1055 #endif
1056 #ifdef ANNUL_IFTRUE_SLOTS
1057 (INSN_ANNULLED_BRANCH_P (jump)
1058 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1059 ? eligible_for_annul_true (jump, i, li->insn (), flags) :
1060 #endif
1061 eligible_for_delay (jump, i, li->insn (), flags)))
1062 break;
1064 return (li == NULL);
1067 /* DELAY_LIST is a list of insns that have already been placed into delay
1068 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1069 If not, return 0; otherwise return 1. */
1071 static int
1072 check_annul_list_true_false (int annul_true_p, rtx delay_list)
1074 rtx temp;
1076 if (delay_list)
1078 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1080 rtx trial = XEXP (temp, 0);
1082 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1083 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1084 return 0;
1088 return 1;
1091 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1092 the condition tested by INSN is CONDITION and the resources shown in
1093 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1094 from SEQ's delay list, in addition to whatever insns it may execute
1095 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1096 needed while searching for delay slot insns. Return the concatenated
1097 delay list if possible, otherwise, return 0.
1099 SLOTS_TO_FILL is the total number of slots required by INSN, and
1100 PSLOTS_FILLED points to the number filled so far (also the number of
1101 insns in DELAY_LIST). It is updated with the number that have been
1102 filled from the SEQUENCE, if any.
1104 PANNUL_P points to a nonzero value if we already know that we need
1105 to annul INSN. If this routine determines that annulling is needed,
1106 it may set that value nonzero.
1108 PNEW_THREAD points to a location that is to receive the place at which
1109 execution should continue. */
1111 static rtx_insn_list *
1112 steal_delay_list_from_target (rtx_insn *insn, rtx condition, rtx_sequence *seq,
1113 rtx_insn_list *delay_list, struct resources *sets,
1114 struct resources *needed,
1115 struct resources *other_needed,
1116 int slots_to_fill, int *pslots_filled,
1117 int *pannul_p, rtx *pnew_thread)
1119 int slots_remaining = slots_to_fill - *pslots_filled;
1120 int total_slots_filled = *pslots_filled;
1121 rtx_insn_list *new_delay_list = 0;
1122 int must_annul = *pannul_p;
1123 int used_annul = 0;
1124 int i;
1125 struct resources cc_set;
1126 bool *redundant;
1128 /* We can't do anything if there are more delay slots in SEQ than we
1129 can handle, or if we don't know that it will be a taken branch.
1130 We know that it will be a taken branch if it is either an unconditional
1131 branch or a conditional branch with a stricter branch condition.
1133 Also, exit if the branch has more than one set, since then it is computing
1134 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1135 ??? It may be possible to move other sets into INSN in addition to
1136 moving the instructions in the delay slots.
1138 We can not steal the delay list if one of the instructions in the
1139 current delay_list modifies the condition codes and the jump in the
1140 sequence is a conditional jump. We can not do this because we can
1141 not change the direction of the jump because the condition codes
1142 will effect the direction of the jump in the sequence. */
1144 CLEAR_RESOURCE (&cc_set);
1145 for (rtx_insn_list *temp = delay_list; temp; temp = temp->next ())
1147 rtx_insn *trial = temp->insn ();
1149 mark_set_resources (trial, &cc_set, 0, MARK_SRC_DEST_CALL);
1150 if (insn_references_resource_p (seq->insn (0), &cc_set, false))
1151 return delay_list;
1154 if (XVECLEN (seq, 0) - 1 > slots_remaining
1155 || ! condition_dominates_p (condition, seq->insn (0))
1156 || ! single_set (seq->insn (0)))
1157 return delay_list;
1159 /* On some targets, branches with delay slots can have a limited
1160 displacement. Give the back end a chance to tell us we can't do
1161 this. */
1162 if (! targetm.can_follow_jump (insn, seq->insn (0)))
1163 return delay_list;
1165 redundant = XALLOCAVEC (bool, XVECLEN (seq, 0));
1166 for (i = 1; i < seq->len (); i++)
1168 rtx_insn *trial = seq->insn (i);
1169 int flags;
1171 if (insn_references_resource_p (trial, sets, false)
1172 || insn_sets_resource_p (trial, needed, false)
1173 || insn_sets_resource_p (trial, sets, false)
1174 #ifdef HAVE_cc0
1175 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1176 delay list. */
1177 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1178 #endif
1179 /* If TRIAL is from the fallthrough code of an annulled branch insn
1180 in SEQ, we cannot use it. */
1181 || (INSN_ANNULLED_BRANCH_P (seq->insn (0))
1182 && ! INSN_FROM_TARGET_P (trial)))
1183 return delay_list;
1185 /* If this insn was already done (usually in a previous delay slot),
1186 pretend we put it in our delay slot. */
1187 redundant[i] = redundant_insn (trial, insn, new_delay_list);
1188 if (redundant[i])
1189 continue;
1191 /* We will end up re-vectoring this branch, so compute flags
1192 based on jumping to the new label. */
1193 flags = get_jump_flags (insn, JUMP_LABEL (seq->insn (0)));
1195 if (! must_annul
1196 && ((condition == const_true_rtx
1197 || (! insn_sets_resource_p (trial, other_needed, false)
1198 && ! may_trap_or_fault_p (PATTERN (trial)))))
1199 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1200 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1201 && (must_annul = 1,
1202 check_annul_list_true_false (0, delay_list)
1203 && check_annul_list_true_false (0, new_delay_list)
1204 && eligible_for_annul_false (insn, total_slots_filled,
1205 trial, flags)))
1207 if (must_annul)
1208 used_annul = 1;
1209 rtx_insn *temp = copy_delay_slot_insn (trial);
1210 INSN_FROM_TARGET_P (temp) = 1;
1211 new_delay_list = add_to_delay_list (temp, new_delay_list);
1212 total_slots_filled++;
1214 if (--slots_remaining == 0)
1215 break;
1217 else
1218 return delay_list;
1221 /* Record the effect of the instructions that were redundant and which
1222 we therefore decided not to copy. */
1223 for (i = 1; i < seq->len (); i++)
1224 if (redundant[i])
1225 update_block (seq->insn (i), insn);
1227 /* Show the place to which we will be branching. */
1228 *pnew_thread = first_active_target_insn (JUMP_LABEL (seq->insn (0)));
1230 /* Add any new insns to the delay list and update the count of the
1231 number of slots filled. */
1232 *pslots_filled = total_slots_filled;
1233 if (used_annul)
1234 *pannul_p = 1;
1236 if (delay_list == 0)
1237 return new_delay_list;
1239 for (rtx_insn_list *temp = new_delay_list; temp; temp = temp->next ())
1240 delay_list = add_to_delay_list (temp->insn (), delay_list);
1242 return delay_list;
1245 /* Similar to steal_delay_list_from_target except that SEQ is on the
1246 fallthrough path of INSN. Here we only do something if the delay insn
1247 of SEQ is an unconditional branch. In that case we steal its delay slot
1248 for INSN since unconditional branches are much easier to fill. */
1250 static rtx_insn_list *
1251 steal_delay_list_from_fallthrough (rtx_insn *insn, rtx condition,
1252 rtx_sequence *seq,
1253 rtx_insn_list *delay_list,
1254 struct resources *sets,
1255 struct resources *needed,
1256 struct resources *other_needed,
1257 int slots_to_fill, int *pslots_filled,
1258 int *pannul_p)
1260 int i;
1261 int flags;
1262 int must_annul = *pannul_p;
1263 int used_annul = 0;
1265 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1267 /* We can't do anything if SEQ's delay insn isn't an
1268 unconditional branch. */
1270 if (! simplejump_or_return_p (seq->insn (0)))
1271 return delay_list;
1273 for (i = 1; i < seq->len (); i++)
1275 rtx_insn *trial = seq->insn (i);
1277 /* If TRIAL sets CC0, stealing it will move it too far from the use
1278 of CC0. */
1279 if (insn_references_resource_p (trial, sets, false)
1280 || insn_sets_resource_p (trial, needed, false)
1281 || insn_sets_resource_p (trial, sets, false)
1282 #ifdef HAVE_cc0
1283 || sets_cc0_p (PATTERN (trial))
1284 #endif
1287 break;
1289 /* If this insn was already done, we don't need it. */
1290 if (redundant_insn (trial, insn, delay_list))
1292 update_block (trial, insn);
1293 delete_from_delay_slot (trial);
1294 continue;
1297 if (! must_annul
1298 && ((condition == const_true_rtx
1299 || (! insn_sets_resource_p (trial, other_needed, false)
1300 && ! may_trap_or_fault_p (PATTERN (trial)))))
1301 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1302 : (must_annul || delay_list == NULL) && (must_annul = 1,
1303 check_annul_list_true_false (1, delay_list)
1304 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1306 if (must_annul)
1307 used_annul = 1;
1308 delete_from_delay_slot (trial);
1309 delay_list = add_to_delay_list (trial, delay_list);
1311 if (++(*pslots_filled) == slots_to_fill)
1312 break;
1314 else
1315 break;
1318 if (used_annul)
1319 *pannul_p = 1;
1320 return delay_list;
1323 /* Try merging insns starting at THREAD which match exactly the insns in
1324 INSN's delay list.
1326 If all insns were matched and the insn was previously annulling, the
1327 annul bit will be cleared.
1329 For each insn that is merged, if the branch is or will be non-annulling,
1330 we delete the merged insn. */
1332 static void
1333 try_merge_delay_insns (rtx insn, rtx_insn *thread)
1335 rtx_insn *trial, *next_trial;
1336 rtx_insn *delay_insn = as_a <rtx_insn *> (XVECEXP (PATTERN (insn), 0, 0));
1337 int annul_p = JUMP_P (delay_insn) && INSN_ANNULLED_BRANCH_P (delay_insn);
1338 int slot_number = 1;
1339 int num_slots = XVECLEN (PATTERN (insn), 0);
1340 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1341 struct resources set, needed;
1342 rtx_insn_list *merged_insns = 0;
1343 int i;
1344 int flags;
1346 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1348 CLEAR_RESOURCE (&needed);
1349 CLEAR_RESOURCE (&set);
1351 /* If this is not an annulling branch, take into account anything needed in
1352 INSN's delay slot. This prevents two increments from being incorrectly
1353 folded into one. If we are annulling, this would be the correct
1354 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1355 will essentially disable this optimization. This method is somewhat of
1356 a kludge, but I don't see a better way.) */
1357 if (! annul_p)
1358 for (i = 1 ; i < num_slots; i++)
1359 if (XVECEXP (PATTERN (insn), 0, i))
1360 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed,
1361 true);
1363 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1365 rtx pat = PATTERN (trial);
1366 rtx oldtrial = trial;
1368 next_trial = next_nonnote_insn (trial);
1370 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1371 if (NONJUMP_INSN_P (trial)
1372 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1373 continue;
1375 if (GET_CODE (next_to_match) == GET_CODE (trial)
1376 #ifdef HAVE_cc0
1377 /* We can't share an insn that sets cc0. */
1378 && ! sets_cc0_p (pat)
1379 #endif
1380 && ! insn_references_resource_p (trial, &set, true)
1381 && ! insn_sets_resource_p (trial, &set, true)
1382 && ! insn_sets_resource_p (trial, &needed, true)
1383 && (trial = try_split (pat, trial, 0)) != 0
1384 /* Update next_trial, in case try_split succeeded. */
1385 && (next_trial = next_nonnote_insn (trial))
1386 /* Likewise THREAD. */
1387 && (thread = oldtrial == thread ? trial : thread)
1388 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1389 /* Have to test this condition if annul condition is different
1390 from (and less restrictive than) non-annulling one. */
1391 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1394 if (! annul_p)
1396 update_block (trial, thread);
1397 if (trial == thread)
1398 thread = next_active_insn (thread);
1400 delete_related_insns (trial);
1401 INSN_FROM_TARGET_P (next_to_match) = 0;
1403 else
1404 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1406 if (++slot_number == num_slots)
1407 break;
1409 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1412 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
1413 mark_referenced_resources (trial, &needed, true);
1416 /* See if we stopped on a filled insn. If we did, try to see if its
1417 delay slots match. */
1418 if (slot_number != num_slots
1419 && trial && NONJUMP_INSN_P (trial)
1420 && GET_CODE (PATTERN (trial)) == SEQUENCE
1421 && !(JUMP_P (XVECEXP (PATTERN (trial), 0, 0))
1422 && INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0))))
1424 rtx_sequence *pat = as_a <rtx_sequence *> (PATTERN (trial));
1425 rtx filled_insn = XVECEXP (pat, 0, 0);
1427 /* Account for resources set/needed by the filled insn. */
1428 mark_set_resources (filled_insn, &set, 0, MARK_SRC_DEST_CALL);
1429 mark_referenced_resources (filled_insn, &needed, true);
1431 for (i = 1; i < pat->len (); i++)
1433 rtx_insn *dtrial = pat->insn (i);
1435 if (! insn_references_resource_p (dtrial, &set, true)
1436 && ! insn_sets_resource_p (dtrial, &set, true)
1437 && ! insn_sets_resource_p (dtrial, &needed, true)
1438 #ifdef HAVE_cc0
1439 && ! sets_cc0_p (PATTERN (dtrial))
1440 #endif
1441 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1442 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1444 if (! annul_p)
1446 rtx_insn *new_rtx;
1448 update_block (dtrial, thread);
1449 new_rtx = delete_from_delay_slot (dtrial);
1450 if (thread->deleted ())
1451 thread = new_rtx;
1452 INSN_FROM_TARGET_P (next_to_match) = 0;
1454 else
1455 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1456 merged_insns);
1458 if (++slot_number == num_slots)
1459 break;
1461 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1463 else
1465 /* Keep track of the set/referenced resources for the delay
1466 slots of any trial insns we encounter. */
1467 mark_set_resources (dtrial, &set, 0, MARK_SRC_DEST_CALL);
1468 mark_referenced_resources (dtrial, &needed, true);
1473 /* If all insns in the delay slot have been matched and we were previously
1474 annulling the branch, we need not any more. In that case delete all the
1475 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1476 the delay list so that we know that it isn't only being used at the
1477 target. */
1478 if (slot_number == num_slots && annul_p)
1480 for (; merged_insns; merged_insns = merged_insns->next ())
1482 if (GET_MODE (merged_insns) == SImode)
1484 rtx_insn *new_rtx;
1486 update_block (merged_insns->insn (), thread);
1487 new_rtx = delete_from_delay_slot (merged_insns->insn ());
1488 if (thread->deleted ())
1489 thread = new_rtx;
1491 else
1493 update_block (merged_insns->insn (), thread);
1494 delete_related_insns (merged_insns->insn ());
1498 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1500 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1501 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1505 /* See if INSN is redundant with an insn in front of TARGET. Often this
1506 is called when INSN is a candidate for a delay slot of TARGET.
1507 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1508 of INSN. Often INSN will be redundant with an insn in a delay slot of
1509 some previous insn. This happens when we have a series of branches to the
1510 same label; in that case the first insn at the target might want to go
1511 into each of the delay slots.
1513 If we are not careful, this routine can take up a significant fraction
1514 of the total compilation time (4%), but only wins rarely. Hence we
1515 speed this routine up by making two passes. The first pass goes back
1516 until it hits a label and sees if it finds an insn with an identical
1517 pattern. Only in this (relatively rare) event does it check for
1518 data conflicts.
1520 We do not split insns we encounter. This could cause us not to find a
1521 redundant insn, but the cost of splitting seems greater than the possible
1522 gain in rare cases. */
1524 static rtx
1525 redundant_insn (rtx insn, rtx_insn *target, rtx delay_list)
1527 rtx target_main = target;
1528 rtx ipat = PATTERN (insn);
1529 rtx_insn *trial;
1530 rtx pat;
1531 struct resources needed, set;
1532 int i;
1533 unsigned insns_to_search;
1535 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1536 are allowed to not actually assign to such a register. */
1537 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1538 return 0;
1540 /* Scan backwards looking for a match. */
1541 for (trial = PREV_INSN (target),
1542 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1543 trial && insns_to_search > 0;
1544 trial = PREV_INSN (trial))
1546 /* (use (insn))s can come immediately after a barrier if the
1547 label that used to precede them has been deleted as dead.
1548 See delete_related_insns. */
1549 if (LABEL_P (trial) || BARRIER_P (trial))
1550 return 0;
1552 if (!INSN_P (trial))
1553 continue;
1554 --insns_to_search;
1556 pat = PATTERN (trial);
1557 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1558 continue;
1560 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
1562 /* Stop for a CALL and its delay slots because it is difficult to
1563 track its resource needs correctly. */
1564 if (CALL_P (seq->element (0)))
1565 return 0;
1567 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1568 slots because it is difficult to track its resource needs
1569 correctly. */
1571 #ifdef INSN_SETS_ARE_DELAYED
1572 if (INSN_SETS_ARE_DELAYED (seq->insn (0)))
1573 return 0;
1574 #endif
1576 #ifdef INSN_REFERENCES_ARE_DELAYED
1577 if (INSN_REFERENCES_ARE_DELAYED (seq->insn (0)))
1578 return 0;
1579 #endif
1581 /* See if any of the insns in the delay slot match, updating
1582 resource requirements as we go. */
1583 for (i = seq->len () - 1; i > 0; i--)
1584 if (GET_CODE (seq->element (i)) == GET_CODE (insn)
1585 && rtx_equal_p (PATTERN (seq->element (i)), ipat)
1586 && ! find_reg_note (seq->element (i), REG_UNUSED, NULL_RTX))
1587 break;
1589 /* If found a match, exit this loop early. */
1590 if (i > 0)
1591 break;
1594 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1595 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1596 break;
1599 /* If we didn't find an insn that matches, return 0. */
1600 if (trial == 0)
1601 return 0;
1603 /* See what resources this insn sets and needs. If they overlap, or
1604 if this insn references CC0, it can't be redundant. */
1606 CLEAR_RESOURCE (&needed);
1607 CLEAR_RESOURCE (&set);
1608 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1609 mark_referenced_resources (insn, &needed, true);
1611 /* If TARGET is a SEQUENCE, get the main insn. */
1612 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1613 target_main = XVECEXP (PATTERN (target), 0, 0);
1615 if (resource_conflicts_p (&needed, &set)
1616 #ifdef HAVE_cc0
1617 || reg_mentioned_p (cc0_rtx, ipat)
1618 #endif
1619 /* The insn requiring the delay may not set anything needed or set by
1620 INSN. */
1621 || insn_sets_resource_p (target_main, &needed, true)
1622 || insn_sets_resource_p (target_main, &set, true))
1623 return 0;
1625 /* Insns we pass may not set either NEEDED or SET, so merge them for
1626 simpler tests. */
1627 needed.memory |= set.memory;
1628 IOR_HARD_REG_SET (needed.regs, set.regs);
1630 /* This insn isn't redundant if it conflicts with an insn that either is
1631 or will be in a delay slot of TARGET. */
1633 while (delay_list)
1635 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, true))
1636 return 0;
1637 delay_list = XEXP (delay_list, 1);
1640 if (NONJUMP_INSN_P (target) && GET_CODE (PATTERN (target)) == SEQUENCE)
1641 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1642 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed,
1643 true))
1644 return 0;
1646 /* Scan backwards until we reach a label or an insn that uses something
1647 INSN sets or sets something insn uses or sets. */
1649 for (trial = PREV_INSN (target),
1650 insns_to_search = MAX_DELAY_SLOT_INSN_SEARCH;
1651 trial && !LABEL_P (trial) && insns_to_search > 0;
1652 trial = PREV_INSN (trial))
1654 if (!INSN_P (trial))
1655 continue;
1656 --insns_to_search;
1658 pat = PATTERN (trial);
1659 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1660 continue;
1662 if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
1664 bool annul_p = false;
1665 rtx_insn *control = seq->insn (0);
1667 /* If this is a CALL_INSN and its delay slots, it is hard to track
1668 the resource needs properly, so give up. */
1669 if (CALL_P (control))
1670 return 0;
1672 /* If this is an INSN or JUMP_INSN with delayed effects, it
1673 is hard to track the resource needs properly, so give up. */
1675 #ifdef INSN_SETS_ARE_DELAYED
1676 if (INSN_SETS_ARE_DELAYED (control))
1677 return 0;
1678 #endif
1680 #ifdef INSN_REFERENCES_ARE_DELAYED
1681 if (INSN_REFERENCES_ARE_DELAYED (control))
1682 return 0;
1683 #endif
1685 if (JUMP_P (control))
1686 annul_p = INSN_ANNULLED_BRANCH_P (control);
1688 /* See if any of the insns in the delay slot match, updating
1689 resource requirements as we go. */
1690 for (i = seq->len () - 1; i > 0; i--)
1692 rtx candidate = seq->element (i);
1694 /* If an insn will be annulled if the branch is false, it isn't
1695 considered as a possible duplicate insn. */
1696 if (rtx_equal_p (PATTERN (candidate), ipat)
1697 && ! (annul_p && INSN_FROM_TARGET_P (candidate)))
1699 /* Show that this insn will be used in the sequel. */
1700 INSN_FROM_TARGET_P (candidate) = 0;
1701 return candidate;
1704 /* Unless this is an annulled insn from the target of a branch,
1705 we must stop if it sets anything needed or set by INSN. */
1706 if ((!annul_p || !INSN_FROM_TARGET_P (candidate))
1707 && insn_sets_resource_p (candidate, &needed, true))
1708 return 0;
1711 /* If the insn requiring the delay slot conflicts with INSN, we
1712 must stop. */
1713 if (insn_sets_resource_p (control, &needed, true))
1714 return 0;
1716 else
1718 /* See if TRIAL is the same as INSN. */
1719 pat = PATTERN (trial);
1720 if (rtx_equal_p (pat, ipat))
1721 return trial;
1723 /* Can't go any further if TRIAL conflicts with INSN. */
1724 if (insn_sets_resource_p (trial, &needed, true))
1725 return 0;
1729 return 0;
1732 /* Return 1 if THREAD can only be executed in one way. If LABEL is nonzero,
1733 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1734 is nonzero, we are allowed to fall into this thread; otherwise, we are
1735 not.
1737 If LABEL is used more than one or we pass a label other than LABEL before
1738 finding an active insn, we do not own this thread. */
1740 static int
1741 own_thread_p (rtx thread, rtx label, int allow_fallthrough)
1743 rtx_insn *active_insn;
1744 rtx_insn *insn;
1746 /* We don't own the function end. */
1747 if (thread == 0 || ANY_RETURN_P (thread))
1748 return 0;
1750 /* We have a non-NULL insn. */
1751 rtx_insn *thread_insn = as_a <rtx_insn *> (thread);
1753 /* Get the first active insn, or THREAD_INSN, if it is an active insn. */
1754 active_insn = next_active_insn (PREV_INSN (thread_insn));
1756 for (insn = thread_insn; insn != active_insn; insn = NEXT_INSN (insn))
1757 if (LABEL_P (insn)
1758 && (insn != label || LABEL_NUSES (insn) != 1))
1759 return 0;
1761 if (allow_fallthrough)
1762 return 1;
1764 /* Ensure that we reach a BARRIER before any insn or label. */
1765 for (insn = prev_nonnote_insn (thread_insn);
1766 insn == 0 || !BARRIER_P (insn);
1767 insn = prev_nonnote_insn (insn))
1768 if (insn == 0
1769 || LABEL_P (insn)
1770 || (NONJUMP_INSN_P (insn)
1771 && GET_CODE (PATTERN (insn)) != USE
1772 && GET_CODE (PATTERN (insn)) != CLOBBER))
1773 return 0;
1775 return 1;
1778 /* Called when INSN is being moved from a location near the target of a jump.
1779 We leave a marker of the form (use (INSN)) immediately in front
1780 of WHERE for mark_target_live_regs. These markers will be deleted when
1781 reorg finishes.
1783 We used to try to update the live status of registers if WHERE is at
1784 the start of a basic block, but that can't work since we may remove a
1785 BARRIER in relax_delay_slots. */
1787 static void
1788 update_block (rtx_insn *insn, rtx where)
1790 /* Ignore if this was in a delay slot and it came from the target of
1791 a branch. */
1792 if (INSN_FROM_TARGET_P (insn))
1793 return;
1795 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1797 /* INSN might be making a value live in a block where it didn't use to
1798 be. So recompute liveness information for this block. */
1800 incr_ticks_for_insn (insn);
1803 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1804 the basic block containing the jump. */
1806 static int
1807 reorg_redirect_jump (rtx_insn *jump, rtx nlabel)
1809 incr_ticks_for_insn (jump);
1810 return redirect_jump (jump, nlabel, 1);
1813 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1814 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1815 that reference values used in INSN. If we find one, then we move the
1816 REG_DEAD note to INSN.
1818 This is needed to handle the case where a later insn (after INSN) has a
1819 REG_DEAD note for a register used by INSN, and this later insn subsequently
1820 gets moved before a CODE_LABEL because it is a redundant insn. In this
1821 case, mark_target_live_regs may be confused into thinking the register
1822 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1824 static void
1825 update_reg_dead_notes (rtx insn, rtx delayed_insn)
1827 rtx p, link, next;
1829 for (p = next_nonnote_insn (insn); p != delayed_insn;
1830 p = next_nonnote_insn (p))
1831 for (link = REG_NOTES (p); link; link = next)
1833 next = XEXP (link, 1);
1835 if (REG_NOTE_KIND (link) != REG_DEAD
1836 || !REG_P (XEXP (link, 0)))
1837 continue;
1839 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1841 /* Move the REG_DEAD note from P to INSN. */
1842 remove_note (p, link);
1843 XEXP (link, 1) = REG_NOTES (insn);
1844 REG_NOTES (insn) = link;
1849 /* Called when an insn redundant with start_insn is deleted. If there
1850 is a REG_DEAD note for the target of start_insn between start_insn
1851 and stop_insn, then the REG_DEAD note needs to be deleted since the
1852 value no longer dies there.
1854 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1855 confused into thinking the register is dead. */
1857 static void
1858 fix_reg_dead_note (rtx start_insn, rtx stop_insn)
1860 rtx p, link, next;
1862 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1863 p = next_nonnote_insn (p))
1864 for (link = REG_NOTES (p); link; link = next)
1866 next = XEXP (link, 1);
1868 if (REG_NOTE_KIND (link) != REG_DEAD
1869 || !REG_P (XEXP (link, 0)))
1870 continue;
1872 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1874 remove_note (p, link);
1875 return;
1880 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1882 This handles the case of udivmodXi4 instructions which optimize their
1883 output depending on whether any REG_UNUSED notes are present.
1884 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1885 does. */
1887 static void
1888 update_reg_unused_notes (rtx insn, rtx redundant_insn)
1890 rtx link, next;
1892 for (link = REG_NOTES (insn); link; link = next)
1894 next = XEXP (link, 1);
1896 if (REG_NOTE_KIND (link) != REG_UNUSED
1897 || !REG_P (XEXP (link, 0)))
1898 continue;
1900 if (! find_regno_note (redundant_insn, REG_UNUSED,
1901 REGNO (XEXP (link, 0))))
1902 remove_note (insn, link);
1906 static vec <rtx> sibling_labels;
1908 /* Return the label before INSN, or put a new label there. If SIBLING is
1909 non-zero, it is another label associated with the new label (if any),
1910 typically the former target of the jump that will be redirected to
1911 the new label. */
1913 static rtx_insn *
1914 get_label_before (rtx_insn *insn, rtx sibling)
1916 rtx_insn *label;
1918 /* Find an existing label at this point
1919 or make a new one if there is none. */
1920 label = prev_nonnote_insn (insn);
1922 if (label == 0 || !LABEL_P (label))
1924 rtx_insn *prev = PREV_INSN (insn);
1926 label = gen_label_rtx ();
1927 emit_label_after (label, prev);
1928 LABEL_NUSES (label) = 0;
1929 if (sibling)
1931 sibling_labels.safe_push (label);
1932 sibling_labels.safe_push (sibling);
1935 return label;
1938 /* Scan a function looking for insns that need a delay slot and find insns to
1939 put into the delay slot.
1941 NON_JUMPS_P is nonzero if we are to only try to fill non-jump insns (such
1942 as calls). We do these first since we don't want jump insns (that are
1943 easier to fill) to get the only insns that could be used for non-jump insns.
1944 When it is zero, only try to fill JUMP_INSNs.
1946 When slots are filled in this manner, the insns (including the
1947 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
1948 it is possible to tell whether a delay slot has really been filled
1949 or not. `final' knows how to deal with this, by communicating
1950 through FINAL_SEQUENCE. */
1952 static void
1953 fill_simple_delay_slots (int non_jumps_p)
1955 rtx_insn *insn, *trial, *next_trial;
1956 rtx pat;
1957 int i;
1958 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
1959 struct resources needed, set;
1960 int slots_to_fill, slots_filled;
1961 rtx_insn_list *delay_list;
1963 for (i = 0; i < num_unfilled_slots; i++)
1965 int flags;
1966 /* Get the next insn to fill. If it has already had any slots assigned,
1967 we can't do anything with it. Maybe we'll improve this later. */
1969 insn = unfilled_slots_base[i];
1970 if (insn == 0
1971 || insn->deleted ()
1972 || (NONJUMP_INSN_P (insn)
1973 && GET_CODE (PATTERN (insn)) == SEQUENCE)
1974 || (JUMP_P (insn) && non_jumps_p)
1975 || (!JUMP_P (insn) && ! non_jumps_p))
1976 continue;
1978 /* It may have been that this insn used to need delay slots, but
1979 now doesn't; ignore in that case. This can happen, for example,
1980 on the HP PA RISC, where the number of delay slots depends on
1981 what insns are nearby. */
1982 slots_to_fill = num_delay_slots (insn);
1984 /* Some machine description have defined instructions to have
1985 delay slots only in certain circumstances which may depend on
1986 nearby insns (which change due to reorg's actions).
1988 For example, the PA port normally has delay slots for unconditional
1989 jumps.
1991 However, the PA port claims such jumps do not have a delay slot
1992 if they are immediate successors of certain CALL_INSNs. This
1993 allows the port to favor filling the delay slot of the call with
1994 the unconditional jump. */
1995 if (slots_to_fill == 0)
1996 continue;
1998 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
1999 says how many. After initialization, first try optimizing
2001 call _foo call _foo
2002 nop add %o7,.-L1,%o7
2003 b,a L1
2006 If this case applies, the delay slot of the call is filled with
2007 the unconditional jump. This is done first to avoid having the
2008 delay slot of the call filled in the backward scan. Also, since
2009 the unconditional jump is likely to also have a delay slot, that
2010 insn must exist when it is subsequently scanned.
2012 This is tried on each insn with delay slots as some machines
2013 have insns which perform calls, but are not represented as
2014 CALL_INSNs. */
2016 slots_filled = 0;
2017 delay_list = 0;
2019 if (JUMP_P (insn))
2020 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2021 else
2022 flags = get_jump_flags (insn, NULL_RTX);
2024 if ((trial = next_active_insn (insn))
2025 && JUMP_P (trial)
2026 && simplejump_p (trial)
2027 && eligible_for_delay (insn, slots_filled, trial, flags)
2028 && no_labels_between_p (insn, trial)
2029 && ! can_throw_internal (trial))
2031 rtx_insn **tmp;
2032 slots_filled++;
2033 delay_list = add_to_delay_list (trial, delay_list);
2035 /* TRIAL may have had its delay slot filled, then unfilled. When
2036 the delay slot is unfilled, TRIAL is placed back on the unfilled
2037 slots obstack. Unfortunately, it is placed on the end of the
2038 obstack, not in its original location. Therefore, we must search
2039 from entry i + 1 to the end of the unfilled slots obstack to
2040 try and find TRIAL. */
2041 tmp = &unfilled_slots_base[i + 1];
2042 while (*tmp != trial && tmp != unfilled_slots_next)
2043 tmp++;
2045 /* Remove the unconditional jump from consideration for delay slot
2046 filling and unthread it. */
2047 if (*tmp == trial)
2048 *tmp = 0;
2050 rtx_insn *next = NEXT_INSN (trial);
2051 rtx_insn *prev = PREV_INSN (trial);
2052 if (prev)
2053 SET_NEXT_INSN (prev) = next;
2054 if (next)
2055 SET_PREV_INSN (next) = prev;
2059 /* Now, scan backwards from the insn to search for a potential
2060 delay-slot candidate. Stop searching when a label or jump is hit.
2062 For each candidate, if it is to go into the delay slot (moved
2063 forward in execution sequence), it must not need or set any resources
2064 that were set by later insns and must not set any resources that
2065 are needed for those insns.
2067 The delay slot insn itself sets resources unless it is a call
2068 (in which case the called routine, not the insn itself, is doing
2069 the setting). */
2071 if (slots_filled < slots_to_fill)
2073 /* If the flags register is dead after the insn, then we want to be
2074 able to accept a candidate that clobbers it. For this purpose,
2075 we need to filter the flags register during life analysis, so
2076 that it doesn't create RAW and WAW dependencies, while still
2077 creating the necessary WAR dependencies. */
2078 bool filter_flags
2079 = (slots_to_fill == 1
2080 && targetm.flags_regnum != INVALID_REGNUM
2081 && find_regno_note (insn, REG_DEAD, targetm.flags_regnum));
2082 struct resources fset;
2083 CLEAR_RESOURCE (&needed);
2084 CLEAR_RESOURCE (&set);
2085 mark_set_resources (insn, &set, 0, MARK_SRC_DEST);
2086 if (filter_flags)
2088 CLEAR_RESOURCE (&fset);
2089 mark_set_resources (insn, &fset, 0, MARK_SRC_DEST);
2091 mark_referenced_resources (insn, &needed, false);
2093 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2094 trial = next_trial)
2096 next_trial = prev_nonnote_insn (trial);
2098 /* This must be an INSN or CALL_INSN. */
2099 pat = PATTERN (trial);
2101 /* Stand-alone USE and CLOBBER are just for flow. */
2102 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2103 continue;
2105 /* Check for resource conflict first, to avoid unnecessary
2106 splitting. */
2107 if (! insn_references_resource_p (trial, &set, true)
2108 && ! insn_sets_resource_p (trial,
2109 filter_flags ? &fset : &set,
2110 true)
2111 && ! insn_sets_resource_p (trial, &needed, true)
2112 #ifdef HAVE_cc0
2113 /* Can't separate set of cc0 from its use. */
2114 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2115 #endif
2116 && ! can_throw_internal (trial))
2118 trial = try_split (pat, trial, 1);
2119 next_trial = prev_nonnote_insn (trial);
2120 if (eligible_for_delay (insn, slots_filled, trial, flags))
2122 /* In this case, we are searching backward, so if we
2123 find insns to put on the delay list, we want
2124 to put them at the head, rather than the
2125 tail, of the list. */
2127 update_reg_dead_notes (trial, insn);
2128 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2129 trial, delay_list);
2130 update_block (trial, trial);
2131 delete_related_insns (trial);
2132 if (slots_to_fill == ++slots_filled)
2133 break;
2134 continue;
2138 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2139 if (filter_flags)
2141 mark_set_resources (trial, &fset, 0, MARK_SRC_DEST_CALL);
2142 /* If the flags register is set, then it doesn't create RAW
2143 dependencies any longer and it also doesn't create WAW
2144 dependencies since it's dead after the original insn. */
2145 if (TEST_HARD_REG_BIT (fset.regs, targetm.flags_regnum))
2147 CLEAR_HARD_REG_BIT (needed.regs, targetm.flags_regnum);
2148 CLEAR_HARD_REG_BIT (fset.regs, targetm.flags_regnum);
2151 mark_referenced_resources (trial, &needed, true);
2155 /* If all needed slots haven't been filled, we come here. */
2157 /* Try to optimize case of jumping around a single insn. */
2158 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2159 if (slots_filled != slots_to_fill
2160 && delay_list == 0
2161 && JUMP_P (insn)
2162 && (condjump_p (insn) || condjump_in_parallel_p (insn))
2163 && !ANY_RETURN_P (JUMP_LABEL (insn)))
2165 delay_list = optimize_skip (insn);
2166 if (delay_list)
2167 slots_filled += 1;
2169 #endif
2171 /* Try to get insns from beyond the insn needing the delay slot.
2172 These insns can neither set or reference resources set in insns being
2173 skipped, cannot set resources in the insn being skipped, and, if this
2174 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2175 call might not return).
2177 There used to be code which continued past the target label if
2178 we saw all uses of the target label. This code did not work,
2179 because it failed to account for some instructions which were
2180 both annulled and marked as from the target. This can happen as a
2181 result of optimize_skip. Since this code was redundant with
2182 fill_eager_delay_slots anyways, it was just deleted. */
2184 if (slots_filled != slots_to_fill
2185 /* If this instruction could throw an exception which is
2186 caught in the same function, then it's not safe to fill
2187 the delay slot with an instruction from beyond this
2188 point. For example, consider:
2190 int i = 2;
2192 try {
2193 f();
2194 i = 3;
2195 } catch (...) {}
2197 return i;
2199 Even though `i' is a local variable, we must be sure not
2200 to put `i = 3' in the delay slot if `f' might throw an
2201 exception.
2203 Presumably, we should also check to see if we could get
2204 back to this function via `setjmp'. */
2205 && ! can_throw_internal (insn)
2206 && !JUMP_P (insn))
2208 int maybe_never = 0;
2209 rtx pat, trial_delay;
2211 CLEAR_RESOURCE (&needed);
2212 CLEAR_RESOURCE (&set);
2213 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
2214 mark_referenced_resources (insn, &needed, true);
2216 if (CALL_P (insn))
2217 maybe_never = 1;
2219 for (trial = next_nonnote_insn (insn); !stop_search_p (trial, 1);
2220 trial = next_trial)
2222 next_trial = next_nonnote_insn (trial);
2224 /* This must be an INSN or CALL_INSN. */
2225 pat = PATTERN (trial);
2227 /* Stand-alone USE and CLOBBER are just for flow. */
2228 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2229 continue;
2231 /* If this already has filled delay slots, get the insn needing
2232 the delay slots. */
2233 if (GET_CODE (pat) == SEQUENCE)
2234 trial_delay = XVECEXP (pat, 0, 0);
2235 else
2236 trial_delay = trial;
2238 /* Stop our search when seeing a jump. */
2239 if (JUMP_P (trial_delay))
2240 break;
2242 /* See if we have a resource problem before we try to split. */
2243 if (GET_CODE (pat) != SEQUENCE
2244 && ! insn_references_resource_p (trial, &set, true)
2245 && ! insn_sets_resource_p (trial, &set, true)
2246 && ! insn_sets_resource_p (trial, &needed, true)
2247 #ifdef HAVE_cc0
2248 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2249 #endif
2250 && ! (maybe_never && may_trap_or_fault_p (pat))
2251 && (trial = try_split (pat, trial, 0))
2252 && eligible_for_delay (insn, slots_filled, trial, flags)
2253 && ! can_throw_internal (trial))
2255 next_trial = next_nonnote_insn (trial);
2256 delay_list = add_to_delay_list (trial, delay_list);
2257 #ifdef HAVE_cc0
2258 if (reg_mentioned_p (cc0_rtx, pat))
2259 link_cc0_insns (trial);
2260 #endif
2261 delete_related_insns (trial);
2262 if (slots_to_fill == ++slots_filled)
2263 break;
2264 continue;
2267 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2268 mark_referenced_resources (trial, &needed, true);
2270 /* Ensure we don't put insns between the setting of cc and the
2271 comparison by moving a setting of cc into an earlier delay
2272 slot since these insns could clobber the condition code. */
2273 set.cc = 1;
2275 /* If this is a call, we might not get here. */
2276 if (CALL_P (trial_delay))
2277 maybe_never = 1;
2280 /* If there are slots left to fill and our search was stopped by an
2281 unconditional branch, try the insn at the branch target. We can
2282 redirect the branch if it works.
2284 Don't do this if the insn at the branch target is a branch. */
2285 if (slots_to_fill != slots_filled
2286 && trial
2287 && jump_to_label_p (trial)
2288 && simplejump_p (trial)
2289 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2290 && ! (NONJUMP_INSN_P (next_trial)
2291 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2292 && !JUMP_P (next_trial)
2293 && ! insn_references_resource_p (next_trial, &set, true)
2294 && ! insn_sets_resource_p (next_trial, &set, true)
2295 && ! insn_sets_resource_p (next_trial, &needed, true)
2296 #ifdef HAVE_cc0
2297 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2298 #endif
2299 && ! (maybe_never && may_trap_or_fault_p (PATTERN (next_trial)))
2300 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2301 && eligible_for_delay (insn, slots_filled, next_trial, flags)
2302 && ! can_throw_internal (trial))
2304 /* See comment in relax_delay_slots about necessity of using
2305 next_real_insn here. */
2306 rtx_insn *new_label = next_real_insn (next_trial);
2308 if (new_label != 0)
2309 new_label = get_label_before (new_label, JUMP_LABEL (trial));
2310 else
2311 new_label = find_end_label (simple_return_rtx);
2313 if (new_label)
2315 delay_list
2316 = add_to_delay_list (copy_delay_slot_insn (next_trial),
2317 delay_list);
2318 slots_filled++;
2319 reorg_redirect_jump (trial, new_label);
2324 /* If this is an unconditional jump, then try to get insns from the
2325 target of the jump. */
2326 if (JUMP_P (insn)
2327 && simplejump_p (insn)
2328 && slots_filled != slots_to_fill)
2329 delay_list
2330 = fill_slots_from_thread (insn, const_true_rtx,
2331 next_active_insn (JUMP_LABEL (insn)),
2332 NULL, 1, 1,
2333 own_thread_p (JUMP_LABEL (insn),
2334 JUMP_LABEL (insn), 0),
2335 slots_to_fill, &slots_filled,
2336 delay_list);
2338 if (delay_list)
2339 unfilled_slots_base[i]
2340 = emit_delay_sequence (insn, delay_list, slots_filled);
2342 if (slots_to_fill == slots_filled)
2343 unfilled_slots_base[i] = 0;
2345 note_delay_statistics (slots_filled, 0);
2349 /* Follow any unconditional jump at LABEL, for the purpose of redirecting JUMP;
2350 return the ultimate label reached by any such chain of jumps.
2351 Return a suitable return rtx if the chain ultimately leads to a
2352 return instruction.
2353 If LABEL is not followed by a jump, return LABEL.
2354 If the chain loops or we can't find end, return LABEL,
2355 since that tells caller to avoid changing the insn.
2356 If the returned label is obtained by following a crossing jump,
2357 set *CROSSING to true, otherwise set it to false. */
2359 static rtx
2360 follow_jumps (rtx label, rtx_insn *jump, bool *crossing)
2362 rtx_insn *insn;
2363 rtx_insn *next;
2364 int depth;
2366 *crossing = false;
2367 if (ANY_RETURN_P (label))
2368 return label;
2370 rtx_insn *value = as_a <rtx_insn *> (label);
2372 for (depth = 0;
2373 (depth < 10
2374 && (insn = next_active_insn (value)) != 0
2375 && JUMP_P (insn)
2376 && JUMP_LABEL (insn) != NULL_RTX
2377 && ((any_uncondjump_p (insn) && onlyjump_p (insn))
2378 || ANY_RETURN_P (PATTERN (insn)))
2379 && (next = NEXT_INSN (insn))
2380 && BARRIER_P (next));
2381 depth++)
2383 rtx this_label_or_return = JUMP_LABEL (insn);
2385 /* If we have found a cycle, make the insn jump to itself. */
2386 if (this_label_or_return == label)
2387 return label;
2389 /* Cannot follow returns and cannot look through tablejumps. */
2390 if (ANY_RETURN_P (this_label_or_return))
2391 return this_label_or_return;
2393 rtx_insn *this_label = as_a <rtx_insn *> (this_label_or_return);
2394 if (NEXT_INSN (this_label)
2395 && JUMP_TABLE_DATA_P (NEXT_INSN (this_label)))
2396 break;
2398 if (!targetm.can_follow_jump (jump, insn))
2399 break;
2400 if (!*crossing)
2401 *crossing = CROSSING_JUMP_P (jump);
2402 value = this_label;
2404 if (depth == 10)
2405 return label;
2406 return value;
2409 /* Try to find insns to place in delay slots.
2411 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2412 or is an unconditional branch if CONDITION is const_true_rtx.
2413 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2415 THREAD is a flow-of-control, either the insns to be executed if the
2416 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2418 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2419 to see if any potential delay slot insns set things needed there.
2421 LIKELY is nonzero if it is extremely likely that the branch will be
2422 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2423 end of a loop back up to the top.
2425 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2426 thread. I.e., it is the fallthrough code of our jump or the target of the
2427 jump when we are the only jump going there.
2429 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2430 case, we can only take insns from the head of the thread for our delay
2431 slot. We then adjust the jump to point after the insns we have taken. */
2433 static rtx_insn_list *
2434 fill_slots_from_thread (rtx_insn *insn, rtx condition, rtx thread_or_return,
2435 rtx opposite_thread, int likely,
2436 int thread_if_true,
2437 int own_thread, int slots_to_fill,
2438 int *pslots_filled, rtx_insn_list *delay_list)
2440 rtx new_thread;
2441 struct resources opposite_needed, set, needed;
2442 rtx_insn *trial;
2443 int lose = 0;
2444 int must_annul = 0;
2445 int flags;
2447 /* Validate our arguments. */
2448 gcc_assert (condition != const_true_rtx || thread_if_true);
2449 gcc_assert (own_thread || thread_if_true);
2451 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2453 /* If our thread is the end of subroutine, we can't get any delay
2454 insns from that. */
2455 if (thread_or_return == NULL_RTX || ANY_RETURN_P (thread_or_return))
2456 return delay_list;
2458 rtx_insn *thread = as_a <rtx_insn *> (thread_or_return);
2460 /* If this is an unconditional branch, nothing is needed at the
2461 opposite thread. Otherwise, compute what is needed there. */
2462 if (condition == const_true_rtx)
2463 CLEAR_RESOURCE (&opposite_needed);
2464 else
2465 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2467 /* If the insn at THREAD can be split, do it here to avoid having to
2468 update THREAD and NEW_THREAD if it is done in the loop below. Also
2469 initialize NEW_THREAD. */
2471 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2473 /* Scan insns at THREAD. We are looking for an insn that can be removed
2474 from THREAD (it neither sets nor references resources that were set
2475 ahead of it and it doesn't set anything needs by the insns ahead of
2476 it) and that either can be placed in an annulling insn or aren't
2477 needed at OPPOSITE_THREAD. */
2479 CLEAR_RESOURCE (&needed);
2480 CLEAR_RESOURCE (&set);
2482 /* If we do not own this thread, we must stop as soon as we find
2483 something that we can't put in a delay slot, since all we can do
2484 is branch into THREAD at a later point. Therefore, labels stop
2485 the search if this is not the `true' thread. */
2487 for (trial = thread;
2488 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2489 trial = next_nonnote_insn (trial))
2491 rtx pat, old_trial;
2493 /* If we have passed a label, we no longer own this thread. */
2494 if (LABEL_P (trial))
2496 own_thread = 0;
2497 continue;
2500 pat = PATTERN (trial);
2501 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2502 continue;
2504 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2505 don't separate or copy insns that set and use CC0. */
2506 if (! insn_references_resource_p (trial, &set, true)
2507 && ! insn_sets_resource_p (trial, &set, true)
2508 && ! insn_sets_resource_p (trial, &needed, true)
2509 #ifdef HAVE_cc0
2510 && ! (reg_mentioned_p (cc0_rtx, pat)
2511 && (! own_thread || ! sets_cc0_p (pat)))
2512 #endif
2513 && ! can_throw_internal (trial))
2515 rtx prior_insn;
2517 /* If TRIAL is redundant with some insn before INSN, we don't
2518 actually need to add it to the delay list; we can merely pretend
2519 we did. */
2520 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2522 fix_reg_dead_note (prior_insn, insn);
2523 if (own_thread)
2525 update_block (trial, thread);
2526 if (trial == thread)
2528 thread = next_active_insn (thread);
2529 if (new_thread == trial)
2530 new_thread = thread;
2533 delete_related_insns (trial);
2535 else
2537 update_reg_unused_notes (prior_insn, trial);
2538 new_thread = next_active_insn (trial);
2541 continue;
2544 /* There are two ways we can win: If TRIAL doesn't set anything
2545 needed at the opposite thread and can't trap, or if it can
2546 go into an annulled delay slot. But we want neither to copy
2547 nor to speculate frame-related insns. */
2548 if (!must_annul
2549 && ((condition == const_true_rtx
2550 && (own_thread || !RTX_FRAME_RELATED_P (trial)))
2551 || (! insn_sets_resource_p (trial, &opposite_needed, true)
2552 && ! may_trap_or_fault_p (pat)
2553 && ! RTX_FRAME_RELATED_P (trial))))
2555 old_trial = trial;
2556 trial = try_split (pat, trial, 0);
2557 if (new_thread == old_trial)
2558 new_thread = trial;
2559 if (thread == old_trial)
2560 thread = trial;
2561 pat = PATTERN (trial);
2562 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2563 goto winner;
2565 else if (0
2566 #ifdef ANNUL_IFTRUE_SLOTS
2567 || ! thread_if_true
2568 #endif
2569 #ifdef ANNUL_IFFALSE_SLOTS
2570 || thread_if_true
2571 #endif
2574 old_trial = trial;
2575 trial = try_split (pat, trial, 0);
2576 if (new_thread == old_trial)
2577 new_thread = trial;
2578 if (thread == old_trial)
2579 thread = trial;
2580 pat = PATTERN (trial);
2581 if ((must_annul || delay_list == NULL) && (thread_if_true
2582 ? check_annul_list_true_false (0, delay_list)
2583 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2584 : check_annul_list_true_false (1, delay_list)
2585 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2587 rtx_insn *temp;
2589 must_annul = 1;
2590 winner:
2592 #ifdef HAVE_cc0
2593 if (reg_mentioned_p (cc0_rtx, pat))
2594 link_cc0_insns (trial);
2595 #endif
2597 /* If we own this thread, delete the insn. If this is the
2598 destination of a branch, show that a basic block status
2599 may have been updated. In any case, mark the new
2600 starting point of this thread. */
2601 if (own_thread)
2603 rtx note;
2605 update_block (trial, thread);
2606 if (trial == thread)
2608 thread = next_active_insn (thread);
2609 if (new_thread == trial)
2610 new_thread = thread;
2613 /* We are moving this insn, not deleting it. We must
2614 temporarily increment the use count on any referenced
2615 label lest it be deleted by delete_related_insns. */
2616 for (note = REG_NOTES (trial);
2617 note != NULL_RTX;
2618 note = XEXP (note, 1))
2619 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2620 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2622 /* REG_LABEL_OPERAND could be
2623 NOTE_INSN_DELETED_LABEL too. */
2624 if (LABEL_P (XEXP (note, 0)))
2625 LABEL_NUSES (XEXP (note, 0))++;
2626 else
2627 gcc_assert (REG_NOTE_KIND (note)
2628 == REG_LABEL_OPERAND);
2630 if (jump_to_label_p (trial))
2631 LABEL_NUSES (JUMP_LABEL (trial))++;
2633 delete_related_insns (trial);
2635 for (note = REG_NOTES (trial);
2636 note != NULL_RTX;
2637 note = XEXP (note, 1))
2638 if (REG_NOTE_KIND (note) == REG_LABEL_OPERAND
2639 || REG_NOTE_KIND (note) == REG_LABEL_TARGET)
2641 /* REG_LABEL_OPERAND could be
2642 NOTE_INSN_DELETED_LABEL too. */
2643 if (LABEL_P (XEXP (note, 0)))
2644 LABEL_NUSES (XEXP (note, 0))--;
2645 else
2646 gcc_assert (REG_NOTE_KIND (note)
2647 == REG_LABEL_OPERAND);
2649 if (jump_to_label_p (trial))
2650 LABEL_NUSES (JUMP_LABEL (trial))--;
2652 else
2653 new_thread = next_active_insn (trial);
2655 temp = own_thread ? trial : copy_delay_slot_insn (trial);
2656 if (thread_if_true)
2657 INSN_FROM_TARGET_P (temp) = 1;
2659 delay_list = add_to_delay_list (temp, delay_list);
2661 if (slots_to_fill == ++(*pslots_filled))
2663 /* Even though we have filled all the slots, we
2664 may be branching to a location that has a
2665 redundant insn. Skip any if so. */
2666 while (new_thread && ! own_thread
2667 && ! insn_sets_resource_p (new_thread, &set, true)
2668 && ! insn_sets_resource_p (new_thread, &needed,
2669 true)
2670 && ! insn_references_resource_p (new_thread,
2671 &set, true)
2672 && (prior_insn
2673 = redundant_insn (new_thread, insn,
2674 delay_list)))
2676 /* We know we do not own the thread, so no need
2677 to call update_block and delete_insn. */
2678 fix_reg_dead_note (prior_insn, insn);
2679 update_reg_unused_notes (prior_insn, new_thread);
2680 new_thread = next_active_insn (new_thread);
2682 break;
2685 continue;
2690 /* This insn can't go into a delay slot. */
2691 lose = 1;
2692 mark_set_resources (trial, &set, 0, MARK_SRC_DEST_CALL);
2693 mark_referenced_resources (trial, &needed, true);
2695 /* Ensure we don't put insns between the setting of cc and the comparison
2696 by moving a setting of cc into an earlier delay slot since these insns
2697 could clobber the condition code. */
2698 set.cc = 1;
2700 /* If this insn is a register-register copy and the next insn has
2701 a use of our destination, change it to use our source. That way,
2702 it will become a candidate for our delay slot the next time
2703 through this loop. This case occurs commonly in loops that
2704 scan a list.
2706 We could check for more complex cases than those tested below,
2707 but it doesn't seem worth it. It might also be a good idea to try
2708 to swap the two insns. That might do better.
2710 We can't do this if the next insn modifies our destination, because
2711 that would make the replacement into the insn invalid. We also can't
2712 do this if it modifies our source, because it might be an earlyclobber
2713 operand. This latter test also prevents updating the contents of
2714 a PRE_INC. We also can't do this if there's overlap of source and
2715 destination. Overlap may happen for larger-than-register-size modes. */
2717 if (NONJUMP_INSN_P (trial) && GET_CODE (pat) == SET
2718 && REG_P (SET_SRC (pat))
2719 && REG_P (SET_DEST (pat))
2720 && !reg_overlap_mentioned_p (SET_DEST (pat), SET_SRC (pat)))
2722 rtx next = next_nonnote_insn (trial);
2724 if (next && NONJUMP_INSN_P (next)
2725 && GET_CODE (PATTERN (next)) != USE
2726 && ! reg_set_p (SET_DEST (pat), next)
2727 && ! reg_set_p (SET_SRC (pat), next)
2728 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2729 && ! modified_in_p (SET_DEST (pat), next))
2730 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2734 /* If we stopped on a branch insn that has delay slots, see if we can
2735 steal some of the insns in those slots. */
2736 if (trial && NONJUMP_INSN_P (trial)
2737 && GET_CODE (PATTERN (trial)) == SEQUENCE
2738 && JUMP_P (XVECEXP (PATTERN (trial), 0, 0)))
2740 rtx_sequence *sequence = as_a <rtx_sequence *> (PATTERN (trial));
2741 /* If this is the `true' thread, we will want to follow the jump,
2742 so we can only do this if we have taken everything up to here. */
2743 if (thread_if_true && trial == new_thread)
2745 delay_list
2746 = steal_delay_list_from_target (insn, condition, sequence,
2747 delay_list, &set, &needed,
2748 &opposite_needed, slots_to_fill,
2749 pslots_filled, &must_annul,
2750 &new_thread);
2751 /* If we owned the thread and are told that it branched
2752 elsewhere, make sure we own the thread at the new location. */
2753 if (own_thread && trial != new_thread)
2754 own_thread = own_thread_p (new_thread, new_thread, 0);
2756 else if (! thread_if_true)
2757 delay_list
2758 = steal_delay_list_from_fallthrough (insn, condition,
2759 sequence,
2760 delay_list, &set, &needed,
2761 &opposite_needed, slots_to_fill,
2762 pslots_filled, &must_annul);
2765 /* If we haven't found anything for this delay slot and it is very
2766 likely that the branch will be taken, see if the insn at our target
2767 increments or decrements a register with an increment that does not
2768 depend on the destination register. If so, try to place the opposite
2769 arithmetic insn after the jump insn and put the arithmetic insn in the
2770 delay slot. If we can't do this, return. */
2771 if (delay_list == 0 && likely
2772 && new_thread && !ANY_RETURN_P (new_thread)
2773 && NONJUMP_INSN_P (new_thread)
2774 && !RTX_FRAME_RELATED_P (new_thread)
2775 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2776 && asm_noperands (PATTERN (new_thread)) < 0)
2778 rtx pat = PATTERN (new_thread);
2779 rtx dest;
2780 rtx src;
2782 /* We know "new_thread" is an insn due to NONJUMP_INSN_P (new_thread)
2783 above. */
2784 trial = as_a <rtx_insn *> (new_thread);
2785 pat = PATTERN (trial);
2787 if (!NONJUMP_INSN_P (trial)
2788 || GET_CODE (pat) != SET
2789 || ! eligible_for_delay (insn, 0, trial, flags)
2790 || can_throw_internal (trial))
2791 return 0;
2793 dest = SET_DEST (pat), src = SET_SRC (pat);
2794 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2795 && rtx_equal_p (XEXP (src, 0), dest)
2796 && (!FLOAT_MODE_P (GET_MODE (src))
2797 || flag_unsafe_math_optimizations)
2798 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1))
2799 && ! side_effects_p (pat))
2801 rtx other = XEXP (src, 1);
2802 rtx new_arith;
2803 rtx_insn *ninsn;
2805 /* If this is a constant adjustment, use the same code with
2806 the negated constant. Otherwise, reverse the sense of the
2807 arithmetic. */
2808 if (CONST_INT_P (other))
2809 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2810 negate_rtx (GET_MODE (src), other));
2811 else
2812 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2813 GET_MODE (src), dest, other);
2815 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2816 insn);
2818 if (recog_memoized (ninsn) < 0
2819 || (extract_insn (ninsn),
2820 !constrain_operands (1, get_preferred_alternatives (ninsn))))
2822 delete_related_insns (ninsn);
2823 return 0;
2826 if (own_thread)
2828 update_block (trial, thread);
2829 if (trial == thread)
2831 thread = next_active_insn (thread);
2832 if (new_thread == trial)
2833 new_thread = thread;
2835 delete_related_insns (trial);
2837 else
2838 new_thread = next_active_insn (trial);
2840 ninsn = own_thread ? trial : copy_delay_slot_insn (trial);
2841 if (thread_if_true)
2842 INSN_FROM_TARGET_P (ninsn) = 1;
2844 delay_list = add_to_delay_list (ninsn, NULL);
2845 (*pslots_filled)++;
2849 if (delay_list && must_annul)
2850 INSN_ANNULLED_BRANCH_P (insn) = 1;
2852 /* If we are to branch into the middle of this thread, find an appropriate
2853 label or make a new one if none, and redirect INSN to it. If we hit the
2854 end of the function, use the end-of-function label. */
2855 if (new_thread != thread)
2857 rtx label;
2858 bool crossing = false;
2860 gcc_assert (thread_if_true);
2862 if (new_thread && simplejump_or_return_p (new_thread)
2863 && redirect_with_delay_list_safe_p (insn,
2864 JUMP_LABEL (new_thread),
2865 delay_list))
2866 new_thread = follow_jumps (JUMP_LABEL (new_thread), insn,
2867 &crossing);
2869 if (ANY_RETURN_P (new_thread))
2870 label = find_end_label (new_thread);
2871 else if (LABEL_P (new_thread))
2872 label = new_thread;
2873 else
2874 label = get_label_before (as_a <rtx_insn *> (new_thread),
2875 JUMP_LABEL (insn));
2877 if (label)
2879 reorg_redirect_jump (insn, label);
2880 if (crossing)
2881 CROSSING_JUMP_P (insn) = 1;
2885 return delay_list;
2888 /* Make another attempt to find insns to place in delay slots.
2890 We previously looked for insns located in front of the delay insn
2891 and, for non-jump delay insns, located behind the delay insn.
2893 Here only try to schedule jump insns and try to move insns from either
2894 the target or the following insns into the delay slot. If annulling is
2895 supported, we will be likely to do this. Otherwise, we can do this only
2896 if safe. */
2898 static void
2899 fill_eager_delay_slots (void)
2901 rtx_insn *insn;
2902 int i;
2903 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2905 for (i = 0; i < num_unfilled_slots; i++)
2907 rtx condition;
2908 rtx target_label, insn_at_target;
2909 rtx_insn *fallthrough_insn;
2910 rtx_insn_list *delay_list = 0;
2911 int own_target;
2912 int own_fallthrough;
2913 int prediction, slots_to_fill, slots_filled;
2915 insn = unfilled_slots_base[i];
2916 if (insn == 0
2917 || insn->deleted ()
2918 || !JUMP_P (insn)
2919 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2920 continue;
2922 slots_to_fill = num_delay_slots (insn);
2923 /* Some machine description have defined instructions to have
2924 delay slots only in certain circumstances which may depend on
2925 nearby insns (which change due to reorg's actions).
2927 For example, the PA port normally has delay slots for unconditional
2928 jumps.
2930 However, the PA port claims such jumps do not have a delay slot
2931 if they are immediate successors of certain CALL_INSNs. This
2932 allows the port to favor filling the delay slot of the call with
2933 the unconditional jump. */
2934 if (slots_to_fill == 0)
2935 continue;
2937 slots_filled = 0;
2938 target_label = JUMP_LABEL (insn);
2939 condition = get_branch_condition (insn, target_label);
2941 if (condition == 0)
2942 continue;
2944 /* Get the next active fallthrough and target insns and see if we own
2945 them. Then see whether the branch is likely true. We don't need
2946 to do a lot of this for unconditional branches. */
2948 insn_at_target = first_active_target_insn (target_label);
2949 own_target = own_thread_p (target_label, target_label, 0);
2951 if (condition == const_true_rtx)
2953 own_fallthrough = 0;
2954 fallthrough_insn = 0;
2955 prediction = 2;
2957 else
2959 fallthrough_insn = next_active_insn (insn);
2960 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
2961 prediction = mostly_true_jump (insn);
2964 /* If this insn is expected to branch, first try to get insns from our
2965 target, then our fallthrough insns. If it is not expected to branch,
2966 try the other order. */
2968 if (prediction > 0)
2970 delay_list
2971 = fill_slots_from_thread (insn, condition, insn_at_target,
2972 fallthrough_insn, prediction == 2, 1,
2973 own_target,
2974 slots_to_fill, &slots_filled, delay_list);
2976 if (delay_list == 0 && own_fallthrough)
2978 /* Even though we didn't find anything for delay slots,
2979 we might have found a redundant insn which we deleted
2980 from the thread that was filled. So we have to recompute
2981 the next insn at the target. */
2982 target_label = JUMP_LABEL (insn);
2983 insn_at_target = first_active_target_insn (target_label);
2985 delay_list
2986 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2987 insn_at_target, 0, 0,
2988 own_fallthrough,
2989 slots_to_fill, &slots_filled,
2990 delay_list);
2993 else
2995 if (own_fallthrough)
2996 delay_list
2997 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2998 insn_at_target, 0, 0,
2999 own_fallthrough,
3000 slots_to_fill, &slots_filled,
3001 delay_list);
3003 if (delay_list == 0)
3004 delay_list
3005 = fill_slots_from_thread (insn, condition, insn_at_target,
3006 next_active_insn (insn), 0, 1,
3007 own_target,
3008 slots_to_fill, &slots_filled,
3009 delay_list);
3012 if (delay_list)
3013 unfilled_slots_base[i]
3014 = emit_delay_sequence (insn, delay_list, slots_filled);
3016 if (slots_to_fill == slots_filled)
3017 unfilled_slots_base[i] = 0;
3019 note_delay_statistics (slots_filled, 1);
3023 static void delete_computation (rtx insn);
3025 /* Recursively delete prior insns that compute the value (used only by INSN
3026 which the caller is deleting) stored in the register mentioned by NOTE
3027 which is a REG_DEAD note associated with INSN. */
3029 static void
3030 delete_prior_computation (rtx note, rtx insn)
3032 rtx our_prev;
3033 rtx reg = XEXP (note, 0);
3035 for (our_prev = prev_nonnote_insn (insn);
3036 our_prev && (NONJUMP_INSN_P (our_prev)
3037 || CALL_P (our_prev));
3038 our_prev = prev_nonnote_insn (our_prev))
3040 rtx pat = PATTERN (our_prev);
3042 /* If we reach a CALL which is not calling a const function
3043 or the callee pops the arguments, then give up. */
3044 if (CALL_P (our_prev)
3045 && (! RTL_CONST_CALL_P (our_prev)
3046 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
3047 break;
3049 /* If we reach a SEQUENCE, it is too complex to try to
3050 do anything with it, so give up. We can be run during
3051 and after reorg, so SEQUENCE rtl can legitimately show
3052 up here. */
3053 if (GET_CODE (pat) == SEQUENCE)
3054 break;
3056 if (GET_CODE (pat) == USE
3057 && NONJUMP_INSN_P (XEXP (pat, 0)))
3058 /* reorg creates USEs that look like this. We leave them
3059 alone because reorg needs them for its own purposes. */
3060 break;
3062 if (reg_set_p (reg, pat))
3064 if (side_effects_p (pat) && !CALL_P (our_prev))
3065 break;
3067 if (GET_CODE (pat) == PARALLEL)
3069 /* If we find a SET of something else, we can't
3070 delete the insn. */
3072 int i;
3074 for (i = 0; i < XVECLEN (pat, 0); i++)
3076 rtx part = XVECEXP (pat, 0, i);
3078 if (GET_CODE (part) == SET
3079 && SET_DEST (part) != reg)
3080 break;
3083 if (i == XVECLEN (pat, 0))
3084 delete_computation (our_prev);
3086 else if (GET_CODE (pat) == SET
3087 && REG_P (SET_DEST (pat)))
3089 int dest_regno = REGNO (SET_DEST (pat));
3090 int dest_endregno = END_REGNO (SET_DEST (pat));
3091 int regno = REGNO (reg);
3092 int endregno = END_REGNO (reg);
3094 if (dest_regno >= regno
3095 && dest_endregno <= endregno)
3096 delete_computation (our_prev);
3098 /* We may have a multi-word hard register and some, but not
3099 all, of the words of the register are needed in subsequent
3100 insns. Write REG_UNUSED notes for those parts that were not
3101 needed. */
3102 else if (dest_regno <= regno
3103 && dest_endregno >= endregno)
3105 int i;
3107 add_reg_note (our_prev, REG_UNUSED, reg);
3109 for (i = dest_regno; i < dest_endregno; i++)
3110 if (! find_regno_note (our_prev, REG_UNUSED, i))
3111 break;
3113 if (i == dest_endregno)
3114 delete_computation (our_prev);
3118 break;
3121 /* If PAT references the register that dies here, it is an
3122 additional use. Hence any prior SET isn't dead. However, this
3123 insn becomes the new place for the REG_DEAD note. */
3124 if (reg_overlap_mentioned_p (reg, pat))
3126 XEXP (note, 1) = REG_NOTES (our_prev);
3127 REG_NOTES (our_prev) = note;
3128 break;
3133 /* Delete INSN and recursively delete insns that compute values used only
3134 by INSN. This uses the REG_DEAD notes computed during flow analysis.
3136 Look at all our REG_DEAD notes. If a previous insn does nothing other
3137 than set a register that dies in this insn, we can delete that insn
3138 as well.
3140 On machines with CC0, if CC0 is used in this insn, we may be able to
3141 delete the insn that set it. */
3143 static void
3144 delete_computation (rtx insn)
3146 rtx note, next;
3148 #ifdef HAVE_cc0
3149 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
3151 rtx prev = prev_nonnote_insn (insn);
3152 /* We assume that at this stage
3153 CC's are always set explicitly
3154 and always immediately before the jump that
3155 will use them. So if the previous insn
3156 exists to set the CC's, delete it
3157 (unless it performs auto-increments, etc.). */
3158 if (prev && NONJUMP_INSN_P (prev)
3159 && sets_cc0_p (PATTERN (prev)))
3161 if (sets_cc0_p (PATTERN (prev)) > 0
3162 && ! side_effects_p (PATTERN (prev)))
3163 delete_computation (prev);
3164 else
3165 /* Otherwise, show that cc0 won't be used. */
3166 add_reg_note (prev, REG_UNUSED, cc0_rtx);
3169 #endif
3171 for (note = REG_NOTES (insn); note; note = next)
3173 next = XEXP (note, 1);
3175 if (REG_NOTE_KIND (note) != REG_DEAD
3176 /* Verify that the REG_NOTE is legitimate. */
3177 || !REG_P (XEXP (note, 0)))
3178 continue;
3180 delete_prior_computation (note, insn);
3183 delete_related_insns (insn);
3186 /* If all INSN does is set the pc, delete it,
3187 and delete the insn that set the condition codes for it
3188 if that's what the previous thing was. */
3190 static void
3191 delete_jump (rtx_insn *insn)
3193 rtx set = single_set (insn);
3195 if (set && GET_CODE (SET_DEST (set)) == PC)
3196 delete_computation (insn);
3199 static rtx_insn *
3200 label_before_next_insn (rtx x, rtx scan_limit)
3202 rtx_insn *insn = next_active_insn (x);
3203 while (insn)
3205 insn = PREV_INSN (insn);
3206 if (insn == scan_limit || insn == NULL_RTX)
3207 return NULL;
3208 if (LABEL_P (insn))
3209 break;
3211 return insn;
3214 /* Return TRUE if there is a NOTE_INSN_SWITCH_TEXT_SECTIONS note in between
3215 BEG and END. */
3217 static bool
3218 switch_text_sections_between_p (const rtx_insn *beg, const rtx_insn *end)
3220 const rtx_insn *p;
3221 for (p = beg; p != end; p = NEXT_INSN (p))
3222 if (NOTE_P (p) && NOTE_KIND (p) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
3223 return true;
3224 return false;
3228 /* Once we have tried two ways to fill a delay slot, make a pass over the
3229 code to try to improve the results and to do such things as more jump
3230 threading. */
3232 static void
3233 relax_delay_slots (rtx_insn *first)
3235 rtx_insn *insn, *next;
3236 rtx_sequence *pat;
3237 rtx trial;
3238 rtx_insn *delay_insn;
3239 rtx target_label;
3241 /* Look at every JUMP_INSN and see if we can improve it. */
3242 for (insn = first; insn; insn = next)
3244 rtx_insn *other;
3245 bool crossing;
3247 next = next_active_insn (insn);
3249 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3250 the next insn, or jumps to a label that is not the last of a
3251 group of consecutive labels. */
3252 if (JUMP_P (insn)
3253 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3254 && !ANY_RETURN_P (target_label = JUMP_LABEL (insn)))
3256 target_label
3257 = skip_consecutive_labels (follow_jumps (target_label, insn,
3258 &crossing));
3259 if (ANY_RETURN_P (target_label))
3260 target_label = find_end_label (target_label);
3262 if (target_label && next_active_insn (target_label) == next
3263 && ! condjump_in_parallel_p (insn)
3264 && ! (next && switch_text_sections_between_p (insn, next)))
3266 delete_jump (insn);
3267 continue;
3270 if (target_label && target_label != JUMP_LABEL (insn))
3272 reorg_redirect_jump (insn, target_label);
3273 if (crossing)
3274 CROSSING_JUMP_P (insn) = 1;
3277 /* See if this jump conditionally branches around an unconditional
3278 jump. If so, invert this jump and point it to the target of the
3279 second jump. Check if it's possible on the target. */
3280 if (next && simplejump_or_return_p (next)
3281 && any_condjump_p (insn)
3282 && target_label
3283 && next_active_insn (target_label) == next_active_insn (next)
3284 && no_labels_between_p (insn, next)
3285 && targetm.can_follow_jump (insn, next))
3287 rtx label = JUMP_LABEL (next);
3289 /* Be careful how we do this to avoid deleting code or
3290 labels that are momentarily dead. See similar optimization
3291 in jump.c.
3293 We also need to ensure we properly handle the case when
3294 invert_jump fails. */
3296 ++LABEL_NUSES (target_label);
3297 if (!ANY_RETURN_P (label))
3298 ++LABEL_NUSES (label);
3300 if (invert_jump (insn, label, 1))
3302 delete_related_insns (next);
3303 next = insn;
3306 if (!ANY_RETURN_P (label))
3307 --LABEL_NUSES (label);
3309 if (--LABEL_NUSES (target_label) == 0)
3310 delete_related_insns (target_label);
3312 continue;
3316 /* If this is an unconditional jump and the previous insn is a
3317 conditional jump, try reversing the condition of the previous
3318 insn and swapping our targets. The next pass might be able to
3319 fill the slots.
3321 Don't do this if we expect the conditional branch to be true, because
3322 we would then be making the more common case longer. */
3324 if (simplejump_or_return_p (insn)
3325 && (other = prev_active_insn (insn)) != 0
3326 && any_condjump_p (other)
3327 && no_labels_between_p (other, insn)
3328 && 0 > mostly_true_jump (other))
3330 rtx other_target = JUMP_LABEL (other);
3331 target_label = JUMP_LABEL (insn);
3333 if (invert_jump (other, target_label, 0))
3334 reorg_redirect_jump (insn, other_target);
3337 /* Now look only at cases where we have a filled delay slot. */
3338 if (!NONJUMP_INSN_P (insn) || GET_CODE (PATTERN (insn)) != SEQUENCE)
3339 continue;
3341 pat = as_a <rtx_sequence *> (PATTERN (insn));
3342 delay_insn = pat->insn (0);
3344 /* See if the first insn in the delay slot is redundant with some
3345 previous insn. Remove it from the delay slot if so; then set up
3346 to reprocess this insn. */
3347 if (redundant_insn (pat->insn (1), delay_insn, 0))
3349 update_block (pat->insn (1), insn);
3350 delete_from_delay_slot (pat->insn (1));
3351 next = prev_active_insn (next);
3352 continue;
3355 /* See if we have a RETURN insn with a filled delay slot followed
3356 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3357 the first RETURN (but not its delay insn). This gives the same
3358 effect in fewer instructions.
3360 Only do so if optimizing for size since this results in slower, but
3361 smaller code. */
3362 if (optimize_function_for_size_p (cfun)
3363 && ANY_RETURN_P (PATTERN (delay_insn))
3364 && next
3365 && JUMP_P (next)
3366 && PATTERN (next) == PATTERN (delay_insn))
3368 rtx_insn *after;
3369 int i;
3371 /* Delete the RETURN and just execute the delay list insns.
3373 We do this by deleting the INSN containing the SEQUENCE, then
3374 re-emitting the insns separately, and then deleting the RETURN.
3375 This allows the count of the jump target to be properly
3376 decremented.
3378 Note that we need to change the INSN_UID of the re-emitted insns
3379 since it is used to hash the insns for mark_target_live_regs and
3380 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3382 Clear the from target bit, since these insns are no longer
3383 in delay slots. */
3384 for (i = 0; i < XVECLEN (pat, 0); i++)
3385 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3387 trial = PREV_INSN (insn);
3388 delete_related_insns (insn);
3389 gcc_assert (GET_CODE (pat) == SEQUENCE);
3390 add_insn_after (delay_insn, trial, NULL);
3391 after = delay_insn;
3392 for (i = 1; i < pat->len (); i++)
3393 after = emit_copy_of_insn_after (pat->insn (i), after);
3394 delete_scheduled_jump (delay_insn);
3395 continue;
3398 /* Now look only at the cases where we have a filled JUMP_INSN. */
3399 if (!JUMP_P (delay_insn)
3400 || !(condjump_p (delay_insn) || condjump_in_parallel_p (delay_insn)))
3401 continue;
3403 target_label = JUMP_LABEL (delay_insn);
3404 if (target_label && ANY_RETURN_P (target_label))
3405 continue;
3407 /* If this jump goes to another unconditional jump, thread it, but
3408 don't convert a jump into a RETURN here. */
3409 trial = skip_consecutive_labels (follow_jumps (target_label, delay_insn,
3410 &crossing));
3411 if (ANY_RETURN_P (trial))
3412 trial = find_end_label (trial);
3414 if (trial && trial != target_label
3415 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3417 reorg_redirect_jump (delay_insn, trial);
3418 target_label = trial;
3419 if (crossing)
3420 CROSSING_JUMP_P (insn) = 1;
3423 /* If the first insn at TARGET_LABEL is redundant with a previous
3424 insn, redirect the jump to the following insn and process again.
3425 We use next_real_insn instead of next_active_insn so we
3426 don't skip USE-markers, or we'll end up with incorrect
3427 liveness info. */
3428 trial = next_real_insn (target_label);
3429 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3430 && redundant_insn (trial, insn, 0)
3431 && ! can_throw_internal (trial))
3433 /* Figure out where to emit the special USE insn so we don't
3434 later incorrectly compute register live/death info. */
3435 rtx_insn *tmp = next_active_insn (trial);
3436 if (tmp == 0)
3437 tmp = find_end_label (simple_return_rtx);
3439 if (tmp)
3441 /* Insert the special USE insn and update dataflow info.
3442 We know "trial" is an insn here as it is the output of
3443 next_real_insn () above. */
3444 update_block (as_a <rtx_insn *> (trial), tmp);
3446 /* Now emit a label before the special USE insn, and
3447 redirect our jump to the new label. */
3448 target_label = get_label_before (PREV_INSN (tmp), target_label);
3449 reorg_redirect_jump (delay_insn, target_label);
3450 next = insn;
3451 continue;
3455 /* Similarly, if it is an unconditional jump with one insn in its
3456 delay list and that insn is redundant, thread the jump. */
3457 rtx_sequence *trial_seq =
3458 trial ? dyn_cast <rtx_sequence *> (PATTERN (trial)) : NULL;
3459 if (trial_seq
3460 && trial_seq->len () == 2
3461 && JUMP_P (trial_seq->insn (0))
3462 && simplejump_or_return_p (trial_seq->insn (0))
3463 && redundant_insn (trial_seq->insn (1), insn, 0))
3465 target_label = JUMP_LABEL (trial_seq->insn (0));
3466 if (ANY_RETURN_P (target_label))
3467 target_label = find_end_label (target_label);
3469 if (target_label
3470 && redirect_with_delay_slots_safe_p (delay_insn, target_label,
3471 insn))
3473 update_block (trial_seq->insn (1), insn);
3474 reorg_redirect_jump (delay_insn, target_label);
3475 next = insn;
3476 continue;
3480 /* See if we have a simple (conditional) jump that is useless. */
3481 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3482 && ! condjump_in_parallel_p (delay_insn)
3483 && prev_active_insn (target_label) == insn
3484 && ! BARRIER_P (prev_nonnote_insn (target_label))
3485 #ifdef HAVE_cc0
3486 /* If the last insn in the delay slot sets CC0 for some insn,
3487 various code assumes that it is in a delay slot. We could
3488 put it back where it belonged and delete the register notes,
3489 but it doesn't seem worthwhile in this uncommon case. */
3490 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3491 REG_CC_USER, NULL_RTX)
3492 #endif
3495 rtx_insn *after;
3496 int i;
3498 /* All this insn does is execute its delay list and jump to the
3499 following insn. So delete the jump and just execute the delay
3500 list insns.
3502 We do this by deleting the INSN containing the SEQUENCE, then
3503 re-emitting the insns separately, and then deleting the jump.
3504 This allows the count of the jump target to be properly
3505 decremented.
3507 Note that we need to change the INSN_UID of the re-emitted insns
3508 since it is used to hash the insns for mark_target_live_regs and
3509 the re-emitted insns will no longer be wrapped up in a SEQUENCE.
3511 Clear the from target bit, since these insns are no longer
3512 in delay slots. */
3513 for (i = 0; i < XVECLEN (pat, 0); i++)
3514 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3516 trial = PREV_INSN (insn);
3517 delete_related_insns (insn);
3518 gcc_assert (GET_CODE (pat) == SEQUENCE);
3519 add_insn_after (delay_insn, trial, NULL);
3520 after = delay_insn;
3521 for (i = 1; i < pat->len (); i++)
3522 after = emit_copy_of_insn_after (pat->insn (i), after);
3523 delete_scheduled_jump (delay_insn);
3524 continue;
3527 /* See if this is an unconditional jump around a single insn which is
3528 identical to the one in its delay slot. In this case, we can just
3529 delete the branch and the insn in its delay slot. */
3530 if (next && NONJUMP_INSN_P (next)
3531 && label_before_next_insn (next, insn) == target_label
3532 && simplejump_p (insn)
3533 && XVECLEN (pat, 0) == 2
3534 && rtx_equal_p (PATTERN (next), PATTERN (pat->insn (1))))
3536 delete_related_insns (insn);
3537 continue;
3540 /* See if this jump (with its delay slots) conditionally branches
3541 around an unconditional jump (without delay slots). If so, invert
3542 this jump and point it to the target of the second jump. We cannot
3543 do this for annulled jumps, though. Again, don't convert a jump to
3544 a RETURN here. */
3545 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3546 && any_condjump_p (delay_insn)
3547 && next && simplejump_or_return_p (next)
3548 && next_active_insn (target_label) == next_active_insn (next)
3549 && no_labels_between_p (insn, next))
3551 rtx label = JUMP_LABEL (next);
3552 rtx old_label = JUMP_LABEL (delay_insn);
3554 if (ANY_RETURN_P (label))
3555 label = find_end_label (label);
3557 /* find_end_label can generate a new label. Check this first. */
3558 if (label
3559 && no_labels_between_p (insn, next)
3560 && redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3562 /* Be careful how we do this to avoid deleting code or labels
3563 that are momentarily dead. See similar optimization in
3564 jump.c */
3565 if (old_label)
3566 ++LABEL_NUSES (old_label);
3568 if (invert_jump (delay_insn, label, 1))
3570 int i;
3572 /* Must update the INSN_FROM_TARGET_P bits now that
3573 the branch is reversed, so that mark_target_live_regs
3574 will handle the delay slot insn correctly. */
3575 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3577 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3578 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3581 delete_related_insns (next);
3582 next = insn;
3585 if (old_label && --LABEL_NUSES (old_label) == 0)
3586 delete_related_insns (old_label);
3587 continue;
3591 /* If we own the thread opposite the way this insn branches, see if we
3592 can merge its delay slots with following insns. */
3593 if (INSN_FROM_TARGET_P (pat->insn (1))
3594 && own_thread_p (NEXT_INSN (insn), 0, 1))
3595 try_merge_delay_insns (insn, next);
3596 else if (! INSN_FROM_TARGET_P (pat->insn (1))
3597 && own_thread_p (target_label, target_label, 0))
3598 try_merge_delay_insns (insn, next_active_insn (target_label));
3600 /* If we get here, we haven't deleted INSN. But we may have deleted
3601 NEXT, so recompute it. */
3602 next = next_active_insn (insn);
3607 /* Look for filled jumps to the end of function label. We can try to convert
3608 them into RETURN insns if the insns in the delay slot are valid for the
3609 RETURN as well. */
3611 static void
3612 make_return_insns (rtx_insn *first)
3614 rtx_insn *insn;
3615 rtx_insn *jump_insn;
3616 rtx real_return_label = function_return_label;
3617 rtx real_simple_return_label = function_simple_return_label;
3618 int slots, i;
3620 /* See if there is a RETURN insn in the function other than the one we
3621 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3622 into a RETURN to jump to it. */
3623 for (insn = first; insn; insn = NEXT_INSN (insn))
3624 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
3626 rtx t = get_label_before (insn, NULL_RTX);
3627 if (PATTERN (insn) == ret_rtx)
3628 real_return_label = t;
3629 else
3630 real_simple_return_label = t;
3631 break;
3634 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3635 was equal to END_OF_FUNCTION_LABEL. */
3636 if (real_return_label)
3637 LABEL_NUSES (real_return_label)++;
3638 if (real_simple_return_label)
3639 LABEL_NUSES (real_simple_return_label)++;
3641 /* Clear the list of insns to fill so we can use it. */
3642 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3644 for (insn = first; insn; insn = NEXT_INSN (insn))
3646 int flags;
3647 rtx kind, real_label;
3649 /* Only look at filled JUMP_INSNs that go to the end of function
3650 label. */
3651 if (!NONJUMP_INSN_P (insn))
3652 continue;
3654 if (GET_CODE (PATTERN (insn)) != SEQUENCE)
3655 continue;
3657 rtx_sequence *pat = as_a <rtx_sequence *> (PATTERN (insn));
3659 if (!jump_to_label_p (pat->insn (0)))
3660 continue;
3662 if (JUMP_LABEL (pat->insn (0)) == function_return_label)
3664 kind = ret_rtx;
3665 real_label = real_return_label;
3667 else if (JUMP_LABEL (pat->insn (0)) == function_simple_return_label)
3669 kind = simple_return_rtx;
3670 real_label = real_simple_return_label;
3672 else
3673 continue;
3675 jump_insn = pat->insn (0);
3677 /* If we can't make the jump into a RETURN, try to redirect it to the best
3678 RETURN and go on to the next insn. */
3679 if (!reorg_redirect_jump (jump_insn, kind))
3681 /* Make sure redirecting the jump will not invalidate the delay
3682 slot insns. */
3683 if (redirect_with_delay_slots_safe_p (jump_insn, real_label, insn))
3684 reorg_redirect_jump (jump_insn, real_label);
3685 continue;
3688 /* See if this RETURN can accept the insns current in its delay slot.
3689 It can if it has more or an equal number of slots and the contents
3690 of each is valid. */
3692 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3693 slots = num_delay_slots (jump_insn);
3694 if (slots >= XVECLEN (pat, 0) - 1)
3696 for (i = 1; i < XVECLEN (pat, 0); i++)
3697 if (! (
3698 #ifdef ANNUL_IFFALSE_SLOTS
3699 (INSN_ANNULLED_BRANCH_P (jump_insn)
3700 && INSN_FROM_TARGET_P (pat->insn (i)))
3701 ? eligible_for_annul_false (jump_insn, i - 1,
3702 pat->insn (i), flags) :
3703 #endif
3704 #ifdef ANNUL_IFTRUE_SLOTS
3705 (INSN_ANNULLED_BRANCH_P (jump_insn)
3706 && ! INSN_FROM_TARGET_P (pat->insn (i)))
3707 ? eligible_for_annul_true (jump_insn, i - 1,
3708 pat->insn (i), flags) :
3709 #endif
3710 eligible_for_delay (jump_insn, i - 1,
3711 pat->insn (i), flags)))
3712 break;
3714 else
3715 i = 0;
3717 if (i == XVECLEN (pat, 0))
3718 continue;
3720 /* We have to do something with this insn. If it is an unconditional
3721 RETURN, delete the SEQUENCE and output the individual insns,
3722 followed by the RETURN. Then set things up so we try to find
3723 insns for its delay slots, if it needs some. */
3724 if (ANY_RETURN_P (PATTERN (jump_insn)))
3726 rtx_insn *prev = PREV_INSN (insn);
3728 delete_related_insns (insn);
3729 for (i = 1; i < XVECLEN (pat, 0); i++)
3730 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3732 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3733 emit_barrier_after (insn);
3735 if (slots)
3736 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3738 else
3739 /* It is probably more efficient to keep this with its current
3740 delay slot as a branch to a RETURN. */
3741 reorg_redirect_jump (jump_insn, real_label);
3744 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3745 new delay slots we have created. */
3746 if (real_return_label != NULL_RTX && --LABEL_NUSES (real_return_label) == 0)
3747 delete_related_insns (real_return_label);
3748 if (real_simple_return_label != NULL_RTX
3749 && --LABEL_NUSES (real_simple_return_label) == 0)
3750 delete_related_insns (real_simple_return_label);
3752 fill_simple_delay_slots (1);
3753 fill_simple_delay_slots (0);
3756 /* Try to find insns to place in delay slots. */
3758 static void
3759 dbr_schedule (rtx_insn *first)
3761 rtx_insn *insn, *next, *epilogue_insn = 0;
3762 int i;
3763 bool need_return_insns;
3765 /* If the current function has no insns other than the prologue and
3766 epilogue, then do not try to fill any delay slots. */
3767 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
3768 return;
3770 /* Find the highest INSN_UID and allocate and initialize our map from
3771 INSN_UID's to position in code. */
3772 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3774 if (INSN_UID (insn) > max_uid)
3775 max_uid = INSN_UID (insn);
3776 if (NOTE_P (insn)
3777 && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
3778 epilogue_insn = insn;
3781 uid_to_ruid = XNEWVEC (int, max_uid + 1);
3782 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3783 uid_to_ruid[INSN_UID (insn)] = i;
3785 /* Initialize the list of insns that need filling. */
3786 if (unfilled_firstobj == 0)
3788 gcc_obstack_init (&unfilled_slots_obstack);
3789 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3792 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3794 rtx target;
3796 /* Skip vector tables. We can't get attributes for them. */
3797 if (JUMP_TABLE_DATA_P (insn))
3798 continue;
3800 if (JUMP_P (insn))
3801 INSN_ANNULLED_BRANCH_P (insn) = 0;
3802 INSN_FROM_TARGET_P (insn) = 0;
3804 if (num_delay_slots (insn) > 0)
3805 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3807 /* Ensure all jumps go to the last of a set of consecutive labels. */
3808 if (JUMP_P (insn)
3809 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3810 && !ANY_RETURN_P (JUMP_LABEL (insn))
3811 && ((target = skip_consecutive_labels (JUMP_LABEL (insn)))
3812 != JUMP_LABEL (insn)))
3813 redirect_jump (insn, target, 1);
3816 init_resource_info (epilogue_insn);
3818 /* Show we haven't computed an end-of-function label yet. */
3819 function_return_label = function_simple_return_label = NULL;
3821 /* Initialize the statistics for this function. */
3822 memset (num_insns_needing_delays, 0, sizeof num_insns_needing_delays);
3823 memset (num_filled_delays, 0, sizeof num_filled_delays);
3825 /* Now do the delay slot filling. Try everything twice in case earlier
3826 changes make more slots fillable. */
3828 for (reorg_pass_number = 0;
3829 reorg_pass_number < MAX_REORG_PASSES;
3830 reorg_pass_number++)
3832 fill_simple_delay_slots (1);
3833 fill_simple_delay_slots (0);
3834 fill_eager_delay_slots ();
3835 relax_delay_slots (first);
3838 /* If we made an end of function label, indicate that it is now
3839 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3840 If it is now unused, delete it. */
3841 if (function_return_label && --LABEL_NUSES (function_return_label) == 0)
3842 delete_related_insns (function_return_label);
3843 if (function_simple_return_label
3844 && --LABEL_NUSES (function_simple_return_label) == 0)
3845 delete_related_insns (function_simple_return_label);
3847 need_return_insns = false;
3848 #ifdef HAVE_return
3849 need_return_insns |= HAVE_return && function_return_label != 0;
3850 #endif
3851 #ifdef HAVE_simple_return
3852 need_return_insns |= HAVE_simple_return && function_simple_return_label != 0;
3853 #endif
3854 if (need_return_insns)
3855 make_return_insns (first);
3857 /* Delete any USE insns made by update_block; subsequent passes don't need
3858 them or know how to deal with them. */
3859 for (insn = first; insn; insn = next)
3861 next = NEXT_INSN (insn);
3863 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE
3864 && INSN_P (XEXP (PATTERN (insn), 0)))
3865 next = delete_related_insns (insn);
3868 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3870 /* It is not clear why the line below is needed, but it does seem to be. */
3871 unfilled_firstobj = XOBNEWVAR (&unfilled_slots_obstack, rtx, 0);
3873 if (dump_file)
3875 int i, j, need_comma;
3876 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3877 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3879 for (reorg_pass_number = 0;
3880 reorg_pass_number < MAX_REORG_PASSES;
3881 reorg_pass_number++)
3883 fprintf (dump_file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3884 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3886 need_comma = 0;
3887 fprintf (dump_file, ";; Reorg function #%d\n", i);
3889 fprintf (dump_file, ";; %d insns needing delay slots\n;; ",
3890 num_insns_needing_delays[i][reorg_pass_number]);
3892 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3893 if (num_filled_delays[i][j][reorg_pass_number])
3895 if (need_comma)
3896 fprintf (dump_file, ", ");
3897 need_comma = 1;
3898 fprintf (dump_file, "%d got %d delays",
3899 num_filled_delays[i][j][reorg_pass_number], j);
3901 fprintf (dump_file, "\n");
3904 memset (total_delay_slots, 0, sizeof total_delay_slots);
3905 memset (total_annul_slots, 0, sizeof total_annul_slots);
3906 for (insn = first; insn; insn = NEXT_INSN (insn))
3908 if (! insn->deleted ()
3909 && NONJUMP_INSN_P (insn)
3910 && GET_CODE (PATTERN (insn)) != USE
3911 && GET_CODE (PATTERN (insn)) != CLOBBER)
3913 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3915 rtx control;
3916 j = XVECLEN (PATTERN (insn), 0) - 1;
3917 if (j > MAX_DELAY_HISTOGRAM)
3918 j = MAX_DELAY_HISTOGRAM;
3919 control = XVECEXP (PATTERN (insn), 0, 0);
3920 if (JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control))
3921 total_annul_slots[j]++;
3922 else
3923 total_delay_slots[j]++;
3925 else if (num_delay_slots (insn) > 0)
3926 total_delay_slots[0]++;
3929 fprintf (dump_file, ";; Reorg totals: ");
3930 need_comma = 0;
3931 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3933 if (total_delay_slots[j])
3935 if (need_comma)
3936 fprintf (dump_file, ", ");
3937 need_comma = 1;
3938 fprintf (dump_file, "%d got %d delays", total_delay_slots[j], j);
3941 fprintf (dump_file, "\n");
3942 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3943 fprintf (dump_file, ";; Reorg annuls: ");
3944 need_comma = 0;
3945 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3947 if (total_annul_slots[j])
3949 if (need_comma)
3950 fprintf (dump_file, ", ");
3951 need_comma = 1;
3952 fprintf (dump_file, "%d got %d delays", total_annul_slots[j], j);
3955 fprintf (dump_file, "\n");
3956 #endif
3957 fprintf (dump_file, "\n");
3960 if (!sibling_labels.is_empty ())
3962 update_alignments (sibling_labels);
3963 sibling_labels.release ();
3966 free_resource_info ();
3967 free (uid_to_ruid);
3968 crtl->dbr_scheduled_p = true;
3970 #endif /* DELAY_SLOTS */
3972 /* Run delay slot optimization. */
3973 static unsigned int
3974 rest_of_handle_delay_slots (void)
3976 #ifdef DELAY_SLOTS
3977 dbr_schedule (get_insns ());
3978 #endif
3979 return 0;
3982 namespace {
3984 const pass_data pass_data_delay_slots =
3986 RTL_PASS, /* type */
3987 "dbr", /* name */
3988 OPTGROUP_NONE, /* optinfo_flags */
3989 TV_DBR_SCHED, /* tv_id */
3990 0, /* properties_required */
3991 0, /* properties_provided */
3992 0, /* properties_destroyed */
3993 0, /* todo_flags_start */
3994 0, /* todo_flags_finish */
3997 class pass_delay_slots : public rtl_opt_pass
3999 public:
4000 pass_delay_slots (gcc::context *ctxt)
4001 : rtl_opt_pass (pass_data_delay_slots, ctxt)
4004 /* opt_pass methods: */
4005 virtual bool gate (function *);
4006 virtual unsigned int execute (function *)
4008 return rest_of_handle_delay_slots ();
4011 }; // class pass_delay_slots
4013 bool
4014 pass_delay_slots::gate (function *)
4016 #ifdef DELAY_SLOTS
4017 /* At -O0 dataflow info isn't updated after RA. */
4018 return optimize > 0 && flag_delayed_branch && !crtl->dbr_scheduled_p;
4019 #else
4020 return 0;
4021 #endif
4024 } // anon namespace
4026 rtl_opt_pass *
4027 make_pass_delay_slots (gcc::context *ctxt)
4029 return new pass_delay_slots (ctxt);
4032 /* Machine dependent reorg pass. */
4034 namespace {
4036 const pass_data pass_data_machine_reorg =
4038 RTL_PASS, /* type */
4039 "mach", /* name */
4040 OPTGROUP_NONE, /* optinfo_flags */
4041 TV_MACH_DEP, /* tv_id */
4042 0, /* properties_required */
4043 0, /* properties_provided */
4044 0, /* properties_destroyed */
4045 0, /* todo_flags_start */
4046 0, /* todo_flags_finish */
4049 class pass_machine_reorg : public rtl_opt_pass
4051 public:
4052 pass_machine_reorg (gcc::context *ctxt)
4053 : rtl_opt_pass (pass_data_machine_reorg, ctxt)
4056 /* opt_pass methods: */
4057 virtual bool gate (function *)
4059 return targetm.machine_dependent_reorg != 0;
4062 virtual unsigned int execute (function *)
4064 targetm.machine_dependent_reorg ();
4065 return 0;
4068 }; // class pass_machine_reorg
4070 } // anon namespace
4072 rtl_opt_pass *
4073 make_pass_machine_reorg (gcc::context *ctxt)
4075 return new pass_machine_reorg (ctxt);