* rtl.h (rtunion_def): Constify member `rtstr'.
[official-gcc.git] / gcc / reorg.c
blobdd54f93bfdf11d87a1da74fb54e4ba6c9e3c2ade
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
4 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
5 Hacked by Michael Tiemann (tiemann@cygnus.com).
7 This file is part of GNU CC.
9 GNU CC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 2, or (at your option)
12 any later version.
14 GNU CC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GNU CC; see the file COPYING. If not, write to
21 the Free Software Foundation, 59 Temple Place - Suite 330,
22 Boston, MA 02111-1307, USA. */
24 /* Instruction reorganization pass.
26 This pass runs after register allocation and final jump
27 optimization. It should be the last pass to run before peephole.
28 It serves primarily to fill delay slots of insns, typically branch
29 and call insns. Other insns typically involve more complicated
30 interactions of data dependencies and resource constraints, and
31 are better handled by scheduling before register allocation (by the
32 function `schedule_insns').
34 The Branch Penalty is the number of extra cycles that are needed to
35 execute a branch insn. On an ideal machine, branches take a single
36 cycle, and the Branch Penalty is 0. Several RISC machines approach
37 branch delays differently:
39 The MIPS and AMD 29000 have a single branch delay slot. Most insns
40 (except other branches) can be used to fill this slot. When the
41 slot is filled, two insns execute in two cycles, reducing the
42 branch penalty to zero.
44 The Motorola 88000 conditionally exposes its branch delay slot,
45 so code is shorter when it is turned off, but will run faster
46 when useful insns are scheduled there.
48 The IBM ROMP has two forms of branch and call insns, both with and
49 without a delay slot. Much like the 88k, insns not using the delay
50 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
52 The SPARC always has a branch delay slot, but its effects can be
53 annulled when the branch is not taken. This means that failing to
54 find other sources of insns, we can hoist an insn from the branch
55 target that would only be safe to execute knowing that the branch
56 is taken.
58 The HP-PA always has a branch delay slot. For unconditional branches
59 its effects can be annulled when the branch is taken. The effects
60 of the delay slot in a conditional branch can be nullified for forward
61 taken branches, or for untaken backward branches. This means
62 we can hoist insns from the fall-through path for forward branches or
63 steal insns from the target of backward branches.
65 The TMS320C3x and C4x have three branch delay slots. When the three
66 slots are filled, the branch penalty is zero. Most insns can fill the
67 delay slots except jump insns.
69 Three techniques for filling delay slots have been implemented so far:
71 (1) `fill_simple_delay_slots' is the simplest, most efficient way
72 to fill delay slots. This pass first looks for insns which come
73 from before the branch and which are safe to execute after the
74 branch. Then it searches after the insn requiring delay slots or,
75 in the case of a branch, for insns that are after the point at
76 which the branch merges into the fallthrough code, if such a point
77 exists. When such insns are found, the branch penalty decreases
78 and no code expansion takes place.
80 (2) `fill_eager_delay_slots' is more complicated: it is used for
81 scheduling conditional jumps, or for scheduling jumps which cannot
82 be filled using (1). A machine need not have annulled jumps to use
83 this strategy, but it helps (by keeping more options open).
84 `fill_eager_delay_slots' tries to guess the direction the branch
85 will go; if it guesses right 100% of the time, it can reduce the
86 branch penalty as much as `fill_simple_delay_slots' does. If it
87 guesses wrong 100% of the time, it might as well schedule nops (or
88 on the m88k, unexpose the branch slot). When
89 `fill_eager_delay_slots' takes insns from the fall-through path of
90 the jump, usually there is no code expansion; when it takes insns
91 from the branch target, there is code expansion if it is not the
92 only way to reach that target.
94 (3) `relax_delay_slots' uses a set of rules to simplify code that
95 has been reorganized by (1) and (2). It finds cases where
96 conditional test can be eliminated, jumps can be threaded, extra
97 insns can be eliminated, etc. It is the job of (1) and (2) to do a
98 good job of scheduling locally; `relax_delay_slots' takes care of
99 making the various individual schedules work well together. It is
100 especially tuned to handle the control flow interactions of branch
101 insns. It does nothing for insns with delay slots that do not
102 branch.
104 On machines that use CC0, we are very conservative. We will not make
105 a copy of an insn involving CC0 since we want to maintain a 1-1
106 correspondence between the insn that sets and uses CC0. The insns are
107 allowed to be separated by placing an insn that sets CC0 (but not an insn
108 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
109 delay slot. In that case, we point each insn at the other with REG_CC_USER
110 and REG_CC_SETTER notes. Note that these restrictions affect very few
111 machines because most RISC machines with delay slots will not use CC0
112 (the RT is the only known exception at this point).
114 Not yet implemented:
116 The Acorn Risc Machine can conditionally execute most insns, so
117 it is profitable to move single insns into a position to execute
118 based on the condition code of the previous insn.
120 The HP-PA can conditionally nullify insns, providing a similar
121 effect to the ARM, differing mostly in which insn is "in charge". */
123 #include "config.h"
124 #include "system.h"
125 #include "toplev.h"
126 #include "rtl.h"
127 #include "tm_p.h"
128 #include "expr.h"
129 #include "function.h"
130 #include "insn-config.h"
131 #include "conditions.h"
132 #include "hard-reg-set.h"
133 #include "basic-block.h"
134 #include "regs.h"
135 #include "insn-flags.h"
136 #include "recog.h"
137 #include "flags.h"
138 #include "output.h"
139 #include "obstack.h"
140 #include "insn-attr.h"
141 #include "resource.h"
144 #ifdef DELAY_SLOTS
146 #define obstack_chunk_alloc xmalloc
147 #define obstack_chunk_free free
149 #ifndef ANNUL_IFTRUE_SLOTS
150 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
151 #endif
152 #ifndef ANNUL_IFFALSE_SLOTS
153 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
154 #endif
156 /* Insns which have delay slots that have not yet been filled. */
158 static struct obstack unfilled_slots_obstack;
159 static rtx *unfilled_firstobj;
161 /* Define macros to refer to the first and last slot containing unfilled
162 insns. These are used because the list may move and its address
163 should be recomputed at each use. */
165 #define unfilled_slots_base \
166 ((rtx *) obstack_base (&unfilled_slots_obstack))
168 #define unfilled_slots_next \
169 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
171 /* Points to the label before the end of the function. */
172 static rtx end_of_function_label;
174 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
175 not always monotonically increase. */
176 static int *uid_to_ruid;
178 /* Highest valid index in `uid_to_ruid'. */
179 static int max_uid;
181 static int stop_search_p PARAMS ((rtx, int));
182 static int resource_conflicts_p PARAMS ((struct resources *,
183 struct resources *));
184 static int insn_references_resource_p PARAMS ((rtx, struct resources *, int));
185 static int insn_sets_resource_p PARAMS ((rtx, struct resources *, int));
186 static rtx find_end_label PARAMS ((void));
187 static rtx emit_delay_sequence PARAMS ((rtx, rtx, int));
188 static rtx add_to_delay_list PARAMS ((rtx, rtx));
189 static rtx delete_from_delay_slot PARAMS ((rtx));
190 static void delete_scheduled_jump PARAMS ((rtx));
191 static void note_delay_statistics PARAMS ((int, int));
192 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
193 static rtx optimize_skip PARAMS ((rtx));
194 #endif
195 static int get_jump_flags PARAMS ((rtx, rtx));
196 static int rare_destination PARAMS ((rtx));
197 static int mostly_true_jump PARAMS ((rtx, rtx));
198 static rtx get_branch_condition PARAMS ((rtx, rtx));
199 static int condition_dominates_p PARAMS ((rtx, rtx));
200 static int redirect_with_delay_slots_safe_p PARAMS ((rtx, rtx, rtx));
201 static int redirect_with_delay_list_safe_p PARAMS ((rtx, rtx, rtx));
202 static int check_annul_list_true_false PARAMS ((int, rtx));
203 static rtx steal_delay_list_from_target PARAMS ((rtx, rtx, rtx, rtx,
204 struct resources *,
205 struct resources *,
206 struct resources *,
207 int, int *, int *, rtx *));
208 static rtx steal_delay_list_from_fallthrough PARAMS ((rtx, rtx, rtx, rtx,
209 struct resources *,
210 struct resources *,
211 struct resources *,
212 int, int *, int *));
213 static void try_merge_delay_insns PARAMS ((rtx, rtx));
214 static rtx redundant_insn PARAMS ((rtx, rtx, rtx));
215 static int own_thread_p PARAMS ((rtx, rtx, int));
216 static void update_block PARAMS ((rtx, rtx));
217 static int reorg_redirect_jump PARAMS ((rtx, rtx));
218 static void update_reg_dead_notes PARAMS ((rtx, rtx));
219 static void fix_reg_dead_note PARAMS ((rtx, rtx));
220 static void update_reg_unused_notes PARAMS ((rtx, rtx));
221 static void fill_simple_delay_slots PARAMS ((int));
222 static rtx fill_slots_from_thread PARAMS ((rtx, rtx, rtx, rtx, int, int,
223 int, int, int *, rtx));
224 static void fill_eager_delay_slots PARAMS ((void));
225 static void relax_delay_slots PARAMS ((rtx));
226 #ifdef HAVE_return
227 static void make_return_insns PARAMS ((rtx));
228 #endif
230 /* Return TRUE if this insn should stop the search for insn to fill delay
231 slots. LABELS_P indicates that labels should terminate the search.
232 In all cases, jumps terminate the search. */
234 static int
235 stop_search_p (insn, labels_p)
236 rtx insn;
237 int labels_p;
239 if (insn == 0)
240 return 1;
242 switch (GET_CODE (insn))
244 case NOTE:
245 case CALL_INSN:
246 return 0;
248 case CODE_LABEL:
249 return labels_p;
251 case JUMP_INSN:
252 case BARRIER:
253 return 1;
255 case INSN:
256 /* OK unless it contains a delay slot or is an `asm' insn of some type.
257 We don't know anything about these. */
258 return (GET_CODE (PATTERN (insn)) == SEQUENCE
259 || GET_CODE (PATTERN (insn)) == ASM_INPUT
260 || asm_noperands (PATTERN (insn)) >= 0);
262 default:
263 abort ();
267 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
268 resource set contains a volatile memory reference. Otherwise, return FALSE. */
270 static int
271 resource_conflicts_p (res1, res2)
272 struct resources *res1, *res2;
274 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
275 || (res1->unch_memory && res2->unch_memory)
276 || res1->volatil || res2->volatil)
277 return 1;
279 #ifdef HARD_REG_SET
280 return (res1->regs & res2->regs) != HARD_CONST (0);
281 #else
283 int i;
285 for (i = 0; i < HARD_REG_SET_LONGS; i++)
286 if ((res1->regs[i] & res2->regs[i]) != 0)
287 return 1;
288 return 0;
290 #endif
293 /* Return TRUE if any resource marked in RES, a `struct resources', is
294 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
295 routine is using those resources.
297 We compute this by computing all the resources referenced by INSN and
298 seeing if this conflicts with RES. It might be faster to directly check
299 ourselves, and this is the way it used to work, but it means duplicating
300 a large block of complex code. */
302 static int
303 insn_references_resource_p (insn, res, include_delayed_effects)
304 register rtx insn;
305 register struct resources *res;
306 int include_delayed_effects;
308 struct resources insn_res;
310 CLEAR_RESOURCE (&insn_res);
311 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
312 return resource_conflicts_p (&insn_res, res);
315 /* Return TRUE if INSN modifies resources that are marked in RES.
316 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
317 included. CC0 is only modified if it is explicitly set; see comments
318 in front of mark_set_resources for details. */
320 static int
321 insn_sets_resource_p (insn, res, include_delayed_effects)
322 register rtx insn;
323 register struct resources *res;
324 int include_delayed_effects;
326 struct resources insn_sets;
328 CLEAR_RESOURCE (&insn_sets);
329 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
330 return resource_conflicts_p (&insn_sets, res);
333 /* Find a label at the end of the function or before a RETURN. If there is
334 none, make one. */
336 static rtx
337 find_end_label ()
339 rtx insn;
341 /* If we found one previously, return it. */
342 if (end_of_function_label)
343 return end_of_function_label;
345 /* Otherwise, see if there is a label at the end of the function. If there
346 is, it must be that RETURN insns aren't needed, so that is our return
347 label and we don't have to do anything else. */
349 insn = get_last_insn ();
350 while (GET_CODE (insn) == NOTE
351 || (GET_CODE (insn) == INSN
352 && (GET_CODE (PATTERN (insn)) == USE
353 || GET_CODE (PATTERN (insn)) == CLOBBER)))
354 insn = PREV_INSN (insn);
356 /* When a target threads its epilogue we might already have a
357 suitable return insn. If so put a label before it for the
358 end_of_function_label. */
359 if (GET_CODE (insn) == BARRIER
360 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
361 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
363 rtx temp = PREV_INSN (PREV_INSN (insn));
364 end_of_function_label = gen_label_rtx ();
365 LABEL_NUSES (end_of_function_label) = 0;
367 /* Put the label before an USE insns that may proceed the RETURN insn. */
368 while (GET_CODE (temp) == USE)
369 temp = PREV_INSN (temp);
371 emit_label_after (end_of_function_label, temp);
374 else if (GET_CODE (insn) == CODE_LABEL)
375 end_of_function_label = insn;
376 else
378 /* Otherwise, make a new label and emit a RETURN and BARRIER,
379 if needed. */
380 end_of_function_label = gen_label_rtx ();
381 LABEL_NUSES (end_of_function_label) = 0;
382 emit_label (end_of_function_label);
383 #ifdef HAVE_return
384 if (HAVE_return)
386 /* The return we make may have delay slots too. */
387 rtx insn = gen_return ();
388 insn = emit_jump_insn (insn);
389 emit_barrier ();
390 if (num_delay_slots (insn) > 0)
391 obstack_ptr_grow (&unfilled_slots_obstack, insn);
393 #endif
396 /* Show one additional use for this label so it won't go away until
397 we are done. */
398 ++LABEL_NUSES (end_of_function_label);
400 return end_of_function_label;
403 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
404 the pattern of INSN with the SEQUENCE.
406 Chain the insns so that NEXT_INSN of each insn in the sequence points to
407 the next and NEXT_INSN of the last insn in the sequence points to
408 the first insn after the sequence. Similarly for PREV_INSN. This makes
409 it easier to scan all insns.
411 Returns the SEQUENCE that replaces INSN. */
413 static rtx
414 emit_delay_sequence (insn, list, length)
415 rtx insn;
416 rtx list;
417 int length;
419 register int i = 1;
420 register rtx li;
421 int had_barrier = 0;
423 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
424 rtvec seqv = rtvec_alloc (length + 1);
425 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
426 rtx seq_insn = make_insn_raw (seq);
427 rtx first = get_insns ();
428 rtx last = get_last_insn ();
430 /* Make a copy of the insn having delay slots. */
431 rtx delay_insn = copy_rtx (insn);
433 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
434 confuse further processing. Update LAST in case it was the last insn.
435 We will put the BARRIER back in later. */
436 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
438 delete_insn (NEXT_INSN (insn));
439 last = get_last_insn ();
440 had_barrier = 1;
443 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
444 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
445 PREV_INSN (seq_insn) = PREV_INSN (insn);
447 if (insn != last)
448 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
450 if (insn != first)
451 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
453 /* Note the calls to set_new_first_and_last_insn must occur after
454 SEQ_INSN has been completely spliced into the insn stream.
456 Otherwise CUR_INSN_UID will get set to an incorrect value because
457 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
458 if (insn == last)
459 set_new_first_and_last_insn (first, seq_insn);
461 if (insn == first)
462 set_new_first_and_last_insn (seq_insn, last);
464 /* Build our SEQUENCE and rebuild the insn chain. */
465 XVECEXP (seq, 0, 0) = delay_insn;
466 INSN_DELETED_P (delay_insn) = 0;
467 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
469 for (li = list; li; li = XEXP (li, 1), i++)
471 rtx tem = XEXP (li, 0);
472 rtx note;
474 /* Show that this copy of the insn isn't deleted. */
475 INSN_DELETED_P (tem) = 0;
477 XVECEXP (seq, 0, i) = tem;
478 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
479 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
481 /* Remove any REG_DEAD notes because we can't rely on them now
482 that the insn has been moved. */
483 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
484 if (REG_NOTE_KIND (note) == REG_DEAD)
485 XEXP (note, 0) = const0_rtx;
488 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
490 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
491 last insn in that SEQUENCE to point to us. Similarly for the first
492 insn in the following insn if it is a SEQUENCE. */
494 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
495 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
496 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
497 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
498 = seq_insn;
500 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
501 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
502 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
504 /* If there used to be a BARRIER, put it back. */
505 if (had_barrier)
506 emit_barrier_after (seq_insn);
508 if (i != length + 1)
509 abort ();
511 return seq_insn;
514 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
515 be in the order in which the insns are to be executed. */
517 static rtx
518 add_to_delay_list (insn, delay_list)
519 rtx insn;
520 rtx delay_list;
522 /* If we have an empty list, just make a new list element. If
523 INSN has its block number recorded, clear it since we may
524 be moving the insn to a new block. */
526 if (delay_list == 0)
528 clear_hashed_info_for_insn (insn);
529 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
532 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
533 list. */
534 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
536 return delay_list;
539 /* Delete INSN from the delay slot of the insn that it is in, which may
540 produce an insn with no delay slots. Return the new insn. */
542 static rtx
543 delete_from_delay_slot (insn)
544 rtx insn;
546 rtx trial, seq_insn, seq, prev;
547 rtx delay_list = 0;
548 int i;
550 /* We first must find the insn containing the SEQUENCE with INSN in its
551 delay slot. Do this by finding an insn, TRIAL, where
552 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
554 for (trial = insn;
555 PREV_INSN (NEXT_INSN (trial)) == trial;
556 trial = NEXT_INSN (trial))
559 seq_insn = PREV_INSN (NEXT_INSN (trial));
560 seq = PATTERN (seq_insn);
562 /* Create a delay list consisting of all the insns other than the one
563 we are deleting (unless we were the only one). */
564 if (XVECLEN (seq, 0) > 2)
565 for (i = 1; i < XVECLEN (seq, 0); i++)
566 if (XVECEXP (seq, 0, i) != insn)
567 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
569 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
570 list, and rebuild the delay list if non-empty. */
571 prev = PREV_INSN (seq_insn);
572 trial = XVECEXP (seq, 0, 0);
573 delete_insn (seq_insn);
574 add_insn_after (trial, prev);
576 if (GET_CODE (trial) == JUMP_INSN
577 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
578 emit_barrier_after (trial);
580 /* If there are any delay insns, remit them. Otherwise clear the
581 annul flag. */
582 if (delay_list)
583 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
584 else
585 INSN_ANNULLED_BRANCH_P (trial) = 0;
587 INSN_FROM_TARGET_P (insn) = 0;
589 /* Show we need to fill this insn again. */
590 obstack_ptr_grow (&unfilled_slots_obstack, trial);
592 return trial;
595 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
596 the insn that sets CC0 for it and delete it too. */
598 static void
599 delete_scheduled_jump (insn)
600 rtx insn;
602 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
603 delete the insn that sets the condition code, but it is hard to find it.
604 Since this case is rare anyway, don't bother trying; there would likely
605 be other insns that became dead anyway, which we wouldn't know to
606 delete. */
608 #ifdef HAVE_cc0
609 if (reg_mentioned_p (cc0_rtx, insn))
611 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
613 /* If a reg-note was found, it points to an insn to set CC0. This
614 insn is in the delay list of some other insn. So delete it from
615 the delay list it was in. */
616 if (note)
618 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
619 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
620 delete_from_delay_slot (XEXP (note, 0));
622 else
624 /* The insn setting CC0 is our previous insn, but it may be in
625 a delay slot. It will be the last insn in the delay slot, if
626 it is. */
627 rtx trial = previous_insn (insn);
628 if (GET_CODE (trial) == NOTE)
629 trial = prev_nonnote_insn (trial);
630 if (sets_cc0_p (PATTERN (trial)) != 1
631 || FIND_REG_INC_NOTE (trial, 0))
632 return;
633 if (PREV_INSN (NEXT_INSN (trial)) == trial)
634 delete_insn (trial);
635 else
636 delete_from_delay_slot (trial);
639 #endif
641 delete_insn (insn);
644 /* Counters for delay-slot filling. */
646 #define NUM_REORG_FUNCTIONS 2
647 #define MAX_DELAY_HISTOGRAM 3
648 #define MAX_REORG_PASSES 2
650 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
652 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
654 static int reorg_pass_number;
656 static void
657 note_delay_statistics (slots_filled, index)
658 int slots_filled, index;
660 num_insns_needing_delays[index][reorg_pass_number]++;
661 if (slots_filled > MAX_DELAY_HISTOGRAM)
662 slots_filled = MAX_DELAY_HISTOGRAM;
663 num_filled_delays[index][slots_filled][reorg_pass_number]++;
666 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
668 /* Optimize the following cases:
670 1. When a conditional branch skips over only one instruction,
671 use an annulling branch and put that insn in the delay slot.
672 Use either a branch that annuls when the condition if true or
673 invert the test with a branch that annuls when the condition is
674 false. This saves insns, since otherwise we must copy an insn
675 from the L1 target.
677 (orig) (skip) (otherwise)
678 Bcc.n L1 Bcc',a L1 Bcc,a L1'
679 insn insn insn2
680 L1: L1: L1:
681 insn2 insn2 insn2
682 insn3 insn3 L1':
683 insn3
685 2. When a conditional branch skips over only one instruction,
686 and after that, it unconditionally branches somewhere else,
687 perform the similar optimization. This saves executing the
688 second branch in the case where the inverted condition is true.
690 Bcc.n L1 Bcc',a L2
691 insn insn
692 L1: L1:
693 Bra L2 Bra L2
695 INSN is a JUMP_INSN.
697 This should be expanded to skip over N insns, where N is the number
698 of delay slots required. */
700 static rtx
701 optimize_skip (insn)
702 register rtx insn;
704 register rtx trial = next_nonnote_insn (insn);
705 rtx next_trial = next_active_insn (trial);
706 rtx delay_list = 0;
707 rtx target_label;
708 int flags;
710 flags = get_jump_flags (insn, JUMP_LABEL (insn));
712 if (trial == 0
713 || GET_CODE (trial) != INSN
714 || GET_CODE (PATTERN (trial)) == SEQUENCE
715 || recog_memoized (trial) < 0
716 || (! eligible_for_annul_false (insn, 0, trial, flags)
717 && ! eligible_for_annul_true (insn, 0, trial, flags)))
718 return 0;
720 /* There are two cases where we are just executing one insn (we assume
721 here that a branch requires only one insn; this should be generalized
722 at some point): Where the branch goes around a single insn or where
723 we have one insn followed by a branch to the same label we branch to.
724 In both of these cases, inverting the jump and annulling the delay
725 slot give the same effect in fewer insns. */
726 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
727 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
728 || (next_trial != 0
729 && GET_CODE (next_trial) == JUMP_INSN
730 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
731 && (simplejump_p (next_trial)
732 || GET_CODE (PATTERN (next_trial)) == RETURN)))
734 if (eligible_for_annul_false (insn, 0, trial, flags))
736 if (invert_jump (insn, JUMP_LABEL (insn)))
737 INSN_FROM_TARGET_P (trial) = 1;
738 else if (! eligible_for_annul_true (insn, 0, trial, flags))
739 return 0;
742 delay_list = add_to_delay_list (trial, NULL_RTX);
743 next_trial = next_active_insn (trial);
744 update_block (trial, trial);
745 delete_insn (trial);
747 /* Also, if we are targeting an unconditional
748 branch, thread our jump to the target of that branch. Don't
749 change this into a RETURN here, because it may not accept what
750 we have in the delay slot. We'll fix this up later. */
751 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
752 && (simplejump_p (next_trial)
753 || GET_CODE (PATTERN (next_trial)) == RETURN))
755 target_label = JUMP_LABEL (next_trial);
756 if (target_label == 0)
757 target_label = find_end_label ();
759 /* Recompute the flags based on TARGET_LABEL since threading
760 the jump to TARGET_LABEL may change the direction of the
761 jump (which may change the circumstances in which the
762 delay slot is nullified). */
763 flags = get_jump_flags (insn, target_label);
764 if (eligible_for_annul_true (insn, 0, trial, flags))
765 reorg_redirect_jump (insn, target_label);
768 INSN_ANNULLED_BRANCH_P (insn) = 1;
771 return delay_list;
773 #endif
776 /* Encode and return branch direction and prediction information for
777 INSN assuming it will jump to LABEL.
779 Non conditional branches return no direction information and
780 are predicted as very likely taken. */
782 static int
783 get_jump_flags (insn, label)
784 rtx insn, label;
786 int flags;
788 /* get_jump_flags can be passed any insn with delay slots, these may
789 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
790 direction information, and only if they are conditional jumps.
792 If LABEL is zero, then there is no way to determine the branch
793 direction. */
794 if (GET_CODE (insn) == JUMP_INSN
795 && (condjump_p (insn) || condjump_in_parallel_p (insn))
796 && INSN_UID (insn) <= max_uid
797 && label != 0
798 && INSN_UID (label) <= max_uid)
799 flags
800 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
801 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
802 /* No valid direction information. */
803 else
804 flags = 0;
806 /* If insn is a conditional branch call mostly_true_jump to get
807 determine the branch prediction.
809 Non conditional branches are predicted as very likely taken. */
810 if (GET_CODE (insn) == JUMP_INSN
811 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
813 int prediction;
815 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
816 switch (prediction)
818 case 2:
819 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
820 break;
821 case 1:
822 flags |= ATTR_FLAG_likely;
823 break;
824 case 0:
825 flags |= ATTR_FLAG_unlikely;
826 break;
827 case -1:
828 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
829 break;
831 default:
832 abort();
835 else
836 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
838 return flags;
841 /* Return 1 if INSN is a destination that will be branched to rarely (the
842 return point of a function); return 2 if DEST will be branched to very
843 rarely (a call to a function that doesn't return). Otherwise,
844 return 0. */
846 static int
847 rare_destination (insn)
848 rtx insn;
850 int jump_count = 0;
851 rtx next;
853 for (; insn; insn = next)
855 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
856 insn = XVECEXP (PATTERN (insn), 0, 0);
858 next = NEXT_INSN (insn);
860 switch (GET_CODE (insn))
862 case CODE_LABEL:
863 return 0;
864 case BARRIER:
865 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
866 don't scan past JUMP_INSNs, so any barrier we find here must
867 have been after a CALL_INSN and hence mean the call doesn't
868 return. */
869 return 2;
870 case JUMP_INSN:
871 if (GET_CODE (PATTERN (insn)) == RETURN)
872 return 1;
873 else if (simplejump_p (insn)
874 && jump_count++ < 10)
875 next = JUMP_LABEL (insn);
876 else
877 return 0;
879 default:
880 break;
884 /* If we got here it means we hit the end of the function. So this
885 is an unlikely destination. */
887 return 1;
890 /* Return truth value of the statement that this branch
891 is mostly taken. If we think that the branch is extremely likely
892 to be taken, we return 2. If the branch is slightly more likely to be
893 taken, return 1. If the branch is slightly less likely to be taken,
894 return 0 and if the branch is highly unlikely to be taken, return -1.
896 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
898 static int
899 mostly_true_jump (jump_insn, condition)
900 rtx jump_insn, condition;
902 rtx target_label = JUMP_LABEL (jump_insn);
903 rtx insn;
904 int rare_dest = rare_destination (target_label);
905 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
907 /* If branch probabilities are available, then use that number since it
908 always gives a correct answer. */
909 if (flag_branch_probabilities)
911 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
912 if (note)
914 int prob = XINT (note, 0);
916 if (prob >= REG_BR_PROB_BASE * 9 / 10)
917 return 2;
918 else if (prob >= REG_BR_PROB_BASE / 2)
919 return 1;
920 else if (prob >= REG_BR_PROB_BASE / 10)
921 return 0;
922 else
923 return -1;
927 /* If this is a branch outside a loop, it is highly unlikely. */
928 if (GET_CODE (PATTERN (jump_insn)) == SET
929 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
930 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
931 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
932 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
933 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
934 return -1;
936 if (target_label)
938 /* If this is the test of a loop, it is very likely true. We scan
939 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
940 before the next real insn, we assume the branch is to the top of
941 the loop. */
942 for (insn = PREV_INSN (target_label);
943 insn && GET_CODE (insn) == NOTE;
944 insn = PREV_INSN (insn))
945 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
946 return 2;
948 /* If this is a jump to the test of a loop, it is likely true. We scan
949 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
950 before the next real insn, we assume the branch is to the loop branch
951 test. */
952 for (insn = NEXT_INSN (target_label);
953 insn && GET_CODE (insn) == NOTE;
954 insn = PREV_INSN (insn))
955 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
956 return 1;
959 /* Look at the relative rarities of the fallthrough and destination. If
960 they differ, we can predict the branch that way. */
962 switch (rare_fallthrough - rare_dest)
964 case -2:
965 return -1;
966 case -1:
967 return 0;
968 case 0:
969 break;
970 case 1:
971 return 1;
972 case 2:
973 return 2;
976 /* If we couldn't figure out what this jump was, assume it won't be
977 taken. This should be rare. */
978 if (condition == 0)
979 return 0;
981 /* EQ tests are usually false and NE tests are usually true. Also,
982 most quantities are positive, so we can make the appropriate guesses
983 about signed comparisons against zero. */
984 switch (GET_CODE (condition))
986 case CONST_INT:
987 /* Unconditional branch. */
988 return 1;
989 case EQ:
990 return 0;
991 case NE:
992 return 1;
993 case LE:
994 case LT:
995 if (XEXP (condition, 1) == const0_rtx)
996 return 0;
997 break;
998 case GE:
999 case GT:
1000 if (XEXP (condition, 1) == const0_rtx)
1001 return 1;
1002 break;
1004 default:
1005 break;
1008 /* Predict backward branches usually take, forward branches usually not. If
1009 we don't know whether this is forward or backward, assume the branch
1010 will be taken, since most are. */
1011 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1012 || INSN_UID (target_label) > max_uid
1013 || (uid_to_ruid[INSN_UID (jump_insn)]
1014 > uid_to_ruid[INSN_UID (target_label)]));
1017 /* Return the condition under which INSN will branch to TARGET. If TARGET
1018 is zero, return the condition under which INSN will return. If INSN is
1019 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1020 type of jump, or it doesn't go to TARGET, return 0. */
1022 static rtx
1023 get_branch_condition (insn, target)
1024 rtx insn;
1025 rtx target;
1027 rtx pat = PATTERN (insn);
1028 rtx src;
1030 if (condjump_in_parallel_p (insn))
1031 pat = XVECEXP (pat, 0, 0);
1033 if (GET_CODE (pat) == RETURN)
1034 return target == 0 ? const_true_rtx : 0;
1036 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1037 return 0;
1039 src = SET_SRC (pat);
1040 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1041 return const_true_rtx;
1043 else if (GET_CODE (src) == IF_THEN_ELSE
1044 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1045 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1046 && XEXP (XEXP (src, 1), 0) == target))
1047 && XEXP (src, 2) == pc_rtx)
1048 return XEXP (src, 0);
1050 else if (GET_CODE (src) == IF_THEN_ELSE
1051 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1052 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1053 && XEXP (XEXP (src, 2), 0) == target))
1054 && XEXP (src, 1) == pc_rtx)
1055 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1056 GET_MODE (XEXP (src, 0)),
1057 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1059 return 0;
1062 /* Return non-zero if CONDITION is more strict than the condition of
1063 INSN, i.e., if INSN will always branch if CONDITION is true. */
1065 static int
1066 condition_dominates_p (condition, insn)
1067 rtx condition;
1068 rtx insn;
1070 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1071 enum rtx_code code = GET_CODE (condition);
1072 enum rtx_code other_code;
1074 if (rtx_equal_p (condition, other_condition)
1075 || other_condition == const_true_rtx)
1076 return 1;
1078 else if (condition == const_true_rtx || other_condition == 0)
1079 return 0;
1081 other_code = GET_CODE (other_condition);
1082 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1083 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1084 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1085 return 0;
1087 return comparison_dominates_p (code, other_code);
1090 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1091 any insns already in the delay slot of JUMP. */
1093 static int
1094 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1095 rtx jump, newlabel, seq;
1097 int flags, i;
1098 rtx pat = PATTERN (seq);
1100 /* Make sure all the delay slots of this jump would still
1101 be valid after threading the jump. If they are still
1102 valid, then return non-zero. */
1104 flags = get_jump_flags (jump, newlabel);
1105 for (i = 1; i < XVECLEN (pat, 0); i++)
1106 if (! (
1107 #ifdef ANNUL_IFFALSE_SLOTS
1108 (INSN_ANNULLED_BRANCH_P (jump)
1109 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1110 ? eligible_for_annul_false (jump, i - 1,
1111 XVECEXP (pat, 0, i), flags) :
1112 #endif
1113 #ifdef ANNUL_IFTRUE_SLOTS
1114 (INSN_ANNULLED_BRANCH_P (jump)
1115 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1116 ? eligible_for_annul_true (jump, i - 1,
1117 XVECEXP (pat, 0, i), flags) :
1118 #endif
1119 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1120 break;
1122 return (i == XVECLEN (pat, 0));
1125 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1126 any insns we wish to place in the delay slot of JUMP. */
1128 static int
1129 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1130 rtx jump, newlabel, delay_list;
1132 int flags, i;
1133 rtx li;
1135 /* Make sure all the insns in DELAY_LIST would still be
1136 valid after threading the jump. If they are still
1137 valid, then return non-zero. */
1139 flags = get_jump_flags (jump, newlabel);
1140 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1141 if (! (
1142 #ifdef ANNUL_IFFALSE_SLOTS
1143 (INSN_ANNULLED_BRANCH_P (jump)
1144 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1145 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1146 #endif
1147 #ifdef ANNUL_IFTRUE_SLOTS
1148 (INSN_ANNULLED_BRANCH_P (jump)
1149 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1150 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1151 #endif
1152 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1153 break;
1155 return (li == NULL);
1158 /* DELAY_LIST is a list of insns that have already been placed into delay
1159 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1160 If not, return 0; otherwise return 1. */
1162 static int
1163 check_annul_list_true_false (annul_true_p, delay_list)
1164 int annul_true_p;
1165 rtx delay_list;
1167 rtx temp;
1169 if (delay_list)
1171 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1173 rtx trial = XEXP (temp, 0);
1175 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1176 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1177 return 0;
1181 return 1;
1185 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1186 the condition tested by INSN is CONDITION and the resources shown in
1187 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1188 from SEQ's delay list, in addition to whatever insns it may execute
1189 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1190 needed while searching for delay slot insns. Return the concatenated
1191 delay list if possible, otherwise, return 0.
1193 SLOTS_TO_FILL is the total number of slots required by INSN, and
1194 PSLOTS_FILLED points to the number filled so far (also the number of
1195 insns in DELAY_LIST). It is updated with the number that have been
1196 filled from the SEQUENCE, if any.
1198 PANNUL_P points to a non-zero value if we already know that we need
1199 to annul INSN. If this routine determines that annulling is needed,
1200 it may set that value non-zero.
1202 PNEW_THREAD points to a location that is to receive the place at which
1203 execution should continue. */
1205 static rtx
1206 steal_delay_list_from_target (insn, condition, seq, delay_list,
1207 sets, needed, other_needed,
1208 slots_to_fill, pslots_filled, pannul_p,
1209 pnew_thread)
1210 rtx insn, condition;
1211 rtx seq;
1212 rtx delay_list;
1213 struct resources *sets, *needed, *other_needed;
1214 int slots_to_fill;
1215 int *pslots_filled;
1216 int *pannul_p;
1217 rtx *pnew_thread;
1219 rtx temp;
1220 int slots_remaining = slots_to_fill - *pslots_filled;
1221 int total_slots_filled = *pslots_filled;
1222 rtx new_delay_list = 0;
1223 int must_annul = *pannul_p;
1224 int used_annul = 0;
1225 int i;
1226 struct resources cc_set;
1228 /* We can't do anything if there are more delay slots in SEQ than we
1229 can handle, or if we don't know that it will be a taken branch.
1230 We know that it will be a taken branch if it is either an unconditional
1231 branch or a conditional branch with a stricter branch condition.
1233 Also, exit if the branch has more than one set, since then it is computing
1234 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1235 ??? It may be possible to move other sets into INSN in addition to
1236 moving the instructions in the delay slots.
1238 We can not steal the delay list if one of the instructions in the
1239 current delay_list modifies the condition codes and the jump in the
1240 sequence is a conditional jump. We can not do this because we can
1241 not change the direction of the jump because the condition codes
1242 will effect the direction of the jump in the sequence. */
1244 CLEAR_RESOURCE (&cc_set);
1245 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1247 rtx trial = XEXP (temp, 0);
1249 mark_set_resources (trial, &cc_set, 0, 1);
1250 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1251 return delay_list;
1254 if (XVECLEN (seq, 0) - 1 > slots_remaining
1255 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1256 || ! single_set (XVECEXP (seq, 0, 0)))
1257 return delay_list;
1259 for (i = 1; i < XVECLEN (seq, 0); i++)
1261 rtx trial = XVECEXP (seq, 0, i);
1262 int flags;
1264 if (insn_references_resource_p (trial, sets, 0)
1265 || insn_sets_resource_p (trial, needed, 0)
1266 || insn_sets_resource_p (trial, sets, 0)
1267 #ifdef HAVE_cc0
1268 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1269 delay list. */
1270 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1271 #endif
1272 /* If TRIAL is from the fallthrough code of an annulled branch insn
1273 in SEQ, we cannot use it. */
1274 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1275 && ! INSN_FROM_TARGET_P (trial)))
1276 return delay_list;
1278 /* If this insn was already done (usually in a previous delay slot),
1279 pretend we put it in our delay slot. */
1280 if (redundant_insn (trial, insn, new_delay_list))
1281 continue;
1283 /* We will end up re-vectoring this branch, so compute flags
1284 based on jumping to the new label. */
1285 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1287 if (! must_annul
1288 && ((condition == const_true_rtx
1289 || (! insn_sets_resource_p (trial, other_needed, 0)
1290 && ! may_trap_p (PATTERN (trial)))))
1291 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1292 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1293 && (must_annul = 1,
1294 check_annul_list_true_false (0, delay_list)
1295 && check_annul_list_true_false (0, new_delay_list)
1296 && eligible_for_annul_false (insn, total_slots_filled,
1297 trial, flags)))
1299 if (must_annul)
1300 used_annul = 1;
1301 temp = copy_rtx (trial);
1302 INSN_FROM_TARGET_P (temp) = 1;
1303 new_delay_list = add_to_delay_list (temp, new_delay_list);
1304 total_slots_filled++;
1306 if (--slots_remaining == 0)
1307 break;
1309 else
1310 return delay_list;
1313 /* Show the place to which we will be branching. */
1314 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1316 /* Add any new insns to the delay list and update the count of the
1317 number of slots filled. */
1318 *pslots_filled = total_slots_filled;
1319 if (used_annul)
1320 *pannul_p = 1;
1322 if (delay_list == 0)
1323 return new_delay_list;
1325 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1326 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1328 return delay_list;
1331 /* Similar to steal_delay_list_from_target except that SEQ is on the
1332 fallthrough path of INSN. Here we only do something if the delay insn
1333 of SEQ is an unconditional branch. In that case we steal its delay slot
1334 for INSN since unconditional branches are much easier to fill. */
1336 static rtx
1337 steal_delay_list_from_fallthrough (insn, condition, seq,
1338 delay_list, sets, needed, other_needed,
1339 slots_to_fill, pslots_filled, pannul_p)
1340 rtx insn, condition;
1341 rtx seq;
1342 rtx delay_list;
1343 struct resources *sets, *needed, *other_needed;
1344 int slots_to_fill;
1345 int *pslots_filled;
1346 int *pannul_p;
1348 int i;
1349 int flags;
1350 int must_annul = *pannul_p;
1351 int used_annul = 0;
1353 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1355 /* We can't do anything if SEQ's delay insn isn't an
1356 unconditional branch. */
1358 if (! simplejump_p (XVECEXP (seq, 0, 0))
1359 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1360 return delay_list;
1362 for (i = 1; i < XVECLEN (seq, 0); i++)
1364 rtx trial = XVECEXP (seq, 0, i);
1366 /* If TRIAL sets CC0, stealing it will move it too far from the use
1367 of CC0. */
1368 if (insn_references_resource_p (trial, sets, 0)
1369 || insn_sets_resource_p (trial, needed, 0)
1370 || insn_sets_resource_p (trial, sets, 0)
1371 #ifdef HAVE_cc0
1372 || sets_cc0_p (PATTERN (trial))
1373 #endif
1376 break;
1378 /* If this insn was already done, we don't need it. */
1379 if (redundant_insn (trial, insn, delay_list))
1381 delete_from_delay_slot (trial);
1382 continue;
1385 if (! must_annul
1386 && ((condition == const_true_rtx
1387 || (! insn_sets_resource_p (trial, other_needed, 0)
1388 && ! may_trap_p (PATTERN (trial)))))
1389 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1390 : (must_annul || delay_list == NULL) && (must_annul = 1,
1391 check_annul_list_true_false (1, delay_list)
1392 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1394 if (must_annul)
1395 used_annul = 1;
1396 delete_from_delay_slot (trial);
1397 delay_list = add_to_delay_list (trial, delay_list);
1399 if (++(*pslots_filled) == slots_to_fill)
1400 break;
1402 else
1403 break;
1406 if (used_annul)
1407 *pannul_p = 1;
1408 return delay_list;
1412 /* Try merging insns starting at THREAD which match exactly the insns in
1413 INSN's delay list.
1415 If all insns were matched and the insn was previously annulling, the
1416 annul bit will be cleared.
1418 For each insn that is merged, if the branch is or will be non-annulling,
1419 we delete the merged insn. */
1421 static void
1422 try_merge_delay_insns (insn, thread)
1423 rtx insn, thread;
1425 rtx trial, next_trial;
1426 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1427 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1428 int slot_number = 1;
1429 int num_slots = XVECLEN (PATTERN (insn), 0);
1430 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1431 struct resources set, needed;
1432 rtx merged_insns = 0;
1433 int i;
1434 int flags;
1436 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1438 CLEAR_RESOURCE (&needed);
1439 CLEAR_RESOURCE (&set);
1441 /* If this is not an annulling branch, take into account anything needed in
1442 INSN's delay slot. This prevents two increments from being incorrectly
1443 folded into one. If we are annulling, this would be the correct
1444 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1445 will essentially disable this optimization. This method is somewhat of
1446 a kludge, but I don't see a better way.) */
1447 if (! annul_p)
1448 for (i = 1 ; i < num_slots ; i++)
1449 if (XVECEXP (PATTERN (insn), 0, i))
1450 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1452 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1454 rtx pat = PATTERN (trial);
1455 rtx oldtrial = trial;
1457 next_trial = next_nonnote_insn (trial);
1459 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1460 if (GET_CODE (trial) == INSN
1461 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1462 continue;
1464 if (GET_CODE (next_to_match) == GET_CODE (trial)
1465 #ifdef HAVE_cc0
1466 /* We can't share an insn that sets cc0. */
1467 && ! sets_cc0_p (pat)
1468 #endif
1469 && ! insn_references_resource_p (trial, &set, 1)
1470 && ! insn_sets_resource_p (trial, &set, 1)
1471 && ! insn_sets_resource_p (trial, &needed, 1)
1472 && (trial = try_split (pat, trial, 0)) != 0
1473 /* Update next_trial, in case try_split succeeded. */
1474 && (next_trial = next_nonnote_insn (trial))
1475 /* Likewise THREAD. */
1476 && (thread = oldtrial == thread ? trial : thread)
1477 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1478 /* Have to test this condition if annul condition is different
1479 from (and less restrictive than) non-annulling one. */
1480 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1483 if (! annul_p)
1485 update_block (trial, thread);
1486 if (trial == thread)
1487 thread = next_active_insn (thread);
1489 delete_insn (trial);
1490 INSN_FROM_TARGET_P (next_to_match) = 0;
1492 else
1493 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1495 if (++slot_number == num_slots)
1496 break;
1498 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1501 mark_set_resources (trial, &set, 0, 1);
1502 mark_referenced_resources (trial, &needed, 1);
1505 /* See if we stopped on a filled insn. If we did, try to see if its
1506 delay slots match. */
1507 if (slot_number != num_slots
1508 && trial && GET_CODE (trial) == INSN
1509 && GET_CODE (PATTERN (trial)) == SEQUENCE
1510 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1512 rtx pat = PATTERN (trial);
1513 rtx filled_insn = XVECEXP (pat, 0, 0);
1515 /* Account for resources set/needed by the filled insn. */
1516 mark_set_resources (filled_insn, &set, 0, 1);
1517 mark_referenced_resources (filled_insn, &needed, 1);
1519 for (i = 1; i < XVECLEN (pat, 0); i++)
1521 rtx dtrial = XVECEXP (pat, 0, i);
1523 if (! insn_references_resource_p (dtrial, &set, 1)
1524 && ! insn_sets_resource_p (dtrial, &set, 1)
1525 && ! insn_sets_resource_p (dtrial, &needed, 1)
1526 #ifdef HAVE_cc0
1527 && ! sets_cc0_p (PATTERN (dtrial))
1528 #endif
1529 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1530 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1532 if (! annul_p)
1534 rtx new;
1536 update_block (dtrial, thread);
1537 new = delete_from_delay_slot (dtrial);
1538 if (INSN_DELETED_P (thread))
1539 thread = new;
1540 INSN_FROM_TARGET_P (next_to_match) = 0;
1542 else
1543 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1544 merged_insns);
1546 if (++slot_number == num_slots)
1547 break;
1549 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1551 else
1553 /* Keep track of the set/referenced resources for the delay
1554 slots of any trial insns we encounter. */
1555 mark_set_resources (dtrial, &set, 0, 1);
1556 mark_referenced_resources (dtrial, &needed, 1);
1561 /* If all insns in the delay slot have been matched and we were previously
1562 annulling the branch, we need not any more. In that case delete all the
1563 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1564 the delay list so that we know that it isn't only being used at the
1565 target. */
1566 if (slot_number == num_slots && annul_p)
1568 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1570 if (GET_MODE (merged_insns) == SImode)
1572 rtx new;
1574 update_block (XEXP (merged_insns, 0), thread);
1575 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1576 if (INSN_DELETED_P (thread))
1577 thread = new;
1579 else
1581 update_block (XEXP (merged_insns, 0), thread);
1582 delete_insn (XEXP (merged_insns, 0));
1586 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1588 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1589 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1593 /* See if INSN is redundant with an insn in front of TARGET. Often this
1594 is called when INSN is a candidate for a delay slot of TARGET.
1595 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1596 of INSN. Often INSN will be redundant with an insn in a delay slot of
1597 some previous insn. This happens when we have a series of branches to the
1598 same label; in that case the first insn at the target might want to go
1599 into each of the delay slots.
1601 If we are not careful, this routine can take up a significant fraction
1602 of the total compilation time (4%), but only wins rarely. Hence we
1603 speed this routine up by making two passes. The first pass goes back
1604 until it hits a label and sees if it find an insn with an identical
1605 pattern. Only in this (relatively rare) event does it check for
1606 data conflicts.
1608 We do not split insns we encounter. This could cause us not to find a
1609 redundant insn, but the cost of splitting seems greater than the possible
1610 gain in rare cases. */
1612 static rtx
1613 redundant_insn (insn, target, delay_list)
1614 rtx insn;
1615 rtx target;
1616 rtx delay_list;
1618 rtx target_main = target;
1619 rtx ipat = PATTERN (insn);
1620 rtx trial, pat;
1621 struct resources needed, set;
1622 int i;
1624 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1625 are allowed to not actually assign to such a register. */
1626 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1627 return 0;
1629 /* Scan backwards looking for a match. */
1630 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1632 if (GET_CODE (trial) == CODE_LABEL)
1633 return 0;
1635 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1636 continue;
1638 pat = PATTERN (trial);
1639 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1640 continue;
1642 if (GET_CODE (pat) == SEQUENCE)
1644 /* Stop for a CALL and its delay slots because it is difficult to
1645 track its resource needs correctly. */
1646 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1647 return 0;
1649 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1650 slots because it is difficult to track its resource needs
1651 correctly. */
1653 #ifdef INSN_SETS_ARE_DELAYED
1654 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1655 return 0;
1656 #endif
1658 #ifdef INSN_REFERENCES_ARE_DELAYED
1659 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1660 return 0;
1661 #endif
1663 /* See if any of the insns in the delay slot match, updating
1664 resource requirements as we go. */
1665 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1666 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1667 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1668 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1669 break;
1671 /* If found a match, exit this loop early. */
1672 if (i > 0)
1673 break;
1676 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1677 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1678 break;
1681 /* If we didn't find an insn that matches, return 0. */
1682 if (trial == 0)
1683 return 0;
1685 /* See what resources this insn sets and needs. If they overlap, or
1686 if this insn references CC0, it can't be redundant. */
1688 CLEAR_RESOURCE (&needed);
1689 CLEAR_RESOURCE (&set);
1690 mark_set_resources (insn, &set, 0, 1);
1691 mark_referenced_resources (insn, &needed, 1);
1693 /* If TARGET is a SEQUENCE, get the main insn. */
1694 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1695 target_main = XVECEXP (PATTERN (target), 0, 0);
1697 if (resource_conflicts_p (&needed, &set)
1698 #ifdef HAVE_cc0
1699 || reg_mentioned_p (cc0_rtx, ipat)
1700 #endif
1701 /* The insn requiring the delay may not set anything needed or set by
1702 INSN. */
1703 || insn_sets_resource_p (target_main, &needed, 1)
1704 || insn_sets_resource_p (target_main, &set, 1))
1705 return 0;
1707 /* Insns we pass may not set either NEEDED or SET, so merge them for
1708 simpler tests. */
1709 needed.memory |= set.memory;
1710 needed.unch_memory |= set.unch_memory;
1711 IOR_HARD_REG_SET (needed.regs, set.regs);
1713 /* This insn isn't redundant if it conflicts with an insn that either is
1714 or will be in a delay slot of TARGET. */
1716 while (delay_list)
1718 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1719 return 0;
1720 delay_list = XEXP (delay_list, 1);
1723 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1724 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1725 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1726 return 0;
1728 /* Scan backwards until we reach a label or an insn that uses something
1729 INSN sets or sets something insn uses or sets. */
1731 for (trial = PREV_INSN (target);
1732 trial && GET_CODE (trial) != CODE_LABEL;
1733 trial = PREV_INSN (trial))
1735 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1736 && GET_CODE (trial) != JUMP_INSN)
1737 continue;
1739 pat = PATTERN (trial);
1740 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1741 continue;
1743 if (GET_CODE (pat) == SEQUENCE)
1745 /* If this is a CALL_INSN and its delay slots, it is hard to track
1746 the resource needs properly, so give up. */
1747 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1748 return 0;
1750 /* If this is an INSN or JUMP_INSN with delayed effects, it
1751 is hard to track the resource needs properly, so give up. */
1753 #ifdef INSN_SETS_ARE_DELAYED
1754 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1755 return 0;
1756 #endif
1758 #ifdef INSN_REFERENCES_ARE_DELAYED
1759 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1760 return 0;
1761 #endif
1763 /* See if any of the insns in the delay slot match, updating
1764 resource requirements as we go. */
1765 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1767 rtx candidate = XVECEXP (pat, 0, i);
1769 /* If an insn will be annulled if the branch is false, it isn't
1770 considered as a possible duplicate insn. */
1771 if (rtx_equal_p (PATTERN (candidate), ipat)
1772 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1773 && INSN_FROM_TARGET_P (candidate)))
1775 /* Show that this insn will be used in the sequel. */
1776 INSN_FROM_TARGET_P (candidate) = 0;
1777 return candidate;
1780 /* Unless this is an annulled insn from the target of a branch,
1781 we must stop if it sets anything needed or set by INSN. */
1782 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1783 || ! INSN_FROM_TARGET_P (candidate))
1784 && insn_sets_resource_p (candidate, &needed, 1))
1785 return 0;
1789 /* If the insn requiring the delay slot conflicts with INSN, we
1790 must stop. */
1791 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1792 return 0;
1794 else
1796 /* See if TRIAL is the same as INSN. */
1797 pat = PATTERN (trial);
1798 if (rtx_equal_p (pat, ipat))
1799 return trial;
1801 /* Can't go any further if TRIAL conflicts with INSN. */
1802 if (insn_sets_resource_p (trial, &needed, 1))
1803 return 0;
1807 return 0;
1810 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1811 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1812 is non-zero, we are allowed to fall into this thread; otherwise, we are
1813 not.
1815 If LABEL is used more than one or we pass a label other than LABEL before
1816 finding an active insn, we do not own this thread. */
1818 static int
1819 own_thread_p (thread, label, allow_fallthrough)
1820 rtx thread;
1821 rtx label;
1822 int allow_fallthrough;
1824 rtx active_insn;
1825 rtx insn;
1827 /* We don't own the function end. */
1828 if (thread == 0)
1829 return 0;
1831 /* Get the first active insn, or THREAD, if it is an active insn. */
1832 active_insn = next_active_insn (PREV_INSN (thread));
1834 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1835 if (GET_CODE (insn) == CODE_LABEL
1836 && (insn != label || LABEL_NUSES (insn) != 1))
1837 return 0;
1839 if (allow_fallthrough)
1840 return 1;
1842 /* Ensure that we reach a BARRIER before any insn or label. */
1843 for (insn = prev_nonnote_insn (thread);
1844 insn == 0 || GET_CODE (insn) != BARRIER;
1845 insn = prev_nonnote_insn (insn))
1846 if (insn == 0
1847 || GET_CODE (insn) == CODE_LABEL
1848 || (GET_CODE (insn) == INSN
1849 && GET_CODE (PATTERN (insn)) != USE
1850 && GET_CODE (PATTERN (insn)) != CLOBBER))
1851 return 0;
1853 return 1;
1856 /* Called when INSN is being moved from a location near the target of a jump.
1857 We leave a marker of the form (use (INSN)) immediately in front
1858 of WHERE for mark_target_live_regs. These markers will be deleted when
1859 reorg finishes.
1861 We used to try to update the live status of registers if WHERE is at
1862 the start of a basic block, but that can't work since we may remove a
1863 BARRIER in relax_delay_slots. */
1865 static void
1866 update_block (insn, where)
1867 rtx insn;
1868 rtx where;
1870 /* Ignore if this was in a delay slot and it came from the target of
1871 a branch. */
1872 if (INSN_FROM_TARGET_P (insn))
1873 return;
1875 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1877 /* INSN might be making a value live in a block where it didn't use to
1878 be. So recompute liveness information for this block. */
1880 incr_ticks_for_insn (insn);
1883 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1884 the basic block containing the jump. */
1886 static int
1887 reorg_redirect_jump (jump, nlabel)
1888 rtx jump;
1889 rtx nlabel;
1891 incr_ticks_for_insn (jump);
1892 return redirect_jump (jump, nlabel);
1895 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1896 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1897 that reference values used in INSN. If we find one, then we move the
1898 REG_DEAD note to INSN.
1900 This is needed to handle the case where an later insn (after INSN) has a
1901 REG_DEAD note for a register used by INSN, and this later insn subsequently
1902 gets moved before a CODE_LABEL because it is a redundant insn. In this
1903 case, mark_target_live_regs may be confused into thinking the register
1904 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1906 static void
1907 update_reg_dead_notes (insn, delayed_insn)
1908 rtx insn, delayed_insn;
1910 rtx p, link, next;
1912 for (p = next_nonnote_insn (insn); p != delayed_insn;
1913 p = next_nonnote_insn (p))
1914 for (link = REG_NOTES (p); link; link = next)
1916 next = XEXP (link, 1);
1918 if (REG_NOTE_KIND (link) != REG_DEAD
1919 || GET_CODE (XEXP (link, 0)) != REG)
1920 continue;
1922 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1924 /* Move the REG_DEAD note from P to INSN. */
1925 remove_note (p, link);
1926 XEXP (link, 1) = REG_NOTES (insn);
1927 REG_NOTES (insn) = link;
1932 /* Called when an insn redundant with start_insn is deleted. If there
1933 is a REG_DEAD note for the target of start_insn between start_insn
1934 and stop_insn, then the REG_DEAD note needs to be deleted since the
1935 value no longer dies there.
1937 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1938 confused into thinking the register is dead. */
1940 static void
1941 fix_reg_dead_note (start_insn, stop_insn)
1942 rtx start_insn, stop_insn;
1944 rtx p, link, next;
1946 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1947 p = next_nonnote_insn (p))
1948 for (link = REG_NOTES (p); link; link = next)
1950 next = XEXP (link, 1);
1952 if (REG_NOTE_KIND (link) != REG_DEAD
1953 || GET_CODE (XEXP (link, 0)) != REG)
1954 continue;
1956 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1958 remove_note (p, link);
1959 return;
1964 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1966 This handles the case of udivmodXi4 instructions which optimize their
1967 output depending on whether any REG_UNUSED notes are present.
1968 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1969 does. */
1971 static void
1972 update_reg_unused_notes (insn, redundant_insn)
1973 rtx insn, redundant_insn;
1975 rtx link, next;
1977 for (link = REG_NOTES (insn); link; link = next)
1979 next = XEXP (link, 1);
1981 if (REG_NOTE_KIND (link) != REG_UNUSED
1982 || GET_CODE (XEXP (link, 0)) != REG)
1983 continue;
1985 if (! find_regno_note (redundant_insn, REG_UNUSED,
1986 REGNO (XEXP (link, 0))))
1987 remove_note (insn, link);
1991 /* Scan a function looking for insns that need a delay slot and find insns to
1992 put into the delay slot.
1994 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
1995 as calls). We do these first since we don't want jump insns (that are
1996 easier to fill) to get the only insns that could be used for non-jump insns.
1997 When it is zero, only try to fill JUMP_INSNs.
1999 When slots are filled in this manner, the insns (including the
2000 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2001 it is possible to tell whether a delay slot has really been filled
2002 or not. `final' knows how to deal with this, by communicating
2003 through FINAL_SEQUENCE. */
2005 static void
2006 fill_simple_delay_slots (non_jumps_p)
2007 int non_jumps_p;
2009 register rtx insn, pat, trial, next_trial;
2010 register int i;
2011 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2012 struct resources needed, set;
2013 int slots_to_fill, slots_filled;
2014 rtx delay_list;
2016 for (i = 0; i < num_unfilled_slots; i++)
2018 int flags;
2019 /* Get the next insn to fill. If it has already had any slots assigned,
2020 we can't do anything with it. Maybe we'll improve this later. */
2022 insn = unfilled_slots_base[i];
2023 if (insn == 0
2024 || INSN_DELETED_P (insn)
2025 || (GET_CODE (insn) == INSN
2026 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2027 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2028 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2029 continue;
2031 /* It may have been that this insn used to need delay slots, but
2032 now doesn't; ignore in that case. This can happen, for example,
2033 on the HP PA RISC, where the number of delay slots depends on
2034 what insns are nearby. */
2035 slots_to_fill = num_delay_slots (insn);
2037 /* Some machine description have defined instructions to have
2038 delay slots only in certain circumstances which may depend on
2039 nearby insns (which change due to reorg's actions).
2041 For example, the PA port normally has delay slots for unconditional
2042 jumps.
2044 However, the PA port claims such jumps do not have a delay slot
2045 if they are immediate successors of certain CALL_INSNs. This
2046 allows the port to favor filling the delay slot of the call with
2047 the unconditional jump. */
2048 if (slots_to_fill == 0)
2049 continue;
2051 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2052 says how many. After initialization, first try optimizing
2054 call _foo call _foo
2055 nop add %o7,.-L1,%o7
2056 b,a L1
2059 If this case applies, the delay slot of the call is filled with
2060 the unconditional jump. This is done first to avoid having the
2061 delay slot of the call filled in the backward scan. Also, since
2062 the unconditional jump is likely to also have a delay slot, that
2063 insn must exist when it is subsequently scanned.
2065 This is tried on each insn with delay slots as some machines
2066 have insns which perform calls, but are not represented as
2067 CALL_INSNs. */
2069 slots_filled = 0;
2070 delay_list = 0;
2072 if (GET_CODE (insn) == JUMP_INSN)
2073 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2074 else
2075 flags = get_jump_flags (insn, NULL_RTX);
2077 if ((trial = next_active_insn (insn))
2078 && GET_CODE (trial) == JUMP_INSN
2079 && simplejump_p (trial)
2080 && eligible_for_delay (insn, slots_filled, trial, flags)
2081 && no_labels_between_p (insn, trial))
2083 rtx *tmp;
2084 slots_filled++;
2085 delay_list = add_to_delay_list (trial, delay_list);
2087 /* TRIAL may have had its delay slot filled, then unfilled. When
2088 the delay slot is unfilled, TRIAL is placed back on the unfilled
2089 slots obstack. Unfortunately, it is placed on the end of the
2090 obstack, not in its original location. Therefore, we must search
2091 from entry i + 1 to the end of the unfilled slots obstack to
2092 try and find TRIAL. */
2093 tmp = &unfilled_slots_base[i + 1];
2094 while (*tmp != trial && tmp != unfilled_slots_next)
2095 tmp++;
2097 /* Remove the unconditional jump from consideration for delay slot
2098 filling and unthread it. */
2099 if (*tmp == trial)
2100 *tmp = 0;
2102 rtx next = NEXT_INSN (trial);
2103 rtx prev = PREV_INSN (trial);
2104 if (prev)
2105 NEXT_INSN (prev) = next;
2106 if (next)
2107 PREV_INSN (next) = prev;
2111 /* Now, scan backwards from the insn to search for a potential
2112 delay-slot candidate. Stop searching when a label or jump is hit.
2114 For each candidate, if it is to go into the delay slot (moved
2115 forward in execution sequence), it must not need or set any resources
2116 that were set by later insns and must not set any resources that
2117 are needed for those insns.
2119 The delay slot insn itself sets resources unless it is a call
2120 (in which case the called routine, not the insn itself, is doing
2121 the setting). */
2123 if (slots_filled < slots_to_fill)
2125 CLEAR_RESOURCE (&needed);
2126 CLEAR_RESOURCE (&set);
2127 mark_set_resources (insn, &set, 0, 0);
2128 mark_referenced_resources (insn, &needed, 0);
2130 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2131 trial = next_trial)
2133 next_trial = prev_nonnote_insn (trial);
2135 /* This must be an INSN or CALL_INSN. */
2136 pat = PATTERN (trial);
2138 /* USE and CLOBBER at this level was just for flow; ignore it. */
2139 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2140 continue;
2142 /* Check for resource conflict first, to avoid unnecessary
2143 splitting. */
2144 if (! insn_references_resource_p (trial, &set, 1)
2145 && ! insn_sets_resource_p (trial, &set, 1)
2146 && ! insn_sets_resource_p (trial, &needed, 1)
2147 #ifdef HAVE_cc0
2148 /* Can't separate set of cc0 from its use. */
2149 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2150 #endif
2153 trial = try_split (pat, trial, 1);
2154 next_trial = prev_nonnote_insn (trial);
2155 if (eligible_for_delay (insn, slots_filled, trial, flags))
2157 /* In this case, we are searching backward, so if we
2158 find insns to put on the delay list, we want
2159 to put them at the head, rather than the
2160 tail, of the list. */
2162 update_reg_dead_notes (trial, insn);
2163 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2164 trial, delay_list);
2165 update_block (trial, trial);
2166 delete_insn (trial);
2167 if (slots_to_fill == ++slots_filled)
2168 break;
2169 continue;
2173 mark_set_resources (trial, &set, 0, 1);
2174 mark_referenced_resources (trial, &needed, 1);
2178 /* If all needed slots haven't been filled, we come here. */
2180 /* Try to optimize case of jumping around a single insn. */
2181 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2182 if (slots_filled != slots_to_fill
2183 && delay_list == 0
2184 && GET_CODE (insn) == JUMP_INSN
2185 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2187 delay_list = optimize_skip (insn);
2188 if (delay_list)
2189 slots_filled += 1;
2191 #endif
2193 /* Try to get insns from beyond the insn needing the delay slot.
2194 These insns can neither set or reference resources set in insns being
2195 skipped, cannot set resources in the insn being skipped, and, if this
2196 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2197 call might not return).
2199 There used to be code which continued past the target label if
2200 we saw all uses of the target label. This code did not work,
2201 because it failed to account for some instructions which were
2202 both annulled and marked as from the target. This can happen as a
2203 result of optimize_skip. Since this code was redundant with
2204 fill_eager_delay_slots anyways, it was just deleted. */
2206 if (slots_filled != slots_to_fill
2207 && (GET_CODE (insn) != JUMP_INSN
2208 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2209 && ! simplejump_p (insn)
2210 && JUMP_LABEL (insn) != 0)))
2212 rtx target = 0;
2213 int maybe_never = 0;
2214 struct resources needed_at_jump;
2216 CLEAR_RESOURCE (&needed);
2217 CLEAR_RESOURCE (&set);
2219 if (GET_CODE (insn) == CALL_INSN)
2221 mark_set_resources (insn, &set, 0, 1);
2222 mark_referenced_resources (insn, &needed, 1);
2223 maybe_never = 1;
2225 else
2227 mark_set_resources (insn, &set, 0, 1);
2228 mark_referenced_resources (insn, &needed, 1);
2229 if (GET_CODE (insn) == JUMP_INSN)
2230 target = JUMP_LABEL (insn);
2233 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2235 rtx pat, trial_delay;
2237 next_trial = next_nonnote_insn (trial);
2239 if (GET_CODE (trial) == CODE_LABEL
2240 || GET_CODE (trial) == BARRIER)
2241 break;
2243 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2244 pat = PATTERN (trial);
2246 /* Stand-alone USE and CLOBBER are just for flow. */
2247 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2248 continue;
2250 /* If this already has filled delay slots, get the insn needing
2251 the delay slots. */
2252 if (GET_CODE (pat) == SEQUENCE)
2253 trial_delay = XVECEXP (pat, 0, 0);
2254 else
2255 trial_delay = trial;
2257 /* If this is a jump insn to our target, indicate that we have
2258 seen another jump to it. If we aren't handling a conditional
2259 jump, stop our search. Otherwise, compute the needs at its
2260 target and add them to NEEDED. */
2261 if (GET_CODE (trial_delay) == JUMP_INSN)
2263 if (target == 0)
2264 break;
2265 else if (JUMP_LABEL (trial_delay) != target)
2267 rtx ninsn =
2268 next_active_insn (JUMP_LABEL (trial_delay));
2270 mark_target_live_regs (get_insns (), ninsn,
2271 &needed_at_jump);
2272 needed.memory |= needed_at_jump.memory;
2273 needed.unch_memory |= needed_at_jump.unch_memory;
2274 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2278 /* See if we have a resource problem before we try to
2279 split. */
2280 if (target == 0
2281 && GET_CODE (pat) != SEQUENCE
2282 && ! insn_references_resource_p (trial, &set, 1)
2283 && ! insn_sets_resource_p (trial, &set, 1)
2284 && ! insn_sets_resource_p (trial, &needed, 1)
2285 #ifdef HAVE_cc0
2286 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2287 #endif
2288 && ! (maybe_never && may_trap_p (pat))
2289 && (trial = try_split (pat, trial, 0))
2290 && eligible_for_delay (insn, slots_filled, trial, flags))
2292 next_trial = next_nonnote_insn (trial);
2293 delay_list = add_to_delay_list (trial, delay_list);
2295 #ifdef HAVE_cc0
2296 if (reg_mentioned_p (cc0_rtx, pat))
2297 link_cc0_insns (trial);
2298 #endif
2300 delete_insn (trial);
2301 if (slots_to_fill == ++slots_filled)
2302 break;
2303 continue;
2306 mark_set_resources (trial, &set, 0, 1);
2307 mark_referenced_resources (trial, &needed, 1);
2309 /* Ensure we don't put insns between the setting of cc and the
2310 comparison by moving a setting of cc into an earlier delay
2311 slot since these insns could clobber the condition code. */
2312 set.cc = 1;
2314 /* If this is a call or jump, we might not get here. */
2315 if (GET_CODE (trial_delay) == CALL_INSN
2316 || GET_CODE (trial_delay) == JUMP_INSN)
2317 maybe_never = 1;
2320 /* If there are slots left to fill and our search was stopped by an
2321 unconditional branch, try the insn at the branch target. We can
2322 redirect the branch if it works.
2324 Don't do this if the insn at the branch target is a branch. */
2325 if (slots_to_fill != slots_filled
2326 && trial
2327 && GET_CODE (trial) == JUMP_INSN
2328 && simplejump_p (trial)
2329 && (target == 0 || JUMP_LABEL (trial) == target)
2330 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2331 && ! (GET_CODE (next_trial) == INSN
2332 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2333 && GET_CODE (next_trial) != JUMP_INSN
2334 && ! insn_references_resource_p (next_trial, &set, 1)
2335 && ! insn_sets_resource_p (next_trial, &set, 1)
2336 && ! insn_sets_resource_p (next_trial, &needed, 1)
2337 #ifdef HAVE_cc0
2338 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2339 #endif
2340 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2341 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2342 && eligible_for_delay (insn, slots_filled, next_trial, flags))
2344 rtx new_label = next_active_insn (next_trial);
2346 if (new_label != 0)
2347 new_label = get_label_before (new_label);
2348 else
2349 new_label = find_end_label ();
2351 delay_list
2352 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2353 slots_filled++;
2354 reorg_redirect_jump (trial, new_label);
2356 /* If we merged because we both jumped to the same place,
2357 redirect the original insn also. */
2358 if (target)
2359 reorg_redirect_jump (insn, new_label);
2363 /* If this is an unconditional jump, then try to get insns from the
2364 target of the jump. */
2365 if (GET_CODE (insn) == JUMP_INSN
2366 && simplejump_p (insn)
2367 && slots_filled != slots_to_fill)
2368 delay_list
2369 = fill_slots_from_thread (insn, const_true_rtx,
2370 next_active_insn (JUMP_LABEL (insn)),
2371 NULL, 1, 1,
2372 own_thread_p (JUMP_LABEL (insn),
2373 JUMP_LABEL (insn), 0),
2374 slots_to_fill, &slots_filled,
2375 delay_list);
2377 if (delay_list)
2378 unfilled_slots_base[i]
2379 = emit_delay_sequence (insn, delay_list, slots_filled);
2381 if (slots_to_fill == slots_filled)
2382 unfilled_slots_base[i] = 0;
2384 note_delay_statistics (slots_filled, 0);
2387 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2388 /* See if the epilogue needs any delay slots. Try to fill them if so.
2389 The only thing we can do is scan backwards from the end of the
2390 function. If we did this in a previous pass, it is incorrect to do it
2391 again. */
2392 if (current_function_epilogue_delay_list)
2393 return;
2395 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2396 if (slots_to_fill == 0)
2397 return;
2399 slots_filled = 0;
2400 CLEAR_RESOURCE (&set);
2402 /* The frame pointer and stack pointer are needed at the beginning of
2403 the epilogue, so instructions setting them can not be put in the
2404 epilogue delay slot. However, everything else needed at function
2405 end is safe, so we don't want to use end_of_function_needs here. */
2406 CLEAR_RESOURCE (&needed);
2407 if (frame_pointer_needed)
2409 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2410 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2411 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2412 #endif
2413 #ifdef EXIT_IGNORE_STACK
2414 if (! EXIT_IGNORE_STACK
2415 || current_function_sp_is_unchanging)
2416 #endif
2417 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2419 else
2420 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2422 #ifdef EPILOGUE_USES
2423 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
2425 if (EPILOGUE_USES (i))
2426 SET_HARD_REG_BIT (needed.regs, i);
2428 #endif
2430 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2431 trial = PREV_INSN (trial))
2433 if (GET_CODE (trial) == NOTE)
2434 continue;
2435 pat = PATTERN (trial);
2436 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2437 continue;
2439 if (! insn_references_resource_p (trial, &set, 1)
2440 && ! insn_sets_resource_p (trial, &needed, 1)
2441 && ! insn_sets_resource_p (trial, &set, 1)
2442 #ifdef HAVE_cc0
2443 /* Don't want to mess with cc0 here. */
2444 && ! reg_mentioned_p (cc0_rtx, pat)
2445 #endif
2448 trial = try_split (pat, trial, 1);
2449 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2451 /* Here as well we are searching backward, so put the
2452 insns we find on the head of the list. */
2454 current_function_epilogue_delay_list
2455 = gen_rtx_INSN_LIST (VOIDmode, trial,
2456 current_function_epilogue_delay_list);
2457 mark_end_of_function_resources (trial, 1);
2458 update_block (trial, trial);
2459 delete_insn (trial);
2461 /* Clear deleted bit so final.c will output the insn. */
2462 INSN_DELETED_P (trial) = 0;
2464 if (slots_to_fill == ++slots_filled)
2465 break;
2466 continue;
2470 mark_set_resources (trial, &set, 0, 1);
2471 mark_referenced_resources (trial, &needed, 1);
2474 note_delay_statistics (slots_filled, 0);
2475 #endif
2478 /* Try to find insns to place in delay slots.
2480 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2481 or is an unconditional branch if CONDITION is const_true_rtx.
2482 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2484 THREAD is a flow-of-control, either the insns to be executed if the
2485 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2487 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2488 to see if any potential delay slot insns set things needed there.
2490 LIKELY is non-zero if it is extremely likely that the branch will be
2491 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2492 end of a loop back up to the top.
2494 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2495 thread. I.e., it is the fallthrough code of our jump or the target of the
2496 jump when we are the only jump going there.
2498 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2499 case, we can only take insns from the head of the thread for our delay
2500 slot. We then adjust the jump to point after the insns we have taken. */
2502 static rtx
2503 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
2504 thread_if_true, own_thread,
2505 slots_to_fill, pslots_filled, delay_list)
2506 rtx insn;
2507 rtx condition;
2508 rtx thread, opposite_thread;
2509 int likely;
2510 int thread_if_true;
2511 int own_thread;
2512 int slots_to_fill, *pslots_filled;
2513 rtx delay_list;
2515 rtx new_thread;
2516 struct resources opposite_needed, set, needed;
2517 rtx trial;
2518 int lose = 0;
2519 int must_annul = 0;
2520 int flags;
2522 /* Validate our arguments. */
2523 if ((condition == const_true_rtx && ! thread_if_true)
2524 || (! own_thread && ! thread_if_true))
2525 abort ();
2527 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2529 /* If our thread is the end of subroutine, we can't get any delay
2530 insns from that. */
2531 if (thread == 0)
2532 return delay_list;
2534 /* If this is an unconditional branch, nothing is needed at the
2535 opposite thread. Otherwise, compute what is needed there. */
2536 if (condition == const_true_rtx)
2537 CLEAR_RESOURCE (&opposite_needed);
2538 else
2539 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2541 /* If the insn at THREAD can be split, do it here to avoid having to
2542 update THREAD and NEW_THREAD if it is done in the loop below. Also
2543 initialize NEW_THREAD. */
2545 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2547 /* Scan insns at THREAD. We are looking for an insn that can be removed
2548 from THREAD (it neither sets nor references resources that were set
2549 ahead of it and it doesn't set anything needs by the insns ahead of
2550 it) and that either can be placed in an annulling insn or aren't
2551 needed at OPPOSITE_THREAD. */
2553 CLEAR_RESOURCE (&needed);
2554 CLEAR_RESOURCE (&set);
2556 /* If we do not own this thread, we must stop as soon as we find
2557 something that we can't put in a delay slot, since all we can do
2558 is branch into THREAD at a later point. Therefore, labels stop
2559 the search if this is not the `true' thread. */
2561 for (trial = thread;
2562 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2563 trial = next_nonnote_insn (trial))
2565 rtx pat, old_trial;
2567 /* If we have passed a label, we no longer own this thread. */
2568 if (GET_CODE (trial) == CODE_LABEL)
2570 own_thread = 0;
2571 continue;
2574 pat = PATTERN (trial);
2575 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2576 continue;
2578 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2579 don't separate or copy insns that set and use CC0. */
2580 if (! insn_references_resource_p (trial, &set, 1)
2581 && ! insn_sets_resource_p (trial, &set, 1)
2582 && ! insn_sets_resource_p (trial, &needed, 1)
2583 #ifdef HAVE_cc0
2584 && ! (reg_mentioned_p (cc0_rtx, pat)
2585 && (! own_thread || ! sets_cc0_p (pat)))
2586 #endif
2589 rtx prior_insn;
2591 /* If TRIAL is redundant with some insn before INSN, we don't
2592 actually need to add it to the delay list; we can merely pretend
2593 we did. */
2594 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2596 fix_reg_dead_note (prior_insn, insn);
2597 if (own_thread)
2599 update_block (trial, thread);
2600 if (trial == thread)
2602 thread = next_active_insn (thread);
2603 if (new_thread == trial)
2604 new_thread = thread;
2607 delete_insn (trial);
2609 else
2611 update_reg_unused_notes (prior_insn, trial);
2612 new_thread = next_active_insn (trial);
2615 continue;
2618 /* There are two ways we can win: If TRIAL doesn't set anything
2619 needed at the opposite thread and can't trap, or if it can
2620 go into an annulled delay slot. */
2621 if (!must_annul
2622 && (condition == const_true_rtx
2623 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2624 && ! may_trap_p (pat))))
2626 old_trial = trial;
2627 trial = try_split (pat, trial, 0);
2628 if (new_thread == old_trial)
2629 new_thread = trial;
2630 if (thread == old_trial)
2631 thread = trial;
2632 pat = PATTERN (trial);
2633 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2634 goto winner;
2636 else if (0
2637 #ifdef ANNUL_IFTRUE_SLOTS
2638 || ! thread_if_true
2639 #endif
2640 #ifdef ANNUL_IFFALSE_SLOTS
2641 || thread_if_true
2642 #endif
2645 old_trial = trial;
2646 trial = try_split (pat, trial, 0);
2647 if (new_thread == old_trial)
2648 new_thread = trial;
2649 if (thread == old_trial)
2650 thread = trial;
2651 pat = PATTERN (trial);
2652 if ((must_annul || delay_list == NULL) && (thread_if_true
2653 ? check_annul_list_true_false (0, delay_list)
2654 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2655 : check_annul_list_true_false (1, delay_list)
2656 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2658 rtx temp;
2660 must_annul = 1;
2661 winner:
2663 #ifdef HAVE_cc0
2664 if (reg_mentioned_p (cc0_rtx, pat))
2665 link_cc0_insns (trial);
2666 #endif
2668 /* If we own this thread, delete the insn. If this is the
2669 destination of a branch, show that a basic block status
2670 may have been updated. In any case, mark the new
2671 starting point of this thread. */
2672 if (own_thread)
2674 update_block (trial, thread);
2675 if (trial == thread)
2677 thread = next_active_insn (thread);
2678 if (new_thread == trial)
2679 new_thread = thread;
2681 delete_insn (trial);
2683 else
2684 new_thread = next_active_insn (trial);
2686 temp = own_thread ? trial : copy_rtx (trial);
2687 if (thread_if_true)
2688 INSN_FROM_TARGET_P (temp) = 1;
2690 delay_list = add_to_delay_list (temp, delay_list);
2692 if (slots_to_fill == ++(*pslots_filled))
2694 /* Even though we have filled all the slots, we
2695 may be branching to a location that has a
2696 redundant insn. Skip any if so. */
2697 while (new_thread && ! own_thread
2698 && ! insn_sets_resource_p (new_thread, &set, 1)
2699 && ! insn_sets_resource_p (new_thread, &needed, 1)
2700 && ! insn_references_resource_p (new_thread,
2701 &set, 1)
2702 && (prior_insn
2703 = redundant_insn (new_thread, insn,
2704 delay_list)))
2706 /* We know we do not own the thread, so no need
2707 to call update_block and delete_insn. */
2708 fix_reg_dead_note (prior_insn, insn);
2709 update_reg_unused_notes (prior_insn, new_thread);
2710 new_thread = next_active_insn (new_thread);
2712 break;
2715 continue;
2720 /* This insn can't go into a delay slot. */
2721 lose = 1;
2722 mark_set_resources (trial, &set, 0, 1);
2723 mark_referenced_resources (trial, &needed, 1);
2725 /* Ensure we don't put insns between the setting of cc and the comparison
2726 by moving a setting of cc into an earlier delay slot since these insns
2727 could clobber the condition code. */
2728 set.cc = 1;
2730 /* If this insn is a register-register copy and the next insn has
2731 a use of our destination, change it to use our source. That way,
2732 it will become a candidate for our delay slot the next time
2733 through this loop. This case occurs commonly in loops that
2734 scan a list.
2736 We could check for more complex cases than those tested below,
2737 but it doesn't seem worth it. It might also be a good idea to try
2738 to swap the two insns. That might do better.
2740 We can't do this if the next insn modifies our destination, because
2741 that would make the replacement into the insn invalid. We also can't
2742 do this if it modifies our source, because it might be an earlyclobber
2743 operand. This latter test also prevents updating the contents of
2744 a PRE_INC. */
2746 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
2747 && GET_CODE (SET_SRC (pat)) == REG
2748 && GET_CODE (SET_DEST (pat)) == REG)
2750 rtx next = next_nonnote_insn (trial);
2752 if (next && GET_CODE (next) == INSN
2753 && GET_CODE (PATTERN (next)) != USE
2754 && ! reg_set_p (SET_DEST (pat), next)
2755 && ! reg_set_p (SET_SRC (pat), next)
2756 && reg_referenced_p (SET_DEST (pat), PATTERN (next))
2757 && ! modified_in_p (SET_DEST (pat), next))
2758 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2762 /* If we stopped on a branch insn that has delay slots, see if we can
2763 steal some of the insns in those slots. */
2764 if (trial && GET_CODE (trial) == INSN
2765 && GET_CODE (PATTERN (trial)) == SEQUENCE
2766 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
2768 /* If this is the `true' thread, we will want to follow the jump,
2769 so we can only do this if we have taken everything up to here. */
2770 if (thread_if_true && trial == new_thread)
2771 delay_list
2772 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2773 delay_list, &set, &needed,
2774 &opposite_needed, slots_to_fill,
2775 pslots_filled, &must_annul,
2776 &new_thread);
2777 else if (! thread_if_true)
2778 delay_list
2779 = steal_delay_list_from_fallthrough (insn, condition,
2780 PATTERN (trial),
2781 delay_list, &set, &needed,
2782 &opposite_needed, slots_to_fill,
2783 pslots_filled, &must_annul);
2786 /* If we haven't found anything for this delay slot and it is very
2787 likely that the branch will be taken, see if the insn at our target
2788 increments or decrements a register with an increment that does not
2789 depend on the destination register. If so, try to place the opposite
2790 arithmetic insn after the jump insn and put the arithmetic insn in the
2791 delay slot. If we can't do this, return. */
2792 if (delay_list == 0 && likely && new_thread
2793 && GET_CODE (new_thread) == INSN
2794 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2795 && asm_noperands (PATTERN (new_thread)) < 0)
2797 rtx pat = PATTERN (new_thread);
2798 rtx dest;
2799 rtx src;
2801 trial = new_thread;
2802 pat = PATTERN (trial);
2804 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
2805 || ! eligible_for_delay (insn, 0, trial, flags))
2806 return 0;
2808 dest = SET_DEST (pat), src = SET_SRC (pat);
2809 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2810 && rtx_equal_p (XEXP (src, 0), dest)
2811 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
2813 rtx other = XEXP (src, 1);
2814 rtx new_arith;
2815 rtx ninsn;
2817 /* If this is a constant adjustment, use the same code with
2818 the negated constant. Otherwise, reverse the sense of the
2819 arithmetic. */
2820 if (GET_CODE (other) == CONST_INT)
2821 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2822 negate_rtx (GET_MODE (src), other));
2823 else
2824 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2825 GET_MODE (src), dest, other);
2827 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2828 insn);
2830 if (recog_memoized (ninsn) < 0
2831 || (extract_insn (ninsn), ! constrain_operands (1)))
2833 delete_insn (ninsn);
2834 return 0;
2837 if (own_thread)
2839 update_block (trial, thread);
2840 if (trial == thread)
2842 thread = next_active_insn (thread);
2843 if (new_thread == trial)
2844 new_thread = thread;
2846 delete_insn (trial);
2848 else
2849 new_thread = next_active_insn (trial);
2851 ninsn = own_thread ? trial : copy_rtx (trial);
2852 if (thread_if_true)
2853 INSN_FROM_TARGET_P (ninsn) = 1;
2855 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2856 (*pslots_filled)++;
2860 if (delay_list && must_annul)
2861 INSN_ANNULLED_BRANCH_P (insn) = 1;
2863 /* If we are to branch into the middle of this thread, find an appropriate
2864 label or make a new one if none, and redirect INSN to it. If we hit the
2865 end of the function, use the end-of-function label. */
2866 if (new_thread != thread)
2868 rtx label;
2870 if (! thread_if_true)
2871 abort ();
2873 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
2874 && (simplejump_p (new_thread)
2875 || GET_CODE (PATTERN (new_thread)) == RETURN)
2876 && redirect_with_delay_list_safe_p (insn,
2877 JUMP_LABEL (new_thread),
2878 delay_list))
2879 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2881 if (new_thread == 0)
2882 label = find_end_label ();
2883 else if (GET_CODE (new_thread) == CODE_LABEL)
2884 label = new_thread;
2885 else
2886 label = get_label_before (new_thread);
2888 reorg_redirect_jump (insn, label);
2891 return delay_list;
2894 /* Make another attempt to find insns to place in delay slots.
2896 We previously looked for insns located in front of the delay insn
2897 and, for non-jump delay insns, located behind the delay insn.
2899 Here only try to schedule jump insns and try to move insns from either
2900 the target or the following insns into the delay slot. If annulling is
2901 supported, we will be likely to do this. Otherwise, we can do this only
2902 if safe. */
2904 static void
2905 fill_eager_delay_slots ()
2907 register rtx insn;
2908 register int i;
2909 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2911 for (i = 0; i < num_unfilled_slots; i++)
2913 rtx condition;
2914 rtx target_label, insn_at_target, fallthrough_insn;
2915 rtx delay_list = 0;
2916 int own_target;
2917 int own_fallthrough;
2918 int prediction, slots_to_fill, slots_filled;
2920 insn = unfilled_slots_base[i];
2921 if (insn == 0
2922 || INSN_DELETED_P (insn)
2923 || GET_CODE (insn) != JUMP_INSN
2924 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2925 continue;
2927 slots_to_fill = num_delay_slots (insn);
2928 /* Some machine description have defined instructions to have
2929 delay slots only in certain circumstances which may depend on
2930 nearby insns (which change due to reorg's actions).
2932 For example, the PA port normally has delay slots for unconditional
2933 jumps.
2935 However, the PA port claims such jumps do not have a delay slot
2936 if they are immediate successors of certain CALL_INSNs. This
2937 allows the port to favor filling the delay slot of the call with
2938 the unconditional jump. */
2939 if (slots_to_fill == 0)
2940 continue;
2942 slots_filled = 0;
2943 target_label = JUMP_LABEL (insn);
2944 condition = get_branch_condition (insn, target_label);
2946 if (condition == 0)
2947 continue;
2949 /* Get the next active fallthrough and target insns and see if we own
2950 them. Then see whether the branch is likely true. We don't need
2951 to do a lot of this for unconditional branches. */
2953 insn_at_target = next_active_insn (target_label);
2954 own_target = own_thread_p (target_label, target_label, 0);
2956 if (condition == const_true_rtx)
2958 own_fallthrough = 0;
2959 fallthrough_insn = 0;
2960 prediction = 2;
2962 else
2964 fallthrough_insn = next_active_insn (insn);
2965 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
2966 prediction = mostly_true_jump (insn, condition);
2969 /* If this insn is expected to branch, first try to get insns from our
2970 target, then our fallthrough insns. If it is not, expected to branch,
2971 try the other order. */
2973 if (prediction > 0)
2975 delay_list
2976 = fill_slots_from_thread (insn, condition, insn_at_target,
2977 fallthrough_insn, prediction == 2, 1,
2978 own_target,
2979 slots_to_fill, &slots_filled, delay_list);
2981 if (delay_list == 0 && own_fallthrough)
2983 /* Even though we didn't find anything for delay slots,
2984 we might have found a redundant insn which we deleted
2985 from the thread that was filled. So we have to recompute
2986 the next insn at the target. */
2987 target_label = JUMP_LABEL (insn);
2988 insn_at_target = next_active_insn (target_label);
2990 delay_list
2991 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2992 insn_at_target, 0, 0,
2993 own_fallthrough,
2994 slots_to_fill, &slots_filled,
2995 delay_list);
2998 else
3000 if (own_fallthrough)
3001 delay_list
3002 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3003 insn_at_target, 0, 0,
3004 own_fallthrough,
3005 slots_to_fill, &slots_filled,
3006 delay_list);
3008 if (delay_list == 0)
3009 delay_list
3010 = fill_slots_from_thread (insn, condition, insn_at_target,
3011 next_active_insn (insn), 0, 1,
3012 own_target,
3013 slots_to_fill, &slots_filled,
3014 delay_list);
3017 if (delay_list)
3018 unfilled_slots_base[i]
3019 = emit_delay_sequence (insn, delay_list, slots_filled);
3021 if (slots_to_fill == slots_filled)
3022 unfilled_slots_base[i] = 0;
3024 note_delay_statistics (slots_filled, 1);
3028 /* Once we have tried two ways to fill a delay slot, make a pass over the
3029 code to try to improve the results and to do such things as more jump
3030 threading. */
3032 static void
3033 relax_delay_slots (first)
3034 rtx first;
3036 register rtx insn, next, pat;
3037 register rtx trial, delay_insn, target_label;
3039 /* Look at every JUMP_INSN and see if we can improve it. */
3040 for (insn = first; insn; insn = next)
3042 rtx other;
3044 next = next_active_insn (insn);
3046 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3047 the next insn, or jumps to a label that is not the last of a
3048 group of consecutive labels. */
3049 if (GET_CODE (insn) == JUMP_INSN
3050 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3051 && (target_label = JUMP_LABEL (insn)) != 0)
3053 target_label = follow_jumps (target_label);
3054 target_label = prev_label (next_active_insn (target_label));
3056 if (target_label == 0)
3057 target_label = find_end_label ();
3059 if (next_active_insn (target_label) == next
3060 && ! condjump_in_parallel_p (insn))
3062 delete_jump (insn);
3063 continue;
3066 if (target_label != JUMP_LABEL (insn))
3067 reorg_redirect_jump (insn, target_label);
3069 /* See if this jump branches around a unconditional jump.
3070 If so, invert this jump and point it to the target of the
3071 second jump. */
3072 if (next && GET_CODE (next) == JUMP_INSN
3073 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3074 && next_active_insn (target_label) == next_active_insn (next)
3075 && no_labels_between_p (insn, next))
3077 rtx label = JUMP_LABEL (next);
3079 /* Be careful how we do this to avoid deleting code or
3080 labels that are momentarily dead. See similar optimization
3081 in jump.c.
3083 We also need to ensure we properly handle the case when
3084 invert_jump fails. */
3086 ++LABEL_NUSES (target_label);
3087 if (label)
3088 ++LABEL_NUSES (label);
3090 if (invert_jump (insn, label))
3092 delete_insn (next);
3093 next = insn;
3096 if (label)
3097 --LABEL_NUSES (label);
3099 if (--LABEL_NUSES (target_label) == 0)
3100 delete_insn (target_label);
3102 continue;
3106 /* If this is an unconditional jump and the previous insn is a
3107 conditional jump, try reversing the condition of the previous
3108 insn and swapping our targets. The next pass might be able to
3109 fill the slots.
3111 Don't do this if we expect the conditional branch to be true, because
3112 we would then be making the more common case longer. */
3114 if (GET_CODE (insn) == JUMP_INSN
3115 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3116 && (other = prev_active_insn (insn)) != 0
3117 && (condjump_p (other) || condjump_in_parallel_p (other))
3118 && no_labels_between_p (other, insn)
3119 && 0 > mostly_true_jump (other,
3120 get_branch_condition (other,
3121 JUMP_LABEL (other))))
3123 rtx other_target = JUMP_LABEL (other);
3124 target_label = JUMP_LABEL (insn);
3126 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3127 as we move the label. */
3128 if (other_target)
3129 ++LABEL_NUSES (other_target);
3131 if (invert_jump (other, target_label))
3132 reorg_redirect_jump (insn, other_target);
3134 if (other_target)
3135 --LABEL_NUSES (other_target);
3138 /* Now look only at cases where we have filled a delay slot. */
3139 if (GET_CODE (insn) != INSN
3140 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3141 continue;
3143 pat = PATTERN (insn);
3144 delay_insn = XVECEXP (pat, 0, 0);
3146 /* See if the first insn in the delay slot is redundant with some
3147 previous insn. Remove it from the delay slot if so; then set up
3148 to reprocess this insn. */
3149 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3151 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3152 next = prev_active_insn (next);
3153 continue;
3156 /* See if we have a RETURN insn with a filled delay slot followed
3157 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3158 the first RETURN (but not it's delay insn). This gives the same
3159 effect in fewer instructions.
3161 Only do so if optimizing for size since this results in slower, but
3162 smaller code. */
3163 if (optimize_size
3164 && GET_CODE (PATTERN (delay_insn)) == RETURN
3165 && next
3166 && GET_CODE (next) == JUMP_INSN
3167 && GET_CODE (PATTERN (next)) == RETURN)
3169 int i;
3171 /* Delete the RETURN and just execute the delay list insns.
3173 We do this by deleting the INSN containing the SEQUENCE, then
3174 re-emitting the insns separately, and then deleting the RETURN.
3175 This allows the count of the jump target to be properly
3176 decremented. */
3178 /* Clear the from target bit, since these insns are no longer
3179 in delay slots. */
3180 for (i = 0; i < XVECLEN (pat, 0); i++)
3181 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3183 trial = PREV_INSN (insn);
3184 delete_insn (insn);
3185 emit_insn_after (pat, trial);
3186 delete_scheduled_jump (delay_insn);
3187 continue;
3190 /* Now look only at the cases where we have a filled JUMP_INSN. */
3191 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3192 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3193 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3194 continue;
3196 target_label = JUMP_LABEL (delay_insn);
3198 if (target_label)
3200 /* If this jump goes to another unconditional jump, thread it, but
3201 don't convert a jump into a RETURN here. */
3202 trial = follow_jumps (target_label);
3203 /* We use next_real_insn instead of next_active_insn, so that
3204 the special USE insns emitted by reorg won't be ignored.
3205 If they are ignored, then they will get deleted if target_label
3206 is now unreachable, and that would cause mark_target_live_regs
3207 to fail. */
3208 trial = prev_label (next_real_insn (trial));
3209 if (trial == 0 && target_label != 0)
3210 trial = find_end_label ();
3212 if (trial != target_label
3213 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3215 reorg_redirect_jump (delay_insn, trial);
3216 target_label = trial;
3219 /* If the first insn at TARGET_LABEL is redundant with a previous
3220 insn, redirect the jump to the following insn process again. */
3221 trial = next_active_insn (target_label);
3222 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3223 && redundant_insn (trial, insn, 0))
3225 rtx tmp;
3227 /* Figure out where to emit the special USE insn so we don't
3228 later incorrectly compute register live/death info. */
3229 tmp = next_active_insn (trial);
3230 if (tmp == 0)
3231 tmp = find_end_label ();
3233 /* Insert the special USE insn and update dataflow info. */
3234 update_block (trial, tmp);
3236 /* Now emit a label before the special USE insn, and
3237 redirect our jump to the new label. */
3238 target_label = get_label_before (PREV_INSN (tmp));
3239 reorg_redirect_jump (delay_insn, target_label);
3240 next = insn;
3241 continue;
3244 /* Similarly, if it is an unconditional jump with one insn in its
3245 delay list and that insn is redundant, thread the jump. */
3246 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3247 && XVECLEN (PATTERN (trial), 0) == 2
3248 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3249 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3250 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3251 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3253 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3254 if (target_label == 0)
3255 target_label = find_end_label ();
3257 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3258 insn))
3260 reorg_redirect_jump (delay_insn, target_label);
3261 next = insn;
3262 continue;
3267 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3268 && prev_active_insn (target_label) == insn
3269 && ! condjump_in_parallel_p (delay_insn)
3270 #ifdef HAVE_cc0
3271 /* If the last insn in the delay slot sets CC0 for some insn,
3272 various code assumes that it is in a delay slot. We could
3273 put it back where it belonged and delete the register notes,
3274 but it doesn't seem worthwhile in this uncommon case. */
3275 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3276 REG_CC_USER, NULL_RTX)
3277 #endif
3280 int i;
3282 /* All this insn does is execute its delay list and jump to the
3283 following insn. So delete the jump and just execute the delay
3284 list insns.
3286 We do this by deleting the INSN containing the SEQUENCE, then
3287 re-emitting the insns separately, and then deleting the jump.
3288 This allows the count of the jump target to be properly
3289 decremented. */
3291 /* Clear the from target bit, since these insns are no longer
3292 in delay slots. */
3293 for (i = 0; i < XVECLEN (pat, 0); i++)
3294 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3296 trial = PREV_INSN (insn);
3297 delete_insn (insn);
3298 emit_insn_after (pat, trial);
3299 delete_scheduled_jump (delay_insn);
3300 continue;
3303 /* See if this is an unconditional jump around a single insn which is
3304 identical to the one in its delay slot. In this case, we can just
3305 delete the branch and the insn in its delay slot. */
3306 if (next && GET_CODE (next) == INSN
3307 && prev_label (next_active_insn (next)) == target_label
3308 && simplejump_p (insn)
3309 && XVECLEN (pat, 0) == 2
3310 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3312 delete_insn (insn);
3313 continue;
3316 /* See if this jump (with its delay slots) branches around another
3317 jump (without delay slots). If so, invert this jump and point
3318 it to the target of the second jump. We cannot do this for
3319 annulled jumps, though. Again, don't convert a jump to a RETURN
3320 here. */
3321 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3322 && next && GET_CODE (next) == JUMP_INSN
3323 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3324 && next_active_insn (target_label) == next_active_insn (next)
3325 && no_labels_between_p (insn, next))
3327 rtx label = JUMP_LABEL (next);
3328 rtx old_label = JUMP_LABEL (delay_insn);
3330 if (label == 0)
3331 label = find_end_label ();
3333 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3335 /* Be careful how we do this to avoid deleting code or labels
3336 that are momentarily dead. See similar optimization in
3337 jump.c */
3338 if (old_label)
3339 ++LABEL_NUSES (old_label);
3341 if (invert_jump (delay_insn, label))
3343 int i;
3345 /* Must update the INSN_FROM_TARGET_P bits now that
3346 the branch is reversed, so that mark_target_live_regs
3347 will handle the delay slot insn correctly. */
3348 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3350 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3351 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3354 delete_insn (next);
3355 next = insn;
3358 if (old_label && --LABEL_NUSES (old_label) == 0)
3359 delete_insn (old_label);
3360 continue;
3364 /* If we own the thread opposite the way this insn branches, see if we
3365 can merge its delay slots with following insns. */
3366 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3367 && own_thread_p (NEXT_INSN (insn), 0, 1))
3368 try_merge_delay_insns (insn, next);
3369 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3370 && own_thread_p (target_label, target_label, 0))
3371 try_merge_delay_insns (insn, next_active_insn (target_label));
3373 /* If we get here, we haven't deleted INSN. But we may have deleted
3374 NEXT, so recompute it. */
3375 next = next_active_insn (insn);
3379 #ifdef HAVE_return
3381 /* Look for filled jumps to the end of function label. We can try to convert
3382 them into RETURN insns if the insns in the delay slot are valid for the
3383 RETURN as well. */
3385 static void
3386 make_return_insns (first)
3387 rtx first;
3389 rtx insn, jump_insn, pat;
3390 rtx real_return_label = end_of_function_label;
3391 int slots, i;
3393 /* See if there is a RETURN insn in the function other than the one we
3394 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3395 into a RETURN to jump to it. */
3396 for (insn = first; insn; insn = NEXT_INSN (insn))
3397 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3399 real_return_label = get_label_before (insn);
3400 break;
3403 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3404 was equal to END_OF_FUNCTION_LABEL. */
3405 LABEL_NUSES (real_return_label)++;
3407 /* Clear the list of insns to fill so we can use it. */
3408 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3410 for (insn = first; insn; insn = NEXT_INSN (insn))
3412 int flags;
3414 /* Only look at filled JUMP_INSNs that go to the end of function
3415 label. */
3416 if (GET_CODE (insn) != INSN
3417 || GET_CODE (PATTERN (insn)) != SEQUENCE
3418 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3419 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3420 continue;
3422 pat = PATTERN (insn);
3423 jump_insn = XVECEXP (pat, 0, 0);
3425 /* If we can't make the jump into a RETURN, try to redirect it to the best
3426 RETURN and go on to the next insn. */
3427 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3429 /* Make sure redirecting the jump will not invalidate the delay
3430 slot insns. */
3431 if (redirect_with_delay_slots_safe_p (jump_insn,
3432 real_return_label,
3433 insn))
3434 reorg_redirect_jump (jump_insn, real_return_label);
3435 continue;
3438 /* See if this RETURN can accept the insns current in its delay slot.
3439 It can if it has more or an equal number of slots and the contents
3440 of each is valid. */
3442 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3443 slots = num_delay_slots (jump_insn);
3444 if (slots >= XVECLEN (pat, 0) - 1)
3446 for (i = 1; i < XVECLEN (pat, 0); i++)
3447 if (! (
3448 #ifdef ANNUL_IFFALSE_SLOTS
3449 (INSN_ANNULLED_BRANCH_P (jump_insn)
3450 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3451 ? eligible_for_annul_false (jump_insn, i - 1,
3452 XVECEXP (pat, 0, i), flags) :
3453 #endif
3454 #ifdef ANNUL_IFTRUE_SLOTS
3455 (INSN_ANNULLED_BRANCH_P (jump_insn)
3456 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3457 ? eligible_for_annul_true (jump_insn, i - 1,
3458 XVECEXP (pat, 0, i), flags) :
3459 #endif
3460 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
3461 break;
3463 else
3464 i = 0;
3466 if (i == XVECLEN (pat, 0))
3467 continue;
3469 /* We have to do something with this insn. If it is an unconditional
3470 RETURN, delete the SEQUENCE and output the individual insns,
3471 followed by the RETURN. Then set things up so we try to find
3472 insns for its delay slots, if it needs some. */
3473 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3475 rtx prev = PREV_INSN (insn);
3477 delete_insn (insn);
3478 for (i = 1; i < XVECLEN (pat, 0); i++)
3479 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3481 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3482 emit_barrier_after (insn);
3484 if (slots)
3485 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3487 else
3488 /* It is probably more efficient to keep this with its current
3489 delay slot as a branch to a RETURN. */
3490 reorg_redirect_jump (jump_insn, real_return_label);
3493 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3494 new delay slots we have created. */
3495 if (--LABEL_NUSES (real_return_label) == 0)
3496 delete_insn (real_return_label);
3498 fill_simple_delay_slots (1);
3499 fill_simple_delay_slots (0);
3501 #endif
3503 /* Try to find insns to place in delay slots. */
3505 void
3506 dbr_schedule (first, file)
3507 rtx first;
3508 FILE *file;
3510 rtx insn, next, epilogue_insn = 0;
3511 int i;
3512 #if 0
3513 int old_flag_no_peephole = flag_no_peephole;
3515 /* Execute `final' once in prescan mode to delete any insns that won't be
3516 used. Don't let final try to do any peephole optimization--it will
3517 ruin dataflow information for this pass. */
3519 flag_no_peephole = 1;
3520 final (first, 0, NO_DEBUG, 1, 1);
3521 flag_no_peephole = old_flag_no_peephole;
3522 #endif
3524 /* If the current function has no insns other than the prologue and
3525 epilogue, then do not try to fill any delay slots. */
3526 if (n_basic_blocks == 0)
3527 return;
3529 /* Find the highest INSN_UID and allocate and initialize our map from
3530 INSN_UID's to position in code. */
3531 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3533 if (INSN_UID (insn) > max_uid)
3534 max_uid = INSN_UID (insn);
3535 if (GET_CODE (insn) == NOTE
3536 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3537 epilogue_insn = insn;
3540 uid_to_ruid = (int *) xmalloc ((max_uid + 1) * sizeof (int));
3541 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3542 uid_to_ruid[INSN_UID (insn)] = i;
3544 /* Initialize the list of insns that need filling. */
3545 if (unfilled_firstobj == 0)
3547 gcc_obstack_init (&unfilled_slots_obstack);
3548 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3551 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3553 rtx target;
3555 INSN_ANNULLED_BRANCH_P (insn) = 0;
3556 INSN_FROM_TARGET_P (insn) = 0;
3558 /* Skip vector tables. We can't get attributes for them. */
3559 if (GET_CODE (insn) == JUMP_INSN
3560 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3561 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3562 continue;
3564 if (num_delay_slots (insn) > 0)
3565 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3567 /* Ensure all jumps go to the last of a set of consecutive labels. */
3568 if (GET_CODE (insn) == JUMP_INSN
3569 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3570 && JUMP_LABEL (insn) != 0
3571 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
3572 != JUMP_LABEL (insn)))
3573 redirect_jump (insn, target);
3576 init_resource_info (epilogue_insn);
3578 /* Show we haven't computed an end-of-function label yet. */
3579 end_of_function_label = 0;
3581 /* Initialize the statistics for this function. */
3582 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
3583 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
3585 /* Now do the delay slot filling. Try everything twice in case earlier
3586 changes make more slots fillable. */
3588 for (reorg_pass_number = 0;
3589 reorg_pass_number < MAX_REORG_PASSES;
3590 reorg_pass_number++)
3592 fill_simple_delay_slots (1);
3593 fill_simple_delay_slots (0);
3594 fill_eager_delay_slots ();
3595 relax_delay_slots (first);
3598 /* Delete any USE insns made by update_block; subsequent passes don't need
3599 them or know how to deal with them. */
3600 for (insn = first; insn; insn = next)
3602 next = NEXT_INSN (insn);
3604 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
3605 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
3606 next = delete_insn (insn);
3609 /* If we made an end of function label, indicate that it is now
3610 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3611 If it is now unused, delete it. */
3612 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3613 delete_insn (end_of_function_label);
3615 #ifdef HAVE_return
3616 if (HAVE_return && end_of_function_label != 0)
3617 make_return_insns (first);
3618 #endif
3620 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3622 /* It is not clear why the line below is needed, but it does seem to be. */
3623 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3625 /* Reposition the prologue and epilogue notes in case we moved the
3626 prologue/epilogue insns. */
3627 reposition_prologue_and_epilogue_notes (first);
3629 if (file)
3631 register int i, j, need_comma;
3632 int total_delay_slots[MAX_DELAY_HISTOGRAM + 1];
3633 int total_annul_slots[MAX_DELAY_HISTOGRAM + 1];
3635 for (reorg_pass_number = 0;
3636 reorg_pass_number < MAX_REORG_PASSES;
3637 reorg_pass_number++)
3639 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3640 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3642 need_comma = 0;
3643 fprintf (file, ";; Reorg function #%d\n", i);
3645 fprintf (file, ";; %d insns needing delay slots\n;; ",
3646 num_insns_needing_delays[i][reorg_pass_number]);
3648 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3649 if (num_filled_delays[i][j][reorg_pass_number])
3651 if (need_comma)
3652 fprintf (file, ", ");
3653 need_comma = 1;
3654 fprintf (file, "%d got %d delays",
3655 num_filled_delays[i][j][reorg_pass_number], j);
3657 fprintf (file, "\n");
3660 bzero ((char *) total_delay_slots, sizeof total_delay_slots);
3661 bzero ((char *) total_annul_slots, sizeof total_annul_slots);
3662 for (insn = first; insn; insn = NEXT_INSN (insn))
3664 if (! INSN_DELETED_P (insn)
3665 && GET_CODE (insn) == INSN
3666 && GET_CODE (PATTERN (insn)) != USE
3667 && GET_CODE (PATTERN (insn)) != CLOBBER)
3669 if (GET_CODE (PATTERN (insn)) == SEQUENCE)
3671 j = XVECLEN (PATTERN (insn), 0) - 1;
3672 if (j > MAX_DELAY_HISTOGRAM)
3673 j = MAX_DELAY_HISTOGRAM;
3674 if (INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (insn), 0, 0)))
3675 total_annul_slots[j]++;
3676 else
3677 total_delay_slots[j]++;
3679 else if (num_delay_slots (insn) > 0)
3680 total_delay_slots[0]++;
3683 fprintf (file, ";; Reorg totals: ");
3684 need_comma = 0;
3685 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3687 if (total_delay_slots[j])
3689 if (need_comma)
3690 fprintf (file, ", ");
3691 need_comma = 1;
3692 fprintf (file, "%d got %d delays", total_delay_slots[j], j);
3695 fprintf (file, "\n");
3696 #if defined (ANNUL_IFTRUE_SLOTS) || defined (ANNUL_IFFALSE_SLOTS)
3697 fprintf (file, ";; Reorg annuls: ");
3698 need_comma = 0;
3699 for (j = 0; j < MAX_DELAY_HISTOGRAM + 1; j++)
3701 if (total_annul_slots[j])
3703 if (need_comma)
3704 fprintf (file, ", ");
3705 need_comma = 1;
3706 fprintf (file, "%d got %d delays", total_annul_slots[j], j);
3709 fprintf (file, "\n");
3710 #endif
3711 fprintf (file, "\n");
3714 /* For all JUMP insns, fill in branch prediction notes, so that during
3715 assembler output a target can set branch prediction bits in the code.
3716 We have to do this now, as up until this point the destinations of
3717 JUMPS can be moved around and changed, but past right here that cannot
3718 happen. */
3719 for (insn = first; insn; insn = NEXT_INSN (insn))
3721 int pred_flags;
3723 if (GET_CODE (insn) == INSN)
3725 rtx pat = PATTERN (insn);
3727 if (GET_CODE (pat) == SEQUENCE)
3728 insn = XVECEXP (pat, 0, 0);
3730 if (GET_CODE (insn) != JUMP_INSN)
3731 continue;
3733 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3734 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3735 GEN_INT (pred_flags),
3736 REG_NOTES (insn));
3738 free_resource_info ();
3739 free (uid_to_ruid);
3741 #endif /* DELAY_SLOTS */