define NO_IMPLICIT_EXTERN_C
[official-gcc.git] / gcc / reorg.c
blob433306196035721de6cce77a1842b3edb5841400
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
55 is taken.
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 The TMS320C3x and C4x have three branch delay slots. When the three
65 slots are filled, the branch penalty is zero. Most insns can fill the
66 delay slots except jump insns.
68 Three techniques for filling delay slots have been implemented so far:
70 (1) `fill_simple_delay_slots' is the simplest, most efficient way
71 to fill delay slots. This pass first looks for insns which come
72 from before the branch and which are safe to execute after the
73 branch. Then it searches after the insn requiring delay slots or,
74 in the case of a branch, for insns that are after the point at
75 which the branch merges into the fallthrough code, if such a point
76 exists. When such insns are found, the branch penalty decreases
77 and no code expansion takes place.
79 (2) `fill_eager_delay_slots' is more complicated: it is used for
80 scheduling conditional jumps, or for scheduling jumps which cannot
81 be filled using (1). A machine need not have annulled jumps to use
82 this strategy, but it helps (by keeping more options open).
83 `fill_eager_delay_slots' tries to guess the direction the branch
84 will go; if it guesses right 100% of the time, it can reduce the
85 branch penalty as much as `fill_simple_delay_slots' does. If it
86 guesses wrong 100% of the time, it might as well schedule nops (or
87 on the m88k, unexpose the branch slot). When
88 `fill_eager_delay_slots' takes insns from the fall-through path of
89 the jump, usually there is no code expansion; when it takes insns
90 from the branch target, there is code expansion if it is not the
91 only way to reach that target.
93 (3) `relax_delay_slots' uses a set of rules to simplify code that
94 has been reorganized by (1) and (2). It finds cases where
95 conditional test can be eliminated, jumps can be threaded, extra
96 insns can be eliminated, etc. It is the job of (1) and (2) to do a
97 good job of scheduling locally; `relax_delay_slots' takes care of
98 making the various individual schedules work well together. It is
99 especially tuned to handle the control flow interactions of branch
100 insns. It does nothing for insns with delay slots that do not
101 branch.
103 On machines that use CC0, we are very conservative. We will not make
104 a copy of an insn involving CC0 since we want to maintain a 1-1
105 correspondence between the insn that sets and uses CC0. The insns are
106 allowed to be separated by placing an insn that sets CC0 (but not an insn
107 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
108 delay slot. In that case, we point each insn at the other with REG_CC_USER
109 and REG_CC_SETTER notes. Note that these restrictions affect very few
110 machines because most RISC machines with delay slots will not use CC0
111 (the RT is the only known exception at this point).
113 Not yet implemented:
115 The Acorn Risc Machine can conditionally execute most insns, so
116 it is profitable to move single insns into a position to execute
117 based on the condition code of the previous insn.
119 The HP-PA can conditionally nullify insns, providing a similar
120 effect to the ARM, differing mostly in which insn is "in charge". */
122 #include "config.h"
123 #include "system.h"
124 #include "toplev.h"
125 #include "rtl.h"
126 #include "expr.h"
127 #include "function.h"
128 #include "insn-config.h"
129 #include "conditions.h"
130 #include "hard-reg-set.h"
131 #include "basic-block.h"
132 #include "regs.h"
133 #include "insn-flags.h"
134 #include "recog.h"
135 #include "flags.h"
136 #include "output.h"
137 #include "obstack.h"
138 #include "insn-attr.h"
139 #include "resource.h"
142 #ifdef DELAY_SLOTS
144 #define obstack_chunk_alloc xmalloc
145 #define obstack_chunk_free free
147 #ifndef ANNUL_IFTRUE_SLOTS
148 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
149 #endif
150 #ifndef ANNUL_IFFALSE_SLOTS
151 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
152 #endif
154 /* Insns which have delay slots that have not yet been filled. */
156 static struct obstack unfilled_slots_obstack;
157 static rtx *unfilled_firstobj;
159 /* Define macros to refer to the first and last slot containing unfilled
160 insns. These are used because the list may move and its address
161 should be recomputed at each use. */
163 #define unfilled_slots_base \
164 ((rtx *) obstack_base (&unfilled_slots_obstack))
166 #define unfilled_slots_next \
167 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
169 /* Points to the label before the end of the function. */
170 static rtx end_of_function_label;
172 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
173 not always monotonically increase. */
174 static int *uid_to_ruid;
176 /* Highest valid index in `uid_to_ruid'. */
177 static int max_uid;
179 static int stop_search_p PROTO((rtx, int));
180 static int resource_conflicts_p PROTO((struct resources *,
181 struct resources *));
182 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
183 static int insn_sets_resource_p PROTO((rtx, struct resources *, int));
184 static rtx find_end_label PROTO((void));
185 static rtx emit_delay_sequence PROTO((rtx, rtx, int));
186 static rtx add_to_delay_list PROTO((rtx, rtx));
187 static rtx delete_from_delay_slot PROTO((rtx));
188 static void delete_scheduled_jump PROTO((rtx));
189 static void note_delay_statistics PROTO((int, int));
190 static rtx optimize_skip PROTO((rtx));
191 static int get_jump_flags PROTO((rtx, rtx));
192 static int rare_destination PROTO((rtx));
193 static int mostly_true_jump PROTO((rtx, rtx));
194 static rtx get_branch_condition PROTO((rtx, rtx));
195 static int condition_dominates_p PROTO((rtx, rtx));
196 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
197 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
198 static int check_annul_list_true_false PROTO ((int, rtx));
199 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
200 struct resources *,
201 struct resources *,
202 struct resources *,
203 int, int *, int *, rtx *));
204 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
205 struct resources *,
206 struct resources *,
207 struct resources *,
208 int, int *, int *));
209 static void try_merge_delay_insns PROTO((rtx, rtx));
210 static rtx redundant_insn PROTO((rtx, rtx, rtx));
211 static int own_thread_p PROTO((rtx, rtx, int));
212 static void update_block PROTO((rtx, rtx));
213 static int reorg_redirect_jump PROTO((rtx, rtx));
214 static void update_reg_dead_notes PROTO((rtx, rtx));
215 static void fix_reg_dead_note PROTO((rtx, rtx));
216 static void update_reg_unused_notes PROTO((rtx, rtx));
217 static void fill_simple_delay_slots PROTO((int));
218 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
219 int, int, int *, rtx));
220 static void fill_eager_delay_slots PROTO((void));
221 static void relax_delay_slots PROTO((rtx));
222 static void make_return_insns PROTO((rtx));
224 /* Return TRUE if this insn should stop the search for insn to fill delay
225 slots. LABELS_P indicates that labels should terminate the search.
226 In all cases, jumps terminate the search. */
228 static int
229 stop_search_p (insn, labels_p)
230 rtx insn;
231 int labels_p;
233 if (insn == 0)
234 return 1;
236 switch (GET_CODE (insn))
238 case NOTE:
239 case CALL_INSN:
240 return 0;
242 case CODE_LABEL:
243 return labels_p;
245 case JUMP_INSN:
246 case BARRIER:
247 return 1;
249 case INSN:
250 /* OK unless it contains a delay slot or is an `asm' insn of some type.
251 We don't know anything about these. */
252 return (GET_CODE (PATTERN (insn)) == SEQUENCE
253 || GET_CODE (PATTERN (insn)) == ASM_INPUT
254 || asm_noperands (PATTERN (insn)) >= 0);
256 default:
257 abort ();
261 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
262 resource set contains a volatile memory reference. Otherwise, return FALSE. */
264 static int
265 resource_conflicts_p (res1, res2)
266 struct resources *res1, *res2;
268 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
269 || (res1->unch_memory && res2->unch_memory)
270 || res1->volatil || res2->volatil)
271 return 1;
273 #ifdef HARD_REG_SET
274 return (res1->regs & res2->regs) != HARD_CONST (0);
275 #else
277 int i;
279 for (i = 0; i < HARD_REG_SET_LONGS; i++)
280 if ((res1->regs[i] & res2->regs[i]) != 0)
281 return 1;
282 return 0;
284 #endif
287 /* Return TRUE if any resource marked in RES, a `struct resources', is
288 referenced by INSN. If INCLUDE_DELAYED_EFFECTS is set, return if the called
289 routine is using those resources.
291 We compute this by computing all the resources referenced by INSN and
292 seeing if this conflicts with RES. It might be faster to directly check
293 ourselves, and this is the way it used to work, but it means duplicating
294 a large block of complex code. */
296 static int
297 insn_references_resource_p (insn, res, include_delayed_effects)
298 register rtx insn;
299 register struct resources *res;
300 int include_delayed_effects;
302 struct resources insn_res;
304 CLEAR_RESOURCE (&insn_res);
305 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
306 return resource_conflicts_p (&insn_res, res);
309 /* Return TRUE if INSN modifies resources that are marked in RES.
310 INCLUDE_DELAYED_EFFECTS is set if the actions of that routine should be
311 included. CC0 is only modified if it is explicitly set; see comments
312 in front of mark_set_resources for details. */
314 static int
315 insn_sets_resource_p (insn, res, include_delayed_effects)
316 register rtx insn;
317 register struct resources *res;
318 int include_delayed_effects;
320 struct resources insn_sets;
322 CLEAR_RESOURCE (&insn_sets);
323 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
324 return resource_conflicts_p (&insn_sets, res);
327 /* Find a label at the end of the function or before a RETURN. If there is
328 none, make one. */
330 static rtx
331 find_end_label ()
333 rtx insn;
335 /* If we found one previously, return it. */
336 if (end_of_function_label)
337 return end_of_function_label;
339 /* Otherwise, see if there is a label at the end of the function. If there
340 is, it must be that RETURN insns aren't needed, so that is our return
341 label and we don't have to do anything else. */
343 insn = get_last_insn ();
344 while (GET_CODE (insn) == NOTE
345 || (GET_CODE (insn) == INSN
346 && (GET_CODE (PATTERN (insn)) == USE
347 || GET_CODE (PATTERN (insn)) == CLOBBER)))
348 insn = PREV_INSN (insn);
350 /* When a target threads its epilogue we might already have a
351 suitable return insn. If so put a label before it for the
352 end_of_function_label. */
353 if (GET_CODE (insn) == BARRIER
354 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
355 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
357 rtx temp = PREV_INSN (PREV_INSN (insn));
358 end_of_function_label = gen_label_rtx ();
359 LABEL_NUSES (end_of_function_label) = 0;
361 /* Put the label before an USE insns that may proceed the RETURN insn. */
362 while (GET_CODE (temp) == USE)
363 temp = PREV_INSN (temp);
365 emit_label_after (end_of_function_label, temp);
368 else if (GET_CODE (insn) == CODE_LABEL)
369 end_of_function_label = insn;
370 else
372 /* Otherwise, make a new label and emit a RETURN and BARRIER,
373 if needed. */
374 end_of_function_label = gen_label_rtx ();
375 LABEL_NUSES (end_of_function_label) = 0;
376 emit_label (end_of_function_label);
377 #ifdef HAVE_return
378 if (HAVE_return)
380 /* The return we make may have delay slots too. */
381 rtx insn = gen_return ();
382 insn = emit_jump_insn (insn);
383 emit_barrier ();
384 if (num_delay_slots (insn) > 0)
385 obstack_ptr_grow (&unfilled_slots_obstack, insn);
387 #endif
390 /* Show one additional use for this label so it won't go away until
391 we are done. */
392 ++LABEL_NUSES (end_of_function_label);
394 return end_of_function_label;
397 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
398 the pattern of INSN with the SEQUENCE.
400 Chain the insns so that NEXT_INSN of each insn in the sequence points to
401 the next and NEXT_INSN of the last insn in the sequence points to
402 the first insn after the sequence. Similarly for PREV_INSN. This makes
403 it easier to scan all insns.
405 Returns the SEQUENCE that replaces INSN. */
407 static rtx
408 emit_delay_sequence (insn, list, length)
409 rtx insn;
410 rtx list;
411 int length;
413 register int i = 1;
414 register rtx li;
415 int had_barrier = 0;
417 /* Allocate the rtvec to hold the insns and the SEQUENCE. */
418 rtvec seqv = rtvec_alloc (length + 1);
419 rtx seq = gen_rtx_SEQUENCE (VOIDmode, seqv);
420 rtx seq_insn = make_insn_raw (seq);
421 rtx first = get_insns ();
422 rtx last = get_last_insn ();
424 /* Make a copy of the insn having delay slots. */
425 rtx delay_insn = copy_rtx (insn);
427 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
428 confuse further processing. Update LAST in case it was the last insn.
429 We will put the BARRIER back in later. */
430 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
432 delete_insn (NEXT_INSN (insn));
433 last = get_last_insn ();
434 had_barrier = 1;
437 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
438 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
439 PREV_INSN (seq_insn) = PREV_INSN (insn);
441 if (insn != last)
442 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
444 if (insn != first)
445 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
447 /* Note the calls to set_new_first_and_last_insn must occur after
448 SEQ_INSN has been completely spliced into the insn stream.
450 Otherwise CUR_INSN_UID will get set to an incorrect value because
451 set_new_first_and_last_insn will not find SEQ_INSN in the chain. */
452 if (insn == last)
453 set_new_first_and_last_insn (first, seq_insn);
455 if (insn == first)
456 set_new_first_and_last_insn (seq_insn, last);
458 /* Build our SEQUENCE and rebuild the insn chain. */
459 XVECEXP (seq, 0, 0) = delay_insn;
460 INSN_DELETED_P (delay_insn) = 0;
461 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
463 for (li = list; li; li = XEXP (li, 1), i++)
465 rtx tem = XEXP (li, 0);
466 rtx note;
468 /* Show that this copy of the insn isn't deleted. */
469 INSN_DELETED_P (tem) = 0;
471 XVECEXP (seq, 0, i) = tem;
472 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
473 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
475 /* Remove any REG_DEAD notes because we can't rely on them now
476 that the insn has been moved. */
477 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
478 if (REG_NOTE_KIND (note) == REG_DEAD)
479 XEXP (note, 0) = const0_rtx;
482 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
484 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
485 last insn in that SEQUENCE to point to us. Similarly for the first
486 insn in the following insn if it is a SEQUENCE. */
488 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
489 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
490 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
491 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
492 = seq_insn;
494 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
495 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
496 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
498 /* If there used to be a BARRIER, put it back. */
499 if (had_barrier)
500 emit_barrier_after (seq_insn);
502 if (i != length + 1)
503 abort ();
505 return seq_insn;
508 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
509 be in the order in which the insns are to be executed. */
511 static rtx
512 add_to_delay_list (insn, delay_list)
513 rtx insn;
514 rtx delay_list;
516 /* If we have an empty list, just make a new list element. If
517 INSN has its block number recorded, clear it since we may
518 be moving the insn to a new block. */
520 if (delay_list == 0)
522 clear_hashed_info_for_insn (insn);
523 return gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
526 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
527 list. */
528 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
530 return delay_list;
533 /* Delete INSN from the delay slot of the insn that it is in, which may
534 produce an insn with no delay slots. Return the new insn. */
536 static rtx
537 delete_from_delay_slot (insn)
538 rtx insn;
540 rtx trial, seq_insn, seq, prev;
541 rtx delay_list = 0;
542 int i;
544 /* We first must find the insn containing the SEQUENCE with INSN in its
545 delay slot. Do this by finding an insn, TRIAL, where
546 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
548 for (trial = insn;
549 PREV_INSN (NEXT_INSN (trial)) == trial;
550 trial = NEXT_INSN (trial))
553 seq_insn = PREV_INSN (NEXT_INSN (trial));
554 seq = PATTERN (seq_insn);
556 /* Create a delay list consisting of all the insns other than the one
557 we are deleting (unless we were the only one). */
558 if (XVECLEN (seq, 0) > 2)
559 for (i = 1; i < XVECLEN (seq, 0); i++)
560 if (XVECEXP (seq, 0, i) != insn)
561 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
563 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
564 list, and rebuild the delay list if non-empty. */
565 prev = PREV_INSN (seq_insn);
566 trial = XVECEXP (seq, 0, 0);
567 delete_insn (seq_insn);
568 add_insn_after (trial, prev);
570 if (GET_CODE (trial) == JUMP_INSN
571 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
572 emit_barrier_after (trial);
574 /* If there are any delay insns, remit them. Otherwise clear the
575 annul flag. */
576 if (delay_list)
577 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2);
578 else
579 INSN_ANNULLED_BRANCH_P (trial) = 0;
581 INSN_FROM_TARGET_P (insn) = 0;
583 /* Show we need to fill this insn again. */
584 obstack_ptr_grow (&unfilled_slots_obstack, trial);
586 return trial;
589 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
590 the insn that sets CC0 for it and delete it too. */
592 static void
593 delete_scheduled_jump (insn)
594 rtx insn;
596 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
597 delete the insn that sets the condition code, but it is hard to find it.
598 Since this case is rare anyway, don't bother trying; there would likely
599 be other insns that became dead anyway, which we wouldn't know to
600 delete. */
602 #ifdef HAVE_cc0
603 if (reg_mentioned_p (cc0_rtx, insn))
605 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
607 /* If a reg-note was found, it points to an insn to set CC0. This
608 insn is in the delay list of some other insn. So delete it from
609 the delay list it was in. */
610 if (note)
612 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
613 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
614 delete_from_delay_slot (XEXP (note, 0));
616 else
618 /* The insn setting CC0 is our previous insn, but it may be in
619 a delay slot. It will be the last insn in the delay slot, if
620 it is. */
621 rtx trial = previous_insn (insn);
622 if (GET_CODE (trial) == NOTE)
623 trial = prev_nonnote_insn (trial);
624 if (sets_cc0_p (PATTERN (trial)) != 1
625 || FIND_REG_INC_NOTE (trial, 0))
626 return;
627 if (PREV_INSN (NEXT_INSN (trial)) == trial)
628 delete_insn (trial);
629 else
630 delete_from_delay_slot (trial);
633 #endif
635 delete_insn (insn);
638 /* Counters for delay-slot filling. */
640 #define NUM_REORG_FUNCTIONS 2
641 #define MAX_DELAY_HISTOGRAM 3
642 #define MAX_REORG_PASSES 2
644 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
646 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
648 static int reorg_pass_number;
650 static void
651 note_delay_statistics (slots_filled, index)
652 int slots_filled, index;
654 num_insns_needing_delays[index][reorg_pass_number]++;
655 if (slots_filled > MAX_DELAY_HISTOGRAM)
656 slots_filled = MAX_DELAY_HISTOGRAM;
657 num_filled_delays[index][slots_filled][reorg_pass_number]++;
660 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
662 /* Optimize the following cases:
664 1. When a conditional branch skips over only one instruction,
665 use an annulling branch and put that insn in the delay slot.
666 Use either a branch that annuls when the condition if true or
667 invert the test with a branch that annuls when the condition is
668 false. This saves insns, since otherwise we must copy an insn
669 from the L1 target.
671 (orig) (skip) (otherwise)
672 Bcc.n L1 Bcc',a L1 Bcc,a L1'
673 insn insn insn2
674 L1: L1: L1:
675 insn2 insn2 insn2
676 insn3 insn3 L1':
677 insn3
679 2. When a conditional branch skips over only one instruction,
680 and after that, it unconditionally branches somewhere else,
681 perform the similar optimization. This saves executing the
682 second branch in the case where the inverted condition is true.
684 Bcc.n L1 Bcc',a L2
685 insn insn
686 L1: L1:
687 Bra L2 Bra L2
689 INSN is a JUMP_INSN.
691 This should be expanded to skip over N insns, where N is the number
692 of delay slots required. */
694 static rtx
695 optimize_skip (insn)
696 register rtx insn;
698 register rtx trial = next_nonnote_insn (insn);
699 rtx next_trial = next_active_insn (trial);
700 rtx delay_list = 0;
701 rtx target_label;
702 int flags;
704 flags = get_jump_flags (insn, JUMP_LABEL (insn));
706 if (trial == 0
707 || GET_CODE (trial) != INSN
708 || GET_CODE (PATTERN (trial)) == SEQUENCE
709 || recog_memoized (trial) < 0
710 || (! eligible_for_annul_false (insn, 0, trial, flags)
711 && ! eligible_for_annul_true (insn, 0, trial, flags)))
712 return 0;
714 /* There are two cases where we are just executing one insn (we assume
715 here that a branch requires only one insn; this should be generalized
716 at some point): Where the branch goes around a single insn or where
717 we have one insn followed by a branch to the same label we branch to.
718 In both of these cases, inverting the jump and annulling the delay
719 slot give the same effect in fewer insns. */
720 if ((next_trial == next_active_insn (JUMP_LABEL (insn))
721 && ! (next_trial == 0 && current_function_epilogue_delay_list != 0))
722 || (next_trial != 0
723 && GET_CODE (next_trial) == JUMP_INSN
724 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
725 && (simplejump_p (next_trial)
726 || GET_CODE (PATTERN (next_trial)) == RETURN)))
728 if (eligible_for_annul_false (insn, 0, trial, flags))
730 if (invert_jump (insn, JUMP_LABEL (insn)))
731 INSN_FROM_TARGET_P (trial) = 1;
732 else if (! eligible_for_annul_true (insn, 0, trial, flags))
733 return 0;
736 delay_list = add_to_delay_list (trial, NULL_RTX);
737 next_trial = next_active_insn (trial);
738 update_block (trial, trial);
739 delete_insn (trial);
741 /* Also, if we are targeting an unconditional
742 branch, thread our jump to the target of that branch. Don't
743 change this into a RETURN here, because it may not accept what
744 we have in the delay slot. We'll fix this up later. */
745 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
746 && (simplejump_p (next_trial)
747 || GET_CODE (PATTERN (next_trial)) == RETURN))
749 target_label = JUMP_LABEL (next_trial);
750 if (target_label == 0)
751 target_label = find_end_label ();
753 /* Recompute the flags based on TARGET_LABEL since threading
754 the jump to TARGET_LABEL may change the direction of the
755 jump (which may change the circumstances in which the
756 delay slot is nullified). */
757 flags = get_jump_flags (insn, target_label);
758 if (eligible_for_annul_true (insn, 0, trial, flags))
759 reorg_redirect_jump (insn, target_label);
762 INSN_ANNULLED_BRANCH_P (insn) = 1;
765 return delay_list;
767 #endif
770 /* Encode and return branch direction and prediction information for
771 INSN assuming it will jump to LABEL.
773 Non conditional branches return no direction information and
774 are predicted as very likely taken. */
776 static int
777 get_jump_flags (insn, label)
778 rtx insn, label;
780 int flags;
782 /* get_jump_flags can be passed any insn with delay slots, these may
783 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
784 direction information, and only if they are conditional jumps.
786 If LABEL is zero, then there is no way to determine the branch
787 direction. */
788 if (GET_CODE (insn) == JUMP_INSN
789 && (condjump_p (insn) || condjump_in_parallel_p (insn))
790 && INSN_UID (insn) <= max_uid
791 && label != 0
792 && INSN_UID (label) <= max_uid)
793 flags
794 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
795 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
796 /* No valid direction information. */
797 else
798 flags = 0;
800 /* If insn is a conditional branch call mostly_true_jump to get
801 determine the branch prediction.
803 Non conditional branches are predicted as very likely taken. */
804 if (GET_CODE (insn) == JUMP_INSN
805 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
807 int prediction;
809 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
810 switch (prediction)
812 case 2:
813 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
814 break;
815 case 1:
816 flags |= ATTR_FLAG_likely;
817 break;
818 case 0:
819 flags |= ATTR_FLAG_unlikely;
820 break;
821 case -1:
822 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
823 break;
825 default:
826 abort();
829 else
830 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
832 return flags;
835 /* Return 1 if INSN is a destination that will be branched to rarely (the
836 return point of a function); return 2 if DEST will be branched to very
837 rarely (a call to a function that doesn't return). Otherwise,
838 return 0. */
840 static int
841 rare_destination (insn)
842 rtx insn;
844 int jump_count = 0;
845 rtx next;
847 for (; insn; insn = next)
849 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
850 insn = XVECEXP (PATTERN (insn), 0, 0);
852 next = NEXT_INSN (insn);
854 switch (GET_CODE (insn))
856 case CODE_LABEL:
857 return 0;
858 case BARRIER:
859 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
860 don't scan past JUMP_INSNs, so any barrier we find here must
861 have been after a CALL_INSN and hence mean the call doesn't
862 return. */
863 return 2;
864 case JUMP_INSN:
865 if (GET_CODE (PATTERN (insn)) == RETURN)
866 return 1;
867 else if (simplejump_p (insn)
868 && jump_count++ < 10)
869 next = JUMP_LABEL (insn);
870 else
871 return 0;
873 default:
874 break;
878 /* If we got here it means we hit the end of the function. So this
879 is an unlikely destination. */
881 return 1;
884 /* Return truth value of the statement that this branch
885 is mostly taken. If we think that the branch is extremely likely
886 to be taken, we return 2. If the branch is slightly more likely to be
887 taken, return 1. If the branch is slightly less likely to be taken,
888 return 0 and if the branch is highly unlikely to be taken, return -1.
890 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
892 static int
893 mostly_true_jump (jump_insn, condition)
894 rtx jump_insn, condition;
896 rtx target_label = JUMP_LABEL (jump_insn);
897 rtx insn;
898 int rare_dest = rare_destination (target_label);
899 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
901 /* If branch probabilities are available, then use that number since it
902 always gives a correct answer. */
903 if (flag_branch_probabilities)
905 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);
906 if (note)
908 int prob = XINT (note, 0);
910 if (prob >= REG_BR_PROB_BASE * 9 / 10)
911 return 2;
912 else if (prob >= REG_BR_PROB_BASE / 2)
913 return 1;
914 else if (prob >= REG_BR_PROB_BASE / 10)
915 return 0;
916 else
917 return -1;
921 /* If this is a branch outside a loop, it is highly unlikely. */
922 if (GET_CODE (PATTERN (jump_insn)) == SET
923 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
924 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
925 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
926 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
927 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
928 return -1;
930 if (target_label)
932 /* If this is the test of a loop, it is very likely true. We scan
933 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
934 before the next real insn, we assume the branch is to the top of
935 the loop. */
936 for (insn = PREV_INSN (target_label);
937 insn && GET_CODE (insn) == NOTE;
938 insn = PREV_INSN (insn))
939 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
940 return 2;
942 /* If this is a jump to the test of a loop, it is likely true. We scan
943 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
944 before the next real insn, we assume the branch is to the loop branch
945 test. */
946 for (insn = NEXT_INSN (target_label);
947 insn && GET_CODE (insn) == NOTE;
948 insn = PREV_INSN (insn))
949 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
950 return 1;
953 /* Look at the relative rarities of the fallthrough and destination. If
954 they differ, we can predict the branch that way. */
956 switch (rare_fallthrough - rare_dest)
958 case -2:
959 return -1;
960 case -1:
961 return 0;
962 case 0:
963 break;
964 case 1:
965 return 1;
966 case 2:
967 return 2;
970 /* If we couldn't figure out what this jump was, assume it won't be
971 taken. This should be rare. */
972 if (condition == 0)
973 return 0;
975 /* EQ tests are usually false and NE tests are usually true. Also,
976 most quantities are positive, so we can make the appropriate guesses
977 about signed comparisons against zero. */
978 switch (GET_CODE (condition))
980 case CONST_INT:
981 /* Unconditional branch. */
982 return 1;
983 case EQ:
984 return 0;
985 case NE:
986 return 1;
987 case LE:
988 case LT:
989 if (XEXP (condition, 1) == const0_rtx)
990 return 0;
991 break;
992 case GE:
993 case GT:
994 if (XEXP (condition, 1) == const0_rtx)
995 return 1;
996 break;
998 default:
999 break;
1002 /* Predict backward branches usually take, forward branches usually not. If
1003 we don't know whether this is forward or backward, assume the branch
1004 will be taken, since most are. */
1005 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1006 || INSN_UID (target_label) > max_uid
1007 || (uid_to_ruid[INSN_UID (jump_insn)]
1008 > uid_to_ruid[INSN_UID (target_label)]));
1011 /* Return the condition under which INSN will branch to TARGET. If TARGET
1012 is zero, return the condition under which INSN will return. If INSN is
1013 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1014 type of jump, or it doesn't go to TARGET, return 0. */
1016 static rtx
1017 get_branch_condition (insn, target)
1018 rtx insn;
1019 rtx target;
1021 rtx pat = PATTERN (insn);
1022 rtx src;
1024 if (condjump_in_parallel_p (insn))
1025 pat = XVECEXP (pat, 0, 0);
1027 if (GET_CODE (pat) == RETURN)
1028 return target == 0 ? const_true_rtx : 0;
1030 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1031 return 0;
1033 src = SET_SRC (pat);
1034 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1035 return const_true_rtx;
1037 else if (GET_CODE (src) == IF_THEN_ELSE
1038 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1039 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1040 && XEXP (XEXP (src, 1), 0) == target))
1041 && XEXP (src, 2) == pc_rtx)
1042 return XEXP (src, 0);
1044 else if (GET_CODE (src) == IF_THEN_ELSE
1045 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1046 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1047 && XEXP (XEXP (src, 2), 0) == target))
1048 && XEXP (src, 1) == pc_rtx)
1049 return gen_rtx_fmt_ee (reverse_condition (GET_CODE (XEXP (src, 0))),
1050 GET_MODE (XEXP (src, 0)),
1051 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1053 return 0;
1056 /* Return non-zero if CONDITION is more strict than the condition of
1057 INSN, i.e., if INSN will always branch if CONDITION is true. */
1059 static int
1060 condition_dominates_p (condition, insn)
1061 rtx condition;
1062 rtx insn;
1064 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1065 enum rtx_code code = GET_CODE (condition);
1066 enum rtx_code other_code;
1068 if (rtx_equal_p (condition, other_condition)
1069 || other_condition == const_true_rtx)
1070 return 1;
1072 else if (condition == const_true_rtx || other_condition == 0)
1073 return 0;
1075 other_code = GET_CODE (other_condition);
1076 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1077 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1078 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1079 return 0;
1081 return comparison_dominates_p (code, other_code);
1084 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1085 any insns already in the delay slot of JUMP. */
1087 static int
1088 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1089 rtx jump, newlabel, seq;
1091 int flags, i;
1092 rtx pat = PATTERN (seq);
1094 /* Make sure all the delay slots of this jump would still
1095 be valid after threading the jump. If they are still
1096 valid, then return non-zero. */
1098 flags = get_jump_flags (jump, newlabel);
1099 for (i = 1; i < XVECLEN (pat, 0); i++)
1100 if (! (
1101 #ifdef ANNUL_IFFALSE_SLOTS
1102 (INSN_ANNULLED_BRANCH_P (jump)
1103 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1104 ? eligible_for_annul_false (jump, i - 1,
1105 XVECEXP (pat, 0, i), flags) :
1106 #endif
1107 #ifdef ANNUL_IFTRUE_SLOTS
1108 (INSN_ANNULLED_BRANCH_P (jump)
1109 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1110 ? eligible_for_annul_true (jump, i - 1,
1111 XVECEXP (pat, 0, i), flags) :
1112 #endif
1113 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1114 break;
1116 return (i == XVECLEN (pat, 0));
1119 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1120 any insns we wish to place in the delay slot of JUMP. */
1122 static int
1123 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1124 rtx jump, newlabel, delay_list;
1126 int flags, i;
1127 rtx li;
1129 /* Make sure all the insns in DELAY_LIST would still be
1130 valid after threading the jump. If they are still
1131 valid, then return non-zero. */
1133 flags = get_jump_flags (jump, newlabel);
1134 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1135 if (! (
1136 #ifdef ANNUL_IFFALSE_SLOTS
1137 (INSN_ANNULLED_BRANCH_P (jump)
1138 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1139 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1140 #endif
1141 #ifdef ANNUL_IFTRUE_SLOTS
1142 (INSN_ANNULLED_BRANCH_P (jump)
1143 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1144 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1145 #endif
1146 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1147 break;
1149 return (li == NULL);
1152 /* DELAY_LIST is a list of insns that have already been placed into delay
1153 slots. See if all of them have the same annulling status as ANNUL_TRUE_P.
1154 If not, return 0; otherwise return 1. */
1156 static int
1157 check_annul_list_true_false (annul_true_p, delay_list)
1158 int annul_true_p;
1159 rtx delay_list;
1161 rtx temp;
1163 if (delay_list)
1165 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1167 rtx trial = XEXP (temp, 0);
1169 if ((annul_true_p && INSN_FROM_TARGET_P (trial))
1170 || (!annul_true_p && !INSN_FROM_TARGET_P (trial)))
1171 return 0;
1175 return 1;
1179 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1180 the condition tested by INSN is CONDITION and the resources shown in
1181 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1182 from SEQ's delay list, in addition to whatever insns it may execute
1183 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1184 needed while searching for delay slot insns. Return the concatenated
1185 delay list if possible, otherwise, return 0.
1187 SLOTS_TO_FILL is the total number of slots required by INSN, and
1188 PSLOTS_FILLED points to the number filled so far (also the number of
1189 insns in DELAY_LIST). It is updated with the number that have been
1190 filled from the SEQUENCE, if any.
1192 PANNUL_P points to a non-zero value if we already know that we need
1193 to annul INSN. If this routine determines that annulling is needed,
1194 it may set that value non-zero.
1196 PNEW_THREAD points to a location that is to receive the place at which
1197 execution should continue. */
1199 static rtx
1200 steal_delay_list_from_target (insn, condition, seq, delay_list,
1201 sets, needed, other_needed,
1202 slots_to_fill, pslots_filled, pannul_p,
1203 pnew_thread)
1204 rtx insn, condition;
1205 rtx seq;
1206 rtx delay_list;
1207 struct resources *sets, *needed, *other_needed;
1208 int slots_to_fill;
1209 int *pslots_filled;
1210 int *pannul_p;
1211 rtx *pnew_thread;
1213 rtx temp;
1214 int slots_remaining = slots_to_fill - *pslots_filled;
1215 int total_slots_filled = *pslots_filled;
1216 rtx new_delay_list = 0;
1217 int must_annul = *pannul_p;
1218 int used_annul = 0;
1219 int i;
1220 struct resources cc_set;
1222 /* We can't do anything if there are more delay slots in SEQ than we
1223 can handle, or if we don't know that it will be a taken branch.
1224 We know that it will be a taken branch if it is either an unconditional
1225 branch or a conditional branch with a stricter branch condition.
1227 Also, exit if the branch has more than one set, since then it is computing
1228 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1229 ??? It may be possible to move other sets into INSN in addition to
1230 moving the instructions in the delay slots.
1232 We can not steal the delay list if one of the instructions in the
1233 current delay_list modifies the condition codes and the jump in the
1234 sequence is a conditional jump. We can not do this because we can
1235 not change the direction of the jump because the condition codes
1236 will effect the direction of the jump in the sequence. */
1238 CLEAR_RESOURCE (&cc_set);
1239 for (temp = delay_list; temp; temp = XEXP (temp, 1))
1241 rtx trial = XEXP (temp, 0);
1243 mark_set_resources (trial, &cc_set, 0, 1);
1244 if (insn_references_resource_p (XVECEXP (seq , 0, 0), &cc_set, 0))
1245 return delay_list;
1248 if (XVECLEN (seq, 0) - 1 > slots_remaining
1249 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1250 || ! single_set (XVECEXP (seq, 0, 0)))
1251 return delay_list;
1253 for (i = 1; i < XVECLEN (seq, 0); i++)
1255 rtx trial = XVECEXP (seq, 0, i);
1256 int flags;
1258 if (insn_references_resource_p (trial, sets, 0)
1259 || insn_sets_resource_p (trial, needed, 0)
1260 || insn_sets_resource_p (trial, sets, 0)
1261 #ifdef HAVE_cc0
1262 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1263 delay list. */
1264 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1265 #endif
1266 /* If TRIAL is from the fallthrough code of an annulled branch insn
1267 in SEQ, we cannot use it. */
1268 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1269 && ! INSN_FROM_TARGET_P (trial)))
1270 return delay_list;
1272 /* If this insn was already done (usually in a previous delay slot),
1273 pretend we put it in our delay slot. */
1274 if (redundant_insn (trial, insn, new_delay_list))
1275 continue;
1277 /* We will end up re-vectoring this branch, so compute flags
1278 based on jumping to the new label. */
1279 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1281 if (! must_annul
1282 && ((condition == const_true_rtx
1283 || (! insn_sets_resource_p (trial, other_needed, 0)
1284 && ! may_trap_p (PATTERN (trial)))))
1285 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1286 : (must_annul || (delay_list == NULL && new_delay_list == NULL))
1287 && (must_annul = 1,
1288 check_annul_list_true_false (0, delay_list)
1289 && check_annul_list_true_false (0, new_delay_list)
1290 && eligible_for_annul_false (insn, total_slots_filled,
1291 trial, flags)))
1293 if (must_annul)
1294 used_annul = 1;
1295 temp = copy_rtx (trial);
1296 INSN_FROM_TARGET_P (temp) = 1;
1297 new_delay_list = add_to_delay_list (temp, new_delay_list);
1298 total_slots_filled++;
1300 if (--slots_remaining == 0)
1301 break;
1303 else
1304 return delay_list;
1307 /* Show the place to which we will be branching. */
1308 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1310 /* Add any new insns to the delay list and update the count of the
1311 number of slots filled. */
1312 *pslots_filled = total_slots_filled;
1313 if (used_annul)
1314 *pannul_p = 1;
1316 if (delay_list == 0)
1317 return new_delay_list;
1319 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1320 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1322 return delay_list;
1325 /* Similar to steal_delay_list_from_target except that SEQ is on the
1326 fallthrough path of INSN. Here we only do something if the delay insn
1327 of SEQ is an unconditional branch. In that case we steal its delay slot
1328 for INSN since unconditional branches are much easier to fill. */
1330 static rtx
1331 steal_delay_list_from_fallthrough (insn, condition, seq,
1332 delay_list, sets, needed, other_needed,
1333 slots_to_fill, pslots_filled, pannul_p)
1334 rtx insn, condition;
1335 rtx seq;
1336 rtx delay_list;
1337 struct resources *sets, *needed, *other_needed;
1338 int slots_to_fill;
1339 int *pslots_filled;
1340 int *pannul_p;
1342 int i;
1343 int flags;
1344 int must_annul = *pannul_p;
1345 int used_annul = 0;
1347 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1349 /* We can't do anything if SEQ's delay insn isn't an
1350 unconditional branch. */
1352 if (! simplejump_p (XVECEXP (seq, 0, 0))
1353 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1354 return delay_list;
1356 for (i = 1; i < XVECLEN (seq, 0); i++)
1358 rtx trial = XVECEXP (seq, 0, i);
1360 /* If TRIAL sets CC0, stealing it will move it too far from the use
1361 of CC0. */
1362 if (insn_references_resource_p (trial, sets, 0)
1363 || insn_sets_resource_p (trial, needed, 0)
1364 || insn_sets_resource_p (trial, sets, 0)
1365 #ifdef HAVE_cc0
1366 || sets_cc0_p (PATTERN (trial))
1367 #endif
1370 break;
1372 /* If this insn was already done, we don't need it. */
1373 if (redundant_insn (trial, insn, delay_list))
1375 delete_from_delay_slot (trial);
1376 continue;
1379 if (! must_annul
1380 && ((condition == const_true_rtx
1381 || (! insn_sets_resource_p (trial, other_needed, 0)
1382 && ! may_trap_p (PATTERN (trial)))))
1383 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1384 : (must_annul || delay_list == NULL) && (must_annul = 1,
1385 check_annul_list_true_false (1, delay_list)
1386 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1388 if (must_annul)
1389 used_annul = 1;
1390 delete_from_delay_slot (trial);
1391 delay_list = add_to_delay_list (trial, delay_list);
1393 if (++(*pslots_filled) == slots_to_fill)
1394 break;
1396 else
1397 break;
1400 if (used_annul)
1401 *pannul_p = 1;
1402 return delay_list;
1406 /* Try merging insns starting at THREAD which match exactly the insns in
1407 INSN's delay list.
1409 If all insns were matched and the insn was previously annulling, the
1410 annul bit will be cleared.
1412 For each insn that is merged, if the branch is or will be non-annulling,
1413 we delete the merged insn. */
1415 static void
1416 try_merge_delay_insns (insn, thread)
1417 rtx insn, thread;
1419 rtx trial, next_trial;
1420 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1421 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1422 int slot_number = 1;
1423 int num_slots = XVECLEN (PATTERN (insn), 0);
1424 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1425 struct resources set, needed;
1426 rtx merged_insns = 0;
1427 int i;
1428 int flags;
1430 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1432 CLEAR_RESOURCE (&needed);
1433 CLEAR_RESOURCE (&set);
1435 /* If this is not an annulling branch, take into account anything needed in
1436 INSN's delay slot. This prevents two increments from being incorrectly
1437 folded into one. If we are annulling, this would be the correct
1438 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1439 will essentially disable this optimization. This method is somewhat of
1440 a kludge, but I don't see a better way.) */
1441 if (! annul_p)
1442 for (i = 1 ; i < num_slots ; i++)
1443 if (XVECEXP (PATTERN (insn), 0, i))
1444 mark_referenced_resources (XVECEXP (PATTERN (insn), 0, i), &needed, 1);
1446 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1448 rtx pat = PATTERN (trial);
1449 rtx oldtrial = trial;
1451 next_trial = next_nonnote_insn (trial);
1453 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1454 if (GET_CODE (trial) == INSN
1455 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1456 continue;
1458 if (GET_CODE (next_to_match) == GET_CODE (trial)
1459 #ifdef HAVE_cc0
1460 /* We can't share an insn that sets cc0. */
1461 && ! sets_cc0_p (pat)
1462 #endif
1463 && ! insn_references_resource_p (trial, &set, 1)
1464 && ! insn_sets_resource_p (trial, &set, 1)
1465 && ! insn_sets_resource_p (trial, &needed, 1)
1466 && (trial = try_split (pat, trial, 0)) != 0
1467 /* Update next_trial, in case try_split succeeded. */
1468 && (next_trial = next_nonnote_insn (trial))
1469 /* Likewise THREAD. */
1470 && (thread = oldtrial == thread ? trial : thread)
1471 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1472 /* Have to test this condition if annul condition is different
1473 from (and less restrictive than) non-annulling one. */
1474 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1477 if (! annul_p)
1479 update_block (trial, thread);
1480 if (trial == thread)
1481 thread = next_active_insn (thread);
1483 delete_insn (trial);
1484 INSN_FROM_TARGET_P (next_to_match) = 0;
1486 else
1487 merged_insns = gen_rtx_INSN_LIST (VOIDmode, trial, merged_insns);
1489 if (++slot_number == num_slots)
1490 break;
1492 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1495 mark_set_resources (trial, &set, 0, 1);
1496 mark_referenced_resources (trial, &needed, 1);
1499 /* See if we stopped on a filled insn. If we did, try to see if its
1500 delay slots match. */
1501 if (slot_number != num_slots
1502 && trial && GET_CODE (trial) == INSN
1503 && GET_CODE (PATTERN (trial)) == SEQUENCE
1504 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1506 rtx pat = PATTERN (trial);
1507 rtx filled_insn = XVECEXP (pat, 0, 0);
1509 /* Account for resources set/needed by the filled insn. */
1510 mark_set_resources (filled_insn, &set, 0, 1);
1511 mark_referenced_resources (filled_insn, &needed, 1);
1513 for (i = 1; i < XVECLEN (pat, 0); i++)
1515 rtx dtrial = XVECEXP (pat, 0, i);
1517 if (! insn_references_resource_p (dtrial, &set, 1)
1518 && ! insn_sets_resource_p (dtrial, &set, 1)
1519 && ! insn_sets_resource_p (dtrial, &needed, 1)
1520 #ifdef HAVE_cc0
1521 && ! sets_cc0_p (PATTERN (dtrial))
1522 #endif
1523 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1524 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1526 if (! annul_p)
1528 rtx new;
1530 update_block (dtrial, thread);
1531 new = delete_from_delay_slot (dtrial);
1532 if (INSN_DELETED_P (thread))
1533 thread = new;
1534 INSN_FROM_TARGET_P (next_to_match) = 0;
1536 else
1537 merged_insns = gen_rtx_INSN_LIST (SImode, dtrial,
1538 merged_insns);
1540 if (++slot_number == num_slots)
1541 break;
1543 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1545 else
1547 /* Keep track of the set/referenced resources for the delay
1548 slots of any trial insns we encounter. */
1549 mark_set_resources (dtrial, &set, 0, 1);
1550 mark_referenced_resources (dtrial, &needed, 1);
1555 /* If all insns in the delay slot have been matched and we were previously
1556 annulling the branch, we need not any more. In that case delete all the
1557 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn in
1558 the delay list so that we know that it isn't only being used at the
1559 target. */
1560 if (slot_number == num_slots && annul_p)
1562 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1564 if (GET_MODE (merged_insns) == SImode)
1566 rtx new;
1568 update_block (XEXP (merged_insns, 0), thread);
1569 new = delete_from_delay_slot (XEXP (merged_insns, 0));
1570 if (INSN_DELETED_P (thread))
1571 thread = new;
1573 else
1575 update_block (XEXP (merged_insns, 0), thread);
1576 delete_insn (XEXP (merged_insns, 0));
1580 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1582 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1583 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1587 /* See if INSN is redundant with an insn in front of TARGET. Often this
1588 is called when INSN is a candidate for a delay slot of TARGET.
1589 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1590 of INSN. Often INSN will be redundant with an insn in a delay slot of
1591 some previous insn. This happens when we have a series of branches to the
1592 same label; in that case the first insn at the target might want to go
1593 into each of the delay slots.
1595 If we are not careful, this routine can take up a significant fraction
1596 of the total compilation time (4%), but only wins rarely. Hence we
1597 speed this routine up by making two passes. The first pass goes back
1598 until it hits a label and sees if it find an insn with an identical
1599 pattern. Only in this (relatively rare) event does it check for
1600 data conflicts.
1602 We do not split insns we encounter. This could cause us not to find a
1603 redundant insn, but the cost of splitting seems greater than the possible
1604 gain in rare cases. */
1606 static rtx
1607 redundant_insn (insn, target, delay_list)
1608 rtx insn;
1609 rtx target;
1610 rtx delay_list;
1612 rtx target_main = target;
1613 rtx ipat = PATTERN (insn);
1614 rtx trial, pat;
1615 struct resources needed, set;
1616 int i;
1618 /* If INSN has any REG_UNUSED notes, it can't match anything since we
1619 are allowed to not actually assign to such a register. */
1620 if (find_reg_note (insn, REG_UNUSED, NULL_RTX) != 0)
1621 return 0;
1623 /* Scan backwards looking for a match. */
1624 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1626 if (GET_CODE (trial) == CODE_LABEL)
1627 return 0;
1629 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1630 continue;
1632 pat = PATTERN (trial);
1633 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1634 continue;
1636 if (GET_CODE (pat) == SEQUENCE)
1638 /* Stop for a CALL and its delay slots because it is difficult to
1639 track its resource needs correctly. */
1640 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1641 return 0;
1643 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1644 slots because it is difficult to track its resource needs
1645 correctly. */
1647 #ifdef INSN_SETS_ARE_DELAYED
1648 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1649 return 0;
1650 #endif
1652 #ifdef INSN_REFERENCES_ARE_DELAYED
1653 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1654 return 0;
1655 #endif
1657 /* See if any of the insns in the delay slot match, updating
1658 resource requirements as we go. */
1659 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1660 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
1661 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat)
1662 && ! find_reg_note (XVECEXP (pat, 0, i), REG_UNUSED, NULL_RTX))
1663 break;
1665 /* If found a match, exit this loop early. */
1666 if (i > 0)
1667 break;
1670 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat)
1671 && ! find_reg_note (trial, REG_UNUSED, NULL_RTX))
1672 break;
1675 /* If we didn't find an insn that matches, return 0. */
1676 if (trial == 0)
1677 return 0;
1679 /* See what resources this insn sets and needs. If they overlap, or
1680 if this insn references CC0, it can't be redundant. */
1682 CLEAR_RESOURCE (&needed);
1683 CLEAR_RESOURCE (&set);
1684 mark_set_resources (insn, &set, 0, 1);
1685 mark_referenced_resources (insn, &needed, 1);
1687 /* If TARGET is a SEQUENCE, get the main insn. */
1688 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1689 target_main = XVECEXP (PATTERN (target), 0, 0);
1691 if (resource_conflicts_p (&needed, &set)
1692 #ifdef HAVE_cc0
1693 || reg_mentioned_p (cc0_rtx, ipat)
1694 #endif
1695 /* The insn requiring the delay may not set anything needed or set by
1696 INSN. */
1697 || insn_sets_resource_p (target_main, &needed, 1)
1698 || insn_sets_resource_p (target_main, &set, 1))
1699 return 0;
1701 /* Insns we pass may not set either NEEDED or SET, so merge them for
1702 simpler tests. */
1703 needed.memory |= set.memory;
1704 needed.unch_memory |= set.unch_memory;
1705 IOR_HARD_REG_SET (needed.regs, set.regs);
1707 /* This insn isn't redundant if it conflicts with an insn that either is
1708 or will be in a delay slot of TARGET. */
1710 while (delay_list)
1712 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
1713 return 0;
1714 delay_list = XEXP (delay_list, 1);
1717 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
1718 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
1719 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
1720 return 0;
1722 /* Scan backwards until we reach a label or an insn that uses something
1723 INSN sets or sets something insn uses or sets. */
1725 for (trial = PREV_INSN (target);
1726 trial && GET_CODE (trial) != CODE_LABEL;
1727 trial = PREV_INSN (trial))
1729 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
1730 && GET_CODE (trial) != JUMP_INSN)
1731 continue;
1733 pat = PATTERN (trial);
1734 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1735 continue;
1737 if (GET_CODE (pat) == SEQUENCE)
1739 /* If this is a CALL_INSN and its delay slots, it is hard to track
1740 the resource needs properly, so give up. */
1741 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1742 return 0;
1744 /* If this is an INSN or JUMP_INSN with delayed effects, it
1745 is hard to track the resource needs properly, so give up. */
1747 #ifdef INSN_SETS_ARE_DELAYED
1748 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1749 return 0;
1750 #endif
1752 #ifdef INSN_REFERENCES_ARE_DELAYED
1753 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1754 return 0;
1755 #endif
1757 /* See if any of the insns in the delay slot match, updating
1758 resource requirements as we go. */
1759 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
1761 rtx candidate = XVECEXP (pat, 0, i);
1763 /* If an insn will be annulled if the branch is false, it isn't
1764 considered as a possible duplicate insn. */
1765 if (rtx_equal_p (PATTERN (candidate), ipat)
1766 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1767 && INSN_FROM_TARGET_P (candidate)))
1769 /* Show that this insn will be used in the sequel. */
1770 INSN_FROM_TARGET_P (candidate) = 0;
1771 return candidate;
1774 /* Unless this is an annulled insn from the target of a branch,
1775 we must stop if it sets anything needed or set by INSN. */
1776 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
1777 || ! INSN_FROM_TARGET_P (candidate))
1778 && insn_sets_resource_p (candidate, &needed, 1))
1779 return 0;
1783 /* If the insn requiring the delay slot conflicts with INSN, we
1784 must stop. */
1785 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
1786 return 0;
1788 else
1790 /* See if TRIAL is the same as INSN. */
1791 pat = PATTERN (trial);
1792 if (rtx_equal_p (pat, ipat))
1793 return trial;
1795 /* Can't go any further if TRIAL conflicts with INSN. */
1796 if (insn_sets_resource_p (trial, &needed, 1))
1797 return 0;
1801 return 0;
1804 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
1805 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
1806 is non-zero, we are allowed to fall into this thread; otherwise, we are
1807 not.
1809 If LABEL is used more than one or we pass a label other than LABEL before
1810 finding an active insn, we do not own this thread. */
1812 static int
1813 own_thread_p (thread, label, allow_fallthrough)
1814 rtx thread;
1815 rtx label;
1816 int allow_fallthrough;
1818 rtx active_insn;
1819 rtx insn;
1821 /* We don't own the function end. */
1822 if (thread == 0)
1823 return 0;
1825 /* Get the first active insn, or THREAD, if it is an active insn. */
1826 active_insn = next_active_insn (PREV_INSN (thread));
1828 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
1829 if (GET_CODE (insn) == CODE_LABEL
1830 && (insn != label || LABEL_NUSES (insn) != 1))
1831 return 0;
1833 if (allow_fallthrough)
1834 return 1;
1836 /* Ensure that we reach a BARRIER before any insn or label. */
1837 for (insn = prev_nonnote_insn (thread);
1838 insn == 0 || GET_CODE (insn) != BARRIER;
1839 insn = prev_nonnote_insn (insn))
1840 if (insn == 0
1841 || GET_CODE (insn) == CODE_LABEL
1842 || (GET_CODE (insn) == INSN
1843 && GET_CODE (PATTERN (insn)) != USE
1844 && GET_CODE (PATTERN (insn)) != CLOBBER))
1845 return 0;
1847 return 1;
1850 /* Called when INSN is being moved from a location near the target of a jump.
1851 We leave a marker of the form (use (INSN)) immediately in front
1852 of WHERE for mark_target_live_regs. These markers will be deleted when
1853 reorg finishes.
1855 We used to try to update the live status of registers if WHERE is at
1856 the start of a basic block, but that can't work since we may remove a
1857 BARRIER in relax_delay_slots. */
1859 static void
1860 update_block (insn, where)
1861 rtx insn;
1862 rtx where;
1864 /* Ignore if this was in a delay slot and it came from the target of
1865 a branch. */
1866 if (INSN_FROM_TARGET_P (insn))
1867 return;
1869 emit_insn_before (gen_rtx_USE (VOIDmode, insn), where);
1871 /* INSN might be making a value live in a block where it didn't use to
1872 be. So recompute liveness information for this block. */
1874 incr_ticks_for_insn (insn);
1877 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
1878 the basic block containing the jump. */
1880 static int
1881 reorg_redirect_jump (jump, nlabel)
1882 rtx jump;
1883 rtx nlabel;
1885 incr_ticks_for_insn (jump);
1886 return redirect_jump (jump, nlabel);
1889 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
1890 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
1891 that reference values used in INSN. If we find one, then we move the
1892 REG_DEAD note to INSN.
1894 This is needed to handle the case where an later insn (after INSN) has a
1895 REG_DEAD note for a register used by INSN, and this later insn subsequently
1896 gets moved before a CODE_LABEL because it is a redundant insn. In this
1897 case, mark_target_live_regs may be confused into thinking the register
1898 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
1900 static void
1901 update_reg_dead_notes (insn, delayed_insn)
1902 rtx insn, delayed_insn;
1904 rtx p, link, next;
1906 for (p = next_nonnote_insn (insn); p != delayed_insn;
1907 p = next_nonnote_insn (p))
1908 for (link = REG_NOTES (p); link; link = next)
1910 next = XEXP (link, 1);
1912 if (REG_NOTE_KIND (link) != REG_DEAD
1913 || GET_CODE (XEXP (link, 0)) != REG)
1914 continue;
1916 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
1918 /* Move the REG_DEAD note from P to INSN. */
1919 remove_note (p, link);
1920 XEXP (link, 1) = REG_NOTES (insn);
1921 REG_NOTES (insn) = link;
1926 /* Called when an insn redundant with start_insn is deleted. If there
1927 is a REG_DEAD note for the target of start_insn between start_insn
1928 and stop_insn, then the REG_DEAD note needs to be deleted since the
1929 value no longer dies there.
1931 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
1932 confused into thinking the register is dead. */
1934 static void
1935 fix_reg_dead_note (start_insn, stop_insn)
1936 rtx start_insn, stop_insn;
1938 rtx p, link, next;
1940 for (p = next_nonnote_insn (start_insn); p != stop_insn;
1941 p = next_nonnote_insn (p))
1942 for (link = REG_NOTES (p); link; link = next)
1944 next = XEXP (link, 1);
1946 if (REG_NOTE_KIND (link) != REG_DEAD
1947 || GET_CODE (XEXP (link, 0)) != REG)
1948 continue;
1950 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
1952 remove_note (p, link);
1953 return;
1958 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
1960 This handles the case of udivmodXi4 instructions which optimize their
1961 output depending on whether any REG_UNUSED notes are present.
1962 we must make sure that INSN calculates as many results as REDUNDANT_INSN
1963 does. */
1965 static void
1966 update_reg_unused_notes (insn, redundant_insn)
1967 rtx insn, redundant_insn;
1969 rtx link, next;
1971 for (link = REG_NOTES (insn); link; link = next)
1973 next = XEXP (link, 1);
1975 if (REG_NOTE_KIND (link) != REG_UNUSED
1976 || GET_CODE (XEXP (link, 0)) != REG)
1977 continue;
1979 if (! find_regno_note (redundant_insn, REG_UNUSED,
1980 REGNO (XEXP (link, 0))))
1981 remove_note (insn, link);
1985 /* Scan a function looking for insns that need a delay slot and find insns to
1986 put into the delay slot.
1988 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
1989 as calls). We do these first since we don't want jump insns (that are
1990 easier to fill) to get the only insns that could be used for non-jump insns.
1991 When it is zero, only try to fill JUMP_INSNs.
1993 When slots are filled in this manner, the insns (including the
1994 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
1995 it is possible to tell whether a delay slot has really been filled
1996 or not. `final' knows how to deal with this, by communicating
1997 through FINAL_SEQUENCE. */
1999 static void
2000 fill_simple_delay_slots (non_jumps_p)
2001 int non_jumps_p;
2003 register rtx insn, pat, trial, next_trial;
2004 register int i;
2005 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2006 struct resources needed, set;
2007 int slots_to_fill, slots_filled;
2008 rtx delay_list;
2010 for (i = 0; i < num_unfilled_slots; i++)
2012 int flags;
2013 /* Get the next insn to fill. If it has already had any slots assigned,
2014 we can't do anything with it. Maybe we'll improve this later. */
2016 insn = unfilled_slots_base[i];
2017 if (insn == 0
2018 || INSN_DELETED_P (insn)
2019 || (GET_CODE (insn) == INSN
2020 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2021 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2022 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2023 continue;
2025 /* It may have been that this insn used to need delay slots, but
2026 now doesn't; ignore in that case. This can happen, for example,
2027 on the HP PA RISC, where the number of delay slots depends on
2028 what insns are nearby. */
2029 slots_to_fill = num_delay_slots (insn);
2031 /* Some machine description have defined instructions to have
2032 delay slots only in certain circumstances which may depend on
2033 nearby insns (which change due to reorg's actions).
2035 For example, the PA port normally has delay slots for unconditional
2036 jumps.
2038 However, the PA port claims such jumps do not have a delay slot
2039 if they are immediate successors of certain CALL_INSNs. This
2040 allows the port to favor filling the delay slot of the call with
2041 the unconditional jump. */
2042 if (slots_to_fill == 0)
2043 continue;
2045 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2046 says how many. After initialization, first try optimizing
2048 call _foo call _foo
2049 nop add %o7,.-L1,%o7
2050 b,a L1
2053 If this case applies, the delay slot of the call is filled with
2054 the unconditional jump. This is done first to avoid having the
2055 delay slot of the call filled in the backward scan. Also, since
2056 the unconditional jump is likely to also have a delay slot, that
2057 insn must exist when it is subsequently scanned.
2059 This is tried on each insn with delay slots as some machines
2060 have insns which perform calls, but are not represented as
2061 CALL_INSNs. */
2063 slots_filled = 0;
2064 delay_list = 0;
2066 if (GET_CODE (insn) == JUMP_INSN)
2067 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2068 else
2069 flags = get_jump_flags (insn, NULL_RTX);
2071 if ((trial = next_active_insn (insn))
2072 && GET_CODE (trial) == JUMP_INSN
2073 && simplejump_p (trial)
2074 && eligible_for_delay (insn, slots_filled, trial, flags)
2075 && no_labels_between_p (insn, trial))
2077 rtx *tmp;
2078 slots_filled++;
2079 delay_list = add_to_delay_list (trial, delay_list);
2081 /* TRIAL may have had its delay slot filled, then unfilled. When
2082 the delay slot is unfilled, TRIAL is placed back on the unfilled
2083 slots obstack. Unfortunately, it is placed on the end of the
2084 obstack, not in its original location. Therefore, we must search
2085 from entry i + 1 to the end of the unfilled slots obstack to
2086 try and find TRIAL. */
2087 tmp = &unfilled_slots_base[i + 1];
2088 while (*tmp != trial && tmp != unfilled_slots_next)
2089 tmp++;
2091 /* Remove the unconditional jump from consideration for delay slot
2092 filling and unthread it. */
2093 if (*tmp == trial)
2094 *tmp = 0;
2096 rtx next = NEXT_INSN (trial);
2097 rtx prev = PREV_INSN (trial);
2098 if (prev)
2099 NEXT_INSN (prev) = next;
2100 if (next)
2101 PREV_INSN (next) = prev;
2105 /* Now, scan backwards from the insn to search for a potential
2106 delay-slot candidate. Stop searching when a label or jump is hit.
2108 For each candidate, if it is to go into the delay slot (moved
2109 forward in execution sequence), it must not need or set any resources
2110 that were set by later insns and must not set any resources that
2111 are needed for those insns.
2113 The delay slot insn itself sets resources unless it is a call
2114 (in which case the called routine, not the insn itself, is doing
2115 the setting). */
2117 if (slots_filled < slots_to_fill)
2119 CLEAR_RESOURCE (&needed);
2120 CLEAR_RESOURCE (&set);
2121 mark_set_resources (insn, &set, 0, 0);
2122 mark_referenced_resources (insn, &needed, 0);
2124 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2125 trial = next_trial)
2127 next_trial = prev_nonnote_insn (trial);
2129 /* This must be an INSN or CALL_INSN. */
2130 pat = PATTERN (trial);
2132 /* USE and CLOBBER at this level was just for flow; ignore it. */
2133 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2134 continue;
2136 /* Check for resource conflict first, to avoid unnecessary
2137 splitting. */
2138 if (! insn_references_resource_p (trial, &set, 1)
2139 && ! insn_sets_resource_p (trial, &set, 1)
2140 && ! insn_sets_resource_p (trial, &needed, 1)
2141 #ifdef HAVE_cc0
2142 /* Can't separate set of cc0 from its use. */
2143 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2144 #endif
2147 trial = try_split (pat, trial, 1);
2148 next_trial = prev_nonnote_insn (trial);
2149 if (eligible_for_delay (insn, slots_filled, trial, flags))
2151 /* In this case, we are searching backward, so if we
2152 find insns to put on the delay list, we want
2153 to put them at the head, rather than the
2154 tail, of the list. */
2156 update_reg_dead_notes (trial, insn);
2157 delay_list = gen_rtx_INSN_LIST (VOIDmode,
2158 trial, delay_list);
2159 update_block (trial, trial);
2160 delete_insn (trial);
2161 if (slots_to_fill == ++slots_filled)
2162 break;
2163 continue;
2167 mark_set_resources (trial, &set, 0, 1);
2168 mark_referenced_resources (trial, &needed, 1);
2172 /* If all needed slots haven't been filled, we come here. */
2174 /* Try to optimize case of jumping around a single insn. */
2175 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2176 if (slots_filled != slots_to_fill
2177 && delay_list == 0
2178 && GET_CODE (insn) == JUMP_INSN
2179 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2181 delay_list = optimize_skip (insn);
2182 if (delay_list)
2183 slots_filled += 1;
2185 #endif
2187 /* Try to get insns from beyond the insn needing the delay slot.
2188 These insns can neither set or reference resources set in insns being
2189 skipped, cannot set resources in the insn being skipped, and, if this
2190 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2191 call might not return).
2193 There used to be code which continued past the target label if
2194 we saw all uses of the target label. This code did not work,
2195 because it failed to account for some instructions which were
2196 both annulled and marked as from the target. This can happen as a
2197 result of optimize_skip. Since this code was redundant with
2198 fill_eager_delay_slots anyways, it was just deleted. */
2200 if (slots_filled != slots_to_fill
2201 && (GET_CODE (insn) != JUMP_INSN
2202 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2203 && ! simplejump_p (insn)
2204 && JUMP_LABEL (insn) != 0)))
2206 rtx target = 0;
2207 int maybe_never = 0;
2208 struct resources needed_at_jump;
2210 CLEAR_RESOURCE (&needed);
2211 CLEAR_RESOURCE (&set);
2213 if (GET_CODE (insn) == CALL_INSN)
2215 mark_set_resources (insn, &set, 0, 1);
2216 mark_referenced_resources (insn, &needed, 1);
2217 maybe_never = 1;
2219 else
2221 mark_set_resources (insn, &set, 0, 1);
2222 mark_referenced_resources (insn, &needed, 1);
2223 if (GET_CODE (insn) == JUMP_INSN)
2224 target = JUMP_LABEL (insn);
2227 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
2229 rtx pat, trial_delay;
2231 next_trial = next_nonnote_insn (trial);
2233 if (GET_CODE (trial) == CODE_LABEL
2234 || GET_CODE (trial) == BARRIER)
2235 break;
2237 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
2238 pat = PATTERN (trial);
2240 /* Stand-alone USE and CLOBBER are just for flow. */
2241 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2242 continue;
2244 /* If this already has filled delay slots, get the insn needing
2245 the delay slots. */
2246 if (GET_CODE (pat) == SEQUENCE)
2247 trial_delay = XVECEXP (pat, 0, 0);
2248 else
2249 trial_delay = trial;
2251 /* If this is a jump insn to our target, indicate that we have
2252 seen another jump to it. If we aren't handling a conditional
2253 jump, stop our search. Otherwise, compute the needs at its
2254 target and add them to NEEDED. */
2255 if (GET_CODE (trial_delay) == JUMP_INSN)
2257 if (target == 0)
2258 break;
2259 else if (JUMP_LABEL (trial_delay) != target)
2261 rtx ninsn =
2262 next_active_insn (JUMP_LABEL (trial_delay));
2264 mark_target_live_regs (get_insns (), ninsn,
2265 &needed_at_jump);
2266 needed.memory |= needed_at_jump.memory;
2267 needed.unch_memory |= needed_at_jump.unch_memory;
2268 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
2272 /* See if we have a resource problem before we try to
2273 split. */
2274 if (target == 0
2275 && GET_CODE (pat) != SEQUENCE
2276 && ! insn_references_resource_p (trial, &set, 1)
2277 && ! insn_sets_resource_p (trial, &set, 1)
2278 && ! insn_sets_resource_p (trial, &needed, 1)
2279 #ifdef HAVE_cc0
2280 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
2281 #endif
2282 && ! (maybe_never && may_trap_p (pat))
2283 && (trial = try_split (pat, trial, 0))
2284 && eligible_for_delay (insn, slots_filled, trial, flags))
2286 next_trial = next_nonnote_insn (trial);
2287 delay_list = add_to_delay_list (trial, delay_list);
2289 #ifdef HAVE_cc0
2290 if (reg_mentioned_p (cc0_rtx, pat))
2291 link_cc0_insns (trial);
2292 #endif
2294 delete_insn (trial);
2295 if (slots_to_fill == ++slots_filled)
2296 break;
2297 continue;
2300 mark_set_resources (trial, &set, 0, 1);
2301 mark_referenced_resources (trial, &needed, 1);
2303 /* Ensure we don't put insns between the setting of cc and the
2304 comparison by moving a setting of cc into an earlier delay
2305 slot since these insns could clobber the condition code. */
2306 set.cc = 1;
2308 /* If this is a call or jump, we might not get here. */
2309 if (GET_CODE (trial_delay) == CALL_INSN
2310 || GET_CODE (trial_delay) == JUMP_INSN)
2311 maybe_never = 1;
2314 /* If there are slots left to fill and our search was stopped by an
2315 unconditional branch, try the insn at the branch target. We can
2316 redirect the branch if it works.
2318 Don't do this if the insn at the branch target is a branch. */
2319 if (slots_to_fill != slots_filled
2320 && trial
2321 && GET_CODE (trial) == JUMP_INSN
2322 && simplejump_p (trial)
2323 && (target == 0 || JUMP_LABEL (trial) == target)
2324 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
2325 && ! (GET_CODE (next_trial) == INSN
2326 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
2327 && GET_CODE (next_trial) != JUMP_INSN
2328 && ! insn_references_resource_p (next_trial, &set, 1)
2329 && ! insn_sets_resource_p (next_trial, &set, 1)
2330 && ! insn_sets_resource_p (next_trial, &needed, 1)
2331 #ifdef HAVE_cc0
2332 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
2333 #endif
2334 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
2335 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
2336 && eligible_for_delay (insn, slots_filled, next_trial, flags))
2338 rtx new_label = next_active_insn (next_trial);
2340 if (new_label != 0)
2341 new_label = get_label_before (new_label);
2342 else
2343 new_label = find_end_label ();
2345 delay_list
2346 = add_to_delay_list (copy_rtx (next_trial), delay_list);
2347 slots_filled++;
2348 reorg_redirect_jump (trial, new_label);
2350 /* If we merged because we both jumped to the same place,
2351 redirect the original insn also. */
2352 if (target)
2353 reorg_redirect_jump (insn, new_label);
2357 /* If this is an unconditional jump, then try to get insns from the
2358 target of the jump. */
2359 if (GET_CODE (insn) == JUMP_INSN
2360 && simplejump_p (insn)
2361 && slots_filled != slots_to_fill)
2362 delay_list
2363 = fill_slots_from_thread (insn, const_true_rtx,
2364 next_active_insn (JUMP_LABEL (insn)),
2365 NULL, 1, 1,
2366 own_thread_p (JUMP_LABEL (insn),
2367 JUMP_LABEL (insn), 0),
2368 slots_to_fill, &slots_filled,
2369 delay_list);
2371 if (delay_list)
2372 unfilled_slots_base[i]
2373 = emit_delay_sequence (insn, delay_list, slots_filled);
2375 if (slots_to_fill == slots_filled)
2376 unfilled_slots_base[i] = 0;
2378 note_delay_statistics (slots_filled, 0);
2381 #ifdef DELAY_SLOTS_FOR_EPILOGUE
2382 /* See if the epilogue needs any delay slots. Try to fill them if so.
2383 The only thing we can do is scan backwards from the end of the
2384 function. If we did this in a previous pass, it is incorrect to do it
2385 again. */
2386 if (current_function_epilogue_delay_list)
2387 return;
2389 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
2390 if (slots_to_fill == 0)
2391 return;
2393 slots_filled = 0;
2394 CLEAR_RESOURCE (&set);
2396 /* The frame pointer and stack pointer are needed at the beginning of
2397 the epilogue, so instructions setting them can not be put in the
2398 epilogue delay slot. However, everything else needed at function
2399 end is safe, so we don't want to use end_of_function_needs here. */
2400 CLEAR_RESOURCE (&needed);
2401 if (frame_pointer_needed)
2403 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
2404 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2405 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
2406 #endif
2407 #ifdef EXIT_IGNORE_STACK
2408 if (! EXIT_IGNORE_STACK
2409 || current_function_sp_is_unchanging)
2410 #endif
2411 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2413 else
2414 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
2416 #ifdef EPILOGUE_USES
2417 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
2419 if (EPILOGUE_USES (i))
2420 SET_HARD_REG_BIT (needed.regs, i);
2422 #endif
2424 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
2425 trial = PREV_INSN (trial))
2427 if (GET_CODE (trial) == NOTE)
2428 continue;
2429 pat = PATTERN (trial);
2430 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2431 continue;
2433 if (! insn_references_resource_p (trial, &set, 1)
2434 && ! insn_sets_resource_p (trial, &needed, 1)
2435 && ! insn_sets_resource_p (trial, &set, 1)
2436 #ifdef HAVE_cc0
2437 /* Don't want to mess with cc0 here. */
2438 && ! reg_mentioned_p (cc0_rtx, pat)
2439 #endif
2442 trial = try_split (pat, trial, 1);
2443 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
2445 /* Here as well we are searching backward, so put the
2446 insns we find on the head of the list. */
2448 current_function_epilogue_delay_list
2449 = gen_rtx_INSN_LIST (VOIDmode, trial,
2450 current_function_epilogue_delay_list);
2451 mark_end_of_function_resources (trial, 1);
2452 update_block (trial, trial);
2453 delete_insn (trial);
2455 /* Clear deleted bit so final.c will output the insn. */
2456 INSN_DELETED_P (trial) = 0;
2458 if (slots_to_fill == ++slots_filled)
2459 break;
2460 continue;
2464 mark_set_resources (trial, &set, 0, 1);
2465 mark_referenced_resources (trial, &needed, 1);
2468 note_delay_statistics (slots_filled, 0);
2469 #endif
2472 /* Try to find insns to place in delay slots.
2474 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
2475 or is an unconditional branch if CONDITION is const_true_rtx.
2476 *PSLOTS_FILLED is updated with the number of slots that we have filled.
2478 THREAD is a flow-of-control, either the insns to be executed if the
2479 branch is true or if the branch is false, THREAD_IF_TRUE says which.
2481 OPPOSITE_THREAD is the thread in the opposite direction. It is used
2482 to see if any potential delay slot insns set things needed there.
2484 LIKELY is non-zero if it is extremely likely that the branch will be
2485 taken and THREAD_IF_TRUE is set. This is used for the branch at the
2486 end of a loop back up to the top.
2488 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
2489 thread. I.e., it is the fallthrough code of our jump or the target of the
2490 jump when we are the only jump going there.
2492 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
2493 case, we can only take insns from the head of the thread for our delay
2494 slot. We then adjust the jump to point after the insns we have taken. */
2496 static rtx
2497 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
2498 thread_if_true, own_thread,
2499 slots_to_fill, pslots_filled, delay_list)
2500 rtx insn;
2501 rtx condition;
2502 rtx thread, opposite_thread;
2503 int likely;
2504 int thread_if_true;
2505 int own_thread;
2506 int slots_to_fill, *pslots_filled;
2507 rtx delay_list;
2509 rtx new_thread;
2510 struct resources opposite_needed, set, needed;
2511 rtx trial;
2512 int lose = 0;
2513 int must_annul = 0;
2514 int flags;
2516 /* Validate our arguments. */
2517 if ((condition == const_true_rtx && ! thread_if_true)
2518 || (! own_thread && ! thread_if_true))
2519 abort ();
2521 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2523 /* If our thread is the end of subroutine, we can't get any delay
2524 insns from that. */
2525 if (thread == 0)
2526 return delay_list;
2528 /* If this is an unconditional branch, nothing is needed at the
2529 opposite thread. Otherwise, compute what is needed there. */
2530 if (condition == const_true_rtx)
2531 CLEAR_RESOURCE (&opposite_needed);
2532 else
2533 mark_target_live_regs (get_insns (), opposite_thread, &opposite_needed);
2535 /* If the insn at THREAD can be split, do it here to avoid having to
2536 update THREAD and NEW_THREAD if it is done in the loop below. Also
2537 initialize NEW_THREAD. */
2539 new_thread = thread = try_split (PATTERN (thread), thread, 0);
2541 /* Scan insns at THREAD. We are looking for an insn that can be removed
2542 from THREAD (it neither sets nor references resources that were set
2543 ahead of it and it doesn't set anything needs by the insns ahead of
2544 it) and that either can be placed in an annulling insn or aren't
2545 needed at OPPOSITE_THREAD. */
2547 CLEAR_RESOURCE (&needed);
2548 CLEAR_RESOURCE (&set);
2550 /* If we do not own this thread, we must stop as soon as we find
2551 something that we can't put in a delay slot, since all we can do
2552 is branch into THREAD at a later point. Therefore, labels stop
2553 the search if this is not the `true' thread. */
2555 for (trial = thread;
2556 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
2557 trial = next_nonnote_insn (trial))
2559 rtx pat, old_trial;
2561 /* If we have passed a label, we no longer own this thread. */
2562 if (GET_CODE (trial) == CODE_LABEL)
2564 own_thread = 0;
2565 continue;
2568 pat = PATTERN (trial);
2569 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2570 continue;
2572 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
2573 don't separate or copy insns that set and use CC0. */
2574 if (! insn_references_resource_p (trial, &set, 1)
2575 && ! insn_sets_resource_p (trial, &set, 1)
2576 && ! insn_sets_resource_p (trial, &needed, 1)
2577 #ifdef HAVE_cc0
2578 && ! (reg_mentioned_p (cc0_rtx, pat)
2579 && (! own_thread || ! sets_cc0_p (pat)))
2580 #endif
2583 rtx prior_insn;
2585 /* If TRIAL is redundant with some insn before INSN, we don't
2586 actually need to add it to the delay list; we can merely pretend
2587 we did. */
2588 if ((prior_insn = redundant_insn (trial, insn, delay_list)))
2590 fix_reg_dead_note (prior_insn, insn);
2591 if (own_thread)
2593 update_block (trial, thread);
2594 if (trial == thread)
2596 thread = next_active_insn (thread);
2597 if (new_thread == trial)
2598 new_thread = thread;
2601 delete_insn (trial);
2603 else
2605 update_reg_unused_notes (prior_insn, trial);
2606 new_thread = next_active_insn (trial);
2609 continue;
2612 /* There are two ways we can win: If TRIAL doesn't set anything
2613 needed at the opposite thread and can't trap, or if it can
2614 go into an annulled delay slot. */
2615 if (!must_annul
2616 && (condition == const_true_rtx
2617 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
2618 && ! may_trap_p (pat))))
2620 old_trial = trial;
2621 trial = try_split (pat, trial, 0);
2622 if (new_thread == old_trial)
2623 new_thread = trial;
2624 if (thread == old_trial)
2625 thread = trial;
2626 pat = PATTERN (trial);
2627 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
2628 goto winner;
2630 else if (0
2631 #ifdef ANNUL_IFTRUE_SLOTS
2632 || ! thread_if_true
2633 #endif
2634 #ifdef ANNUL_IFFALSE_SLOTS
2635 || thread_if_true
2636 #endif
2639 old_trial = trial;
2640 trial = try_split (pat, trial, 0);
2641 if (new_thread == old_trial)
2642 new_thread = trial;
2643 if (thread == old_trial)
2644 thread = trial;
2645 pat = PATTERN (trial);
2646 if ((must_annul || delay_list == NULL) && (thread_if_true
2647 ? check_annul_list_true_false (0, delay_list)
2648 && eligible_for_annul_false (insn, *pslots_filled, trial, flags)
2649 : check_annul_list_true_false (1, delay_list)
2650 && eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
2652 rtx temp;
2654 must_annul = 1;
2655 winner:
2657 #ifdef HAVE_cc0
2658 if (reg_mentioned_p (cc0_rtx, pat))
2659 link_cc0_insns (trial);
2660 #endif
2662 /* If we own this thread, delete the insn. If this is the
2663 destination of a branch, show that a basic block status
2664 may have been updated. In any case, mark the new
2665 starting point of this thread. */
2666 if (own_thread)
2668 update_block (trial, thread);
2669 if (trial == thread)
2671 thread = next_active_insn (thread);
2672 if (new_thread == trial)
2673 new_thread = thread;
2675 delete_insn (trial);
2677 else
2678 new_thread = next_active_insn (trial);
2680 temp = own_thread ? trial : copy_rtx (trial);
2681 if (thread_if_true)
2682 INSN_FROM_TARGET_P (temp) = 1;
2684 delay_list = add_to_delay_list (temp, delay_list);
2686 if (slots_to_fill == ++(*pslots_filled))
2688 /* Even though we have filled all the slots, we
2689 may be branching to a location that has a
2690 redundant insn. Skip any if so. */
2691 while (new_thread && ! own_thread
2692 && ! insn_sets_resource_p (new_thread, &set, 1)
2693 && ! insn_sets_resource_p (new_thread, &needed, 1)
2694 && ! insn_references_resource_p (new_thread,
2695 &set, 1)
2696 && (prior_insn
2697 = redundant_insn (new_thread, insn,
2698 delay_list)))
2700 /* We know we do not own the thread, so no need
2701 to call update_block and delete_insn. */
2702 fix_reg_dead_note (prior_insn, insn);
2703 update_reg_unused_notes (prior_insn, new_thread);
2704 new_thread = next_active_insn (new_thread);
2706 break;
2709 continue;
2714 /* This insn can't go into a delay slot. */
2715 lose = 1;
2716 mark_set_resources (trial, &set, 0, 1);
2717 mark_referenced_resources (trial, &needed, 1);
2719 /* Ensure we don't put insns between the setting of cc and the comparison
2720 by moving a setting of cc into an earlier delay slot since these insns
2721 could clobber the condition code. */
2722 set.cc = 1;
2724 /* If this insn is a register-register copy and the next insn has
2725 a use of our destination, change it to use our source. That way,
2726 it will become a candidate for our delay slot the next time
2727 through this loop. This case occurs commonly in loops that
2728 scan a list.
2730 We could check for more complex cases than those tested below,
2731 but it doesn't seem worth it. It might also be a good idea to try
2732 to swap the two insns. That might do better.
2734 We can't do this if the next insn modifies our destination, because
2735 that would make the replacement into the insn invalid. We also can't
2736 do this if it modifies our source, because it might be an earlyclobber
2737 operand. This latter test also prevents updating the contents of
2738 a PRE_INC. */
2740 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
2741 && GET_CODE (SET_SRC (pat)) == REG
2742 && GET_CODE (SET_DEST (pat)) == REG)
2744 rtx next = next_nonnote_insn (trial);
2746 if (next && GET_CODE (next) == INSN
2747 && GET_CODE (PATTERN (next)) != USE
2748 && ! reg_set_p (SET_DEST (pat), next)
2749 && ! reg_set_p (SET_SRC (pat), next)
2750 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
2751 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
2755 /* If we stopped on a branch insn that has delay slots, see if we can
2756 steal some of the insns in those slots. */
2757 if (trial && GET_CODE (trial) == INSN
2758 && GET_CODE (PATTERN (trial)) == SEQUENCE
2759 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
2761 /* If this is the `true' thread, we will want to follow the jump,
2762 so we can only do this if we have taken everything up to here. */
2763 if (thread_if_true && trial == new_thread)
2764 delay_list
2765 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
2766 delay_list, &set, &needed,
2767 &opposite_needed, slots_to_fill,
2768 pslots_filled, &must_annul,
2769 &new_thread);
2770 else if (! thread_if_true)
2771 delay_list
2772 = steal_delay_list_from_fallthrough (insn, condition,
2773 PATTERN (trial),
2774 delay_list, &set, &needed,
2775 &opposite_needed, slots_to_fill,
2776 pslots_filled, &must_annul);
2779 /* If we haven't found anything for this delay slot and it is very
2780 likely that the branch will be taken, see if the insn at our target
2781 increments or decrements a register with an increment that does not
2782 depend on the destination register. If so, try to place the opposite
2783 arithmetic insn after the jump insn and put the arithmetic insn in the
2784 delay slot. If we can't do this, return. */
2785 if (delay_list == 0 && likely && new_thread
2786 && GET_CODE (new_thread) == INSN
2787 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
2788 && asm_noperands (PATTERN (new_thread)) < 0)
2790 rtx pat = PATTERN (new_thread);
2791 rtx dest;
2792 rtx src;
2794 trial = new_thread;
2795 pat = PATTERN (trial);
2797 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
2798 || ! eligible_for_delay (insn, 0, trial, flags))
2799 return 0;
2801 dest = SET_DEST (pat), src = SET_SRC (pat);
2802 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
2803 && rtx_equal_p (XEXP (src, 0), dest)
2804 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
2806 rtx other = XEXP (src, 1);
2807 rtx new_arith;
2808 rtx ninsn;
2810 /* If this is a constant adjustment, use the same code with
2811 the negated constant. Otherwise, reverse the sense of the
2812 arithmetic. */
2813 if (GET_CODE (other) == CONST_INT)
2814 new_arith = gen_rtx_fmt_ee (GET_CODE (src), GET_MODE (src), dest,
2815 negate_rtx (GET_MODE (src), other));
2816 else
2817 new_arith = gen_rtx_fmt_ee (GET_CODE (src) == PLUS ? MINUS : PLUS,
2818 GET_MODE (src), dest, other);
2820 ninsn = emit_insn_after (gen_rtx_SET (VOIDmode, dest, new_arith),
2821 insn);
2823 if (recog_memoized (ninsn) < 0
2824 || (extract_insn (ninsn), ! constrain_operands (1)))
2826 delete_insn (ninsn);
2827 return 0;
2830 if (own_thread)
2832 update_block (trial, thread);
2833 if (trial == thread)
2835 thread = next_active_insn (thread);
2836 if (new_thread == trial)
2837 new_thread = thread;
2839 delete_insn (trial);
2841 else
2842 new_thread = next_active_insn (trial);
2844 ninsn = own_thread ? trial : copy_rtx (trial);
2845 if (thread_if_true)
2846 INSN_FROM_TARGET_P (ninsn) = 1;
2848 delay_list = add_to_delay_list (ninsn, NULL_RTX);
2849 (*pslots_filled)++;
2853 if (delay_list && must_annul)
2854 INSN_ANNULLED_BRANCH_P (insn) = 1;
2856 /* If we are to branch into the middle of this thread, find an appropriate
2857 label or make a new one if none, and redirect INSN to it. If we hit the
2858 end of the function, use the end-of-function label. */
2859 if (new_thread != thread)
2861 rtx label;
2863 if (! thread_if_true)
2864 abort ();
2866 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
2867 && (simplejump_p (new_thread)
2868 || GET_CODE (PATTERN (new_thread)) == RETURN)
2869 && redirect_with_delay_list_safe_p (insn,
2870 JUMP_LABEL (new_thread),
2871 delay_list))
2872 new_thread = follow_jumps (JUMP_LABEL (new_thread));
2874 if (new_thread == 0)
2875 label = find_end_label ();
2876 else if (GET_CODE (new_thread) == CODE_LABEL)
2877 label = new_thread;
2878 else
2879 label = get_label_before (new_thread);
2881 reorg_redirect_jump (insn, label);
2884 return delay_list;
2887 /* Make another attempt to find insns to place in delay slots.
2889 We previously looked for insns located in front of the delay insn
2890 and, for non-jump delay insns, located behind the delay insn.
2892 Here only try to schedule jump insns and try to move insns from either
2893 the target or the following insns into the delay slot. If annulling is
2894 supported, we will be likely to do this. Otherwise, we can do this only
2895 if safe. */
2897 static void
2898 fill_eager_delay_slots ()
2900 register rtx insn;
2901 register int i;
2902 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2904 for (i = 0; i < num_unfilled_slots; i++)
2906 rtx condition;
2907 rtx target_label, insn_at_target, fallthrough_insn;
2908 rtx delay_list = 0;
2909 int own_target;
2910 int own_fallthrough;
2911 int prediction, slots_to_fill, slots_filled;
2913 insn = unfilled_slots_base[i];
2914 if (insn == 0
2915 || INSN_DELETED_P (insn)
2916 || GET_CODE (insn) != JUMP_INSN
2917 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
2918 continue;
2920 slots_to_fill = num_delay_slots (insn);
2921 /* Some machine description have defined instructions to have
2922 delay slots only in certain circumstances which may depend on
2923 nearby insns (which change due to reorg's actions).
2925 For example, the PA port normally has delay slots for unconditional
2926 jumps.
2928 However, the PA port claims such jumps do not have a delay slot
2929 if they are immediate successors of certain CALL_INSNs. This
2930 allows the port to favor filling the delay slot of the call with
2931 the unconditional jump. */
2932 if (slots_to_fill == 0)
2933 continue;
2935 slots_filled = 0;
2936 target_label = JUMP_LABEL (insn);
2937 condition = get_branch_condition (insn, target_label);
2939 if (condition == 0)
2940 continue;
2942 /* Get the next active fallthrough and target insns and see if we own
2943 them. Then see whether the branch is likely true. We don't need
2944 to do a lot of this for unconditional branches. */
2946 insn_at_target = next_active_insn (target_label);
2947 own_target = own_thread_p (target_label, target_label, 0);
2949 if (condition == const_true_rtx)
2951 own_fallthrough = 0;
2952 fallthrough_insn = 0;
2953 prediction = 2;
2955 else
2957 fallthrough_insn = next_active_insn (insn);
2958 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
2959 prediction = mostly_true_jump (insn, condition);
2962 /* If this insn is expected to branch, first try to get insns from our
2963 target, then our fallthrough insns. If it is not, expected to branch,
2964 try the other order. */
2966 if (prediction > 0)
2968 delay_list
2969 = fill_slots_from_thread (insn, condition, insn_at_target,
2970 fallthrough_insn, prediction == 2, 1,
2971 own_target,
2972 slots_to_fill, &slots_filled, delay_list);
2974 if (delay_list == 0 && own_fallthrough)
2976 /* Even though we didn't find anything for delay slots,
2977 we might have found a redundant insn which we deleted
2978 from the thread that was filled. So we have to recompute
2979 the next insn at the target. */
2980 target_label = JUMP_LABEL (insn);
2981 insn_at_target = next_active_insn (target_label);
2983 delay_list
2984 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2985 insn_at_target, 0, 0,
2986 own_fallthrough,
2987 slots_to_fill, &slots_filled,
2988 delay_list);
2991 else
2993 if (own_fallthrough)
2994 delay_list
2995 = fill_slots_from_thread (insn, condition, fallthrough_insn,
2996 insn_at_target, 0, 0,
2997 own_fallthrough,
2998 slots_to_fill, &slots_filled,
2999 delay_list);
3001 if (delay_list == 0)
3002 delay_list
3003 = fill_slots_from_thread (insn, condition, insn_at_target,
3004 next_active_insn (insn), 0, 1,
3005 own_target,
3006 slots_to_fill, &slots_filled,
3007 delay_list);
3010 if (delay_list)
3011 unfilled_slots_base[i]
3012 = emit_delay_sequence (insn, delay_list, slots_filled);
3014 if (slots_to_fill == slots_filled)
3015 unfilled_slots_base[i] = 0;
3017 note_delay_statistics (slots_filled, 1);
3021 /* Once we have tried two ways to fill a delay slot, make a pass over the
3022 code to try to improve the results and to do such things as more jump
3023 threading. */
3025 static void
3026 relax_delay_slots (first)
3027 rtx first;
3029 register rtx insn, next, pat;
3030 register rtx trial, delay_insn, target_label;
3032 /* Look at every JUMP_INSN and see if we can improve it. */
3033 for (insn = first; insn; insn = next)
3035 rtx other;
3037 next = next_active_insn (insn);
3039 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3040 the next insn, or jumps to a label that is not the last of a
3041 group of consecutive labels. */
3042 if (GET_CODE (insn) == JUMP_INSN
3043 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3044 && (target_label = JUMP_LABEL (insn)) != 0)
3046 target_label = follow_jumps (target_label);
3047 target_label = prev_label (next_active_insn (target_label));
3049 if (target_label == 0)
3050 target_label = find_end_label ();
3052 if (next_active_insn (target_label) == next
3053 && ! condjump_in_parallel_p (insn))
3055 delete_jump (insn);
3056 continue;
3059 if (target_label != JUMP_LABEL (insn))
3060 reorg_redirect_jump (insn, target_label);
3062 /* See if this jump branches around a unconditional jump.
3063 If so, invert this jump and point it to the target of the
3064 second jump. */
3065 if (next && GET_CODE (next) == JUMP_INSN
3066 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3067 && next_active_insn (target_label) == next_active_insn (next)
3068 && no_labels_between_p (insn, next))
3070 rtx label = JUMP_LABEL (next);
3072 /* Be careful how we do this to avoid deleting code or
3073 labels that are momentarily dead. See similar optimization
3074 in jump.c.
3076 We also need to ensure we properly handle the case when
3077 invert_jump fails. */
3079 ++LABEL_NUSES (target_label);
3080 if (label)
3081 ++LABEL_NUSES (label);
3083 if (invert_jump (insn, label))
3085 delete_insn (next);
3086 next = insn;
3089 if (label)
3090 --LABEL_NUSES (label);
3092 if (--LABEL_NUSES (target_label) == 0)
3093 delete_insn (target_label);
3095 continue;
3099 /* If this is an unconditional jump and the previous insn is a
3100 conditional jump, try reversing the condition of the previous
3101 insn and swapping our targets. The next pass might be able to
3102 fill the slots.
3104 Don't do this if we expect the conditional branch to be true, because
3105 we would then be making the more common case longer. */
3107 if (GET_CODE (insn) == JUMP_INSN
3108 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3109 && (other = prev_active_insn (insn)) != 0
3110 && (condjump_p (other) || condjump_in_parallel_p (other))
3111 && no_labels_between_p (other, insn)
3112 && 0 > mostly_true_jump (other,
3113 get_branch_condition (other,
3114 JUMP_LABEL (other))))
3116 rtx other_target = JUMP_LABEL (other);
3117 target_label = JUMP_LABEL (insn);
3119 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3120 as we move the label. */
3121 if (other_target)
3122 ++LABEL_NUSES (other_target);
3124 if (invert_jump (other, target_label))
3125 reorg_redirect_jump (insn, other_target);
3127 if (other_target)
3128 --LABEL_NUSES (other_target);
3131 /* Now look only at cases where we have filled a delay slot. */
3132 if (GET_CODE (insn) != INSN
3133 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3134 continue;
3136 pat = PATTERN (insn);
3137 delay_insn = XVECEXP (pat, 0, 0);
3139 /* See if the first insn in the delay slot is redundant with some
3140 previous insn. Remove it from the delay slot if so; then set up
3141 to reprocess this insn. */
3142 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3144 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3145 next = prev_active_insn (next);
3146 continue;
3149 /* See if we have a RETURN insn with a filled delay slot followed
3150 by a RETURN insn with an unfilled a delay slot. If so, we can delete
3151 the first RETURN (but not it's delay insn). This gives the same
3152 effect in fewer instructions.
3154 Only do so if optimizing for size since this results in slower, but
3155 smaller code. */
3156 if (optimize_size
3157 && GET_CODE (PATTERN (delay_insn)) == RETURN
3158 && next
3159 && GET_CODE (next) == JUMP_INSN
3160 && GET_CODE (PATTERN (next)) == RETURN)
3162 int i;
3164 /* Delete the RETURN and just execute the delay list insns.
3166 We do this by deleting the INSN containing the SEQUENCE, then
3167 re-emitting the insns separately, and then deleting the RETURN.
3168 This allows the count of the jump target to be properly
3169 decremented. */
3171 /* Clear the from target bit, since these insns are no longer
3172 in delay slots. */
3173 for (i = 0; i < XVECLEN (pat, 0); i++)
3174 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3176 trial = PREV_INSN (insn);
3177 delete_insn (insn);
3178 emit_insn_after (pat, trial);
3179 delete_scheduled_jump (delay_insn);
3180 continue;
3183 /* Now look only at the cases where we have a filled JUMP_INSN. */
3184 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3185 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3186 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3187 continue;
3189 target_label = JUMP_LABEL (delay_insn);
3191 if (target_label)
3193 /* If this jump goes to another unconditional jump, thread it, but
3194 don't convert a jump into a RETURN here. */
3195 trial = follow_jumps (target_label);
3196 /* We use next_real_insn instead of next_active_insn, so that
3197 the special USE insns emitted by reorg won't be ignored.
3198 If they are ignored, then they will get deleted if target_label
3199 is now unreachable, and that would cause mark_target_live_regs
3200 to fail. */
3201 trial = prev_label (next_real_insn (trial));
3202 if (trial == 0 && target_label != 0)
3203 trial = find_end_label ();
3205 if (trial != target_label
3206 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3208 reorg_redirect_jump (delay_insn, trial);
3209 target_label = trial;
3212 /* If the first insn at TARGET_LABEL is redundant with a previous
3213 insn, redirect the jump to the following insn process again. */
3214 trial = next_active_insn (target_label);
3215 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3216 && redundant_insn (trial, insn, 0))
3218 rtx tmp;
3220 /* Figure out where to emit the special USE insn so we don't
3221 later incorrectly compute register live/death info. */
3222 tmp = next_active_insn (trial);
3223 if (tmp == 0)
3224 tmp = find_end_label ();
3226 /* Insert the special USE insn and update dataflow info. */
3227 update_block (trial, tmp);
3229 /* Now emit a label before the special USE insn, and
3230 redirect our jump to the new label. */
3231 target_label = get_label_before (PREV_INSN (tmp));
3232 reorg_redirect_jump (delay_insn, target_label);
3233 next = insn;
3234 continue;
3237 /* Similarly, if it is an unconditional jump with one insn in its
3238 delay list and that insn is redundant, thread the jump. */
3239 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3240 && XVECLEN (PATTERN (trial), 0) == 2
3241 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3242 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3243 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3244 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3246 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3247 if (target_label == 0)
3248 target_label = find_end_label ();
3250 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3251 insn))
3253 reorg_redirect_jump (delay_insn, target_label);
3254 next = insn;
3255 continue;
3260 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3261 && prev_active_insn (target_label) == insn
3262 && ! condjump_in_parallel_p (delay_insn)
3263 #ifdef HAVE_cc0
3264 /* If the last insn in the delay slot sets CC0 for some insn,
3265 various code assumes that it is in a delay slot. We could
3266 put it back where it belonged and delete the register notes,
3267 but it doesn't seem worthwhile in this uncommon case. */
3268 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3269 REG_CC_USER, NULL_RTX)
3270 #endif
3273 int i;
3275 /* All this insn does is execute its delay list and jump to the
3276 following insn. So delete the jump and just execute the delay
3277 list insns.
3279 We do this by deleting the INSN containing the SEQUENCE, then
3280 re-emitting the insns separately, and then deleting the jump.
3281 This allows the count of the jump target to be properly
3282 decremented. */
3284 /* Clear the from target bit, since these insns are no longer
3285 in delay slots. */
3286 for (i = 0; i < XVECLEN (pat, 0); i++)
3287 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3289 trial = PREV_INSN (insn);
3290 delete_insn (insn);
3291 emit_insn_after (pat, trial);
3292 delete_scheduled_jump (delay_insn);
3293 continue;
3296 /* See if this is an unconditional jump around a single insn which is
3297 identical to the one in its delay slot. In this case, we can just
3298 delete the branch and the insn in its delay slot. */
3299 if (next && GET_CODE (next) == INSN
3300 && prev_label (next_active_insn (next)) == target_label
3301 && simplejump_p (insn)
3302 && XVECLEN (pat, 0) == 2
3303 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3305 delete_insn (insn);
3306 continue;
3309 /* See if this jump (with its delay slots) branches around another
3310 jump (without delay slots). If so, invert this jump and point
3311 it to the target of the second jump. We cannot do this for
3312 annulled jumps, though. Again, don't convert a jump to a RETURN
3313 here. */
3314 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3315 && next && GET_CODE (next) == JUMP_INSN
3316 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3317 && next_active_insn (target_label) == next_active_insn (next)
3318 && no_labels_between_p (insn, next))
3320 rtx label = JUMP_LABEL (next);
3321 rtx old_label = JUMP_LABEL (delay_insn);
3323 if (label == 0)
3324 label = find_end_label ();
3326 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
3328 /* Be careful how we do this to avoid deleting code or labels
3329 that are momentarily dead. See similar optimization in
3330 jump.c */
3331 if (old_label)
3332 ++LABEL_NUSES (old_label);
3334 if (invert_jump (delay_insn, label))
3336 int i;
3338 /* Must update the INSN_FROM_TARGET_P bits now that
3339 the branch is reversed, so that mark_target_live_regs
3340 will handle the delay slot insn correctly. */
3341 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
3343 rtx slot = XVECEXP (PATTERN (insn), 0, i);
3344 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
3347 delete_insn (next);
3348 next = insn;
3351 if (old_label && --LABEL_NUSES (old_label) == 0)
3352 delete_insn (old_label);
3353 continue;
3357 /* If we own the thread opposite the way this insn branches, see if we
3358 can merge its delay slots with following insns. */
3359 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3360 && own_thread_p (NEXT_INSN (insn), 0, 1))
3361 try_merge_delay_insns (insn, next);
3362 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
3363 && own_thread_p (target_label, target_label, 0))
3364 try_merge_delay_insns (insn, next_active_insn (target_label));
3366 /* If we get here, we haven't deleted INSN. But we may have deleted
3367 NEXT, so recompute it. */
3368 next = next_active_insn (insn);
3372 #ifdef HAVE_return
3374 /* Look for filled jumps to the end of function label. We can try to convert
3375 them into RETURN insns if the insns in the delay slot are valid for the
3376 RETURN as well. */
3378 static void
3379 make_return_insns (first)
3380 rtx first;
3382 rtx insn, jump_insn, pat;
3383 rtx real_return_label = end_of_function_label;
3384 int slots, i;
3386 /* See if there is a RETURN insn in the function other than the one we
3387 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
3388 into a RETURN to jump to it. */
3389 for (insn = first; insn; insn = NEXT_INSN (insn))
3390 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
3392 real_return_label = get_label_before (insn);
3393 break;
3396 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
3397 was equal to END_OF_FUNCTION_LABEL. */
3398 LABEL_NUSES (real_return_label)++;
3400 /* Clear the list of insns to fill so we can use it. */
3401 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3403 for (insn = first; insn; insn = NEXT_INSN (insn))
3405 int flags;
3407 /* Only look at filled JUMP_INSNs that go to the end of function
3408 label. */
3409 if (GET_CODE (insn) != INSN
3410 || GET_CODE (PATTERN (insn)) != SEQUENCE
3411 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3412 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
3413 continue;
3415 pat = PATTERN (insn);
3416 jump_insn = XVECEXP (pat, 0, 0);
3418 /* If we can't make the jump into a RETURN, try to redirect it to the best
3419 RETURN and go on to the next insn. */
3420 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
3422 /* Make sure redirecting the jump will not invalidate the delay
3423 slot insns. */
3424 if (redirect_with_delay_slots_safe_p (jump_insn,
3425 real_return_label,
3426 insn))
3427 reorg_redirect_jump (jump_insn, real_return_label);
3428 continue;
3431 /* See if this RETURN can accept the insns current in its delay slot.
3432 It can if it has more or an equal number of slots and the contents
3433 of each is valid. */
3435 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
3436 slots = num_delay_slots (jump_insn);
3437 if (slots >= XVECLEN (pat, 0) - 1)
3439 for (i = 1; i < XVECLEN (pat, 0); i++)
3440 if (! (
3441 #ifdef ANNUL_IFFALSE_SLOTS
3442 (INSN_ANNULLED_BRANCH_P (jump_insn)
3443 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3444 ? eligible_for_annul_false (jump_insn, i - 1,
3445 XVECEXP (pat, 0, i), flags) :
3446 #endif
3447 #ifdef ANNUL_IFTRUE_SLOTS
3448 (INSN_ANNULLED_BRANCH_P (jump_insn)
3449 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
3450 ? eligible_for_annul_true (jump_insn, i - 1,
3451 XVECEXP (pat, 0, i), flags) :
3452 #endif
3453 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
3454 break;
3456 else
3457 i = 0;
3459 if (i == XVECLEN (pat, 0))
3460 continue;
3462 /* We have to do something with this insn. If it is an unconditional
3463 RETURN, delete the SEQUENCE and output the individual insns,
3464 followed by the RETURN. Then set things up so we try to find
3465 insns for its delay slots, if it needs some. */
3466 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
3468 rtx prev = PREV_INSN (insn);
3470 delete_insn (insn);
3471 for (i = 1; i < XVECLEN (pat, 0); i++)
3472 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
3474 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
3475 emit_barrier_after (insn);
3477 if (slots)
3478 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3480 else
3481 /* It is probably more efficient to keep this with its current
3482 delay slot as a branch to a RETURN. */
3483 reorg_redirect_jump (jump_insn, real_return_label);
3486 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
3487 new delay slots we have created. */
3488 if (--LABEL_NUSES (real_return_label) == 0)
3489 delete_insn (real_return_label);
3491 fill_simple_delay_slots (1);
3492 fill_simple_delay_slots (0);
3494 #endif
3496 /* Try to find insns to place in delay slots. */
3498 void
3499 dbr_schedule (first, file)
3500 rtx first;
3501 FILE *file;
3503 rtx insn, next, epilogue_insn = 0;
3504 int i;
3505 #if 0
3506 int old_flag_no_peephole = flag_no_peephole;
3508 /* Execute `final' once in prescan mode to delete any insns that won't be
3509 used. Don't let final try to do any peephole optimization--it will
3510 ruin dataflow information for this pass. */
3512 flag_no_peephole = 1;
3513 final (first, 0, NO_DEBUG, 1, 1);
3514 flag_no_peephole = old_flag_no_peephole;
3515 #endif
3517 /* If the current function has no insns other than the prologue and
3518 epilogue, then do not try to fill any delay slots. */
3519 if (n_basic_blocks == 0)
3520 return;
3522 /* Find the highest INSN_UID and allocate and initialize our map from
3523 INSN_UID's to position in code. */
3524 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
3526 if (INSN_UID (insn) > max_uid)
3527 max_uid = INSN_UID (insn);
3528 if (GET_CODE (insn) == NOTE
3529 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
3530 epilogue_insn = insn;
3533 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int));
3534 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
3535 uid_to_ruid[INSN_UID (insn)] = i;
3537 /* Initialize the list of insns that need filling. */
3538 if (unfilled_firstobj == 0)
3540 gcc_obstack_init (&unfilled_slots_obstack);
3541 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3544 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
3546 rtx target;
3548 INSN_ANNULLED_BRANCH_P (insn) = 0;
3549 INSN_FROM_TARGET_P (insn) = 0;
3551 /* Skip vector tables. We can't get attributes for them. */
3552 if (GET_CODE (insn) == JUMP_INSN
3553 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
3554 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
3555 continue;
3557 if (num_delay_slots (insn) > 0)
3558 obstack_ptr_grow (&unfilled_slots_obstack, insn);
3560 /* Ensure all jumps go to the last of a set of consecutive labels. */
3561 if (GET_CODE (insn) == JUMP_INSN
3562 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3563 && JUMP_LABEL (insn) != 0
3564 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
3565 != JUMP_LABEL (insn)))
3566 redirect_jump (insn, target);
3569 init_resource_info (epilogue_insn);
3571 /* Show we haven't computed an end-of-function label yet. */
3572 end_of_function_label = 0;
3574 /* Initialize the statistics for this function. */
3575 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
3576 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
3578 /* Now do the delay slot filling. Try everything twice in case earlier
3579 changes make more slots fillable. */
3581 for (reorg_pass_number = 0;
3582 reorg_pass_number < MAX_REORG_PASSES;
3583 reorg_pass_number++)
3585 fill_simple_delay_slots (1);
3586 fill_simple_delay_slots (0);
3587 fill_eager_delay_slots ();
3588 relax_delay_slots (first);
3591 /* Delete any USE insns made by update_block; subsequent passes don't need
3592 them or know how to deal with them. */
3593 for (insn = first; insn; insn = next)
3595 next = NEXT_INSN (insn);
3597 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
3598 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
3599 next = delete_insn (insn);
3602 /* If we made an end of function label, indicate that it is now
3603 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
3604 If it is now unused, delete it. */
3605 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
3606 delete_insn (end_of_function_label);
3608 #ifdef HAVE_return
3609 if (HAVE_return && end_of_function_label != 0)
3610 make_return_insns (first);
3611 #endif
3613 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
3615 /* It is not clear why the line below is needed, but it does seem to be. */
3616 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
3618 /* Reposition the prologue and epilogue notes in case we moved the
3619 prologue/epilogue insns. */
3620 reposition_prologue_and_epilogue_notes (first);
3622 if (file)
3624 register int i, j, need_comma;
3626 for (reorg_pass_number = 0;
3627 reorg_pass_number < MAX_REORG_PASSES;
3628 reorg_pass_number++)
3630 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
3631 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
3633 need_comma = 0;
3634 fprintf (file, ";; Reorg function #%d\n", i);
3636 fprintf (file, ";; %d insns needing delay slots\n;; ",
3637 num_insns_needing_delays[i][reorg_pass_number]);
3639 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
3640 if (num_filled_delays[i][j][reorg_pass_number])
3642 if (need_comma)
3643 fprintf (file, ", ");
3644 need_comma = 1;
3645 fprintf (file, "%d got %d delays",
3646 num_filled_delays[i][j][reorg_pass_number], j);
3648 fprintf (file, "\n");
3653 /* For all JUMP insns, fill in branch prediction notes, so that during
3654 assembler output a target can set branch prediction bits in the code.
3655 We have to do this now, as up until this point the destinations of
3656 JUMPS can be moved around and changed, but past right here that cannot
3657 happen. */
3658 for (insn = first; insn; insn = NEXT_INSN (insn))
3660 int pred_flags;
3662 if (GET_CODE (insn) == INSN)
3664 rtx pat = PATTERN (insn);
3666 if (GET_CODE (pat) == SEQUENCE)
3667 insn = XVECEXP (pat, 0, 0);
3669 if (GET_CODE (insn) != JUMP_INSN)
3670 continue;
3672 pred_flags = get_jump_flags (insn, JUMP_LABEL (insn));
3673 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_BR_PRED,
3674 GEN_INT (pred_flags),
3675 REG_NOTES (insn));
3677 free_resource_info ();
3679 #endif /* DELAY_SLOTS */