1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22 /* Instruction reorganization pass.
24 This pass runs after register allocation and final jump
25 optimization. It should be the last pass to run before peephole.
26 It serves primarily to fill delay slots of insns, typically branch
27 and call insns. Other insns typically involve more complicated
28 interactions of data dependencies and resource constraints, and
29 are better handled by scheduling before register allocation (by the
30 function `schedule_insns').
32 The Branch Penalty is the number of extra cycles that are needed to
33 execute a branch insn. On an ideal machine, branches take a single
34 cycle, and the Branch Penalty is 0. Several RISC machines approach
35 branch delays differently:
37 The MIPS and AMD 29000 have a single branch delay slot. Most insns
38 (except other branches) can be used to fill this slot. When the
39 slot is filled, two insns execute in two cycles, reducing the
40 branch penalty to zero.
42 The Motorola 88000 conditionally exposes its branch delay slot,
43 so code is shorter when it is turned off, but will run faster
44 when useful insns are scheduled there.
46 The IBM ROMP has two forms of branch and call insns, both with and
47 without a delay slot. Much like the 88k, insns not using the delay
48 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
50 The SPARC always has a branch delay slot, but its effects can be
51 annulled when the branch is not taken. This means that failing to
52 find other sources of insns, we can hoist an insn from the branch
53 target that would only be safe to execute knowing that the branch
56 The HP-PA always has a branch delay slot. For unconditional branches
57 its effects can be annulled when the branch is taken. The effects
58 of the delay slot in a conditional branch can be nullified for forward
59 taken branches, or for untaken backward branches. This means
60 we can hoist insns from the fall-through path for forward branches or
61 steal insns from the target of backward branches.
63 Three techniques for filling delay slots have been implemented so far:
65 (1) `fill_simple_delay_slots' is the simplest, most efficient way
66 to fill delay slots. This pass first looks for insns which come
67 from before the branch and which are safe to execute after the
68 branch. Then it searches after the insn requiring delay slots or,
69 in the case of a branch, for insns that are after the point at
70 which the branch merges into the fallthrough code, if such a point
71 exists. When such insns are found, the branch penalty decreases
72 and no code expansion takes place.
74 (2) `fill_eager_delay_slots' is more complicated: it is used for
75 scheduling conditional jumps, or for scheduling jumps which cannot
76 be filled using (1). A machine need not have annulled jumps to use
77 this strategy, but it helps (by keeping more options open).
78 `fill_eager_delay_slots' tries to guess the direction the branch
79 will go; if it guesses right 100% of the time, it can reduce the
80 branch penalty as much as `fill_simple_delay_slots' does. If it
81 guesses wrong 100% of the time, it might as well schedule nops (or
82 on the m88k, unexpose the branch slot). When
83 `fill_eager_delay_slots' takes insns from the fall-through path of
84 the jump, usually there is no code expansion; when it takes insns
85 from the branch target, there is code expansion if it is not the
86 only way to reach that target.
88 (3) `relax_delay_slots' uses a set of rules to simplify code that
89 has been reorganized by (1) and (2). It finds cases where
90 conditional test can be eliminated, jumps can be threaded, extra
91 insns can be eliminated, etc. It is the job of (1) and (2) to do a
92 good job of scheduling locally; `relax_delay_slots' takes care of
93 making the various individual schedules work well together. It is
94 especially tuned to handle the control flow interactions of branch
95 insns. It does nothing for insns with delay slots that do not
98 On machines that use CC0, we are very conservative. We will not make
99 a copy of an insn involving CC0 since we want to maintain a 1-1
100 correspondence between the insn that sets and uses CC0. The insns are
101 allowed to be separated by placing an insn that sets CC0 (but not an insn
102 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
103 delay slot. In that case, we point each insn at the other with REG_CC_USER
104 and REG_CC_SETTER notes. Note that these restrictions affect very few
105 machines because most RISC machines with delay slots will not use CC0
106 (the RT is the only known exception at this point).
110 The Acorn Risc Machine can conditionally execute most insns, so
111 it is profitable to move single insns into a position to execute
112 based on the condition code of the previous insn.
114 The HP-PA can conditionally nullify insns, providing a similar
115 effect to the ARM, differing mostly in which insn is "in charge". */
120 #include "insn-config.h"
121 #include "conditions.h"
122 #include "hard-reg-set.h"
123 #include "basic-block.h"
125 #include "insn-flags.h"
130 #include "insn-attr.h"
134 #define obstack_chunk_alloc xmalloc
135 #define obstack_chunk_free free
137 #ifndef ANNUL_IFTRUE_SLOTS
138 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
140 #ifndef ANNUL_IFFALSE_SLOTS
141 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
144 /* Insns which have delay slots that have not yet been filled. */
146 static struct obstack unfilled_slots_obstack
;
147 static rtx
*unfilled_firstobj
;
149 /* Define macros to refer to the first and last slot containing unfilled
150 insns. These are used because the list may move and its address
151 should be recomputed at each use. */
153 #define unfilled_slots_base \
154 ((rtx *) obstack_base (&unfilled_slots_obstack))
156 #define unfilled_slots_next \
157 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
159 /* This structure is used to indicate which hardware resources are set or
160 needed by insns so far. */
164 char memory
; /* Insn sets or needs a memory location. */
165 char volatil
; /* Insn sets or needs a volatile memory loc. */
166 char cc
; /* Insn sets or needs the condition codes. */
167 HARD_REG_SET regs
; /* Which registers are set or needed. */
170 /* Macro to clear all resources. */
171 #define CLEAR_RESOURCE(RES) \
172 do { (RES)->memory = (RES)->volatil = (RES)->cc = 0; \
173 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
175 /* Indicates what resources are required at the beginning of the epilogue. */
176 static struct resources start_of_epilogue_needs
;
178 /* Indicates what resources are required at function end. */
179 static struct resources end_of_function_needs
;
181 /* Points to the label before the end of the function. */
182 static rtx end_of_function_label
;
184 /* This structure is used to record liveness information at the targets or
185 fallthrough insns of branches. We will most likely need the information
186 at targets again, so save them in a hash table rather than recomputing them
191 int uid
; /* INSN_UID of target. */
192 struct target_info
*next
; /* Next info for same hash bucket. */
193 HARD_REG_SET live_regs
; /* Registers live at target. */
194 int block
; /* Basic block number containing target. */
195 int bb_tick
; /* Generation count of basic block info. */
198 #define TARGET_HASH_PRIME 257
200 /* Define the hash table itself. */
201 static struct target_info
**target_hash_table
;
203 /* For each basic block, we maintain a generation number of its basic
204 block info, which is updated each time we move an insn from the
205 target of a jump. This is the generation number indexed by block
208 static int *bb_ticks
;
210 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
211 not always monotonically increase. */
212 static int *uid_to_ruid
;
214 /* Highest valid index in `uid_to_ruid'. */
217 static void mark_referenced_resources
PROTO((rtx
, struct resources
*, int));
218 static void mark_set_resources
PROTO((rtx
, struct resources
*, int, int));
219 static int stop_search_p
PROTO((rtx
, int));
220 static int resource_conflicts_p
PROTO((struct resources
*,
221 struct resources
*));
222 static int insn_references_resource_p
PROTO((rtx
, struct resources
*, int));
223 static int insn_sets_resources_p
PROTO((rtx
, struct resources
*, int));
224 static rtx find_end_label
PROTO((void));
225 static rtx emit_delay_sequence
PROTO((rtx
, rtx
, int, int));
226 static rtx add_to_delay_list
PROTO((rtx
, rtx
));
227 static void delete_from_delay_slot
PROTO((rtx
));
228 static void delete_scheduled_jump
PROTO((rtx
));
229 static void note_delay_statistics
PROTO((int, int));
230 static rtx optimize_skip
PROTO((rtx
));
231 static int get_jump_flags
PROTO((rtx
, rtx
));
232 static int rare_destination
PROTO((rtx
));
233 static int mostly_true_jump
PROTO((rtx
, rtx
));
234 static rtx get_branch_condition
PROTO((rtx
, rtx
));
235 static int condition_dominates_p
PROTO((rtx
, rtx
));
236 static rtx steal_delay_list_from_target
PROTO((rtx
, rtx
, rtx
, rtx
,
240 int, int *, int *, rtx
*));
241 static rtx steal_delay_list_from_fallthrough
PROTO((rtx
, rtx
, rtx
, rtx
,
246 static void try_merge_delay_insns
PROTO((rtx
, rtx
));
247 static rtx redundant_insn
PROTO((rtx
, rtx
, rtx
));
248 static int own_thread_p
PROTO((rtx
, rtx
, int));
249 static int find_basic_block
PROTO((rtx
));
250 static void update_block
PROTO((rtx
, rtx
));
251 static int reorg_redirect_jump
PROTO((rtx
, rtx
));
252 static void update_reg_dead_notes
PROTO((rtx
, rtx
));
253 static void update_reg_unused_notes
PROTO((rtx
, rtx
));
254 static void update_live_status
PROTO((rtx
, rtx
));
255 static rtx next_insn_no_annul
PROTO((rtx
));
256 static void mark_target_live_regs
PROTO((rtx
, struct resources
*));
257 static void fill_simple_delay_slots
PROTO((rtx
, int));
258 static rtx fill_slots_from_thread
PROTO((rtx
, rtx
, rtx
, rtx
, int, int,
259 int, int, int, int *));
260 static void fill_eager_delay_slots
PROTO((rtx
));
261 static void relax_delay_slots
PROTO((rtx
));
262 static void make_return_insns
PROTO((rtx
));
263 static int redirect_with_delay_slots_safe_p
PROTO ((rtx
, rtx
, rtx
));
264 static int redirect_with_delay_list_safe_p
PROTO ((rtx
, rtx
, rtx
));
266 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
267 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
268 is TRUE, resources used by the called routine will be included for
272 mark_referenced_resources (x
, res
, include_delayed_effects
)
274 register struct resources
*res
;
275 register int include_delayed_effects
;
277 register enum rtx_code code
= GET_CODE (x
);
279 register char *format_ptr
;
281 /* Handle leaf items for which we set resource flags. Also, special-case
282 CALL, SET and CLOBBER operators. */
294 if (GET_CODE (SUBREG_REG (x
)) != REG
)
295 mark_referenced_resources (SUBREG_REG (x
), res
, 0);
298 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
299 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
300 for (i
= regno
; i
< last_regno
; i
++)
301 SET_HARD_REG_BIT (res
->regs
, i
);
306 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
307 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
311 /* If this memory shouldn't change, it really isn't referencing
313 if (! RTX_UNCHANGING_P (x
))
315 res
->volatil
= MEM_VOLATILE_P (x
);
317 /* Mark registers used to access memory. */
318 mark_referenced_resources (XEXP (x
, 0), res
, 0);
325 case UNSPEC_VOLATILE
:
327 /* Traditional asm's are always volatile. */
332 res
->volatil
= MEM_VOLATILE_P (x
);
334 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
335 We can not just fall through here since then we would be confused
336 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
337 traditional asms unlike their normal usage. */
339 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
340 mark_referenced_resources (ASM_OPERANDS_INPUT (x
, i
), res
, 0);
344 /* The first operand will be a (MEM (xxx)) but doesn't really reference
345 memory. The second operand may be referenced, though. */
346 mark_referenced_resources (XEXP (XEXP (x
, 0), 0), res
, 0);
347 mark_referenced_resources (XEXP (x
, 1), res
, 0);
351 /* Usually, the first operand of SET is set, not referenced. But
352 registers used to access memory are referenced. SET_DEST is
353 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
355 mark_referenced_resources (SET_SRC (x
), res
, 0);
358 if (GET_CODE (x
) == SIGN_EXTRACT
|| GET_CODE (x
) == ZERO_EXTRACT
)
359 mark_referenced_resources (x
, res
, 0);
360 else if (GET_CODE (x
) == SUBREG
)
362 if (GET_CODE (x
) == MEM
)
363 mark_referenced_resources (XEXP (x
, 0), res
, 0);
370 if (include_delayed_effects
)
372 /* A CALL references memory, the frame pointer if it exists, the
373 stack pointer, any global registers and any registers given in
374 USE insns immediately in front of the CALL.
376 However, we may have moved some of the parameter loading insns
377 into the delay slot of this CALL. If so, the USE's for them
378 don't count and should be skipped. */
379 rtx insn
= PREV_INSN (x
);
384 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
385 if (NEXT_INSN (insn
) != x
)
387 sequence
= PATTERN (NEXT_INSN (insn
));
388 seq_size
= XVECLEN (sequence
, 0);
389 if (GET_CODE (sequence
) != SEQUENCE
)
394 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
395 if (frame_pointer_needed
)
397 SET_HARD_REG_BIT (res
->regs
, FRAME_POINTER_REGNUM
);
398 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
399 SET_HARD_REG_BIT (res
->regs
, HARD_FRAME_POINTER_REGNUM
);
403 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
405 SET_HARD_REG_BIT (res
->regs
, i
);
410 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
412 link
= XEXP (link
, 1))
413 if (GET_CODE (XEXP (link
, 0)) == USE
)
415 for (i
= 1; i
< seq_size
; i
++)
417 rtx slot_pat
= PATTERN (XVECEXP (sequence
, 0, i
));
418 if (GET_CODE (slot_pat
) == SET
419 && rtx_equal_p (SET_DEST (slot_pat
),
420 SET_DEST (XEXP (link
, 0))))
424 mark_referenced_resources (SET_DEST (XEXP (link
, 0)),
430 /* ... fall through to other INSN processing ... */
435 #ifdef INSN_REFERENCES_ARE_DELAYED
436 if (! include_delayed_effects
437 && INSN_REFERENCES_ARE_DELAYED (x
))
441 /* No special processing, just speed up. */
442 mark_referenced_resources (PATTERN (x
), res
, include_delayed_effects
);
446 /* Process each sub-expression and flag what it needs. */
447 format_ptr
= GET_RTX_FORMAT (code
);
448 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
449 switch (*format_ptr
++)
452 mark_referenced_resources (XEXP (x
, i
), res
, include_delayed_effects
);
456 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
457 mark_referenced_resources (XVECEXP (x
, i
, j
), res
,
458 include_delayed_effects
);
463 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
464 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
465 is nonzero, also mark resources potentially set by the called routine.
467 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
468 objects are being referenced instead of set.
470 We never mark the insn as modifying the condition code unless it explicitly
471 SETs CC0 even though this is not totally correct. The reason for this is
472 that we require a SET of CC0 to immediately precede the reference to CC0.
473 So if some other insn sets CC0 as a side-effect, we know it cannot affect
474 our computation and thus may be placed in a delay slot. */
477 mark_set_resources (x
, res
, in_dest
, include_delayed_effects
)
479 register struct resources
*res
;
481 int include_delayed_effects
;
483 register enum rtx_code code
;
485 register char *format_ptr
;
503 /* These don't set any resources. */
512 /* Called routine modifies the condition code, memory, any registers
513 that aren't saved across calls, global registers and anything
514 explicitly CLOBBERed immediately after the CALL_INSN. */
516 if (include_delayed_effects
)
518 rtx next
= NEXT_INSN (x
);
519 rtx prev
= PREV_INSN (x
);
522 res
->cc
= res
->memory
= 1;
523 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
524 if (call_used_regs
[i
] || global_regs
[i
])
525 SET_HARD_REG_BIT (res
->regs
, i
);
527 /* If X is part of a delay slot sequence, then NEXT should be
528 the first insn after the sequence. */
529 if (NEXT_INSN (prev
) != x
)
530 next
= NEXT_INSN (NEXT_INSN (prev
));
532 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
533 link
; link
= XEXP (link
, 1))
534 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
535 mark_set_resources (SET_DEST (XEXP (link
, 0)), res
, 1, 0);
537 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
538 assume that this call can clobber any register. */
539 if (next
&& GET_CODE (next
) == NOTE
540 && NOTE_LINE_NUMBER (next
) == NOTE_INSN_SETJMP
)
541 SET_HARD_REG_SET (res
->regs
);
544 /* ... and also what it's RTL says it modifies, if anything. */
549 /* An insn consisting of just a CLOBBER (or USE) is just for flow
550 and doesn't actually do anything, so we ignore it. */
552 #ifdef INSN_SETS_ARE_DELAYED
553 if (! include_delayed_effects
554 && INSN_SETS_ARE_DELAYED (x
))
559 if (GET_CODE (x
) != USE
&& GET_CODE (x
) != CLOBBER
)
564 /* If the source of a SET is a CALL, this is actually done by
565 the called routine. So only include it if we are to include the
566 effects of the calling routine. */
568 mark_set_resources (SET_DEST (x
), res
,
569 (include_delayed_effects
570 || GET_CODE (SET_SRC (x
)) != CALL
),
573 mark_set_resources (SET_SRC (x
), res
, 0, 0);
577 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
581 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
582 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x
, 0, 0))
583 && INSN_FROM_TARGET_P (XVECEXP (x
, 0, i
))))
584 mark_set_resources (XVECEXP (x
, 0, i
), res
, 0,
585 include_delayed_effects
);
592 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
596 mark_set_resources (XEXP (x
, 0), res
, in_dest
, 0);
597 mark_set_resources (XEXP (x
, 1), res
, 0, 0);
598 mark_set_resources (XEXP (x
, 2), res
, 0, 0);
605 res
->volatil
= MEM_VOLATILE_P (x
);
608 mark_set_resources (XEXP (x
, 0), res
, 0, 0);
614 if (GET_CODE (SUBREG_REG (x
)) != REG
)
615 mark_set_resources (SUBREG_REG (x
), res
,
616 in_dest
, include_delayed_effects
);
619 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
620 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
621 for (i
= regno
; i
< last_regno
; i
++)
622 SET_HARD_REG_BIT (res
->regs
, i
);
629 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
630 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
634 /* Process each sub-expression and flag what it needs. */
635 format_ptr
= GET_RTX_FORMAT (code
);
636 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
637 switch (*format_ptr
++)
640 mark_set_resources (XEXP (x
, i
), res
, in_dest
, include_delayed_effects
);
644 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
645 mark_set_resources (XVECEXP (x
, i
, j
), res
, in_dest
,
646 include_delayed_effects
);
651 /* Return TRUE if this insn should stop the search for insn to fill delay
652 slots. LABELS_P indicates that labels should terminate the search.
653 In all cases, jumps terminate the search. */
656 stop_search_p (insn
, labels_p
)
663 switch (GET_CODE (insn
))
677 /* OK unless it contains a delay slot or is an `asm' insn of some type.
678 We don't know anything about these. */
679 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
680 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
681 || asm_noperands (PATTERN (insn
)) >= 0);
688 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
689 resource set contains a volatile memory reference. Otherwise, return FALSE. */
692 resource_conflicts_p (res1
, res2
)
693 struct resources
*res1
, *res2
;
695 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
696 || res1
->volatil
|| res2
->volatil
)
700 return (res1
->regs
& res2
->regs
) != HARD_CONST (0);
705 for (i
= 0; i
< HARD_REG_SET_LONGS
; i
++)
706 if ((res1
->regs
[i
] & res2
->regs
[i
]) != 0)
713 /* Return TRUE if any resource marked in RES, a `struct resources', is
714 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
715 routine is using those resources.
717 We compute this by computing all the resources referenced by INSN and
718 seeing if this conflicts with RES. It might be faster to directly check
719 ourselves, and this is the way it used to work, but it means duplicating
720 a large block of complex code. */
723 insn_references_resource_p (insn
, res
, include_delayed_effects
)
725 register struct resources
*res
;
726 int include_delayed_effects
;
728 struct resources insn_res
;
730 CLEAR_RESOURCE (&insn_res
);
731 mark_referenced_resources (insn
, &insn_res
, include_delayed_effects
);
732 return resource_conflicts_p (&insn_res
, res
);
735 /* Return TRUE if INSN modifies resources that are marked in RES.
736 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
737 included. CC0 is only modified if it is explicitly set; see comments
738 in front of mark_set_resources for details. */
741 insn_sets_resource_p (insn
, res
, include_delayed_effects
)
743 register struct resources
*res
;
744 int include_delayed_effects
;
746 struct resources insn_sets
;
748 CLEAR_RESOURCE (&insn_sets
);
749 mark_set_resources (insn
, &insn_sets
, 0, include_delayed_effects
);
750 return resource_conflicts_p (&insn_sets
, res
);
753 /* Find a label at the end of the function or before a RETURN. If there is
761 /* If we found one previously, return it. */
762 if (end_of_function_label
)
763 return end_of_function_label
;
765 /* Otherwise, see if there is a label at the end of the function. If there
766 is, it must be that RETURN insns aren't needed, so that is our return
767 label and we don't have to do anything else. */
769 insn
= get_last_insn ();
770 while (GET_CODE (insn
) == NOTE
771 || (GET_CODE (insn
) == INSN
772 && (GET_CODE (PATTERN (insn
)) == USE
773 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
774 insn
= PREV_INSN (insn
);
776 /* When a target threads its epilogue we might already have a
777 suitable return insn. If so put a label before it for the
778 end_of_function_label. */
779 if (GET_CODE (insn
) == BARRIER
780 && GET_CODE (PREV_INSN (insn
)) == JUMP_INSN
781 && GET_CODE (PATTERN (PREV_INSN (insn
))) == RETURN
)
783 rtx temp
= PREV_INSN (PREV_INSN (insn
));
784 end_of_function_label
= gen_label_rtx ();
785 LABEL_NUSES (end_of_function_label
) = 0;
787 /* Put the label before an USE insns that may proceed the RETURN insn. */
788 while (GET_CODE (temp
) == USE
)
789 temp
= PREV_INSN (temp
);
791 emit_label_after (end_of_function_label
, temp
);
794 else if (GET_CODE (insn
) == CODE_LABEL
)
795 end_of_function_label
= insn
;
798 /* Otherwise, make a new label and emit a RETURN and BARRIER,
800 end_of_function_label
= gen_label_rtx ();
801 LABEL_NUSES (end_of_function_label
) = 0;
802 emit_label (end_of_function_label
);
806 /* The return we make may have delay slots too. */
807 rtx insn
= gen_return ();
808 insn
= emit_jump_insn (insn
);
810 if (num_delay_slots (insn
) > 0)
811 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
816 /* Show one additional use for this label so it won't go away until
818 ++LABEL_NUSES (end_of_function_label
);
820 return end_of_function_label
;
823 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
824 the pattern of INSN with the SEQUENCE.
826 Chain the insns so that NEXT_INSN of each insn in the sequence points to
827 the next and NEXT_INSN of the last insn in the sequence points to
828 the first insn after the sequence. Similarly for PREV_INSN. This makes
829 it easier to scan all insns.
831 Returns the SEQUENCE that replaces INSN. */
834 emit_delay_sequence (insn
, list
, length
, avail
)
844 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
845 rtvec seqv
= rtvec_alloc (length
+ 1);
846 rtx seq
= gen_rtx (SEQUENCE
, VOIDmode
, seqv
);
847 rtx seq_insn
= make_insn_raw (seq
);
848 rtx first
= get_insns ();
849 rtx last
= get_last_insn ();
851 /* Make a copy of the insn having delay slots. */
852 rtx delay_insn
= copy_rtx (insn
);
854 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
855 confuse further processing. Update LAST in case it was the last insn.
856 We will put the BARRIER back in later. */
857 if (NEXT_INSN (insn
) && GET_CODE (NEXT_INSN (insn
)) == BARRIER
)
859 delete_insn (NEXT_INSN (insn
));
860 last
= get_last_insn ();
864 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
865 NEXT_INSN (seq_insn
) = NEXT_INSN (insn
);
866 PREV_INSN (seq_insn
) = PREV_INSN (insn
);
869 set_new_first_and_last_insn (first
, seq_insn
);
871 PREV_INSN (NEXT_INSN (seq_insn
)) = seq_insn
;
874 set_new_first_and_last_insn (seq_insn
, last
);
876 NEXT_INSN (PREV_INSN (seq_insn
)) = seq_insn
;
878 /* Build our SEQUENCE and rebuild the insn chain. */
879 XVECEXP (seq
, 0, 0) = delay_insn
;
880 INSN_DELETED_P (delay_insn
) = 0;
881 PREV_INSN (delay_insn
) = PREV_INSN (seq_insn
);
883 for (li
= list
; li
; li
= XEXP (li
, 1), i
++)
885 rtx tem
= XEXP (li
, 0);
888 /* Show that this copy of the insn isn't deleted. */
889 INSN_DELETED_P (tem
) = 0;
891 XVECEXP (seq
, 0, i
) = tem
;
892 PREV_INSN (tem
) = XVECEXP (seq
, 0, i
- 1);
893 NEXT_INSN (XVECEXP (seq
, 0, i
- 1)) = tem
;
895 /* Remove any REG_DEAD notes because we can't rely on them now
896 that the insn has been moved. */
897 for (note
= REG_NOTES (tem
); note
; note
= XEXP (note
, 1))
898 if (REG_NOTE_KIND (note
) == REG_DEAD
)
899 XEXP (note
, 0) = const0_rtx
;
902 NEXT_INSN (XVECEXP (seq
, 0, length
)) = NEXT_INSN (seq_insn
);
904 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
905 last insn in that SEQUENCE to point to us. Similarly for the first
906 insn in the following insn if it is a SEQUENCE. */
908 if (PREV_INSN (seq_insn
) && GET_CODE (PREV_INSN (seq_insn
)) == INSN
909 && GET_CODE (PATTERN (PREV_INSN (seq_insn
))) == SEQUENCE
)
910 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn
)), 0,
911 XVECLEN (PATTERN (PREV_INSN (seq_insn
)), 0) - 1))
914 if (NEXT_INSN (seq_insn
) && GET_CODE (NEXT_INSN (seq_insn
)) == INSN
915 && GET_CODE (PATTERN (NEXT_INSN (seq_insn
))) == SEQUENCE
)
916 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn
)), 0, 0)) = seq_insn
;
918 /* If there used to be a BARRIER, put it back. */
920 emit_barrier_after (seq_insn
);
928 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
929 be in the order in which the insns are to be executed. */
932 add_to_delay_list (insn
, delay_list
)
936 /* If we have an empty list, just make a new list element. If
937 INSN has it's block number recorded, clear it since we may
938 be moving the insn to a new block. */
942 struct target_info
*tinfo
;
944 for (tinfo
= target_hash_table
[INSN_UID (insn
) % TARGET_HASH_PRIME
];
945 tinfo
; tinfo
= tinfo
->next
)
946 if (tinfo
->uid
== INSN_UID (insn
))
952 return gen_rtx (INSN_LIST
, VOIDmode
, insn
, NULL_RTX
);
955 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
957 XEXP (delay_list
, 1) = add_to_delay_list (insn
, XEXP (delay_list
, 1));
962 /* Delete INSN from the the delay slot of the insn that it is in. This may
963 produce an insn without anything in its delay slots. */
966 delete_from_delay_slot (insn
)
969 rtx trial
, seq_insn
, seq
, prev
;
973 /* We first must find the insn containing the SEQUENCE with INSN in its
974 delay slot. Do this by finding an insn, TRIAL, where
975 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
978 PREV_INSN (NEXT_INSN (trial
)) == trial
;
979 trial
= NEXT_INSN (trial
))
982 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
983 seq
= PATTERN (seq_insn
);
985 /* Create a delay list consisting of all the insns other than the one
986 we are deleting (unless we were the only one). */
987 if (XVECLEN (seq
, 0) > 2)
988 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
989 if (XVECEXP (seq
, 0, i
) != insn
)
990 delay_list
= add_to_delay_list (XVECEXP (seq
, 0, i
), delay_list
);
992 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
993 list, and rebuild the delay list if non-empty. */
994 prev
= PREV_INSN (seq_insn
);
995 trial
= XVECEXP (seq
, 0, 0);
996 delete_insn (seq_insn
);
997 add_insn_after (trial
, prev
);
999 if (GET_CODE (trial
) == JUMP_INSN
1000 && (simplejump_p (trial
) || GET_CODE (PATTERN (trial
)) == RETURN
))
1001 emit_barrier_after (trial
);
1003 /* If there are any delay insns, remit them. Otherwise clear the
1006 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2, 0);
1008 INSN_ANNULLED_BRANCH_P (trial
) = 0;
1010 INSN_FROM_TARGET_P (insn
) = 0;
1012 /* Show we need to fill this insn again. */
1013 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
1016 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1017 the insn that sets CC0 for it and delete it too. */
1020 delete_scheduled_jump (insn
)
1023 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1024 delete the insn that sets the condition code, but it is hard to find it.
1025 Since this case is rare anyway, don't bother trying; there would likely
1026 be other insns that became dead anyway, which we wouldn't know to
1030 if (reg_mentioned_p (cc0_rtx
, insn
))
1032 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
1034 /* If a reg-note was found, it points to an insn to set CC0. This
1035 insn is in the delay list of some other insn. So delete it from
1036 the delay list it was in. */
1039 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), NULL_RTX
)
1040 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
1041 delete_from_delay_slot (XEXP (note
, 0));
1045 /* The insn setting CC0 is our previous insn, but it may be in
1046 a delay slot. It will be the last insn in the delay slot, if
1048 rtx trial
= previous_insn (insn
);
1049 if (GET_CODE (trial
) == NOTE
)
1050 trial
= prev_nonnote_insn (trial
);
1051 if (sets_cc0_p (PATTERN (trial
)) != 1
1052 || FIND_REG_INC_NOTE (trial
, 0))
1054 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
1055 delete_insn (trial
);
1057 delete_from_delay_slot (trial
);
1065 /* Counters for delay-slot filling. */
1067 #define NUM_REORG_FUNCTIONS 2
1068 #define MAX_DELAY_HISTOGRAM 3
1069 #define MAX_REORG_PASSES 2
1071 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
1073 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
1075 static int reorg_pass_number
;
1078 note_delay_statistics (slots_filled
, index
)
1079 int slots_filled
, index
;
1081 num_insns_needing_delays
[index
][reorg_pass_number
]++;
1082 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
1083 slots_filled
= MAX_DELAY_HISTOGRAM
;
1084 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
1087 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1089 /* Optimize the following cases:
1091 1. When a conditional branch skips over only one instruction,
1092 use an annulling branch and put that insn in the delay slot.
1093 Use either a branch that annuls when the condition if true or
1094 invert the test with a branch that annuls when the condition is
1095 false. This saves insns, since otherwise we must copy an insn
1098 (orig) (skip) (otherwise)
1099 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1106 2. When a conditional branch skips over only one instruction,
1107 and after that, it unconditionally branches somewhere else,
1108 perform the similar optimization. This saves executing the
1109 second branch in the case where the inverted condition is true.
1116 INSN is a JUMP_INSN.
1118 This should be expanded to skip over N insns, where N is the number
1119 of delay slots required. */
1122 optimize_skip (insn
)
1125 register rtx trial
= next_nonnote_insn (insn
);
1126 rtx next_trial
= next_active_insn (trial
);
1131 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1134 || GET_CODE (trial
) != INSN
1135 || GET_CODE (PATTERN (trial
)) == SEQUENCE
1136 || recog_memoized (trial
) < 0
1137 || (! eligible_for_annul_false (insn
, 0, trial
, flags
)
1138 && ! eligible_for_annul_true (insn
, 0, trial
, flags
)))
1141 /* There are two cases where we are just executing one insn (we assume
1142 here that a branch requires only one insn; this should be generalized
1143 at some point): Where the branch goes around a single insn or where
1144 we have one insn followed by a branch to the same label we branch to.
1145 In both of these cases, inverting the jump and annulling the delay
1146 slot give the same effect in fewer insns. */
1147 if ((next_trial
== next_active_insn (JUMP_LABEL (insn
)))
1149 && GET_CODE (next_trial
) == JUMP_INSN
1150 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)
1151 && (simplejump_p (next_trial
)
1152 || GET_CODE (PATTERN (next_trial
)) == RETURN
)))
1154 if (eligible_for_annul_false (insn
, 0, trial
, flags
))
1156 if (invert_jump (insn
, JUMP_LABEL (insn
)))
1157 INSN_FROM_TARGET_P (trial
) = 1;
1158 else if (! eligible_for_annul_true (insn
, 0, trial
, flags
))
1162 delay_list
= add_to_delay_list (trial
, NULL_RTX
);
1163 next_trial
= next_active_insn (trial
);
1164 update_block (trial
, trial
);
1165 delete_insn (trial
);
1167 /* Also, if we are targeting an unconditional
1168 branch, thread our jump to the target of that branch. Don't
1169 change this into a RETURN here, because it may not accept what
1170 we have in the delay slot. We'll fix this up later. */
1171 if (next_trial
&& GET_CODE (next_trial
) == JUMP_INSN
1172 && (simplejump_p (next_trial
)
1173 || GET_CODE (PATTERN (next_trial
)) == RETURN
))
1175 target_label
= JUMP_LABEL (next_trial
);
1176 if (target_label
== 0)
1177 target_label
= find_end_label ();
1179 /* Recompute the flags based on TARGET_LABEL since threading
1180 the jump to TARGET_LABEL may change the direction of the
1181 jump (which may change the circumstances in which the
1182 delay slot is nullified). */
1183 flags
= get_jump_flags (insn
, target_label
);
1184 if (eligible_for_annul_true (insn
, 0, trial
, flags
))
1185 reorg_redirect_jump (insn
, target_label
);
1188 INSN_ANNULLED_BRANCH_P (insn
) = 1;
1196 /* Encode and return branch direction and prediction information for
1197 INSN assuming it will jump to LABEL.
1199 Non conditional branches return no direction information and
1200 are predicted as very likely taken. */
1202 get_jump_flags (insn
, label
)
1207 /* get_jump_flags can be passed any insn with delay slots, these may
1208 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1209 direction information, and only if they are conditional jumps.
1211 If LABEL is zero, then there is no way to determine the branch
1213 if (GET_CODE (insn
) == JUMP_INSN
1214 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
1215 && INSN_UID (insn
) <= max_uid
1217 && INSN_UID (label
) <= max_uid
)
1219 = (uid_to_ruid
[INSN_UID (label
)] > uid_to_ruid
[INSN_UID (insn
)])
1220 ? ATTR_FLAG_forward
: ATTR_FLAG_backward
;
1221 /* No valid direction information. */
1225 /* If insn is a conditional branch call mostly_true_jump to get
1226 determine the branch prediction.
1228 Non conditional branches are predicted as very likely taken. */
1229 if (GET_CODE (insn
) == JUMP_INSN
1230 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
1234 prediction
= mostly_true_jump (insn
, get_branch_condition (insn
, label
));
1238 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
1241 flags
|= ATTR_FLAG_likely
;
1244 flags
|= ATTR_FLAG_unlikely
;
1247 flags
|= (ATTR_FLAG_very_unlikely
| ATTR_FLAG_unlikely
);
1255 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
1260 /* Return 1 if INSN is a destination that will be branched to rarely (the
1261 return point of a function); return 2 if DEST will be branched to very
1262 rarely (a call to a function that doesn't return). Otherwise,
1266 rare_destination (insn
)
1272 for (; insn
; insn
= next
)
1274 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1275 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1277 next
= NEXT_INSN (insn
);
1279 switch (GET_CODE (insn
))
1284 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1285 don't scan past JUMP_INSNs, so any barrier we find here must
1286 have been after a CALL_INSN and hence mean the call doesn't
1290 if (GET_CODE (PATTERN (insn
)) == RETURN
)
1292 else if (simplejump_p (insn
)
1293 && jump_count
++ < 10)
1294 next
= JUMP_LABEL (insn
);
1300 /* If we got here it means we hit the end of the function. So this
1301 is an unlikely destination. */
1306 /* Return truth value of the statement that this branch
1307 is mostly taken. If we think that the branch is extremely likely
1308 to be taken, we return 2. If the branch is slightly more likely to be
1309 taken, return 1. If the branch is slightly less likely to be taken,
1310 return 0 and if the branch is highly unlikely to be taken, return -1.
1312 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1315 mostly_true_jump (jump_insn
, condition
)
1316 rtx jump_insn
, condition
;
1318 rtx target_label
= JUMP_LABEL (jump_insn
);
1320 int rare_dest
= rare_destination (target_label
);
1321 int rare_fallthrough
= rare_destination (NEXT_INSN (jump_insn
));
1323 /* If this is a branch outside a loop, it is highly unlikely. */
1324 if (GET_CODE (PATTERN (jump_insn
)) == SET
1325 && GET_CODE (SET_SRC (PATTERN (jump_insn
))) == IF_THEN_ELSE
1326 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)) == LABEL_REF
1327 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)))
1328 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)) == LABEL_REF
1329 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)))))
1334 /* If this is the test of a loop, it is very likely true. We scan
1335 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1336 before the next real insn, we assume the branch is to the top of
1338 for (insn
= PREV_INSN (target_label
);
1339 insn
&& GET_CODE (insn
) == NOTE
;
1340 insn
= PREV_INSN (insn
))
1341 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
1344 /* If this is a jump to the test of a loop, it is likely true. We scan
1345 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1346 before the next real insn, we assume the branch is to the loop branch
1348 for (insn
= NEXT_INSN (target_label
);
1349 insn
&& GET_CODE (insn
) == NOTE
;
1350 insn
= PREV_INSN (insn
))
1351 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
)
1355 /* Look at the relative rarities of the fallthough and destination. If
1356 they differ, we can predict the branch that way. */
1358 switch (rare_fallthrough
- rare_dest
)
1372 /* If we couldn't figure out what this jump was, assume it won't be
1373 taken. This should be rare. */
1377 /* EQ tests are usually false and NE tests are usually true. Also,
1378 most quantities are positive, so we can make the appropriate guesses
1379 about signed comparisons against zero. */
1380 switch (GET_CODE (condition
))
1383 /* Unconditional branch. */
1391 if (XEXP (condition
, 1) == const0_rtx
)
1396 if (XEXP (condition
, 1) == const0_rtx
)
1401 /* Predict backward branches usually take, forward branches usually not. If
1402 we don't know whether this is forward or backward, assume the branch
1403 will be taken, since most are. */
1404 return (target_label
== 0 || INSN_UID (jump_insn
) > max_uid
1405 || INSN_UID (target_label
) > max_uid
1406 || (uid_to_ruid
[INSN_UID (jump_insn
)]
1407 > uid_to_ruid
[INSN_UID (target_label
)]));;
1410 /* Return the condition under which INSN will branch to TARGET. If TARGET
1411 is zero, return the condition under which INSN will return. If INSN is
1412 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1413 type of jump, or it doesn't go to TARGET, return 0. */
1416 get_branch_condition (insn
, target
)
1420 rtx pat
= PATTERN (insn
);
1423 if (condjump_in_parallel_p (insn
))
1424 pat
= XVECEXP (pat
, 0, 0);
1426 if (GET_CODE (pat
) == RETURN
)
1427 return target
== 0 ? const_true_rtx
: 0;
1429 else if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
1432 src
= SET_SRC (pat
);
1433 if (GET_CODE (src
) == LABEL_REF
&& XEXP (src
, 0) == target
)
1434 return const_true_rtx
;
1436 else if (GET_CODE (src
) == IF_THEN_ELSE
1437 && ((target
== 0 && GET_CODE (XEXP (src
, 1)) == RETURN
)
1438 || (GET_CODE (XEXP (src
, 1)) == LABEL_REF
1439 && XEXP (XEXP (src
, 1), 0) == target
))
1440 && XEXP (src
, 2) == pc_rtx
)
1441 return XEXP (src
, 0);
1443 else if (GET_CODE (src
) == IF_THEN_ELSE
1444 && ((target
== 0 && GET_CODE (XEXP (src
, 2)) == RETURN
)
1445 || (GET_CODE (XEXP (src
, 2)) == LABEL_REF
1446 && XEXP (XEXP (src
, 2), 0) == target
))
1447 && XEXP (src
, 1) == pc_rtx
)
1448 return gen_rtx (reverse_condition (GET_CODE (XEXP (src
, 0))),
1449 GET_MODE (XEXP (src
, 0)),
1450 XEXP (XEXP (src
, 0), 0), XEXP (XEXP (src
, 0), 1));
1455 /* Return non-zero if CONDITION is more strict than the condition of
1456 INSN, i.e., if INSN will always branch if CONDITION is true. */
1459 condition_dominates_p (condition
, insn
)
1463 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
1464 enum rtx_code code
= GET_CODE (condition
);
1465 enum rtx_code other_code
;
1467 if (rtx_equal_p (condition
, other_condition
)
1468 || other_condition
== const_true_rtx
)
1471 else if (condition
== const_true_rtx
|| other_condition
== 0)
1474 other_code
= GET_CODE (other_condition
);
1475 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
1476 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
1477 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
1480 return comparison_dominates_p (code
, other_code
);
1483 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1484 any insns already in the delay slot of JUMP. */
1487 redirect_with_delay_slots_safe_p (jump
, newlabel
, seq
)
1488 rtx jump
, newlabel
, seq
;
1490 int flags
, slots
, i
;
1491 rtx pat
= PATTERN (seq
);
1493 /* Make sure all the delay slots of this jump would still
1494 be valid after threading the jump. If they are still
1495 valid, then return non-zero. */
1497 flags
= get_jump_flags (jump
, newlabel
);
1498 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1500 #ifdef ANNUL_IFFALSE_SLOTS
1501 (INSN_ANNULLED_BRANCH_P (jump
)
1502 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1503 ? eligible_for_annul_false (jump
, i
- 1,
1504 XVECEXP (pat
, 0, i
), flags
) :
1506 #ifdef ANNUL_IFTRUE_SLOTS
1507 (INSN_ANNULLED_BRANCH_P (jump
)
1508 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1509 ? eligible_for_annul_true (jump
, i
- 1,
1510 XVECEXP (pat
, 0, i
), flags
) :
1512 eligible_for_delay (jump
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
1515 return (i
== XVECLEN (pat
, 0));
1518 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1519 any insns we wish to place in the delay slot of JUMP. */
1522 redirect_with_delay_list_safe_p (jump
, newlabel
, delay_list
)
1523 rtx jump
, newlabel
, delay_list
;
1528 /* Make sure all the insns in DELAY_LIST would still be
1529 valid after threading the jump. If they are still
1530 valid, then return non-zero. */
1532 flags
= get_jump_flags (jump
, newlabel
);
1533 for (li
= delay_list
, i
= 0; li
; li
= XEXP (li
, 1), i
++)
1535 #ifdef ANNUL_IFFALSE_SLOTS
1536 (INSN_ANNULLED_BRANCH_P (jump
)
1537 && INSN_FROM_TARGET_P (XEXP (li
, 0)))
1538 ? eligible_for_annul_false (jump
, i
, XEXP (li
, 0), flags
) :
1540 #ifdef ANNUL_IFTRUE_SLOTS
1541 (INSN_ANNULLED_BRANCH_P (jump
)
1542 && ! INSN_FROM_TARGET_P (XEXP (li
, 0)))
1543 ? eligible_for_annul_true (jump
, i
, XEXP (li
, 0), flags
) :
1545 eligible_for_delay (jump
, i
, XEXP (li
, 0), flags
)))
1548 return (li
== NULL
);
1552 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1553 the condition tested by INSN is CONDITION and the resources shown in
1554 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1555 from SEQ's delay list, in addition to whatever insns it may execute
1556 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1557 needed while searching for delay slot insns. Return the concatenated
1558 delay list if possible, otherwise, return 0.
1560 SLOTS_TO_FILL is the total number of slots required by INSN, and
1561 PSLOTS_FILLED points to the number filled so far (also the number of
1562 insns in DELAY_LIST). It is updated with the number that have been
1563 filled from the SEQUENCE, if any.
1565 PANNUL_P points to a non-zero value if we already know that we need
1566 to annul INSN. If this routine determines that annulling is needed,
1567 it may set that value non-zero.
1569 PNEW_THREAD points to a location that is to receive the place at which
1570 execution should continue. */
1573 steal_delay_list_from_target (insn
, condition
, seq
, delay_list
,
1574 sets
, needed
, other_needed
,
1575 slots_to_fill
, pslots_filled
, pannul_p
,
1577 rtx insn
, condition
;
1580 struct resources
*sets
, *needed
, *other_needed
;
1587 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1588 int total_slots_filled
= *pslots_filled
;
1589 rtx new_delay_list
= 0;
1590 int must_annul
= *pannul_p
;
1593 /* We can't do anything if there are more delay slots in SEQ than we
1594 can handle, or if we don't know that it will be a taken branch.
1596 We know that it will be a taken branch if it is either an unconditional
1597 branch or a conditional branch with a stricter branch condition. */
1599 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1600 || ! condition_dominates_p (condition
, XVECEXP (seq
, 0, 0)))
1603 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1605 rtx trial
= XVECEXP (seq
, 0, i
);
1608 if (insn_references_resource_p (trial
, sets
, 0)
1609 || insn_sets_resource_p (trial
, needed
, 0)
1610 || insn_sets_resource_p (trial
, sets
, 0)
1612 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1614 || find_reg_note (trial
, REG_CC_USER
, NULL_RTX
)
1616 /* If TRIAL is from the fallthrough code of an annulled branch insn
1617 in SEQ, we cannot use it. */
1618 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq
, 0, 0))
1619 && ! INSN_FROM_TARGET_P (trial
)))
1622 /* If this insn was already done (usually in a previous delay slot),
1623 pretend we put it in our delay slot. */
1624 if (redundant_insn (trial
, insn
, new_delay_list
))
1627 /* We will end up re-vectoring this branch, so compute flags
1628 based on jumping to the new label. */
1629 flags
= get_jump_flags (insn
, JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1632 && ((condition
== const_true_rtx
1633 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1634 && ! may_trap_p (PATTERN (trial
)))))
1635 ? eligible_for_delay (insn
, total_slots_filled
, trial
, flags
)
1637 eligible_for_annul_false (insn
, total_slots_filled
, trial
, flags
)))
1639 temp
= copy_rtx (trial
);
1640 INSN_FROM_TARGET_P (temp
) = 1;
1641 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1642 total_slots_filled
++;
1644 if (--slots_remaining
== 0)
1651 /* Show the place to which we will be branching. */
1652 *pnew_thread
= next_active_insn (JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1654 /* Add any new insns to the delay list and update the count of the
1655 number of slots filled. */
1656 *pslots_filled
= total_slots_filled
;
1657 *pannul_p
= must_annul
;
1659 if (delay_list
== 0)
1660 return new_delay_list
;
1662 for (temp
= new_delay_list
; temp
; temp
= XEXP (temp
, 1))
1663 delay_list
= add_to_delay_list (XEXP (temp
, 0), delay_list
);
1668 /* Similar to steal_delay_list_from_target except that SEQ is on the
1669 fallthrough path of INSN. Here we only do something if the delay insn
1670 of SEQ is an unconditional branch. In that case we steal its delay slot
1671 for INSN since unconditional branches are much easier to fill. */
1674 steal_delay_list_from_fallthrough (insn
, condition
, seq
,
1675 delay_list
, sets
, needed
, other_needed
,
1676 slots_to_fill
, pslots_filled
, pannul_p
)
1677 rtx insn
, condition
;
1680 struct resources
*sets
, *needed
, *other_needed
;
1688 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1690 /* We can't do anything if SEQ's delay insn isn't an
1691 unconditional branch. */
1693 if (! simplejump_p (XVECEXP (seq
, 0, 0))
1694 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) != RETURN
)
1697 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1699 rtx trial
= XVECEXP (seq
, 0, i
);
1701 /* If TRIAL sets CC0, stealing it will move it too far from the use
1703 if (insn_references_resource_p (trial
, sets
, 0)
1704 || insn_sets_resource_p (trial
, needed
, 0)
1705 || insn_sets_resource_p (trial
, sets
, 0)
1707 || sets_cc0_p (PATTERN (trial
))
1713 /* If this insn was already done, we don't need it. */
1714 if (redundant_insn (trial
, insn
, delay_list
))
1716 delete_from_delay_slot (trial
);
1721 && ((condition
== const_true_rtx
1722 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1723 && ! may_trap_p (PATTERN (trial
)))))
1724 ? eligible_for_delay (insn
, *pslots_filled
, trial
, flags
)
1726 eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
1728 delete_from_delay_slot (trial
);
1729 delay_list
= add_to_delay_list (trial
, delay_list
);
1731 if (++(*pslots_filled
) == slots_to_fill
)
1741 /* Try merging insns starting at THREAD which match exactly the insns in
1744 If all insns were matched and the insn was previously annulling, the
1745 annul bit will be cleared.
1747 For each insn that is merged, if the branch is or will be non-annulling,
1748 we delete the merged insn. */
1751 try_merge_delay_insns (insn
, thread
)
1754 rtx trial
, next_trial
;
1755 rtx delay_insn
= XVECEXP (PATTERN (insn
), 0, 0);
1756 int annul_p
= INSN_ANNULLED_BRANCH_P (delay_insn
);
1757 int slot_number
= 1;
1758 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1759 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1760 struct resources set
, needed
;
1761 rtx merged_insns
= 0;
1765 flags
= get_jump_flags (delay_insn
, JUMP_LABEL (delay_insn
));
1767 CLEAR_RESOURCE (&needed
);
1768 CLEAR_RESOURCE (&set
);
1770 /* If this is not an annulling branch, take into account anything needed in
1771 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1772 folded into one. If we are annulling, this would be the correct
1773 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1774 will essentially disable this optimization. This method is somewhat of
1775 a kludge, but I don't see a better way.) */
1777 mark_referenced_resources (next_to_match
, &needed
, 1);
1779 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1781 rtx pat
= PATTERN (trial
);
1782 rtx oldtrial
= trial
;
1784 next_trial
= next_nonnote_insn (trial
);
1786 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1787 if (GET_CODE (trial
) == INSN
1788 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1791 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1793 /* We can't share an insn that sets cc0. */
1794 && ! sets_cc0_p (pat
)
1796 && ! insn_references_resource_p (trial
, &set
, 1)
1797 && ! insn_sets_resource_p (trial
, &set
, 1)
1798 && ! insn_sets_resource_p (trial
, &needed
, 1)
1799 && (trial
= try_split (pat
, trial
, 0)) != 0
1800 /* Update next_trial, in case try_split succeeded. */
1801 && (next_trial
= next_nonnote_insn (trial
))
1802 /* Likewise THREAD. */
1803 && (thread
= oldtrial
== thread
? trial
: thread
)
1804 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1805 /* Have to test this condition if annul condition is different
1806 from (and less restrictive than) non-annulling one. */
1807 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
, flags
))
1812 update_block (trial
, thread
);
1813 if (trial
== thread
)
1814 thread
= next_active_insn (thread
);
1816 delete_insn (trial
);
1817 INSN_FROM_TARGET_P (next_to_match
) = 0;
1820 merged_insns
= gen_rtx (INSN_LIST
, VOIDmode
, trial
, merged_insns
);
1822 if (++slot_number
== num_slots
)
1825 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1827 mark_referenced_resources (next_to_match
, &needed
, 1);
1830 mark_set_resources (trial
, &set
, 0, 1);
1831 mark_referenced_resources (trial
, &needed
, 1);
1834 /* See if we stopped on a filled insn. If we did, try to see if its
1835 delay slots match. */
1836 if (slot_number
!= num_slots
1837 && trial
&& GET_CODE (trial
) == INSN
1838 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1839 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0)))
1841 rtx pat
= PATTERN (trial
);
1842 rtx filled_insn
= XVECEXP (pat
, 0, 0);
1844 /* Account for resources set/needed by the filled insn. */
1845 mark_set_resources (filled_insn
, &set
, 0, 1);
1846 mark_referenced_resources (filled_insn
, &needed
, 1);
1848 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1850 rtx dtrial
= XVECEXP (pat
, 0, i
);
1852 if (! insn_references_resource_p (dtrial
, &set
, 1)
1853 && ! insn_sets_resource_p (dtrial
, &set
, 1)
1854 && ! insn_sets_resource_p (dtrial
, &needed
, 1)
1856 && ! sets_cc0_p (PATTERN (dtrial
))
1858 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1859 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
, flags
))
1863 update_block (dtrial
, thread
);
1864 delete_from_delay_slot (dtrial
);
1865 INSN_FROM_TARGET_P (next_to_match
) = 0;
1868 merged_insns
= gen_rtx (INSN_LIST
, SImode
, dtrial
,
1871 if (++slot_number
== num_slots
)
1874 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1879 /* If all insns in the delay slot have been matched and we were previously
1880 annulling the branch, we need not any more. In that case delete all the
1881 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1882 the delay list so that we know that it isn't only being used at the
1884 if (slot_number
== num_slots
&& annul_p
)
1886 for (; merged_insns
; merged_insns
= XEXP (merged_insns
, 1))
1888 if (GET_MODE (merged_insns
) == SImode
)
1890 update_block (XEXP (merged_insns
, 0), thread
);
1891 delete_from_delay_slot (XEXP (merged_insns
, 0));
1895 update_block (XEXP (merged_insns
, 0), thread
);
1896 delete_insn (XEXP (merged_insns
, 0));
1900 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1902 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1903 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1907 /* See if INSN is redundant with an insn in front of TARGET. Often this
1908 is called when INSN is a candidate for a delay slot of TARGET.
1909 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1910 of INSN. Often INSN will be redundant with an insn in a delay slot of
1911 some previous insn. This happens when we have a series of branches to the
1912 same label; in that case the first insn at the target might want to go
1913 into each of the delay slots.
1915 If we are not careful, this routine can take up a significant fraction
1916 of the total compilation time (4%), but only wins rarely. Hence we
1917 speed this routine up by making two passes. The first pass goes back
1918 until it hits a label and sees if it find an insn with an identical
1919 pattern. Only in this (relatively rare) event does it check for
1922 We do not split insns we encounter. This could cause us not to find a
1923 redundant insn, but the cost of splitting seems greater than the possible
1924 gain in rare cases. */
1927 redundant_insn (insn
, target
, delay_list
)
1932 rtx target_main
= target
;
1933 rtx ipat
= PATTERN (insn
);
1935 struct resources needed
, set
;
1938 /* Scan backwards looking for a match. */
1939 for (trial
= PREV_INSN (target
); trial
; trial
= PREV_INSN (trial
))
1941 if (GET_CODE (trial
) == CODE_LABEL
)
1944 if (GET_RTX_CLASS (GET_CODE (trial
)) != 'i')
1947 pat
= PATTERN (trial
);
1948 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1951 if (GET_CODE (pat
) == SEQUENCE
)
1953 /* Stop for a CALL and its delay slots because it is difficult to
1954 track its resource needs correctly. */
1955 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1958 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1959 slots because it is difficult to track its resource needs
1962 #ifdef INSN_SETS_ARE_DELAYED
1963 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1967 #ifdef INSN_REFERENCES_ARE_DELAYED
1968 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1972 /* See if any of the insns in the delay slot match, updating
1973 resource requirements as we go. */
1974 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
1975 if (GET_CODE (XVECEXP (pat
, 0, i
)) == GET_CODE (insn
)
1976 && rtx_equal_p (PATTERN (XVECEXP (pat
, 0, i
)), ipat
))
1979 /* If found a match, exit this loop early. */
1984 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
))
1988 /* If we didn't find an insn that matches, return 0. */
1992 /* See what resources this insn sets and needs. If they overlap, or
1993 if this insn references CC0, it can't be redundant. */
1995 CLEAR_RESOURCE (&needed
);
1996 CLEAR_RESOURCE (&set
);
1997 mark_set_resources (insn
, &set
, 0, 1);
1998 mark_referenced_resources (insn
, &needed
, 1);
2000 /* If TARGET is a SEQUENCE, get the main insn. */
2001 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
2002 target_main
= XVECEXP (PATTERN (target
), 0, 0);
2004 if (resource_conflicts_p (&needed
, &set
)
2006 || reg_mentioned_p (cc0_rtx
, ipat
)
2008 /* The insn requiring the delay may not set anything needed or set by
2010 || insn_sets_resource_p (target_main
, &needed
, 1)
2011 || insn_sets_resource_p (target_main
, &set
, 1))
2014 /* Insns we pass may not set either NEEDED or SET, so merge them for
2016 needed
.memory
|= set
.memory
;
2017 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
2019 /* This insn isn't redundant if it conflicts with an insn that either is
2020 or will be in a delay slot of TARGET. */
2024 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, 1))
2026 delay_list
= XEXP (delay_list
, 1);
2029 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
2030 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
2031 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
, 1))
2034 /* Scan backwards until we reach a label or an insn that uses something
2035 INSN sets or sets something insn uses or sets. */
2037 for (trial
= PREV_INSN (target
);
2038 trial
&& GET_CODE (trial
) != CODE_LABEL
;
2039 trial
= PREV_INSN (trial
))
2041 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != CALL_INSN
2042 && GET_CODE (trial
) != JUMP_INSN
)
2045 pat
= PATTERN (trial
);
2046 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2049 if (GET_CODE (pat
) == SEQUENCE
)
2051 /* If this is a CALL_INSN and its delay slots, it is hard to track
2052 the resource needs properly, so give up. */
2053 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
2056 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2057 is hard to track the resource needs properly, so give up. */
2059 #ifdef INSN_SETS_ARE_DELAYED
2060 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2064 #ifdef INSN_REFERENCES_ARE_DELAYED
2065 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2069 /* See if any of the insns in the delay slot match, updating
2070 resource requirements as we go. */
2071 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
2073 rtx candidate
= XVECEXP (pat
, 0, i
);
2075 /* If an insn will be annulled if the branch is false, it isn't
2076 considered as a possible duplicate insn. */
2077 if (rtx_equal_p (PATTERN (candidate
), ipat
)
2078 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
2079 && INSN_FROM_TARGET_P (candidate
)))
2081 /* Show that this insn will be used in the sequel. */
2082 INSN_FROM_TARGET_P (candidate
) = 0;
2086 /* Unless this is an annulled insn from the target of a branch,
2087 we must stop if it sets anything needed or set by INSN. */
2088 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
2089 || ! INSN_FROM_TARGET_P (candidate
))
2090 && insn_sets_resource_p (candidate
, &needed
, 1))
2095 /* If the insn requiring the delay slot conflicts with INSN, we
2097 if (insn_sets_resource_p (XVECEXP (pat
, 0, 0), &needed
, 1))
2102 /* See if TRIAL is the same as INSN. */
2103 pat
= PATTERN (trial
);
2104 if (rtx_equal_p (pat
, ipat
))
2107 /* Can't go any further if TRIAL conflicts with INSN. */
2108 if (insn_sets_resource_p (trial
, &needed
, 1))
2116 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2117 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2118 is non-zero, we are allowed to fall into this thread; otherwise, we are
2121 If LABEL is used more than one or we pass a label other than LABEL before
2122 finding an active insn, we do not own this thread. */
2125 own_thread_p (thread
, label
, allow_fallthrough
)
2128 int allow_fallthrough
;
2133 /* We don't own the function end. */
2137 /* Get the first active insn, or THREAD, if it is an active insn. */
2138 active_insn
= next_active_insn (PREV_INSN (thread
));
2140 for (insn
= thread
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
2141 if (GET_CODE (insn
) == CODE_LABEL
2142 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
2145 if (allow_fallthrough
)
2148 /* Ensure that we reach a BARRIER before any insn or label. */
2149 for (insn
= prev_nonnote_insn (thread
);
2150 insn
== 0 || GET_CODE (insn
) != BARRIER
;
2151 insn
= prev_nonnote_insn (insn
))
2153 || GET_CODE (insn
) == CODE_LABEL
2154 || (GET_CODE (insn
) == INSN
2155 && GET_CODE (PATTERN (insn
)) != USE
2156 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
2162 /* Find the number of the basic block that starts closest to INSN. Return -1
2163 if we couldn't find such a basic block. */
2166 find_basic_block (insn
)
2171 /* Scan backwards to the previous BARRIER. Then see if we can find a
2172 label that starts a basic block. Return the basic block number. */
2174 for (insn
= prev_nonnote_insn (insn
);
2175 insn
&& GET_CODE (insn
) != BARRIER
;
2176 insn
= prev_nonnote_insn (insn
))
2179 /* The start of the function is basic block zero. */
2183 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2184 anything other than a CODE_LABEL or note, we can't find this code. */
2185 for (insn
= next_nonnote_insn (insn
);
2186 insn
&& GET_CODE (insn
) == CODE_LABEL
;
2187 insn
= next_nonnote_insn (insn
))
2189 for (i
= 0; i
< n_basic_blocks
; i
++)
2190 if (insn
== basic_block_head
[i
])
2197 /* Called when INSN is being moved from a location near the target of a jump.
2198 We leave a marker of the form (use (INSN)) immediately in front
2199 of WHERE for mark_target_live_regs. These markers will be deleted when
2202 We used to try to update the live status of registers if WHERE is at
2203 the start of a basic block, but that can't work since we may remove a
2204 BARRIER in relax_delay_slots. */
2207 update_block (insn
, where
)
2213 /* Ignore if this was in a delay slot and it came from the target of
2215 if (INSN_FROM_TARGET_P (insn
))
2218 emit_insn_before (gen_rtx (USE
, VOIDmode
, insn
), where
);
2220 /* INSN might be making a value live in a block where it didn't use to
2221 be. So recompute liveness information for this block. */
2223 b
= find_basic_block (insn
);
2228 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2229 the basic block containing the jump. */
2232 reorg_redirect_jump (jump
, nlabel
)
2236 int b
= find_basic_block (jump
);
2241 return redirect_jump (jump
, nlabel
);
2244 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2245 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2246 that reference values used in INSN. If we find one, then we move the
2247 REG_DEAD note to INSN.
2249 This is needed to handle the case where an later insn (after INSN) has a
2250 REG_DEAD note for a register used by INSN, and this later insn subsequently
2251 gets moved before a CODE_LABEL because it is a redundant insn. In this
2252 case, mark_target_live_regs may be confused into thinking the register
2253 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2256 update_reg_dead_notes (insn
, delayed_insn
)
2257 rtx insn
, delayed_insn
;
2261 for (p
= next_nonnote_insn (insn
); p
!= delayed_insn
;
2262 p
= next_nonnote_insn (p
))
2263 for (link
= REG_NOTES (p
); link
; link
= next
)
2265 next
= XEXP (link
, 1);
2267 if (REG_NOTE_KIND (link
) != REG_DEAD
2268 || GET_CODE (XEXP (link
, 0)) != REG
)
2271 if (reg_referenced_p (XEXP (link
, 0), PATTERN (insn
)))
2273 /* Move the REG_DEAD note from P to INSN. */
2274 remove_note (p
, link
);
2275 XEXP (link
, 1) = REG_NOTES (insn
);
2276 REG_NOTES (insn
) = link
;
2281 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2283 This handles the case of udivmodXi4 instructions which optimize their
2284 output depending on whether any REG_UNUSED notes are present.
2285 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2289 update_reg_unused_notes (insn
, redundant_insn
)
2290 rtx insn
, redundant_insn
;
2294 for (link
= REG_NOTES (insn
); link
; link
= next
)
2296 next
= XEXP (link
, 1);
2298 if (REG_NOTE_KIND (link
) != REG_UNUSED
2299 || GET_CODE (XEXP (link
, 0)) != REG
)
2302 if (! find_regno_note (redundant_insn
, REG_UNUSED
,
2303 REGNO (XEXP (link
, 0))))
2304 remove_note (insn
, link
);
2308 /* Marks registers possibly live at the current place being scanned by
2309 mark_target_live_regs. Used only by next two function. */
2311 static HARD_REG_SET current_live_regs
;
2313 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2314 Also only used by the next two functions. */
2316 static HARD_REG_SET pending_dead_regs
;
2318 /* Utility function called from mark_target_live_regs via note_stores.
2319 It deadens any CLOBBERed registers and livens any SET registers. */
2322 update_live_status (dest
, x
)
2326 int first_regno
, last_regno
;
2329 if (GET_CODE (dest
) != REG
2330 && (GET_CODE (dest
) != SUBREG
|| GET_CODE (SUBREG_REG (dest
)) != REG
))
2333 if (GET_CODE (dest
) == SUBREG
)
2334 first_regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2336 first_regno
= REGNO (dest
);
2338 last_regno
= first_regno
+ HARD_REGNO_NREGS (first_regno
, GET_MODE (dest
));
2340 if (GET_CODE (x
) == CLOBBER
)
2341 for (i
= first_regno
; i
< last_regno
; i
++)
2342 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2344 for (i
= first_regno
; i
< last_regno
; i
++)
2346 SET_HARD_REG_BIT (current_live_regs
, i
);
2347 CLEAR_HARD_REG_BIT (pending_dead_regs
, i
);
2351 /* Similar to next_insn, but ignores insns in the delay slots of
2352 an annulled branch. */
2355 next_insn_no_annul (insn
)
2360 /* If INSN is an annulled branch, skip any insns from the target
2362 if (INSN_ANNULLED_BRANCH_P (insn
)
2363 && NEXT_INSN (PREV_INSN (insn
)) != insn
)
2364 while (INSN_FROM_TARGET_P (NEXT_INSN (insn
)))
2365 insn
= NEXT_INSN (insn
);
2367 insn
= NEXT_INSN (insn
);
2368 if (insn
&& GET_CODE (insn
) == INSN
2369 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2370 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2376 /* Set the resources that are live at TARGET.
2378 If TARGET is zero, we refer to the end of the current function and can
2379 return our precomputed value.
2381 Otherwise, we try to find out what is live by consulting the basic block
2382 information. This is tricky, because we must consider the actions of
2383 reload and jump optimization, which occur after the basic block information
2386 Accordingly, we proceed as follows::
2388 We find the previous BARRIER and look at all immediately following labels
2389 (with no intervening active insns) to see if any of them start a basic
2390 block. If we hit the start of the function first, we use block 0.
2392 Once we have found a basic block and a corresponding first insns, we can
2393 accurately compute the live status from basic_block_live_regs and
2394 reg_renumber. (By starting at a label following a BARRIER, we are immune
2395 to actions taken by reload and jump.) Then we scan all insns between
2396 that point and our target. For each CLOBBER (or for call-clobbered regs
2397 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2398 a SET, mark them as live.
2400 We have to be careful when using REG_DEAD notes because they are not
2401 updated by such things as find_equiv_reg. So keep track of registers
2402 marked as dead that haven't been assigned to, and mark them dead at the
2403 next CODE_LABEL since reload and jump won't propagate values across labels.
2405 If we cannot find the start of a basic block (should be a very rare
2406 case, if it can happen at all), mark everything as potentially live.
2408 Next, scan forward from TARGET looking for things set or clobbered
2409 before they are used. These are not live.
2411 Because we can be called many times on the same target, save our results
2412 in a hash table indexed by INSN_UID. */
2415 mark_target_live_regs (target
, res
)
2417 struct resources
*res
;
2421 struct target_info
*tinfo
;
2425 HARD_REG_SET scratch
;
2426 struct resources set
, needed
;
2429 /* Handle end of function. */
2432 *res
= end_of_function_needs
;
2436 /* We have to assume memory is needed, but the CC isn't. */
2441 /* See if we have computed this value already. */
2442 for (tinfo
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
2443 tinfo
; tinfo
= tinfo
->next
)
2444 if (tinfo
->uid
== INSN_UID (target
))
2447 /* Start by getting the basic block number. If we have saved information,
2448 we can get it from there unless the insn at the start of the basic block
2449 has been deleted. */
2450 if (tinfo
&& tinfo
->block
!= -1
2451 && ! INSN_DELETED_P (basic_block_head
[tinfo
->block
]))
2455 b
= find_basic_block (target
);
2459 /* If the information is up-to-date, use it. Otherwise, we will
2461 if (b
== tinfo
->block
&& b
!= -1 && tinfo
->bb_tick
== bb_ticks
[b
])
2463 COPY_HARD_REG_SET (res
->regs
, tinfo
->live_regs
);
2469 /* Allocate a place to put our results and chain it into the
2471 tinfo
= (struct target_info
*) oballoc (sizeof (struct target_info
));
2472 tinfo
->uid
= INSN_UID (target
);
2474 tinfo
->next
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
2475 target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
] = tinfo
;
2478 CLEAR_HARD_REG_SET (pending_dead_regs
);
2480 /* If we found a basic block, get the live registers from it and update
2481 them with anything set or killed between its start and the insn before
2482 TARGET. Otherwise, we must assume everything is live. */
2485 regset regs_live
= basic_block_live_at_start
[b
];
2487 REGSET_ELT_TYPE bit
;
2489 rtx start_insn
, stop_insn
;
2491 /* Compute hard regs live at start of block -- this is the real hard regs
2492 marked live, plus live pseudo regs that have been renumbered to
2496 current_live_regs
= *regs_live
;
2498 COPY_HARD_REG_SET (current_live_regs
, regs_live
);
2501 for (offset
= 0, i
= 0; offset
< regset_size
; offset
++)
2503 if (regs_live
[offset
] == 0)
2504 i
+= REGSET_ELT_BITS
;
2506 for (bit
= 1; bit
&& i
< max_regno
; bit
<<= 1, i
++)
2507 if ((regs_live
[offset
] & bit
)
2508 && (regno
= reg_renumber
[i
]) >= 0)
2510 j
< regno
+ HARD_REGNO_NREGS (regno
,
2511 PSEUDO_REGNO_MODE (i
));
2513 SET_HARD_REG_BIT (current_live_regs
, j
);
2516 /* Get starting and ending insn, handling the case where each might
2518 start_insn
= (b
== 0 ? get_insns () : basic_block_head
[b
]);
2521 if (GET_CODE (start_insn
) == INSN
2522 && GET_CODE (PATTERN (start_insn
)) == SEQUENCE
)
2523 start_insn
= XVECEXP (PATTERN (start_insn
), 0, 0);
2525 if (GET_CODE (stop_insn
) == INSN
2526 && GET_CODE (PATTERN (stop_insn
)) == SEQUENCE
)
2527 stop_insn
= next_insn (PREV_INSN (stop_insn
));
2529 for (insn
= start_insn
; insn
!= stop_insn
;
2530 insn
= next_insn_no_annul (insn
))
2533 rtx real_insn
= insn
;
2535 /* If this insn is from the target of a branch, it isn't going to
2536 be used in the sequel. If it is used in both cases, this
2537 test will not be true. */
2538 if (INSN_FROM_TARGET_P (insn
))
2541 /* If this insn is a USE made by update_block, we care about the
2543 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
2544 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2545 real_insn
= XEXP (PATTERN (insn
), 0);
2547 if (GET_CODE (real_insn
) == CALL_INSN
)
2549 /* CALL clobbers all call-used regs that aren't fixed except
2550 sp, ap, and fp. Do this before setting the result of the
2552 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2553 if (call_used_regs
[i
]
2554 && i
!= STACK_POINTER_REGNUM
&& i
!= FRAME_POINTER_REGNUM
2555 && i
!= ARG_POINTER_REGNUM
2556 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2557 && i
!= HARD_FRAME_POINTER_REGNUM
2559 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2560 && ! (i
== ARG_POINTER_REGNUM
&& fixed_regs
[i
])
2562 #ifdef PIC_OFFSET_TABLE_REGNUM
2563 && ! (i
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2566 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2568 /* A CALL_INSN sets any global register live, since it may
2569 have been modified by the call. */
2570 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2572 SET_HARD_REG_BIT (current_live_regs
, i
);
2575 /* Mark anything killed in an insn to be deadened at the next
2576 label. Ignore USE insns; the only REG_DEAD notes will be for
2577 parameters. But they might be early. A CALL_INSN will usually
2578 clobber registers used for parameters. It isn't worth bothering
2579 with the unlikely case when it won't. */
2580 if ((GET_CODE (real_insn
) == INSN
2581 && GET_CODE (PATTERN (real_insn
)) != USE
2582 && GET_CODE (PATTERN (real_insn
)) != CLOBBER
)
2583 || GET_CODE (real_insn
) == JUMP_INSN
2584 || GET_CODE (real_insn
) == CALL_INSN
)
2586 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2587 if (REG_NOTE_KIND (link
) == REG_DEAD
2588 && GET_CODE (XEXP (link
, 0)) == REG
2589 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2591 int first_regno
= REGNO (XEXP (link
, 0));
2594 + HARD_REGNO_NREGS (first_regno
,
2595 GET_MODE (XEXP (link
, 0))));
2597 for (i
= first_regno
; i
< last_regno
; i
++)
2598 SET_HARD_REG_BIT (pending_dead_regs
, i
);
2601 note_stores (PATTERN (real_insn
), update_live_status
);
2603 /* If any registers were unused after this insn, kill them.
2604 These notes will always be accurate. */
2605 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2606 if (REG_NOTE_KIND (link
) == REG_UNUSED
2607 && GET_CODE (XEXP (link
, 0)) == REG
2608 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2610 int first_regno
= REGNO (XEXP (link
, 0));
2613 + HARD_REGNO_NREGS (first_regno
,
2614 GET_MODE (XEXP (link
, 0))));
2616 for (i
= first_regno
; i
< last_regno
; i
++)
2617 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2621 else if (GET_CODE (real_insn
) == CODE_LABEL
)
2623 /* A label clobbers the pending dead registers since neither
2624 reload nor jump will propagate a value across a label. */
2625 AND_COMPL_HARD_REG_SET (current_live_regs
, pending_dead_regs
);
2626 CLEAR_HARD_REG_SET (pending_dead_regs
);
2629 /* The beginning of the epilogue corresponds to the end of the
2630 RTL chain when there are no epilogue insns. Certain resources
2631 are implicitly required at that point. */
2632 else if (GET_CODE (real_insn
) == NOTE
2633 && NOTE_LINE_NUMBER (real_insn
) == NOTE_INSN_EPILOGUE_BEG
)
2634 IOR_HARD_REG_SET (current_live_regs
, start_of_epilogue_needs
.regs
);
2637 COPY_HARD_REG_SET (res
->regs
, current_live_regs
);
2639 tinfo
->bb_tick
= bb_ticks
[b
];
2642 /* We didn't find the start of a basic block. Assume everything
2643 in use. This should happen only extremely rarely. */
2644 SET_HARD_REG_SET (res
->regs
);
2646 /* Now step forward from TARGET looking for registers that are set before
2647 they are used. These are dead. If we pass a label, any pending dead
2648 registers that weren't yet used can be made dead. Stop when we pass a
2649 conditional JUMP_INSN; follow the first few unconditional branches. */
2651 CLEAR_RESOURCE (&set
);
2652 CLEAR_RESOURCE (&needed
);
2654 for (insn
= target
; insn
; insn
= next
)
2656 rtx this_jump_insn
= insn
;
2658 next
= NEXT_INSN (insn
);
2659 switch (GET_CODE (insn
))
2662 AND_COMPL_HARD_REG_SET (pending_dead_regs
, needed
.regs
);
2663 AND_COMPL_HARD_REG_SET (res
->regs
, pending_dead_regs
);
2664 CLEAR_HARD_REG_SET (pending_dead_regs
);
2672 if (GET_CODE (PATTERN (insn
)) == USE
)
2674 /* If INSN is a USE made by update_block, we care about the
2675 underlying insn. Any registers set by the underlying insn
2676 are live since the insn is being done somewhere else. */
2677 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2678 mark_set_resources (XEXP (PATTERN (insn
), 0), res
, 0, 1);
2680 /* All other USE insns are to be ignored. */
2683 else if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
2685 else if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2687 /* An unconditional jump can be used to fill the delay slot
2688 of a call, so search for a JUMP_INSN in any position. */
2689 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2691 this_jump_insn
= XVECEXP (PATTERN (insn
), 0, i
);
2692 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2698 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2700 if (jump_count
++ < 10
2701 && (simplejump_p (this_jump_insn
)
2702 || GET_CODE (PATTERN (this_jump_insn
)) == RETURN
))
2704 next
= next_active_insn (JUMP_LABEL (this_jump_insn
));
2708 jump_target
= JUMP_LABEL (this_jump_insn
);
2715 mark_referenced_resources (insn
, &needed
, 1);
2716 mark_set_resources (insn
, &set
, 0, 1);
2718 COPY_HARD_REG_SET (scratch
, set
.regs
);
2719 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2720 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
2723 /* If we hit an unconditional branch, we have another way of finding out
2724 what is live: we can see what is live at the branch target and include
2725 anything used but not set before the branch. The only things that are
2726 live are those that are live using the above test and the test below.
2728 Don't try this if we expired our jump count above, since that would
2729 mean there may be an infinite loop in the function being compiled. */
2731 if (jump_insn
&& jump_count
< 10)
2733 struct resources new_resources
;
2734 rtx stop_insn
= next_active_insn (jump_insn
);
2736 mark_target_live_regs (next_active_insn (jump_target
), &new_resources
);
2737 CLEAR_RESOURCE (&set
);
2738 CLEAR_RESOURCE (&needed
);
2740 /* Include JUMP_INSN in the needed registers. */
2741 for (insn
= target
; insn
!= stop_insn
; insn
= next_active_insn (insn
))
2743 mark_referenced_resources (insn
, &needed
, 1);
2745 COPY_HARD_REG_SET (scratch
, needed
.regs
);
2746 AND_COMPL_HARD_REG_SET (scratch
, set
.regs
);
2747 IOR_HARD_REG_SET (new_resources
.regs
, scratch
);
2749 mark_set_resources (insn
, &set
, 0, 1);
2752 AND_HARD_REG_SET (res
->regs
, new_resources
.regs
);
2755 COPY_HARD_REG_SET (tinfo
->live_regs
, res
->regs
);
2758 /* Scan a function looking for insns that need a delay slot and find insns to
2759 put into the delay slot.
2761 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2762 as calls). We do these first since we don't want jump insns (that are
2763 easier to fill) to get the only insns that could be used for non-jump insns.
2764 When it is zero, only try to fill JUMP_INSNs.
2766 When slots are filled in this manner, the insns (including the
2767 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2768 it is possible to tell whether a delay slot has really been filled
2769 or not. `final' knows how to deal with this, by communicating
2770 through FINAL_SEQUENCE. */
2773 fill_simple_delay_slots (first
, non_jumps_p
)
2777 register rtx insn
, pat
, trial
, next_trial
;
2779 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2780 struct resources needed
, set
;
2781 register int slots_to_fill
, slots_filled
;
2784 for (i
= 0; i
< num_unfilled_slots
; i
++)
2787 /* Get the next insn to fill. If it has already had any slots assigned,
2788 we can't do anything with it. Maybe we'll improve this later. */
2790 insn
= unfilled_slots_base
[i
];
2792 || INSN_DELETED_P (insn
)
2793 || (GET_CODE (insn
) == INSN
2794 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2795 || (GET_CODE (insn
) == JUMP_INSN
&& non_jumps_p
)
2796 || (GET_CODE (insn
) != JUMP_INSN
&& ! non_jumps_p
))
2799 if (GET_CODE (insn
) == JUMP_INSN
)
2800 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2802 flags
= get_jump_flags (insn
, NULL_RTX
);
2803 slots_to_fill
= num_delay_slots (insn
);
2804 if (slots_to_fill
== 0)
2807 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2808 says how many. After initialization, first try optimizing
2811 nop add %o7,.-L1,%o7
2815 If this case applies, the delay slot of the call is filled with
2816 the unconditional jump. This is done first to avoid having the
2817 delay slot of the call filled in the backward scan. Also, since
2818 the unconditional jump is likely to also have a delay slot, that
2819 insn must exist when it is subsequently scanned.
2821 This is tried on each insn with delay slots as some machines
2822 have insns which perform calls, but are not represented as
2828 if ((trial
= next_active_insn (insn
))
2829 && GET_CODE (trial
) == JUMP_INSN
2830 && simplejump_p (trial
)
2831 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
2832 && no_labels_between_p (insn
, trial
))
2835 delay_list
= add_to_delay_list (trial
, delay_list
);
2836 /* Remove the unconditional jump from consideration for delay slot
2837 filling and unthread it. */
2838 if (unfilled_slots_base
[i
+ 1] == trial
)
2839 unfilled_slots_base
[i
+ 1] = 0;
2841 rtx next
= NEXT_INSN (trial
);
2842 rtx prev
= PREV_INSN (trial
);
2844 NEXT_INSN (prev
) = next
;
2846 PREV_INSN (next
) = prev
;
2850 /* Now, scan backwards from the insn to search for a potential
2851 delay-slot candidate. Stop searching when a label or jump is hit.
2853 For each candidate, if it is to go into the delay slot (moved
2854 forward in execution sequence), it must not need or set any resources
2855 that were set by later insns and must not set any resources that
2856 are needed for those insns.
2858 The delay slot insn itself sets resources unless it is a call
2859 (in which case the called routine, not the insn itself, is doing
2862 if (slots_filled
< slots_to_fill
)
2864 CLEAR_RESOURCE (&needed
);
2865 CLEAR_RESOURCE (&set
);
2866 mark_set_resources (insn
, &set
, 0, 0);
2867 mark_referenced_resources (insn
, &needed
, 0);
2869 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
2872 next_trial
= prev_nonnote_insn (trial
);
2874 /* This must be an INSN or CALL_INSN. */
2875 pat
= PATTERN (trial
);
2877 /* USE and CLOBBER at this level was just for flow; ignore it. */
2878 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2881 /* Check for resource conflict first, to avoid unnecessary
2883 if (! insn_references_resource_p (trial
, &set
, 1)
2884 && ! insn_sets_resource_p (trial
, &set
, 1)
2885 && ! insn_sets_resource_p (trial
, &needed
, 1)
2887 /* Can't separate set of cc0 from its use. */
2888 && ! (reg_mentioned_p (cc0_rtx
, pat
)
2889 && ! sets_cc0_p (cc0_rtx
, pat
))
2893 trial
= try_split (pat
, trial
, 1);
2894 next_trial
= prev_nonnote_insn (trial
);
2895 if (eligible_for_delay (insn
, slots_filled
, trial
, flags
))
2897 /* In this case, we are searching backward, so if we
2898 find insns to put on the delay list, we want
2899 to put them at the head, rather than the
2900 tail, of the list. */
2902 update_reg_dead_notes (trial
, insn
);
2903 delay_list
= gen_rtx (INSN_LIST
, VOIDmode
,
2905 update_block (trial
, trial
);
2906 delete_insn (trial
);
2907 if (slots_to_fill
== ++slots_filled
)
2913 mark_set_resources (trial
, &set
, 0, 1);
2914 mark_referenced_resources (trial
, &needed
, 1);
2918 /* If all needed slots haven't been filled, we come here. */
2920 /* Try to optimize case of jumping around a single insn. */
2921 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2922 if (slots_filled
!= slots_to_fill
2924 && GET_CODE (insn
) == JUMP_INSN
2925 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
2927 delay_list
= optimize_skip (insn
);
2933 /* Try to get insns from beyond the insn needing the delay slot.
2934 These insns can neither set or reference resources set in insns being
2935 skipped, cannot set resources in the insn being skipped, and, if this
2936 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2937 call might not return).
2939 If this is a conditional jump, see if it merges back to us early
2940 enough for us to pick up insns from the merge point. Don't do
2941 this if there is another branch to our label unless we pass all of
2944 Another similar merge is if we jump to the same place that a
2945 later unconditional jump branches to. In that case, we don't
2946 care about the number of uses of our label. */
2948 if (slots_filled
!= slots_to_fill
2949 && (GET_CODE (insn
) != JUMP_INSN
2950 || ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
2951 && ! simplejump_p (insn
)
2952 && JUMP_LABEL (insn
) != 0)))
2955 int maybe_never
= 0;
2956 int passed_label
= 0;
2958 struct resources needed_at_jump
;
2960 CLEAR_RESOURCE (&needed
);
2961 CLEAR_RESOURCE (&set
);
2963 if (GET_CODE (insn
) == CALL_INSN
)
2965 mark_set_resources (insn
, &set
, 0, 1);
2966 mark_referenced_resources (insn
, &needed
, 1);
2971 mark_set_resources (insn
, &set
, 0, 1);
2972 mark_referenced_resources (insn
, &needed
, 1);
2973 if (GET_CODE (insn
) == JUMP_INSN
)
2975 /* Get our target and show how many more uses we want to
2976 see before we hit the label. */
2977 target
= JUMP_LABEL (insn
);
2978 target_uses
= LABEL_NUSES (target
) - 1;
2983 for (trial
= next_nonnote_insn (insn
); trial
; trial
= next_trial
)
2985 rtx pat
, trial_delay
;
2987 next_trial
= next_nonnote_insn (trial
);
2989 if (GET_CODE (trial
) == CODE_LABEL
)
2993 /* If this is our target, see if we have seen all its uses.
2994 If so, indicate we have passed our target and ignore it.
2995 All other labels cause us to stop our search. */
2996 if (trial
== target
&& target_uses
== 0)
3004 else if (GET_CODE (trial
) == BARRIER
)
3007 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3008 pat
= PATTERN (trial
);
3010 /* Stand-alone USE and CLOBBER are just for flow. */
3011 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3014 /* If this already has filled delay slots, get the insn needing
3016 if (GET_CODE (pat
) == SEQUENCE
)
3017 trial_delay
= XVECEXP (pat
, 0, 0);
3019 trial_delay
= trial
;
3021 /* If this is a jump insn to our target, indicate that we have
3022 seen another jump to it. If we aren't handling a conditional
3023 jump, stop our search. Otherwise, compute the needs at its
3024 target and add them to NEEDED. */
3025 if (GET_CODE (trial_delay
) == JUMP_INSN
)
3029 else if (JUMP_LABEL (trial_delay
) == target
)
3033 mark_target_live_regs
3034 (next_active_insn (JUMP_LABEL (trial_delay
)),
3036 needed
.memory
|= needed_at_jump
.memory
;
3037 IOR_HARD_REG_SET (needed
.regs
, needed_at_jump
.regs
);
3041 /* See if we have a resource problem before we try to
3044 && GET_CODE (pat
) != SEQUENCE
3045 && ! insn_references_resource_p (trial
, &set
, 1)
3046 && ! insn_sets_resource_p (trial
, &set
, 1)
3047 && ! insn_sets_resource_p (trial
, &needed
, 1)
3049 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
3051 && ! (maybe_never
&& may_trap_p (pat
))
3052 && (trial
= try_split (pat
, trial
, 0))
3053 && eligible_for_delay (insn
, slots_filled
, trial
, flags
))
3055 next_trial
= next_nonnote_insn (trial
);
3056 delay_list
= add_to_delay_list (trial
, delay_list
);
3059 if (reg_mentioned_p (cc0_rtx
, pat
))
3060 link_cc0_insns (trial
);
3064 update_block (trial
, trial
);
3065 delete_insn (trial
);
3066 if (slots_to_fill
== ++slots_filled
)
3071 mark_set_resources (trial
, &set
, 0, 1);
3072 mark_referenced_resources (trial
, &needed
, 1);
3074 /* Ensure we don't put insns between the setting of cc and the
3075 comparison by moving a setting of cc into an earlier delay
3076 slot since these insns could clobber the condition code. */
3079 /* If this is a call or jump, we might not get here. */
3080 if (GET_CODE (trial
) == CALL_INSN
3081 || GET_CODE (trial
) == JUMP_INSN
)
3085 /* If there are slots left to fill and our search was stopped by an
3086 unconditional branch, try the insn at the branch target. We can
3087 redirect the branch if it works. */
3088 if (slots_to_fill
!= slots_filled
3090 && GET_CODE (trial
) == JUMP_INSN
3091 && simplejump_p (trial
)
3092 && (target
== 0 || JUMP_LABEL (trial
) == target
)
3093 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
3094 && ! (GET_CODE (next_trial
) == INSN
3095 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
3096 && ! insn_references_resource_p (next_trial
, &set
, 1)
3097 && ! insn_sets_resource_p (next_trial
, &set
, 1)
3098 && ! insn_sets_resource_p (next_trial
, &needed
, 1)
3100 && ! reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
))
3102 && ! (maybe_never
&& may_trap_p (PATTERN (next_trial
)))
3103 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
3104 && eligible_for_delay (insn
, slots_filled
, next_trial
, flags
))
3106 rtx new_label
= next_active_insn (next_trial
);
3109 new_label
= get_label_before (new_label
);
3111 new_label
= find_end_label ();
3114 = add_to_delay_list (copy_rtx (next_trial
), delay_list
);
3116 reorg_redirect_jump (trial
, new_label
);
3118 /* If we merged because we both jumped to the same place,
3119 redirect the original insn also. */
3121 reorg_redirect_jump (insn
, new_label
);
3126 unfilled_slots_base
[i
]
3127 = emit_delay_sequence (insn
, delay_list
,
3128 slots_filled
, slots_to_fill
);
3130 if (slots_to_fill
== slots_filled
)
3131 unfilled_slots_base
[i
] = 0;
3133 note_delay_statistics (slots_filled
, 0);
3136 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3137 /* See if the epilogue needs any delay slots. Try to fill them if so.
3138 The only thing we can do is scan backwards from the end of the
3139 function. If we did this in a previous pass, it is incorrect to do it
3141 if (current_function_epilogue_delay_list
)
3144 slots_to_fill
= DELAY_SLOTS_FOR_EPILOGUE
;
3145 if (slots_to_fill
== 0)
3149 CLEAR_RESOURCE (&set
);
3151 /* The frame pointer and stack pointer are needed at the beginning of
3152 the epilogue, so instructions setting them can not be put in the
3153 epilogue delay slot. However, everything else needed at function
3154 end is safe, so we don't want to use end_of_function_needs here. */
3155 CLEAR_RESOURCE (&needed
);
3156 if (frame_pointer_needed
)
3158 SET_HARD_REG_BIT (needed
.regs
, FRAME_POINTER_REGNUM
);
3159 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3160 SET_HARD_REG_BIT (needed
.regs
, HARD_FRAME_POINTER_REGNUM
);
3162 #ifdef EXIT_IGNORE_STACK
3163 if (! EXIT_IGNORE_STACK
)
3165 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
3168 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
3170 for (trial
= get_last_insn (); ! stop_search_p (trial
, 1);
3171 trial
= PREV_INSN (trial
))
3173 if (GET_CODE (trial
) == NOTE
)
3175 pat
= PATTERN (trial
);
3176 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3179 if (! insn_references_resource_p (trial
, &set
, 1)
3180 && ! insn_sets_resource_p (trial
, &needed
, 1)
3181 && ! insn_sets_resource_p (trial
, &set
, 1)
3183 /* Don't want to mess with cc0 here. */
3184 && ! reg_mentioned_p (cc0_rtx
, pat
)
3188 trial
= try_split (pat
, trial
, 1);
3189 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial
, slots_filled
))
3191 /* Here as well we are searching backward, so put the
3192 insns we find on the head of the list. */
3194 current_function_epilogue_delay_list
3195 = gen_rtx (INSN_LIST
, VOIDmode
, trial
,
3196 current_function_epilogue_delay_list
);
3197 mark_referenced_resources (trial
, &end_of_function_needs
, 1);
3198 update_block (trial
, trial
);
3199 delete_insn (trial
);
3201 /* Clear deleted bit so final.c will output the insn. */
3202 INSN_DELETED_P (trial
) = 0;
3204 if (slots_to_fill
== ++slots_filled
)
3210 mark_set_resources (trial
, &set
, 0, 1);
3211 mark_referenced_resources (trial
, &needed
, 1);
3214 note_delay_statistics (slots_filled
, 0);
3218 /* Try to find insns to place in delay slots.
3220 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3221 or is an unconditional branch if CONDITION is const_true_rtx.
3222 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3224 THREAD is a flow-of-control, either the insns to be executed if the
3225 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3227 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3228 to see if any potential delay slot insns set things needed there.
3230 LIKELY is non-zero if it is extremely likely that the branch will be
3231 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3232 end of a loop back up to the top.
3234 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3235 thread. I.e., it is the fallthrough code of our jump or the target of the
3236 jump when we are the only jump going there.
3238 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3239 case, we can only take insns from the head of the thread for our delay
3240 slot. We then adjust the jump to point after the insns we have taken. */
3243 fill_slots_from_thread (insn
, condition
, thread
, opposite_thread
, likely
,
3244 thread_if_true
, own_thread
, own_opposite_thread
,
3245 slots_to_fill
, pslots_filled
)
3248 rtx thread
, opposite_thread
;
3251 int own_thread
, own_opposite_thread
;
3252 int slots_to_fill
, *pslots_filled
;
3256 struct resources opposite_needed
, set
, needed
;
3262 /* Validate our arguments. */
3263 if ((condition
== const_true_rtx
&& ! thread_if_true
)
3264 || (! own_thread
&& ! thread_if_true
))
3267 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
3269 /* If our thread is the end of subroutine, we can't get any delay
3274 /* If this is an unconditional branch, nothing is needed at the
3275 opposite thread. Otherwise, compute what is needed there. */
3276 if (condition
== const_true_rtx
)
3277 CLEAR_RESOURCE (&opposite_needed
);
3279 mark_target_live_regs (opposite_thread
, &opposite_needed
);
3281 /* If the insn at THREAD can be split, do it here to avoid having to
3282 update THREAD and NEW_THREAD if it is done in the loop below. Also
3283 initialize NEW_THREAD. */
3285 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
3287 /* Scan insns at THREAD. We are looking for an insn that can be removed
3288 from THREAD (it neither sets nor references resources that were set
3289 ahead of it and it doesn't set anything needs by the insns ahead of
3290 it) and that either can be placed in an annulling insn or aren't
3291 needed at OPPOSITE_THREAD. */
3293 CLEAR_RESOURCE (&needed
);
3294 CLEAR_RESOURCE (&set
);
3296 /* If we do not own this thread, we must stop as soon as we find
3297 something that we can't put in a delay slot, since all we can do
3298 is branch into THREAD at a later point. Therefore, labels stop
3299 the search if this is not the `true' thread. */
3301 for (trial
= thread
;
3302 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
3303 trial
= next_nonnote_insn (trial
))
3307 /* If we have passed a label, we no longer own this thread. */
3308 if (GET_CODE (trial
) == CODE_LABEL
)
3314 pat
= PATTERN (trial
);
3315 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3318 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3319 don't separate or copy insns that set and use CC0. */
3320 if (! insn_references_resource_p (trial
, &set
, 1)
3321 && ! insn_sets_resource_p (trial
, &set
, 1)
3322 && ! insn_sets_resource_p (trial
, &needed
, 1)
3324 && ! (reg_mentioned_p (cc0_rtx
, pat
)
3325 && (! own_thread
|| ! sets_cc0_p (pat
)))
3331 /* If TRIAL is redundant with some insn before INSN, we don't
3332 actually need to add it to the delay list; we can merely pretend
3334 if (prior_insn
= redundant_insn (trial
, insn
, delay_list
))
3338 update_block (trial
, thread
);
3339 if (trial
== thread
)
3341 thread
= next_active_insn (thread
);
3342 if (new_thread
== trial
)
3343 new_thread
= thread
;
3346 delete_insn (trial
);
3350 update_reg_unused_notes (prior_insn
, trial
);
3351 new_thread
= next_active_insn (trial
);
3357 /* There are two ways we can win: If TRIAL doesn't set anything
3358 needed at the opposite thread and can't trap, or if it can
3359 go into an annulled delay slot. */
3360 if (condition
== const_true_rtx
3361 || (! insn_sets_resource_p (trial
, &opposite_needed
, 1)
3362 && ! may_trap_p (pat
)))
3365 trial
= try_split (pat
, trial
, 0);
3366 if (new_thread
== old_trial
)
3368 if (thread
== old_trial
)
3370 pat
= PATTERN (trial
);
3371 if (eligible_for_delay (insn
, *pslots_filled
, trial
, flags
))
3375 #ifdef ANNUL_IFTRUE_SLOTS
3378 #ifdef ANNUL_IFFALSE_SLOTS
3384 trial
= try_split (pat
, trial
, 0);
3385 if (new_thread
== old_trial
)
3387 pat
= PATTERN (trial
);
3389 ? eligible_for_annul_false (insn
, *pslots_filled
, trial
, flags
)
3390 : eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
3398 if (reg_mentioned_p (cc0_rtx
, pat
))
3399 link_cc0_insns (trial
);
3402 /* If we own this thread, delete the insn. If this is the
3403 destination of a branch, show that a basic block status
3404 may have been updated. In any case, mark the new
3405 starting point of this thread. */
3408 update_block (trial
, thread
);
3409 delete_insn (trial
);
3412 new_thread
= next_active_insn (trial
);
3414 temp
= own_thread
? trial
: copy_rtx (trial
);
3416 INSN_FROM_TARGET_P (temp
) = 1;
3418 delay_list
= add_to_delay_list (temp
, delay_list
);
3420 if (slots_to_fill
== ++(*pslots_filled
))
3422 /* Even though we have filled all the slots, we
3423 may be branching to a location that has a
3424 redundant insn. Skip any if so. */
3425 while (new_thread
&& ! own_thread
3426 && ! insn_sets_resource_p (new_thread
, &set
, 1)
3427 && ! insn_sets_resource_p (new_thread
, &needed
, 1)
3428 && ! insn_references_resource_p (new_thread
,
3430 && redundant_insn (new_thread
, insn
, delay_list
))
3431 new_thread
= next_active_insn (new_thread
);
3440 /* This insn can't go into a delay slot. */
3442 mark_set_resources (trial
, &set
, 0, 1);
3443 mark_referenced_resources (trial
, &needed
, 1);
3445 /* Ensure we don't put insns between the setting of cc and the comparison
3446 by moving a setting of cc into an earlier delay slot since these insns
3447 could clobber the condition code. */
3450 /* If this insn is a register-register copy and the next insn has
3451 a use of our destination, change it to use our source. That way,
3452 it will become a candidate for our delay slot the next time
3453 through this loop. This case occurs commonly in loops that
3456 We could check for more complex cases than those tested below,
3457 but it doesn't seem worth it. It might also be a good idea to try
3458 to swap the two insns. That might do better.
3460 We can't do this if the next insn modifies our destination, because
3461 that would make the replacement into the insn invalid. We also can't
3462 do this if it modifies our source, because it might be an earlyclobber
3463 operand. This latter test also prevents updating the contents of
3466 if (GET_CODE (trial
) == INSN
&& GET_CODE (pat
) == SET
3467 && GET_CODE (SET_SRC (pat
)) == REG
3468 && GET_CODE (SET_DEST (pat
)) == REG
)
3470 rtx next
= next_nonnote_insn (trial
);
3472 if (next
&& GET_CODE (next
) == INSN
3473 && GET_CODE (PATTERN (next
)) != USE
3474 && ! reg_set_p (SET_DEST (pat
), next
)
3475 && ! reg_set_p (SET_SRC (pat
), next
)
3476 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
)))
3477 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
3481 /* If we stopped on a branch insn that has delay slots, see if we can
3482 steal some of the insns in those slots. */
3483 if (trial
&& GET_CODE (trial
) == INSN
3484 && GET_CODE (PATTERN (trial
)) == SEQUENCE
3485 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
)
3487 /* If this is the `true' thread, we will want to follow the jump,
3488 so we can only do this if we have taken everything up to here. */
3489 if (thread_if_true
&& trial
== new_thread
)
3491 = steal_delay_list_from_target (insn
, condition
, PATTERN (trial
),
3492 delay_list
, &set
, &needed
,
3493 &opposite_needed
, slots_to_fill
,
3494 pslots_filled
, &must_annul
,
3496 else if (! thread_if_true
)
3498 = steal_delay_list_from_fallthrough (insn
, condition
,
3500 delay_list
, &set
, &needed
,
3501 &opposite_needed
, slots_to_fill
,
3502 pslots_filled
, &must_annul
);
3505 /* If we haven't found anything for this delay slot and it is very
3506 likely that the branch will be taken, see if the insn at our target
3507 increments or decrements a register with an increment that does not
3508 depend on the destination register. If so, try to place the opposite
3509 arithmetic insn after the jump insn and put the arithmetic insn in the
3510 delay slot. If we can't do this, return. */
3511 if (delay_list
== 0 && likely
&& new_thread
&& GET_CODE (new_thread
) == INSN
)
3513 rtx pat
= PATTERN (new_thread
);
3518 pat
= PATTERN (trial
);
3520 if (GET_CODE (trial
) != INSN
|| GET_CODE (pat
) != SET
3521 || ! eligible_for_delay (insn
, 0, trial
, flags
))
3524 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
3525 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
3526 && rtx_equal_p (XEXP (src
, 0), dest
)
3527 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1)))
3529 rtx other
= XEXP (src
, 1);
3533 /* If this is a constant adjustment, use the same code with
3534 the negated constant. Otherwise, reverse the sense of the
3536 if (GET_CODE (other
) == CONST_INT
)
3537 new_arith
= gen_rtx (GET_CODE (src
), GET_MODE (src
), dest
,
3538 negate_rtx (GET_MODE (src
), other
));
3540 new_arith
= gen_rtx (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
3541 GET_MODE (src
), dest
, other
);
3543 ninsn
= emit_insn_after (gen_rtx (SET
, VOIDmode
, dest
, new_arith
),
3546 if (recog_memoized (ninsn
) < 0
3547 || (insn_extract (ninsn
),
3548 ! constrain_operands (INSN_CODE (ninsn
), 1)))
3550 delete_insn (ninsn
);
3556 update_block (trial
, thread
);
3557 delete_insn (trial
);
3560 new_thread
= next_active_insn (trial
);
3562 ninsn
= own_thread
? trial
: copy_rtx (trial
);
3564 INSN_FROM_TARGET_P (ninsn
) = 1;
3566 delay_list
= add_to_delay_list (ninsn
, NULL_RTX
);
3571 if (delay_list
&& must_annul
)
3572 INSN_ANNULLED_BRANCH_P (insn
) = 1;
3574 /* If we are to branch into the middle of this thread, find an appropriate
3575 label or make a new one if none, and redirect INSN to it. If we hit the
3576 end of the function, use the end-of-function label. */
3577 if (new_thread
!= thread
)
3581 if (! thread_if_true
)
3584 if (new_thread
&& GET_CODE (new_thread
) == JUMP_INSN
3585 && (simplejump_p (new_thread
)
3586 || GET_CODE (PATTERN (new_thread
)) == RETURN
)
3587 && redirect_with_delay_list_safe_p (insn
,
3588 JUMP_LABEL (new_thread
),
3590 new_thread
= follow_jumps (JUMP_LABEL (new_thread
));
3592 if (new_thread
== 0)
3593 label
= find_end_label ();
3594 else if (GET_CODE (new_thread
) == CODE_LABEL
)
3597 label
= get_label_before (new_thread
);
3599 reorg_redirect_jump (insn
, label
);
3605 /* Make another attempt to find insns to place in delay slots.
3607 We previously looked for insns located in front of the delay insn
3608 and, for non-jump delay insns, located behind the delay insn.
3610 Here only try to schedule jump insns and try to move insns from either
3611 the target or the following insns into the delay slot. If annulling is
3612 supported, we will be likely to do this. Otherwise, we can do this only
3616 fill_eager_delay_slots (first
)
3621 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
3623 for (i
= 0; i
< num_unfilled_slots
; i
++)
3626 rtx target_label
, insn_at_target
, fallthrough_insn
;
3629 int own_fallthrough
;
3630 int prediction
, slots_to_fill
, slots_filled
;
3632 insn
= unfilled_slots_base
[i
];
3634 || INSN_DELETED_P (insn
)
3635 || GET_CODE (insn
) != JUMP_INSN
3636 || ! (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
3639 slots_to_fill
= num_delay_slots (insn
);
3640 if (slots_to_fill
== 0)
3644 target_label
= JUMP_LABEL (insn
);
3645 condition
= get_branch_condition (insn
, target_label
);
3650 /* Get the next active fallthough and target insns and see if we own
3651 them. Then see whether the branch is likely true. We don't need
3652 to do a lot of this for unconditional branches. */
3654 insn_at_target
= next_active_insn (target_label
);
3655 own_target
= own_thread_p (target_label
, target_label
, 0);
3657 if (condition
== const_true_rtx
)
3659 own_fallthrough
= 0;
3660 fallthrough_insn
= 0;
3665 fallthrough_insn
= next_active_insn (insn
);
3666 own_fallthrough
= own_thread_p (NEXT_INSN (insn
), NULL_RTX
, 1);
3667 prediction
= mostly_true_jump (insn
, condition
);
3670 /* If this insn is expected to branch, first try to get insns from our
3671 target, then our fallthrough insns. If it is not, expected to branch,
3672 try the other order. */
3677 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3678 fallthrough_insn
, prediction
== 2, 1,
3679 own_target
, own_fallthrough
,
3680 slots_to_fill
, &slots_filled
);
3682 if (delay_list
== 0 && own_fallthrough
)
3684 /* Even though we didn't find anything for delay slots,
3685 we might have found a redundant insn which we deleted
3686 from the thread that was filled. So we have to recompute
3687 the next insn at the target. */
3688 target_label
= JUMP_LABEL (insn
);
3689 insn_at_target
= next_active_insn (target_label
);
3692 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3693 insn_at_target
, 0, 0,
3694 own_fallthrough
, own_target
,
3695 slots_to_fill
, &slots_filled
);
3700 if (own_fallthrough
)
3702 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3703 insn_at_target
, 0, 0,
3704 own_fallthrough
, own_target
,
3705 slots_to_fill
, &slots_filled
);
3707 if (delay_list
== 0)
3709 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3710 next_active_insn (insn
), 0, 1,
3711 own_target
, own_fallthrough
,
3712 slots_to_fill
, &slots_filled
);
3716 unfilled_slots_base
[i
]
3717 = emit_delay_sequence (insn
, delay_list
,
3718 slots_filled
, slots_to_fill
);
3720 if (slots_to_fill
== slots_filled
)
3721 unfilled_slots_base
[i
] = 0;
3723 note_delay_statistics (slots_filled
, 1);
3727 /* Once we have tried two ways to fill a delay slot, make a pass over the
3728 code to try to improve the results and to do such things as more jump
3732 relax_delay_slots (first
)
3735 register rtx insn
, next
, pat
;
3736 register rtx trial
, delay_insn
, target_label
;
3738 /* Look at every JUMP_INSN and see if we can improve it. */
3739 for (insn
= first
; insn
; insn
= next
)
3743 next
= next_active_insn (insn
);
3745 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3746 the next insn, or jumps to a label that is not the last of a
3747 group of consecutive labels. */
3748 if (GET_CODE (insn
) == JUMP_INSN
3749 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3750 && (target_label
= JUMP_LABEL (insn
)) != 0)
3752 target_label
= follow_jumps (target_label
);
3753 target_label
= prev_label (next_active_insn (target_label
));
3755 if (target_label
== 0)
3756 target_label
= find_end_label ();
3758 if (next_active_insn (target_label
) == next
3759 && ! condjump_in_parallel_p (insn
))
3765 if (target_label
!= JUMP_LABEL (insn
))
3766 reorg_redirect_jump (insn
, target_label
);
3768 /* See if this jump branches around a unconditional jump.
3769 If so, invert this jump and point it to the target of the
3771 if (next
&& GET_CODE (next
) == JUMP_INSN
3772 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3773 && next_active_insn (target_label
) == next_active_insn (next
)
3774 && no_labels_between_p (insn
, next
))
3776 rtx label
= JUMP_LABEL (next
);
3778 /* Be careful how we do this to avoid deleting code or
3779 labels that are momentarily dead. See similar optimization
3782 We also need to ensure we properly handle the case when
3783 invert_jump fails. */
3785 ++LABEL_NUSES (target_label
);
3787 ++LABEL_NUSES (label
);
3789 if (invert_jump (insn
, label
))
3796 --LABEL_NUSES (label
);
3798 if (--LABEL_NUSES (target_label
) == 0)
3799 delete_insn (target_label
);
3805 /* If this is an unconditional jump and the previous insn is a
3806 conditional jump, try reversing the condition of the previous
3807 insn and swapping our targets. The next pass might be able to
3810 Don't do this if we expect the conditional branch to be true, because
3811 we would then be making the more common case longer. */
3813 if (GET_CODE (insn
) == JUMP_INSN
3814 && (simplejump_p (insn
) || GET_CODE (PATTERN (insn
)) == RETURN
)
3815 && (other
= prev_active_insn (insn
)) != 0
3816 && (condjump_p (other
) || condjump_in_parallel_p (other
))
3817 && no_labels_between_p (other
, insn
)
3818 && 0 < mostly_true_jump (other
,
3819 get_branch_condition (other
,
3820 JUMP_LABEL (other
))))
3822 rtx other_target
= JUMP_LABEL (other
);
3823 target_label
= JUMP_LABEL (insn
);
3825 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3826 as we move the label. */
3828 ++LABEL_NUSES (other_target
);
3830 if (invert_jump (other
, target_label
))
3831 reorg_redirect_jump (insn
, other_target
);
3834 --LABEL_NUSES (other_target
);
3837 /* Now look only at cases where we have filled a delay slot. */
3838 if (GET_CODE (insn
) != INSN
3839 || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
3842 pat
= PATTERN (insn
);
3843 delay_insn
= XVECEXP (pat
, 0, 0);
3845 /* See if the first insn in the delay slot is redundant with some
3846 previous insn. Remove it from the delay slot if so; then set up
3847 to reprocess this insn. */
3848 if (redundant_insn (XVECEXP (pat
, 0, 1), delay_insn
, 0))
3850 delete_from_delay_slot (XVECEXP (pat
, 0, 1));
3851 next
= prev_active_insn (next
);
3855 /* Now look only at the cases where we have a filled JUMP_INSN. */
3856 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
3857 || ! (condjump_p (XVECEXP (PATTERN (insn
), 0, 0))
3858 || condjump_in_parallel_p (XVECEXP (PATTERN (insn
), 0, 0))))
3861 target_label
= JUMP_LABEL (delay_insn
);
3865 /* If this jump goes to another unconditional jump, thread it, but
3866 don't convert a jump into a RETURN here. */
3867 trial
= follow_jumps (target_label
);
3868 trial
= prev_label (next_active_insn (trial
));
3869 if (trial
== 0 && target_label
!= 0)
3870 trial
= find_end_label ();
3872 if (trial
!= target_label
3873 && redirect_with_delay_slots_safe_p (delay_insn
, trial
, insn
))
3875 reorg_redirect_jump (delay_insn
, trial
);
3876 target_label
= trial
;
3879 /* If the first insn at TARGET_LABEL is redundant with a previous
3880 insn, redirect the jump to the following insn process again. */
3881 trial
= next_active_insn (target_label
);
3882 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
3883 && redundant_insn (trial
, insn
, 0))
3885 trial
= next_active_insn (trial
);
3887 target_label
= find_end_label ();
3889 target_label
= get_label_before (trial
);
3890 reorg_redirect_jump (delay_insn
, target_label
);
3895 /* Similarly, if it is an unconditional jump with one insn in its
3896 delay list and that insn is redundant, thread the jump. */
3897 if (trial
&& GET_CODE (PATTERN (trial
)) == SEQUENCE
3898 && XVECLEN (PATTERN (trial
), 0) == 2
3899 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
3900 && (simplejump_p (XVECEXP (PATTERN (trial
), 0, 0))
3901 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial
), 0, 0))) == RETURN
)
3902 && redundant_insn (XVECEXP (PATTERN (trial
), 0, 1), insn
, 0))
3904 target_label
= JUMP_LABEL (XVECEXP (PATTERN (trial
), 0, 0));
3905 if (target_label
== 0)
3906 target_label
= find_end_label ();
3908 if (redirect_with_delay_slots_safe_p (delay_insn
, target_label
,
3911 reorg_redirect_jump (delay_insn
, target_label
);
3918 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3919 && prev_active_insn (target_label
) == insn
3920 && ! condjump_in_parallel_p (delay_insn
)
3922 /* If the last insn in the delay slot sets CC0 for some insn,
3923 various code assumes that it is in a delay slot. We could
3924 put it back where it belonged and delete the register notes,
3925 but it doesn't seem worthwhile in this uncommon case. */
3926 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
3927 REG_CC_USER
, NULL_RTX
)
3933 /* All this insn does is execute its delay list and jump to the
3934 following insn. So delete the jump and just execute the delay
3937 We do this by deleting the INSN containing the SEQUENCE, then
3938 re-emitting the insns separately, and then deleting the jump.
3939 This allows the count of the jump target to be properly
3942 /* Clear the from target bit, since these insns are no longer
3944 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3945 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
3947 trial
= PREV_INSN (insn
);
3949 emit_insn_after (pat
, trial
);
3950 delete_scheduled_jump (delay_insn
);
3954 /* See if this is an unconditional jump around a single insn which is
3955 identical to the one in its delay slot. In this case, we can just
3956 delete the branch and the insn in its delay slot. */
3957 if (next
&& GET_CODE (next
) == INSN
3958 && prev_label (next_active_insn (next
)) == target_label
3959 && simplejump_p (insn
)
3960 && XVECLEN (pat
, 0) == 2
3961 && rtx_equal_p (PATTERN (next
), PATTERN (XVECEXP (pat
, 0, 1))))
3967 /* See if this jump (with its delay slots) branches around another
3968 jump (without delay slots). If so, invert this jump and point
3969 it to the target of the second jump. We cannot do this for
3970 annulled jumps, though. Again, don't convert a jump to a RETURN
3972 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
3973 && next
&& GET_CODE (next
) == JUMP_INSN
3974 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3975 && next_active_insn (target_label
) == next_active_insn (next
)
3976 && no_labels_between_p (insn
, next
))
3978 rtx label
= JUMP_LABEL (next
);
3979 rtx old_label
= JUMP_LABEL (delay_insn
);
3982 label
= find_end_label ();
3984 if (redirect_with_delay_slots_safe_p (delay_insn
, label
, insn
))
3986 /* Be careful how we do this to avoid deleting code or labels
3987 that are momentarily dead. See similar optimization in
3990 ++LABEL_NUSES (old_label
);
3992 if (invert_jump (delay_insn
, label
))
3996 /* Must update the INSN_FROM_TARGET_P bits now that
3997 the branch is reversed, so that mark_target_live_regs
3998 will handle the delay slot insn correctly. */
3999 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4001 rtx slot
= XVECEXP (PATTERN (insn
), 0, i
);
4002 INSN_FROM_TARGET_P (slot
) = ! INSN_FROM_TARGET_P (slot
);
4009 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
4010 delete_insn (old_label
);
4015 /* If we own the thread opposite the way this insn branches, see if we
4016 can merge its delay slots with following insns. */
4017 if (INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
4018 && own_thread_p (NEXT_INSN (insn
), 0, 1))
4019 try_merge_delay_insns (insn
, next
);
4020 else if (! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
4021 && own_thread_p (target_label
, target_label
, 0))
4022 try_merge_delay_insns (insn
, next_active_insn (target_label
));
4024 /* If we get here, we haven't deleted INSN. But we may have deleted
4025 NEXT, so recompute it. */
4026 next
= next_active_insn (insn
);
4032 /* Look for filled jumps to the end of function label. We can try to convert
4033 them into RETURN insns if the insns in the delay slot are valid for the
4037 make_return_insns (first
)
4040 rtx insn
, jump_insn
, pat
;
4041 rtx real_return_label
= end_of_function_label
;
4044 /* See if there is a RETURN insn in the function other than the one we
4045 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4046 into a RETURN to jump to it. */
4047 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4048 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == RETURN
)
4050 real_return_label
= get_label_before (insn
);
4054 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4055 was equal to END_OF_FUNCTION_LABEL. */
4056 LABEL_NUSES (real_return_label
)++;
4058 /* Clear the list of insns to fill so we can use it. */
4059 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
4061 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4065 /* Only look at filled JUMP_INSNs that go to the end of function
4067 if (GET_CODE (insn
) != INSN
4068 || GET_CODE (PATTERN (insn
)) != SEQUENCE
4069 || GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
4070 || JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0)) != end_of_function_label
)
4073 pat
= PATTERN (insn
);
4074 jump_insn
= XVECEXP (pat
, 0, 0);
4076 /* If we can't make the jump into a RETURN, try to redirect it to the best
4077 RETURN and go on to the next insn. */
4078 if (! reorg_redirect_jump (jump_insn
, NULL_RTX
))
4080 /* Make sure redirecting the jump will not invalidate the delay
4082 if (redirect_with_delay_slots_safe_p (jump_insn
,
4085 reorg_redirect_jump (jump_insn
, real_return_label
);
4089 /* See if this RETURN can accept the insns current in its delay slot.
4090 It can if it has more or an equal number of slots and the contents
4091 of each is valid. */
4093 flags
= get_jump_flags (jump_insn
, JUMP_LABEL (jump_insn
));
4094 slots
= num_delay_slots (jump_insn
);
4095 if (slots
>= XVECLEN (pat
, 0) - 1)
4097 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
4099 #ifdef ANNUL_IFFALSE_SLOTS
4100 (INSN_ANNULLED_BRANCH_P (jump_insn
)
4101 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
4102 ? eligible_for_annul_false (jump_insn
, i
- 1,
4103 XVECEXP (pat
, 0, i
), flags
) :
4105 #ifdef ANNUL_IFTRUE_SLOTS
4106 (INSN_ANNULLED_BRANCH_P (jump_insn
)
4107 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
4108 ? eligible_for_annul_true (jump_insn
, i
- 1,
4109 XVECEXP (pat
, 0, i
), flags
) :
4111 eligible_for_delay (jump_insn
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
4117 if (i
== XVECLEN (pat
, 0))
4120 /* We have to do something with this insn. If it is an unconditional
4121 RETURN, delete the SEQUENCE and output the individual insns,
4122 followed by the RETURN. Then set things up so we try to find
4123 insns for its delay slots, if it needs some. */
4124 if (GET_CODE (PATTERN (jump_insn
)) == RETURN
)
4126 rtx prev
= PREV_INSN (insn
);
4129 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
4130 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
4132 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
4133 emit_barrier_after (insn
);
4136 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
4139 /* It is probably more efficient to keep this with its current
4140 delay slot as a branch to a RETURN. */
4141 reorg_redirect_jump (jump_insn
, real_return_label
);
4144 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4145 new delay slots we have created. */
4146 if (--LABEL_NUSES (real_return_label
) == 0)
4147 delete_insn (real_return_label
);
4149 fill_simple_delay_slots (first
, 1);
4150 fill_simple_delay_slots (first
, 0);
4154 /* Try to find insns to place in delay slots. */
4157 dbr_schedule (first
, file
)
4161 rtx insn
, next
, epilogue_insn
= 0;
4164 int old_flag_no_peephole
= flag_no_peephole
;
4166 /* Execute `final' once in prescan mode to delete any insns that won't be
4167 used. Don't let final try to do any peephole optimization--it will
4168 ruin dataflow information for this pass. */
4170 flag_no_peephole
= 1;
4171 final (first
, 0, NO_DEBUG
, 1, 1);
4172 flag_no_peephole
= old_flag_no_peephole
;
4175 /* If the current function has no insns other than the prologue and
4176 epilogue, then do not try to fill any delay slots. */
4177 if (n_basic_blocks
== 0)
4180 /* Find the highest INSN_UID and allocate and initialize our map from
4181 INSN_UID's to position in code. */
4182 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4184 if (INSN_UID (insn
) > max_uid
)
4185 max_uid
= INSN_UID (insn
);
4186 if (GET_CODE (insn
) == NOTE
4187 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
4188 epilogue_insn
= insn
;
4191 uid_to_ruid
= (int *) alloca ((max_uid
+ 1) * sizeof (int *));
4192 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
4193 uid_to_ruid
[INSN_UID (insn
)] = i
;
4195 /* Initialize the list of insns that need filling. */
4196 if (unfilled_firstobj
== 0)
4198 gcc_obstack_init (&unfilled_slots_obstack
);
4199 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
4202 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
4206 INSN_ANNULLED_BRANCH_P (insn
) = 0;
4207 INSN_FROM_TARGET_P (insn
) = 0;
4209 /* Skip vector tables. We can't get attributes for them. */
4210 if (GET_CODE (insn
) == JUMP_INSN
4211 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4212 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
4215 if (num_delay_slots (insn
) > 0)
4216 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
4218 /* Ensure all jumps go to the last of a set of consecutive labels. */
4219 if (GET_CODE (insn
) == JUMP_INSN
4220 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
4221 && JUMP_LABEL (insn
) != 0
4222 && ((target
= prev_label (next_active_insn (JUMP_LABEL (insn
))))
4223 != JUMP_LABEL (insn
)))
4224 redirect_jump (insn
, target
);
4227 /* Indicate what resources are required to be valid at the end of the current
4228 function. The condition code never is and memory always is. If the
4229 frame pointer is needed, it is and so is the stack pointer unless
4230 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4231 stack pointer is. Registers used to return the function value are
4232 needed. Registers holding global variables are needed. */
4234 end_of_function_needs
.cc
= 0;
4235 end_of_function_needs
.memory
= 1;
4236 CLEAR_HARD_REG_SET (end_of_function_needs
.regs
);
4238 if (frame_pointer_needed
)
4240 SET_HARD_REG_BIT (end_of_function_needs
.regs
, FRAME_POINTER_REGNUM
);
4241 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4242 SET_HARD_REG_BIT (end_of_function_needs
.regs
, HARD_FRAME_POINTER_REGNUM
);
4244 #ifdef EXIT_IGNORE_STACK
4245 if (! EXIT_IGNORE_STACK
)
4247 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
4250 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
4252 if (current_function_return_rtx
!= 0
4253 && GET_CODE (current_function_return_rtx
) == REG
)
4254 mark_referenced_resources (current_function_return_rtx
,
4255 &end_of_function_needs
, 1);
4257 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4259 SET_HARD_REG_BIT (end_of_function_needs
.regs
, i
);
4261 /* The registers required to be live at the end of the function are
4262 represented in the flow information as being dead just prior to
4263 reaching the end of the function. For example, the return of a value
4264 might be represented by a USE of the return register immediately
4265 followed by an unconditional jump to the return label where the
4266 return label is the end of the RTL chain. The end of the RTL chain
4267 is then taken to mean that the return register is live.
4269 This sequence is no longer maintained when epilogue instructions are
4270 added to the RTL chain. To reconstruct the original meaning, the
4271 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4272 point where these registers become live (start_of_epilogue_needs).
4273 If epilogue instructions are present, the registers set by those
4274 instructions won't have been processed by flow. Thus, those
4275 registers are additionally required at the end of the RTL chain
4276 (end_of_function_needs). */
4278 start_of_epilogue_needs
= end_of_function_needs
;
4280 while (epilogue_insn
= next_nonnote_insn (epilogue_insn
))
4281 mark_set_resources (epilogue_insn
, &end_of_function_needs
, 0, 1);
4283 /* Show we haven't computed an end-of-function label yet. */
4284 end_of_function_label
= 0;
4286 /* Allocate and initialize the tables used by mark_target_live_regs. */
4288 = (struct target_info
**) alloca ((TARGET_HASH_PRIME
4289 * sizeof (struct target_info
*)));
4290 bzero ((char *) target_hash_table
,
4291 TARGET_HASH_PRIME
* sizeof (struct target_info
*));
4293 bb_ticks
= (int *) alloca (n_basic_blocks
* sizeof (int));
4294 bzero ((char *) bb_ticks
, n_basic_blocks
* sizeof (int));
4296 /* Initialize the statistics for this function. */
4297 bzero ((char *) num_insns_needing_delays
, sizeof num_insns_needing_delays
);
4298 bzero ((char *) num_filled_delays
, sizeof num_filled_delays
);
4300 /* Now do the delay slot filling. Try everything twice in case earlier
4301 changes make more slots fillable. */
4303 for (reorg_pass_number
= 0;
4304 reorg_pass_number
< MAX_REORG_PASSES
;
4305 reorg_pass_number
++)
4307 fill_simple_delay_slots (first
, 1);
4308 fill_simple_delay_slots (first
, 0);
4309 fill_eager_delay_slots (first
);
4310 relax_delay_slots (first
);
4313 /* Delete any USE insns made by update_block; subsequent passes don't need
4314 them or know how to deal with them. */
4315 for (insn
= first
; insn
; insn
= next
)
4317 next
= NEXT_INSN (insn
);
4319 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
4320 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
4321 next
= delete_insn (insn
);
4324 /* If we made an end of function label, indicate that it is now
4325 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4326 If it is now unused, delete it. */
4327 if (end_of_function_label
&& --LABEL_NUSES (end_of_function_label
) == 0)
4328 delete_insn (end_of_function_label
);
4331 if (HAVE_return
&& end_of_function_label
!= 0)
4332 make_return_insns (first
);
4335 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
4337 /* It is not clear why the line below is needed, but it does seem to be. */
4338 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
4340 /* Reposition the prologue and epilogue notes in case we moved the
4341 prologue/epilogue insns. */
4342 reposition_prologue_and_epilogue_notes (first
);
4346 register int i
, j
, need_comma
;
4348 for (reorg_pass_number
= 0;
4349 reorg_pass_number
< MAX_REORG_PASSES
;
4350 reorg_pass_number
++)
4352 fprintf (file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
4353 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
4356 fprintf (file
, ";; Reorg function #%d\n", i
);
4358 fprintf (file
, ";; %d insns needing delay slots\n;; ",
4359 num_insns_needing_delays
[i
][reorg_pass_number
]);
4361 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
; j
++)
4362 if (num_filled_delays
[i
][j
][reorg_pass_number
])
4365 fprintf (file
, ", ");
4367 fprintf (file
, "%d got %d delays",
4368 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
4370 fprintf (file
, "\n");
4375 #endif /* DELAY_SLOTS */