1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
121 #include "insn-config.h"
122 #include "conditions.h"
123 #include "hard-reg-set.h"
124 #include "basic-block.h"
126 #include "insn-flags.h"
131 #include "insn-attr.h"
133 /* Import list of registers used as spill regs from reload. */
134 extern HARD_REG_SET used_spill_regs
;
138 #define obstack_chunk_alloc xmalloc
139 #define obstack_chunk_free free
141 #ifndef ANNUL_IFTRUE_SLOTS
142 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
144 #ifndef ANNUL_IFFALSE_SLOTS
145 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
148 /* Insns which have delay slots that have not yet been filled. */
150 static struct obstack unfilled_slots_obstack
;
151 static rtx
*unfilled_firstobj
;
153 /* Define macros to refer to the first and last slot containing unfilled
154 insns. These are used because the list may move and its address
155 should be recomputed at each use. */
157 #define unfilled_slots_base \
158 ((rtx *) obstack_base (&unfilled_slots_obstack))
160 #define unfilled_slots_next \
161 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
163 /* This structure is used to indicate which hardware resources are set or
164 needed by insns so far. */
168 char memory
; /* Insn sets or needs a memory location. */
169 char unch_memory
; /* Insn sets of needs a "unchanging" MEM. */
170 char volatil
; /* Insn sets or needs a volatile memory loc. */
171 char cc
; /* Insn sets or needs the condition codes. */
172 HARD_REG_SET regs
; /* Which registers are set or needed. */
175 /* Macro to clear all resources. */
176 #define CLEAR_RESOURCE(RES) \
177 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
178 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
180 /* Indicates what resources are required at the beginning of the epilogue. */
181 static struct resources start_of_epilogue_needs
;
183 /* Indicates what resources are required at function end. */
184 static struct resources end_of_function_needs
;
186 /* Points to the label before the end of the function. */
187 static rtx end_of_function_label
;
189 /* This structure is used to record liveness information at the targets or
190 fallthrough insns of branches. We will most likely need the information
191 at targets again, so save them in a hash table rather than recomputing them
196 int uid
; /* INSN_UID of target. */
197 struct target_info
*next
; /* Next info for same hash bucket. */
198 HARD_REG_SET live_regs
; /* Registers live at target. */
199 int block
; /* Basic block number containing target. */
200 int bb_tick
; /* Generation count of basic block info. */
203 #define TARGET_HASH_PRIME 257
205 /* Define the hash table itself. */
206 static struct target_info
**target_hash_table
;
208 /* For each basic block, we maintain a generation number of its basic
209 block info, which is updated each time we move an insn from the
210 target of a jump. This is the generation number indexed by block
213 static int *bb_ticks
;
215 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
216 not always monotonically increase. */
217 static int *uid_to_ruid
;
219 /* Highest valid index in `uid_to_ruid'. */
222 static void mark_referenced_resources
PROTO((rtx
, struct resources
*, int));
223 static void mark_set_resources
PROTO((rtx
, struct resources
*, int, int));
224 static int stop_search_p
PROTO((rtx
, int));
225 static int resource_conflicts_p
PROTO((struct resources
*,
226 struct resources
*));
227 static int insn_references_resource_p
PROTO((rtx
, struct resources
*, int));
228 static int insn_sets_resources_p
PROTO((rtx
, struct resources
*, int));
229 static rtx find_end_label
PROTO((void));
230 static rtx emit_delay_sequence
PROTO((rtx
, rtx
, int, int));
231 static rtx add_to_delay_list
PROTO((rtx
, rtx
));
232 static void delete_from_delay_slot
PROTO((rtx
));
233 static void delete_scheduled_jump
PROTO((rtx
));
234 static void note_delay_statistics
PROTO((int, int));
235 static rtx optimize_skip
PROTO((rtx
));
236 static int get_jump_flags
PROTO((rtx
, rtx
));
237 static int rare_destination
PROTO((rtx
));
238 static int mostly_true_jump
PROTO((rtx
, rtx
));
239 static rtx get_branch_condition
PROTO((rtx
, rtx
));
240 static int condition_dominates_p
PROTO((rtx
, rtx
));
241 static rtx steal_delay_list_from_target
PROTO((rtx
, rtx
, rtx
, rtx
,
245 int, int *, int *, rtx
*));
246 static rtx steal_delay_list_from_fallthrough
PROTO((rtx
, rtx
, rtx
, rtx
,
251 static void try_merge_delay_insns
PROTO((rtx
, rtx
));
252 static rtx redundant_insn
PROTO((rtx
, rtx
, rtx
));
253 static int own_thread_p
PROTO((rtx
, rtx
, int));
254 static int find_basic_block
PROTO((rtx
));
255 static void update_block
PROTO((rtx
, rtx
));
256 static int reorg_redirect_jump
PROTO((rtx
, rtx
));
257 static void update_reg_dead_notes
PROTO((rtx
, rtx
));
258 static void fix_reg_dead_note
PROTO((rtx
, rtx
));
259 static void update_reg_unused_notes
PROTO((rtx
, rtx
));
260 static void update_live_status
PROTO((rtx
, rtx
));
261 static rtx next_insn_no_annul
PROTO((rtx
));
262 static void mark_target_live_regs
PROTO((rtx
, struct resources
*));
263 static void fill_simple_delay_slots
PROTO((rtx
, int));
264 static rtx fill_slots_from_thread
PROTO((rtx
, rtx
, rtx
, rtx
, int, int,
265 int, int, int, int *));
266 static void fill_eager_delay_slots
PROTO((rtx
));
267 static void relax_delay_slots
PROTO((rtx
));
268 static void make_return_insns
PROTO((rtx
));
269 static int redirect_with_delay_slots_safe_p
PROTO ((rtx
, rtx
, rtx
));
270 static int redirect_with_delay_list_safe_p
PROTO ((rtx
, rtx
, rtx
));
272 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
273 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
274 is TRUE, resources used by the called routine will be included for
278 mark_referenced_resources (x
, res
, include_delayed_effects
)
280 register struct resources
*res
;
281 register int include_delayed_effects
;
283 register enum rtx_code code
= GET_CODE (x
);
285 register char *format_ptr
;
287 /* Handle leaf items for which we set resource flags. Also, special-case
288 CALL, SET and CLOBBER operators. */
300 if (GET_CODE (SUBREG_REG (x
)) != REG
)
301 mark_referenced_resources (SUBREG_REG (x
), res
, 0);
304 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
305 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
306 for (i
= regno
; i
< last_regno
; i
++)
307 SET_HARD_REG_BIT (res
->regs
, i
);
312 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
313 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
317 /* If this memory shouldn't change, it really isn't referencing
319 if (RTX_UNCHANGING_P (x
))
320 res
->unch_memory
= 1;
323 res
->volatil
= MEM_VOLATILE_P (x
);
325 /* Mark registers used to access memory. */
326 mark_referenced_resources (XEXP (x
, 0), res
, 0);
333 case UNSPEC_VOLATILE
:
336 /* Traditional asm's are always volatile. */
341 res
->volatil
= MEM_VOLATILE_P (x
);
343 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
344 We can not just fall through here since then we would be confused
345 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
346 traditional asms unlike their normal usage. */
348 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
349 mark_referenced_resources (ASM_OPERANDS_INPUT (x
, i
), res
, 0);
353 /* The first operand will be a (MEM (xxx)) but doesn't really reference
354 memory. The second operand may be referenced, though. */
355 mark_referenced_resources (XEXP (XEXP (x
, 0), 0), res
, 0);
356 mark_referenced_resources (XEXP (x
, 1), res
, 0);
360 /* Usually, the first operand of SET is set, not referenced. But
361 registers used to access memory are referenced. SET_DEST is
362 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
364 mark_referenced_resources (SET_SRC (x
), res
, 0);
367 if (GET_CODE (x
) == SIGN_EXTRACT
|| GET_CODE (x
) == ZERO_EXTRACT
)
368 mark_referenced_resources (x
, res
, 0);
369 else if (GET_CODE (x
) == SUBREG
)
371 if (GET_CODE (x
) == MEM
)
372 mark_referenced_resources (XEXP (x
, 0), res
, 0);
379 if (include_delayed_effects
)
381 /* A CALL references memory, the frame pointer if it exists, the
382 stack pointer, any global registers and any registers given in
383 USE insns immediately in front of the CALL.
385 However, we may have moved some of the parameter loading insns
386 into the delay slot of this CALL. If so, the USE's for them
387 don't count and should be skipped. */
388 rtx insn
= PREV_INSN (x
);
391 rtx next
= NEXT_INSN (x
);
394 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
395 if (NEXT_INSN (insn
) != x
)
397 next
= NEXT_INSN (NEXT_INSN (insn
));
398 sequence
= PATTERN (NEXT_INSN (insn
));
399 seq_size
= XVECLEN (sequence
, 0);
400 if (GET_CODE (sequence
) != SEQUENCE
)
405 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
406 if (frame_pointer_needed
)
408 SET_HARD_REG_BIT (res
->regs
, FRAME_POINTER_REGNUM
);
409 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
410 SET_HARD_REG_BIT (res
->regs
, HARD_FRAME_POINTER_REGNUM
);
414 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
416 SET_HARD_REG_BIT (res
->regs
, i
);
418 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
419 assume that this call can need any register.
421 This is done to be more conservative about how we handle setjmp.
422 We assume that they both use and set all registers. Using all
423 registers ensures that a register will not be considered dead
424 just because it crosses a setjmp call. A register should be
425 considered dead only if the setjmp call returns non-zero. */
426 if (next
&& GET_CODE (next
) == NOTE
427 && NOTE_LINE_NUMBER (next
) == NOTE_INSN_SETJMP
)
428 SET_HARD_REG_SET (res
->regs
);
433 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
435 link
= XEXP (link
, 1))
436 if (GET_CODE (XEXP (link
, 0)) == USE
)
438 for (i
= 1; i
< seq_size
; i
++)
440 rtx slot_pat
= PATTERN (XVECEXP (sequence
, 0, i
));
441 if (GET_CODE (slot_pat
) == SET
442 && rtx_equal_p (SET_DEST (slot_pat
),
443 SET_DEST (XEXP (link
, 0))))
447 mark_referenced_resources (SET_DEST (XEXP (link
, 0)),
453 /* ... fall through to other INSN processing ... */
458 #ifdef INSN_REFERENCES_ARE_DELAYED
459 if (! include_delayed_effects
460 && INSN_REFERENCES_ARE_DELAYED (x
))
464 /* No special processing, just speed up. */
465 mark_referenced_resources (PATTERN (x
), res
, include_delayed_effects
);
469 /* Process each sub-expression and flag what it needs. */
470 format_ptr
= GET_RTX_FORMAT (code
);
471 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
472 switch (*format_ptr
++)
475 mark_referenced_resources (XEXP (x
, i
), res
, include_delayed_effects
);
479 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
480 mark_referenced_resources (XVECEXP (x
, i
, j
), res
,
481 include_delayed_effects
);
486 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
487 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
488 is nonzero, also mark resources potentially set by the called routine.
490 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
491 objects are being referenced instead of set.
493 We never mark the insn as modifying the condition code unless it explicitly
494 SETs CC0 even though this is not totally correct. The reason for this is
495 that we require a SET of CC0 to immediately precede the reference to CC0.
496 So if some other insn sets CC0 as a side-effect, we know it cannot affect
497 our computation and thus may be placed in a delay slot. */
500 mark_set_resources (x
, res
, in_dest
, include_delayed_effects
)
502 register struct resources
*res
;
504 int include_delayed_effects
;
506 register enum rtx_code code
;
508 register char *format_ptr
;
526 /* These don't set any resources. */
535 /* Called routine modifies the condition code, memory, any registers
536 that aren't saved across calls, global registers and anything
537 explicitly CLOBBERed immediately after the CALL_INSN. */
539 if (include_delayed_effects
)
541 rtx next
= NEXT_INSN (x
);
542 rtx prev
= PREV_INSN (x
);
545 res
->cc
= res
->memory
= 1;
546 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
547 if (call_used_regs
[i
] || global_regs
[i
])
548 SET_HARD_REG_BIT (res
->regs
, i
);
550 /* If X is part of a delay slot sequence, then NEXT should be
551 the first insn after the sequence. */
552 if (NEXT_INSN (prev
) != x
)
553 next
= NEXT_INSN (NEXT_INSN (prev
));
555 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
556 link
; link
= XEXP (link
, 1))
557 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
558 mark_set_resources (SET_DEST (XEXP (link
, 0)), res
, 1, 0);
560 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
561 assume that this call can clobber any register. */
562 if (next
&& GET_CODE (next
) == NOTE
563 && NOTE_LINE_NUMBER (next
) == NOTE_INSN_SETJMP
)
564 SET_HARD_REG_SET (res
->regs
);
567 /* ... and also what it's RTL says it modifies, if anything. */
572 /* An insn consisting of just a CLOBBER (or USE) is just for flow
573 and doesn't actually do anything, so we ignore it. */
575 #ifdef INSN_SETS_ARE_DELAYED
576 if (! include_delayed_effects
577 && INSN_SETS_ARE_DELAYED (x
))
582 if (GET_CODE (x
) != USE
&& GET_CODE (x
) != CLOBBER
)
587 /* If the source of a SET is a CALL, this is actually done by
588 the called routine. So only include it if we are to include the
589 effects of the calling routine. */
591 mark_set_resources (SET_DEST (x
), res
,
592 (include_delayed_effects
593 || GET_CODE (SET_SRC (x
)) != CALL
),
596 mark_set_resources (SET_SRC (x
), res
, 0, 0);
600 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
604 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
605 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x
, 0, 0))
606 && INSN_FROM_TARGET_P (XVECEXP (x
, 0, i
))))
607 mark_set_resources (XVECEXP (x
, 0, i
), res
, 0,
608 include_delayed_effects
);
615 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
619 mark_set_resources (XEXP (x
, 0), res
, in_dest
, 0);
620 mark_set_resources (XEXP (x
, 1), res
, 0, 0);
621 mark_set_resources (XEXP (x
, 2), res
, 0, 0);
628 res
->unch_memory
= RTX_UNCHANGING_P (x
);
629 res
->volatil
= MEM_VOLATILE_P (x
);
632 mark_set_resources (XEXP (x
, 0), res
, 0, 0);
638 if (GET_CODE (SUBREG_REG (x
)) != REG
)
639 mark_set_resources (SUBREG_REG (x
), res
,
640 in_dest
, include_delayed_effects
);
643 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
644 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
645 for (i
= regno
; i
< last_regno
; i
++)
646 SET_HARD_REG_BIT (res
->regs
, i
);
653 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
654 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
658 /* Process each sub-expression and flag what it needs. */
659 format_ptr
= GET_RTX_FORMAT (code
);
660 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
661 switch (*format_ptr
++)
664 mark_set_resources (XEXP (x
, i
), res
, in_dest
, include_delayed_effects
);
668 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
669 mark_set_resources (XVECEXP (x
, i
, j
), res
, in_dest
,
670 include_delayed_effects
);
675 /* Return TRUE if this insn should stop the search for insn to fill delay
676 slots. LABELS_P indicates that labels should terminate the search.
677 In all cases, jumps terminate the search. */
680 stop_search_p (insn
, labels_p
)
687 switch (GET_CODE (insn
))
701 /* OK unless it contains a delay slot or is an `asm' insn of some type.
702 We don't know anything about these. */
703 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
704 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
705 || asm_noperands (PATTERN (insn
)) >= 0);
712 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
713 resource set contains a volatile memory reference. Otherwise, return FALSE. */
716 resource_conflicts_p (res1
, res2
)
717 struct resources
*res1
, *res2
;
719 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
720 || (res1
->unch_memory
&& res2
->unch_memory
)
721 || res1
->volatil
|| res2
->volatil
)
725 return (res1
->regs
& res2
->regs
) != HARD_CONST (0);
730 for (i
= 0; i
< HARD_REG_SET_LONGS
; i
++)
731 if ((res1
->regs
[i
] & res2
->regs
[i
]) != 0)
738 /* Return TRUE if any resource marked in RES, a `struct resources', is
739 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
740 routine is using those resources.
742 We compute this by computing all the resources referenced by INSN and
743 seeing if this conflicts with RES. It might be faster to directly check
744 ourselves, and this is the way it used to work, but it means duplicating
745 a large block of complex code. */
748 insn_references_resource_p (insn
, res
, include_delayed_effects
)
750 register struct resources
*res
;
751 int include_delayed_effects
;
753 struct resources insn_res
;
755 CLEAR_RESOURCE (&insn_res
);
756 mark_referenced_resources (insn
, &insn_res
, include_delayed_effects
);
757 return resource_conflicts_p (&insn_res
, res
);
760 /* Return TRUE if INSN modifies resources that are marked in RES.
761 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
762 included. CC0 is only modified if it is explicitly set; see comments
763 in front of mark_set_resources for details. */
766 insn_sets_resource_p (insn
, res
, include_delayed_effects
)
768 register struct resources
*res
;
769 int include_delayed_effects
;
771 struct resources insn_sets
;
773 CLEAR_RESOURCE (&insn_sets
);
774 mark_set_resources (insn
, &insn_sets
, 0, include_delayed_effects
);
775 return resource_conflicts_p (&insn_sets
, res
);
778 /* Find a label at the end of the function or before a RETURN. If there is
786 /* If we found one previously, return it. */
787 if (end_of_function_label
)
788 return end_of_function_label
;
790 /* Otherwise, see if there is a label at the end of the function. If there
791 is, it must be that RETURN insns aren't needed, so that is our return
792 label and we don't have to do anything else. */
794 insn
= get_last_insn ();
795 while (GET_CODE (insn
) == NOTE
796 || (GET_CODE (insn
) == INSN
797 && (GET_CODE (PATTERN (insn
)) == USE
798 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
799 insn
= PREV_INSN (insn
);
801 /* When a target threads its epilogue we might already have a
802 suitable return insn. If so put a label before it for the
803 end_of_function_label. */
804 if (GET_CODE (insn
) == BARRIER
805 && GET_CODE (PREV_INSN (insn
)) == JUMP_INSN
806 && GET_CODE (PATTERN (PREV_INSN (insn
))) == RETURN
)
808 rtx temp
= PREV_INSN (PREV_INSN (insn
));
809 end_of_function_label
= gen_label_rtx ();
810 LABEL_NUSES (end_of_function_label
) = 0;
812 /* Put the label before an USE insns that may proceed the RETURN insn. */
813 while (GET_CODE (temp
) == USE
)
814 temp
= PREV_INSN (temp
);
816 emit_label_after (end_of_function_label
, temp
);
819 else if (GET_CODE (insn
) == CODE_LABEL
)
820 end_of_function_label
= insn
;
823 /* Otherwise, make a new label and emit a RETURN and BARRIER,
825 end_of_function_label
= gen_label_rtx ();
826 LABEL_NUSES (end_of_function_label
) = 0;
827 emit_label (end_of_function_label
);
831 /* The return we make may have delay slots too. */
832 rtx insn
= gen_return ();
833 insn
= emit_jump_insn (insn
);
835 if (num_delay_slots (insn
) > 0)
836 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
841 /* Show one additional use for this label so it won't go away until
843 ++LABEL_NUSES (end_of_function_label
);
845 return end_of_function_label
;
848 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
849 the pattern of INSN with the SEQUENCE.
851 Chain the insns so that NEXT_INSN of each insn in the sequence points to
852 the next and NEXT_INSN of the last insn in the sequence points to
853 the first insn after the sequence. Similarly for PREV_INSN. This makes
854 it easier to scan all insns.
856 Returns the SEQUENCE that replaces INSN. */
859 emit_delay_sequence (insn
, list
, length
, avail
)
869 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
870 rtvec seqv
= rtvec_alloc (length
+ 1);
871 rtx seq
= gen_rtx (SEQUENCE
, VOIDmode
, seqv
);
872 rtx seq_insn
= make_insn_raw (seq
);
873 rtx first
= get_insns ();
874 rtx last
= get_last_insn ();
876 /* Make a copy of the insn having delay slots. */
877 rtx delay_insn
= copy_rtx (insn
);
879 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
880 confuse further processing. Update LAST in case it was the last insn.
881 We will put the BARRIER back in later. */
882 if (NEXT_INSN (insn
) && GET_CODE (NEXT_INSN (insn
)) == BARRIER
)
884 delete_insn (NEXT_INSN (insn
));
885 last
= get_last_insn ();
889 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
890 NEXT_INSN (seq_insn
) = NEXT_INSN (insn
);
891 PREV_INSN (seq_insn
) = PREV_INSN (insn
);
894 set_new_first_and_last_insn (first
, seq_insn
);
896 PREV_INSN (NEXT_INSN (seq_insn
)) = seq_insn
;
899 set_new_first_and_last_insn (seq_insn
, last
);
901 NEXT_INSN (PREV_INSN (seq_insn
)) = seq_insn
;
903 /* Build our SEQUENCE and rebuild the insn chain. */
904 XVECEXP (seq
, 0, 0) = delay_insn
;
905 INSN_DELETED_P (delay_insn
) = 0;
906 PREV_INSN (delay_insn
) = PREV_INSN (seq_insn
);
908 for (li
= list
; li
; li
= XEXP (li
, 1), i
++)
910 rtx tem
= XEXP (li
, 0);
913 /* Show that this copy of the insn isn't deleted. */
914 INSN_DELETED_P (tem
) = 0;
916 XVECEXP (seq
, 0, i
) = tem
;
917 PREV_INSN (tem
) = XVECEXP (seq
, 0, i
- 1);
918 NEXT_INSN (XVECEXP (seq
, 0, i
- 1)) = tem
;
920 /* Remove any REG_DEAD notes because we can't rely on them now
921 that the insn has been moved. */
922 for (note
= REG_NOTES (tem
); note
; note
= XEXP (note
, 1))
923 if (REG_NOTE_KIND (note
) == REG_DEAD
)
924 XEXP (note
, 0) = const0_rtx
;
927 NEXT_INSN (XVECEXP (seq
, 0, length
)) = NEXT_INSN (seq_insn
);
929 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
930 last insn in that SEQUENCE to point to us. Similarly for the first
931 insn in the following insn if it is a SEQUENCE. */
933 if (PREV_INSN (seq_insn
) && GET_CODE (PREV_INSN (seq_insn
)) == INSN
934 && GET_CODE (PATTERN (PREV_INSN (seq_insn
))) == SEQUENCE
)
935 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn
)), 0,
936 XVECLEN (PATTERN (PREV_INSN (seq_insn
)), 0) - 1))
939 if (NEXT_INSN (seq_insn
) && GET_CODE (NEXT_INSN (seq_insn
)) == INSN
940 && GET_CODE (PATTERN (NEXT_INSN (seq_insn
))) == SEQUENCE
)
941 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn
)), 0, 0)) = seq_insn
;
943 /* If there used to be a BARRIER, put it back. */
945 emit_barrier_after (seq_insn
);
953 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
954 be in the order in which the insns are to be executed. */
957 add_to_delay_list (insn
, delay_list
)
961 /* If we have an empty list, just make a new list element. If
962 INSN has it's block number recorded, clear it since we may
963 be moving the insn to a new block. */
967 struct target_info
*tinfo
;
969 for (tinfo
= target_hash_table
[INSN_UID (insn
) % TARGET_HASH_PRIME
];
970 tinfo
; tinfo
= tinfo
->next
)
971 if (tinfo
->uid
== INSN_UID (insn
))
977 return gen_rtx (INSN_LIST
, VOIDmode
, insn
, NULL_RTX
);
980 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
982 XEXP (delay_list
, 1) = add_to_delay_list (insn
, XEXP (delay_list
, 1));
987 /* Delete INSN from the the delay slot of the insn that it is in. This may
988 produce an insn without anything in its delay slots. */
991 delete_from_delay_slot (insn
)
994 rtx trial
, seq_insn
, seq
, prev
;
998 /* We first must find the insn containing the SEQUENCE with INSN in its
999 delay slot. Do this by finding an insn, TRIAL, where
1000 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
1003 PREV_INSN (NEXT_INSN (trial
)) == trial
;
1004 trial
= NEXT_INSN (trial
))
1007 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
1008 seq
= PATTERN (seq_insn
);
1010 /* Create a delay list consisting of all the insns other than the one
1011 we are deleting (unless we were the only one). */
1012 if (XVECLEN (seq
, 0) > 2)
1013 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1014 if (XVECEXP (seq
, 0, i
) != insn
)
1015 delay_list
= add_to_delay_list (XVECEXP (seq
, 0, i
), delay_list
);
1017 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1018 list, and rebuild the delay list if non-empty. */
1019 prev
= PREV_INSN (seq_insn
);
1020 trial
= XVECEXP (seq
, 0, 0);
1021 delete_insn (seq_insn
);
1022 add_insn_after (trial
, prev
);
1024 if (GET_CODE (trial
) == JUMP_INSN
1025 && (simplejump_p (trial
) || GET_CODE (PATTERN (trial
)) == RETURN
))
1026 emit_barrier_after (trial
);
1028 /* If there are any delay insns, remit them. Otherwise clear the
1031 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2, 0);
1033 INSN_ANNULLED_BRANCH_P (trial
) = 0;
1035 INSN_FROM_TARGET_P (insn
) = 0;
1037 /* Show we need to fill this insn again. */
1038 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
1041 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1042 the insn that sets CC0 for it and delete it too. */
1045 delete_scheduled_jump (insn
)
1048 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1049 delete the insn that sets the condition code, but it is hard to find it.
1050 Since this case is rare anyway, don't bother trying; there would likely
1051 be other insns that became dead anyway, which we wouldn't know to
1055 if (reg_mentioned_p (cc0_rtx
, insn
))
1057 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
1059 /* If a reg-note was found, it points to an insn to set CC0. This
1060 insn is in the delay list of some other insn. So delete it from
1061 the delay list it was in. */
1064 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), NULL_RTX
)
1065 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
1066 delete_from_delay_slot (XEXP (note
, 0));
1070 /* The insn setting CC0 is our previous insn, but it may be in
1071 a delay slot. It will be the last insn in the delay slot, if
1073 rtx trial
= previous_insn (insn
);
1074 if (GET_CODE (trial
) == NOTE
)
1075 trial
= prev_nonnote_insn (trial
);
1076 if (sets_cc0_p (PATTERN (trial
)) != 1
1077 || FIND_REG_INC_NOTE (trial
, 0))
1079 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
1080 delete_insn (trial
);
1082 delete_from_delay_slot (trial
);
1090 /* Counters for delay-slot filling. */
1092 #define NUM_REORG_FUNCTIONS 2
1093 #define MAX_DELAY_HISTOGRAM 3
1094 #define MAX_REORG_PASSES 2
1096 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
1098 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
1100 static int reorg_pass_number
;
1103 note_delay_statistics (slots_filled
, index
)
1104 int slots_filled
, index
;
1106 num_insns_needing_delays
[index
][reorg_pass_number
]++;
1107 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
1108 slots_filled
= MAX_DELAY_HISTOGRAM
;
1109 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
1112 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1114 /* Optimize the following cases:
1116 1. When a conditional branch skips over only one instruction,
1117 use an annulling branch and put that insn in the delay slot.
1118 Use either a branch that annuls when the condition if true or
1119 invert the test with a branch that annuls when the condition is
1120 false. This saves insns, since otherwise we must copy an insn
1123 (orig) (skip) (otherwise)
1124 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1131 2. When a conditional branch skips over only one instruction,
1132 and after that, it unconditionally branches somewhere else,
1133 perform the similar optimization. This saves executing the
1134 second branch in the case where the inverted condition is true.
1141 INSN is a JUMP_INSN.
1143 This should be expanded to skip over N insns, where N is the number
1144 of delay slots required. */
1147 optimize_skip (insn
)
1150 register rtx trial
= next_nonnote_insn (insn
);
1151 rtx next_trial
= next_active_insn (trial
);
1156 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1159 || GET_CODE (trial
) != INSN
1160 || GET_CODE (PATTERN (trial
)) == SEQUENCE
1161 || recog_memoized (trial
) < 0
1162 || (! eligible_for_annul_false (insn
, 0, trial
, flags
)
1163 && ! eligible_for_annul_true (insn
, 0, trial
, flags
)))
1166 /* There are two cases where we are just executing one insn (we assume
1167 here that a branch requires only one insn; this should be generalized
1168 at some point): Where the branch goes around a single insn or where
1169 we have one insn followed by a branch to the same label we branch to.
1170 In both of these cases, inverting the jump and annulling the delay
1171 slot give the same effect in fewer insns. */
1172 if ((next_trial
== next_active_insn (JUMP_LABEL (insn
)))
1174 && GET_CODE (next_trial
) == JUMP_INSN
1175 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)
1176 && (simplejump_p (next_trial
)
1177 || GET_CODE (PATTERN (next_trial
)) == RETURN
)))
1179 if (eligible_for_annul_false (insn
, 0, trial
, flags
))
1181 if (invert_jump (insn
, JUMP_LABEL (insn
)))
1182 INSN_FROM_TARGET_P (trial
) = 1;
1183 else if (! eligible_for_annul_true (insn
, 0, trial
, flags
))
1187 delay_list
= add_to_delay_list (trial
, NULL_RTX
);
1188 next_trial
= next_active_insn (trial
);
1189 update_block (trial
, trial
);
1190 delete_insn (trial
);
1192 /* Also, if we are targeting an unconditional
1193 branch, thread our jump to the target of that branch. Don't
1194 change this into a RETURN here, because it may not accept what
1195 we have in the delay slot. We'll fix this up later. */
1196 if (next_trial
&& GET_CODE (next_trial
) == JUMP_INSN
1197 && (simplejump_p (next_trial
)
1198 || GET_CODE (PATTERN (next_trial
)) == RETURN
))
1200 target_label
= JUMP_LABEL (next_trial
);
1201 if (target_label
== 0)
1202 target_label
= find_end_label ();
1204 /* Recompute the flags based on TARGET_LABEL since threading
1205 the jump to TARGET_LABEL may change the direction of the
1206 jump (which may change the circumstances in which the
1207 delay slot is nullified). */
1208 flags
= get_jump_flags (insn
, target_label
);
1209 if (eligible_for_annul_true (insn
, 0, trial
, flags
))
1210 reorg_redirect_jump (insn
, target_label
);
1213 INSN_ANNULLED_BRANCH_P (insn
) = 1;
1221 /* Encode and return branch direction and prediction information for
1222 INSN assuming it will jump to LABEL.
1224 Non conditional branches return no direction information and
1225 are predicted as very likely taken. */
1227 get_jump_flags (insn
, label
)
1232 /* get_jump_flags can be passed any insn with delay slots, these may
1233 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1234 direction information, and only if they are conditional jumps.
1236 If LABEL is zero, then there is no way to determine the branch
1238 if (GET_CODE (insn
) == JUMP_INSN
1239 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
1240 && INSN_UID (insn
) <= max_uid
1242 && INSN_UID (label
) <= max_uid
)
1244 = (uid_to_ruid
[INSN_UID (label
)] > uid_to_ruid
[INSN_UID (insn
)])
1245 ? ATTR_FLAG_forward
: ATTR_FLAG_backward
;
1246 /* No valid direction information. */
1250 /* If insn is a conditional branch call mostly_true_jump to get
1251 determine the branch prediction.
1253 Non conditional branches are predicted as very likely taken. */
1254 if (GET_CODE (insn
) == JUMP_INSN
1255 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
1259 prediction
= mostly_true_jump (insn
, get_branch_condition (insn
, label
));
1263 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
1266 flags
|= ATTR_FLAG_likely
;
1269 flags
|= ATTR_FLAG_unlikely
;
1272 flags
|= (ATTR_FLAG_very_unlikely
| ATTR_FLAG_unlikely
);
1280 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
1285 /* Return 1 if INSN is a destination that will be branched to rarely (the
1286 return point of a function); return 2 if DEST will be branched to very
1287 rarely (a call to a function that doesn't return). Otherwise,
1291 rare_destination (insn
)
1297 for (; insn
; insn
= next
)
1299 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1300 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1302 next
= NEXT_INSN (insn
);
1304 switch (GET_CODE (insn
))
1309 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1310 don't scan past JUMP_INSNs, so any barrier we find here must
1311 have been after a CALL_INSN and hence mean the call doesn't
1315 if (GET_CODE (PATTERN (insn
)) == RETURN
)
1317 else if (simplejump_p (insn
)
1318 && jump_count
++ < 10)
1319 next
= JUMP_LABEL (insn
);
1325 /* If we got here it means we hit the end of the function. So this
1326 is an unlikely destination. */
1331 /* Return truth value of the statement that this branch
1332 is mostly taken. If we think that the branch is extremely likely
1333 to be taken, we return 2. If the branch is slightly more likely to be
1334 taken, return 1. If the branch is slightly less likely to be taken,
1335 return 0 and if the branch is highly unlikely to be taken, return -1.
1337 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1340 mostly_true_jump (jump_insn
, condition
)
1341 rtx jump_insn
, condition
;
1343 rtx target_label
= JUMP_LABEL (jump_insn
);
1345 int rare_dest
= rare_destination (target_label
);
1346 int rare_fallthrough
= rare_destination (NEXT_INSN (jump_insn
));
1348 /* If this is a branch outside a loop, it is highly unlikely. */
1349 if (GET_CODE (PATTERN (jump_insn
)) == SET
1350 && GET_CODE (SET_SRC (PATTERN (jump_insn
))) == IF_THEN_ELSE
1351 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)) == LABEL_REF
1352 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)))
1353 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)) == LABEL_REF
1354 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)))))
1359 /* If this is the test of a loop, it is very likely true. We scan
1360 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1361 before the next real insn, we assume the branch is to the top of
1363 for (insn
= PREV_INSN (target_label
);
1364 insn
&& GET_CODE (insn
) == NOTE
;
1365 insn
= PREV_INSN (insn
))
1366 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
1369 /* If this is a jump to the test of a loop, it is likely true. We scan
1370 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1371 before the next real insn, we assume the branch is to the loop branch
1373 for (insn
= NEXT_INSN (target_label
);
1374 insn
&& GET_CODE (insn
) == NOTE
;
1375 insn
= PREV_INSN (insn
))
1376 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
)
1380 /* Look at the relative rarities of the fallthrough and destination. If
1381 they differ, we can predict the branch that way. */
1383 switch (rare_fallthrough
- rare_dest
)
1397 /* If we couldn't figure out what this jump was, assume it won't be
1398 taken. This should be rare. */
1402 /* EQ tests are usually false and NE tests are usually true. Also,
1403 most quantities are positive, so we can make the appropriate guesses
1404 about signed comparisons against zero. */
1405 switch (GET_CODE (condition
))
1408 /* Unconditional branch. */
1416 if (XEXP (condition
, 1) == const0_rtx
)
1421 if (XEXP (condition
, 1) == const0_rtx
)
1426 /* Predict backward branches usually take, forward branches usually not. If
1427 we don't know whether this is forward or backward, assume the branch
1428 will be taken, since most are. */
1429 return (target_label
== 0 || INSN_UID (jump_insn
) > max_uid
1430 || INSN_UID (target_label
) > max_uid
1431 || (uid_to_ruid
[INSN_UID (jump_insn
)]
1432 > uid_to_ruid
[INSN_UID (target_label
)]));;
1435 /* Return the condition under which INSN will branch to TARGET. If TARGET
1436 is zero, return the condition under which INSN will return. If INSN is
1437 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1438 type of jump, or it doesn't go to TARGET, return 0. */
1441 get_branch_condition (insn
, target
)
1445 rtx pat
= PATTERN (insn
);
1448 if (condjump_in_parallel_p (insn
))
1449 pat
= XVECEXP (pat
, 0, 0);
1451 if (GET_CODE (pat
) == RETURN
)
1452 return target
== 0 ? const_true_rtx
: 0;
1454 else if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
1457 src
= SET_SRC (pat
);
1458 if (GET_CODE (src
) == LABEL_REF
&& XEXP (src
, 0) == target
)
1459 return const_true_rtx
;
1461 else if (GET_CODE (src
) == IF_THEN_ELSE
1462 && ((target
== 0 && GET_CODE (XEXP (src
, 1)) == RETURN
)
1463 || (GET_CODE (XEXP (src
, 1)) == LABEL_REF
1464 && XEXP (XEXP (src
, 1), 0) == target
))
1465 && XEXP (src
, 2) == pc_rtx
)
1466 return XEXP (src
, 0);
1468 else if (GET_CODE (src
) == IF_THEN_ELSE
1469 && ((target
== 0 && GET_CODE (XEXP (src
, 2)) == RETURN
)
1470 || (GET_CODE (XEXP (src
, 2)) == LABEL_REF
1471 && XEXP (XEXP (src
, 2), 0) == target
))
1472 && XEXP (src
, 1) == pc_rtx
)
1473 return gen_rtx (reverse_condition (GET_CODE (XEXP (src
, 0))),
1474 GET_MODE (XEXP (src
, 0)),
1475 XEXP (XEXP (src
, 0), 0), XEXP (XEXP (src
, 0), 1));
1480 /* Return non-zero if CONDITION is more strict than the condition of
1481 INSN, i.e., if INSN will always branch if CONDITION is true. */
1484 condition_dominates_p (condition
, insn
)
1488 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
1489 enum rtx_code code
= GET_CODE (condition
);
1490 enum rtx_code other_code
;
1492 if (rtx_equal_p (condition
, other_condition
)
1493 || other_condition
== const_true_rtx
)
1496 else if (condition
== const_true_rtx
|| other_condition
== 0)
1499 other_code
= GET_CODE (other_condition
);
1500 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
1501 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
1502 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
1505 return comparison_dominates_p (code
, other_code
);
1508 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1509 any insns already in the delay slot of JUMP. */
1512 redirect_with_delay_slots_safe_p (jump
, newlabel
, seq
)
1513 rtx jump
, newlabel
, seq
;
1515 int flags
, slots
, i
;
1516 rtx pat
= PATTERN (seq
);
1518 /* Make sure all the delay slots of this jump would still
1519 be valid after threading the jump. If they are still
1520 valid, then return non-zero. */
1522 flags
= get_jump_flags (jump
, newlabel
);
1523 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1525 #ifdef ANNUL_IFFALSE_SLOTS
1526 (INSN_ANNULLED_BRANCH_P (jump
)
1527 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1528 ? eligible_for_annul_false (jump
, i
- 1,
1529 XVECEXP (pat
, 0, i
), flags
) :
1531 #ifdef ANNUL_IFTRUE_SLOTS
1532 (INSN_ANNULLED_BRANCH_P (jump
)
1533 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1534 ? eligible_for_annul_true (jump
, i
- 1,
1535 XVECEXP (pat
, 0, i
), flags
) :
1537 eligible_for_delay (jump
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
1540 return (i
== XVECLEN (pat
, 0));
1543 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1544 any insns we wish to place in the delay slot of JUMP. */
1547 redirect_with_delay_list_safe_p (jump
, newlabel
, delay_list
)
1548 rtx jump
, newlabel
, delay_list
;
1553 /* Make sure all the insns in DELAY_LIST would still be
1554 valid after threading the jump. If they are still
1555 valid, then return non-zero. */
1557 flags
= get_jump_flags (jump
, newlabel
);
1558 for (li
= delay_list
, i
= 0; li
; li
= XEXP (li
, 1), i
++)
1560 #ifdef ANNUL_IFFALSE_SLOTS
1561 (INSN_ANNULLED_BRANCH_P (jump
)
1562 && INSN_FROM_TARGET_P (XEXP (li
, 0)))
1563 ? eligible_for_annul_false (jump
, i
, XEXP (li
, 0), flags
) :
1565 #ifdef ANNUL_IFTRUE_SLOTS
1566 (INSN_ANNULLED_BRANCH_P (jump
)
1567 && ! INSN_FROM_TARGET_P (XEXP (li
, 0)))
1568 ? eligible_for_annul_true (jump
, i
, XEXP (li
, 0), flags
) :
1570 eligible_for_delay (jump
, i
, XEXP (li
, 0), flags
)))
1573 return (li
== NULL
);
1577 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1578 the condition tested by INSN is CONDITION and the resources shown in
1579 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1580 from SEQ's delay list, in addition to whatever insns it may execute
1581 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1582 needed while searching for delay slot insns. Return the concatenated
1583 delay list if possible, otherwise, return 0.
1585 SLOTS_TO_FILL is the total number of slots required by INSN, and
1586 PSLOTS_FILLED points to the number filled so far (also the number of
1587 insns in DELAY_LIST). It is updated with the number that have been
1588 filled from the SEQUENCE, if any.
1590 PANNUL_P points to a non-zero value if we already know that we need
1591 to annul INSN. If this routine determines that annulling is needed,
1592 it may set that value non-zero.
1594 PNEW_THREAD points to a location that is to receive the place at which
1595 execution should continue. */
1598 steal_delay_list_from_target (insn
, condition
, seq
, delay_list
,
1599 sets
, needed
, other_needed
,
1600 slots_to_fill
, pslots_filled
, pannul_p
,
1602 rtx insn
, condition
;
1605 struct resources
*sets
, *needed
, *other_needed
;
1612 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1613 int total_slots_filled
= *pslots_filled
;
1614 rtx new_delay_list
= 0;
1615 int must_annul
= *pannul_p
;
1618 /* We can't do anything if there are more delay slots in SEQ than we
1619 can handle, or if we don't know that it will be a taken branch.
1620 We know that it will be a taken branch if it is either an unconditional
1621 branch or a conditional branch with a stricter branch condition.
1623 Also, exit if the branch has more than one set, since then it is computing
1624 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1625 ??? It may be possible to move other sets into INSN in addition to
1626 moving the instructions in the delay slots. */
1628 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1629 || ! condition_dominates_p (condition
, XVECEXP (seq
, 0, 0))
1630 || ! single_set (XVECEXP (seq
, 0, 0)))
1633 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1635 rtx trial
= XVECEXP (seq
, 0, i
);
1638 if (insn_references_resource_p (trial
, sets
, 0)
1639 || insn_sets_resource_p (trial
, needed
, 0)
1640 || insn_sets_resource_p (trial
, sets
, 0)
1642 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1644 || find_reg_note (trial
, REG_CC_USER
, NULL_RTX
)
1646 /* If TRIAL is from the fallthrough code of an annulled branch insn
1647 in SEQ, we cannot use it. */
1648 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq
, 0, 0))
1649 && ! INSN_FROM_TARGET_P (trial
)))
1652 /* If this insn was already done (usually in a previous delay slot),
1653 pretend we put it in our delay slot. */
1654 if (redundant_insn (trial
, insn
, new_delay_list
))
1657 /* We will end up re-vectoring this branch, so compute flags
1658 based on jumping to the new label. */
1659 flags
= get_jump_flags (insn
, JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1662 && ((condition
== const_true_rtx
1663 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1664 && ! may_trap_p (PATTERN (trial
)))))
1665 ? eligible_for_delay (insn
, total_slots_filled
, trial
, flags
)
1667 eligible_for_annul_false (insn
, total_slots_filled
, trial
, flags
)))
1669 temp
= copy_rtx (trial
);
1670 INSN_FROM_TARGET_P (temp
) = 1;
1671 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1672 total_slots_filled
++;
1674 if (--slots_remaining
== 0)
1681 /* Show the place to which we will be branching. */
1682 *pnew_thread
= next_active_insn (JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1684 /* Add any new insns to the delay list and update the count of the
1685 number of slots filled. */
1686 *pslots_filled
= total_slots_filled
;
1687 *pannul_p
= must_annul
;
1689 if (delay_list
== 0)
1690 return new_delay_list
;
1692 for (temp
= new_delay_list
; temp
; temp
= XEXP (temp
, 1))
1693 delay_list
= add_to_delay_list (XEXP (temp
, 0), delay_list
);
1698 /* Similar to steal_delay_list_from_target except that SEQ is on the
1699 fallthrough path of INSN. Here we only do something if the delay insn
1700 of SEQ is an unconditional branch. In that case we steal its delay slot
1701 for INSN since unconditional branches are much easier to fill. */
1704 steal_delay_list_from_fallthrough (insn
, condition
, seq
,
1705 delay_list
, sets
, needed
, other_needed
,
1706 slots_to_fill
, pslots_filled
, pannul_p
)
1707 rtx insn
, condition
;
1710 struct resources
*sets
, *needed
, *other_needed
;
1718 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1720 /* We can't do anything if SEQ's delay insn isn't an
1721 unconditional branch. */
1723 if (! simplejump_p (XVECEXP (seq
, 0, 0))
1724 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) != RETURN
)
1727 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1729 rtx trial
= XVECEXP (seq
, 0, i
);
1731 /* If TRIAL sets CC0, stealing it will move it too far from the use
1733 if (insn_references_resource_p (trial
, sets
, 0)
1734 || insn_sets_resource_p (trial
, needed
, 0)
1735 || insn_sets_resource_p (trial
, sets
, 0)
1737 || sets_cc0_p (PATTERN (trial
))
1743 /* If this insn was already done, we don't need it. */
1744 if (redundant_insn (trial
, insn
, delay_list
))
1746 delete_from_delay_slot (trial
);
1751 && ((condition
== const_true_rtx
1752 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1753 && ! may_trap_p (PATTERN (trial
)))))
1754 ? eligible_for_delay (insn
, *pslots_filled
, trial
, flags
)
1756 eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
1758 delete_from_delay_slot (trial
);
1759 delay_list
= add_to_delay_list (trial
, delay_list
);
1761 if (++(*pslots_filled
) == slots_to_fill
)
1771 /* Try merging insns starting at THREAD which match exactly the insns in
1774 If all insns were matched and the insn was previously annulling, the
1775 annul bit will be cleared.
1777 For each insn that is merged, if the branch is or will be non-annulling,
1778 we delete the merged insn. */
1781 try_merge_delay_insns (insn
, thread
)
1784 rtx trial
, next_trial
;
1785 rtx delay_insn
= XVECEXP (PATTERN (insn
), 0, 0);
1786 int annul_p
= INSN_ANNULLED_BRANCH_P (delay_insn
);
1787 int slot_number
= 1;
1788 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1789 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1790 struct resources set
, needed
;
1791 rtx merged_insns
= 0;
1795 flags
= get_jump_flags (delay_insn
, JUMP_LABEL (delay_insn
));
1797 CLEAR_RESOURCE (&needed
);
1798 CLEAR_RESOURCE (&set
);
1800 /* If this is not an annulling branch, take into account anything needed in
1801 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1802 folded into one. If we are annulling, this would be the correct
1803 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1804 will essentially disable this optimization. This method is somewhat of
1805 a kludge, but I don't see a better way.) */
1807 mark_referenced_resources (next_to_match
, &needed
, 1);
1809 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1811 rtx pat
= PATTERN (trial
);
1812 rtx oldtrial
= trial
;
1814 next_trial
= next_nonnote_insn (trial
);
1816 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1817 if (GET_CODE (trial
) == INSN
1818 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1821 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1823 /* We can't share an insn that sets cc0. */
1824 && ! sets_cc0_p (pat
)
1826 && ! insn_references_resource_p (trial
, &set
, 1)
1827 && ! insn_sets_resource_p (trial
, &set
, 1)
1828 && ! insn_sets_resource_p (trial
, &needed
, 1)
1829 && (trial
= try_split (pat
, trial
, 0)) != 0
1830 /* Update next_trial, in case try_split succeeded. */
1831 && (next_trial
= next_nonnote_insn (trial
))
1832 /* Likewise THREAD. */
1833 && (thread
= oldtrial
== thread
? trial
: thread
)
1834 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1835 /* Have to test this condition if annul condition is different
1836 from (and less restrictive than) non-annulling one. */
1837 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
, flags
))
1842 update_block (trial
, thread
);
1843 if (trial
== thread
)
1844 thread
= next_active_insn (thread
);
1846 delete_insn (trial
);
1847 INSN_FROM_TARGET_P (next_to_match
) = 0;
1850 merged_insns
= gen_rtx (INSN_LIST
, VOIDmode
, trial
, merged_insns
);
1852 if (++slot_number
== num_slots
)
1855 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1857 mark_referenced_resources (next_to_match
, &needed
, 1);
1860 mark_set_resources (trial
, &set
, 0, 1);
1861 mark_referenced_resources (trial
, &needed
, 1);
1864 /* See if we stopped on a filled insn. If we did, try to see if its
1865 delay slots match. */
1866 if (slot_number
!= num_slots
1867 && trial
&& GET_CODE (trial
) == INSN
1868 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1869 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0)))
1871 rtx pat
= PATTERN (trial
);
1872 rtx filled_insn
= XVECEXP (pat
, 0, 0);
1874 /* Account for resources set/needed by the filled insn. */
1875 mark_set_resources (filled_insn
, &set
, 0, 1);
1876 mark_referenced_resources (filled_insn
, &needed
, 1);
1878 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1880 rtx dtrial
= XVECEXP (pat
, 0, i
);
1882 if (! insn_references_resource_p (dtrial
, &set
, 1)
1883 && ! insn_sets_resource_p (dtrial
, &set
, 1)
1884 && ! insn_sets_resource_p (dtrial
, &needed
, 1)
1886 && ! sets_cc0_p (PATTERN (dtrial
))
1888 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1889 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
, flags
))
1893 update_block (dtrial
, thread
);
1894 delete_from_delay_slot (dtrial
);
1895 INSN_FROM_TARGET_P (next_to_match
) = 0;
1898 merged_insns
= gen_rtx (INSN_LIST
, SImode
, dtrial
,
1901 if (++slot_number
== num_slots
)
1904 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1909 /* If all insns in the delay slot have been matched and we were previously
1910 annulling the branch, we need not any more. In that case delete all the
1911 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1912 the delay list so that we know that it isn't only being used at the
1914 if (slot_number
== num_slots
&& annul_p
)
1916 for (; merged_insns
; merged_insns
= XEXP (merged_insns
, 1))
1918 if (GET_MODE (merged_insns
) == SImode
)
1920 update_block (XEXP (merged_insns
, 0), thread
);
1921 delete_from_delay_slot (XEXP (merged_insns
, 0));
1925 update_block (XEXP (merged_insns
, 0), thread
);
1926 delete_insn (XEXP (merged_insns
, 0));
1930 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1932 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1933 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1937 /* See if INSN is redundant with an insn in front of TARGET. Often this
1938 is called when INSN is a candidate for a delay slot of TARGET.
1939 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1940 of INSN. Often INSN will be redundant with an insn in a delay slot of
1941 some previous insn. This happens when we have a series of branches to the
1942 same label; in that case the first insn at the target might want to go
1943 into each of the delay slots.
1945 If we are not careful, this routine can take up a significant fraction
1946 of the total compilation time (4%), but only wins rarely. Hence we
1947 speed this routine up by making two passes. The first pass goes back
1948 until it hits a label and sees if it find an insn with an identical
1949 pattern. Only in this (relatively rare) event does it check for
1952 We do not split insns we encounter. This could cause us not to find a
1953 redundant insn, but the cost of splitting seems greater than the possible
1954 gain in rare cases. */
1957 redundant_insn (insn
, target
, delay_list
)
1962 rtx target_main
= target
;
1963 rtx ipat
= PATTERN (insn
);
1965 struct resources needed
, set
;
1968 /* Scan backwards looking for a match. */
1969 for (trial
= PREV_INSN (target
); trial
; trial
= PREV_INSN (trial
))
1971 if (GET_CODE (trial
) == CODE_LABEL
)
1974 if (GET_RTX_CLASS (GET_CODE (trial
)) != 'i')
1977 pat
= PATTERN (trial
);
1978 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
1981 if (GET_CODE (pat
) == SEQUENCE
)
1983 /* Stop for a CALL and its delay slots because it is difficult to
1984 track its resource needs correctly. */
1985 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
1988 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1989 slots because it is difficult to track its resource needs
1992 #ifdef INSN_SETS_ARE_DELAYED
1993 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
1997 #ifdef INSN_REFERENCES_ARE_DELAYED
1998 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2002 /* See if any of the insns in the delay slot match, updating
2003 resource requirements as we go. */
2004 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
2005 if (GET_CODE (XVECEXP (pat
, 0, i
)) == GET_CODE (insn
)
2006 && rtx_equal_p (PATTERN (XVECEXP (pat
, 0, i
)), ipat
))
2009 /* If found a match, exit this loop early. */
2014 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
))
2018 /* If we didn't find an insn that matches, return 0. */
2022 /* See what resources this insn sets and needs. If they overlap, or
2023 if this insn references CC0, it can't be redundant. */
2025 CLEAR_RESOURCE (&needed
);
2026 CLEAR_RESOURCE (&set
);
2027 mark_set_resources (insn
, &set
, 0, 1);
2028 mark_referenced_resources (insn
, &needed
, 1);
2030 /* If TARGET is a SEQUENCE, get the main insn. */
2031 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
2032 target_main
= XVECEXP (PATTERN (target
), 0, 0);
2034 if (resource_conflicts_p (&needed
, &set
)
2036 || reg_mentioned_p (cc0_rtx
, ipat
)
2038 /* The insn requiring the delay may not set anything needed or set by
2040 || insn_sets_resource_p (target_main
, &needed
, 1)
2041 || insn_sets_resource_p (target_main
, &set
, 1))
2044 /* Insns we pass may not set either NEEDED or SET, so merge them for
2046 needed
.memory
|= set
.memory
;
2047 needed
.unch_memory
|= set
.unch_memory
;
2048 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
2050 /* This insn isn't redundant if it conflicts with an insn that either is
2051 or will be in a delay slot of TARGET. */
2055 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, 1))
2057 delay_list
= XEXP (delay_list
, 1);
2060 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
2061 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
2062 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
, 1))
2065 /* Scan backwards until we reach a label or an insn that uses something
2066 INSN sets or sets something insn uses or sets. */
2068 for (trial
= PREV_INSN (target
);
2069 trial
&& GET_CODE (trial
) != CODE_LABEL
;
2070 trial
= PREV_INSN (trial
))
2072 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != CALL_INSN
2073 && GET_CODE (trial
) != JUMP_INSN
)
2076 pat
= PATTERN (trial
);
2077 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2080 if (GET_CODE (pat
) == SEQUENCE
)
2082 /* If this is a CALL_INSN and its delay slots, it is hard to track
2083 the resource needs properly, so give up. */
2084 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
2087 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2088 is hard to track the resource needs properly, so give up. */
2090 #ifdef INSN_SETS_ARE_DELAYED
2091 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2095 #ifdef INSN_REFERENCES_ARE_DELAYED
2096 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2100 /* See if any of the insns in the delay slot match, updating
2101 resource requirements as we go. */
2102 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
2104 rtx candidate
= XVECEXP (pat
, 0, i
);
2106 /* If an insn will be annulled if the branch is false, it isn't
2107 considered as a possible duplicate insn. */
2108 if (rtx_equal_p (PATTERN (candidate
), ipat
)
2109 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
2110 && INSN_FROM_TARGET_P (candidate
)))
2112 /* Show that this insn will be used in the sequel. */
2113 INSN_FROM_TARGET_P (candidate
) = 0;
2117 /* Unless this is an annulled insn from the target of a branch,
2118 we must stop if it sets anything needed or set by INSN. */
2119 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
2120 || ! INSN_FROM_TARGET_P (candidate
))
2121 && insn_sets_resource_p (candidate
, &needed
, 1))
2126 /* If the insn requiring the delay slot conflicts with INSN, we
2128 if (insn_sets_resource_p (XVECEXP (pat
, 0, 0), &needed
, 1))
2133 /* See if TRIAL is the same as INSN. */
2134 pat
= PATTERN (trial
);
2135 if (rtx_equal_p (pat
, ipat
))
2138 /* Can't go any further if TRIAL conflicts with INSN. */
2139 if (insn_sets_resource_p (trial
, &needed
, 1))
2147 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2148 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2149 is non-zero, we are allowed to fall into this thread; otherwise, we are
2152 If LABEL is used more than one or we pass a label other than LABEL before
2153 finding an active insn, we do not own this thread. */
2156 own_thread_p (thread
, label
, allow_fallthrough
)
2159 int allow_fallthrough
;
2164 /* We don't own the function end. */
2168 /* Get the first active insn, or THREAD, if it is an active insn. */
2169 active_insn
= next_active_insn (PREV_INSN (thread
));
2171 for (insn
= thread
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
2172 if (GET_CODE (insn
) == CODE_LABEL
2173 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
2176 if (allow_fallthrough
)
2179 /* Ensure that we reach a BARRIER before any insn or label. */
2180 for (insn
= prev_nonnote_insn (thread
);
2181 insn
== 0 || GET_CODE (insn
) != BARRIER
;
2182 insn
= prev_nonnote_insn (insn
))
2184 || GET_CODE (insn
) == CODE_LABEL
2185 || (GET_CODE (insn
) == INSN
2186 && GET_CODE (PATTERN (insn
)) != USE
2187 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
2193 /* Find the number of the basic block that starts closest to INSN. Return -1
2194 if we couldn't find such a basic block. */
2197 find_basic_block (insn
)
2202 /* Scan backwards to the previous BARRIER. Then see if we can find a
2203 label that starts a basic block. Return the basic block number. */
2205 for (insn
= prev_nonnote_insn (insn
);
2206 insn
&& GET_CODE (insn
) != BARRIER
;
2207 insn
= prev_nonnote_insn (insn
))
2210 /* The start of the function is basic block zero. */
2214 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2215 anything other than a CODE_LABEL or note, we can't find this code. */
2216 for (insn
= next_nonnote_insn (insn
);
2217 insn
&& GET_CODE (insn
) == CODE_LABEL
;
2218 insn
= next_nonnote_insn (insn
))
2220 for (i
= 0; i
< n_basic_blocks
; i
++)
2221 if (insn
== basic_block_head
[i
])
2228 /* Called when INSN is being moved from a location near the target of a jump.
2229 We leave a marker of the form (use (INSN)) immediately in front
2230 of WHERE for mark_target_live_regs. These markers will be deleted when
2233 We used to try to update the live status of registers if WHERE is at
2234 the start of a basic block, but that can't work since we may remove a
2235 BARRIER in relax_delay_slots. */
2238 update_block (insn
, where
)
2244 /* Ignore if this was in a delay slot and it came from the target of
2246 if (INSN_FROM_TARGET_P (insn
))
2249 emit_insn_before (gen_rtx (USE
, VOIDmode
, insn
), where
);
2251 /* INSN might be making a value live in a block where it didn't use to
2252 be. So recompute liveness information for this block. */
2254 b
= find_basic_block (insn
);
2259 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2260 the basic block containing the jump. */
2263 reorg_redirect_jump (jump
, nlabel
)
2267 int b
= find_basic_block (jump
);
2272 return redirect_jump (jump
, nlabel
);
2275 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2276 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2277 that reference values used in INSN. If we find one, then we move the
2278 REG_DEAD note to INSN.
2280 This is needed to handle the case where an later insn (after INSN) has a
2281 REG_DEAD note for a register used by INSN, and this later insn subsequently
2282 gets moved before a CODE_LABEL because it is a redundant insn. In this
2283 case, mark_target_live_regs may be confused into thinking the register
2284 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2287 update_reg_dead_notes (insn
, delayed_insn
)
2288 rtx insn
, delayed_insn
;
2292 for (p
= next_nonnote_insn (insn
); p
!= delayed_insn
;
2293 p
= next_nonnote_insn (p
))
2294 for (link
= REG_NOTES (p
); link
; link
= next
)
2296 next
= XEXP (link
, 1);
2298 if (REG_NOTE_KIND (link
) != REG_DEAD
2299 || GET_CODE (XEXP (link
, 0)) != REG
)
2302 if (reg_referenced_p (XEXP (link
, 0), PATTERN (insn
)))
2304 /* Move the REG_DEAD note from P to INSN. */
2305 remove_note (p
, link
);
2306 XEXP (link
, 1) = REG_NOTES (insn
);
2307 REG_NOTES (insn
) = link
;
2312 /* Called when an insn redundant with start_insn is deleted. If there
2313 is a REG_DEAD note for the target of start_insn between start_insn
2314 and stop_insn, then the REG_DEAD note needs to be deleted since the
2315 value no longer dies there.
2317 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
2318 confused into thinking the register is dead. */
2321 fix_reg_dead_note (start_insn
, stop_insn
)
2322 rtx start_insn
, stop_insn
;
2326 for (p
= next_nonnote_insn (start_insn
); p
!= stop_insn
;
2327 p
= next_nonnote_insn (p
))
2328 for (link
= REG_NOTES (p
); link
; link
= next
)
2330 next
= XEXP (link
, 1);
2332 if (REG_NOTE_KIND (link
) != REG_DEAD
2333 || GET_CODE (XEXP (link
, 0)) != REG
)
2336 if (reg_set_p (XEXP (link
, 0), PATTERN (start_insn
)))
2338 remove_note (p
, link
);
2344 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2346 This handles the case of udivmodXi4 instructions which optimize their
2347 output depending on whether any REG_UNUSED notes are present.
2348 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2352 update_reg_unused_notes (insn
, redundant_insn
)
2353 rtx insn
, redundant_insn
;
2357 for (link
= REG_NOTES (insn
); link
; link
= next
)
2359 next
= XEXP (link
, 1);
2361 if (REG_NOTE_KIND (link
) != REG_UNUSED
2362 || GET_CODE (XEXP (link
, 0)) != REG
)
2365 if (! find_regno_note (redundant_insn
, REG_UNUSED
,
2366 REGNO (XEXP (link
, 0))))
2367 remove_note (insn
, link
);
2371 /* Marks registers possibly live at the current place being scanned by
2372 mark_target_live_regs. Used only by next two function. */
2374 static HARD_REG_SET current_live_regs
;
2376 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2377 Also only used by the next two functions. */
2379 static HARD_REG_SET pending_dead_regs
;
2381 /* Utility function called from mark_target_live_regs via note_stores.
2382 It deadens any CLOBBERed registers and livens any SET registers. */
2385 update_live_status (dest
, x
)
2389 int first_regno
, last_regno
;
2392 if (GET_CODE (dest
) != REG
2393 && (GET_CODE (dest
) != SUBREG
|| GET_CODE (SUBREG_REG (dest
)) != REG
))
2396 if (GET_CODE (dest
) == SUBREG
)
2397 first_regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2399 first_regno
= REGNO (dest
);
2401 last_regno
= first_regno
+ HARD_REGNO_NREGS (first_regno
, GET_MODE (dest
));
2403 if (GET_CODE (x
) == CLOBBER
)
2404 for (i
= first_regno
; i
< last_regno
; i
++)
2405 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2407 for (i
= first_regno
; i
< last_regno
; i
++)
2409 SET_HARD_REG_BIT (current_live_regs
, i
);
2410 CLEAR_HARD_REG_BIT (pending_dead_regs
, i
);
2414 /* Similar to next_insn, but ignores insns in the delay slots of
2415 an annulled branch. */
2418 next_insn_no_annul (insn
)
2423 /* If INSN is an annulled branch, skip any insns from the target
2425 if (INSN_ANNULLED_BRANCH_P (insn
)
2426 && NEXT_INSN (PREV_INSN (insn
)) != insn
)
2427 while (INSN_FROM_TARGET_P (NEXT_INSN (insn
)))
2428 insn
= NEXT_INSN (insn
);
2430 insn
= NEXT_INSN (insn
);
2431 if (insn
&& GET_CODE (insn
) == INSN
2432 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2433 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2439 /* A subroutine of mark_target_live_regs. Search forward from TARGET
2440 looking for registers that are set before they are used. These are dead.
2441 Stop after passing a few conditional jumps, and/or a small
2442 number of unconditional branches. */
2445 find_dead_or_set_registers (target
, res
, jump_target
, jump_count
, set
, needed
)
2447 struct resources
*res
;
2450 struct resources set
, needed
;
2452 HARD_REG_SET scratch
;
2457 for (insn
= target
; insn
; insn
= next
)
2459 rtx this_jump_insn
= insn
;
2461 next
= NEXT_INSN (insn
);
2462 switch (GET_CODE (insn
))
2465 /* After a label, any pending dead registers that weren't yet
2466 used can be made dead. */
2467 AND_COMPL_HARD_REG_SET (pending_dead_regs
, needed
.regs
);
2468 AND_COMPL_HARD_REG_SET (res
->regs
, pending_dead_regs
);
2469 CLEAR_HARD_REG_SET (pending_dead_regs
);
2471 /* All spill registers are dead at a label, so kill all of the
2472 ones that aren't needed also. */
2473 COPY_HARD_REG_SET (scratch
, used_spill_regs
);
2474 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2475 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
2483 if (GET_CODE (PATTERN (insn
)) == USE
)
2485 /* If INSN is a USE made by update_block, we care about the
2486 underlying insn. Any registers set by the underlying insn
2487 are live since the insn is being done somewhere else. */
2488 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2489 mark_set_resources (XEXP (PATTERN (insn
), 0), res
, 0, 1);
2491 /* All other USE insns are to be ignored. */
2494 else if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
2496 else if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2498 /* An unconditional jump can be used to fill the delay slot
2499 of a call, so search for a JUMP_INSN in any position. */
2500 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2502 this_jump_insn
= XVECEXP (PATTERN (insn
), 0, i
);
2503 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2509 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2511 if (jump_count
++ < 10)
2513 if (simplejump_p (this_jump_insn
)
2514 || GET_CODE (PATTERN (this_jump_insn
)) == RETURN
)
2516 next
= JUMP_LABEL (this_jump_insn
);
2521 *jump_target
= JUMP_LABEL (this_jump_insn
);
2524 else if (condjump_p (this_jump_insn
)
2525 || condjump_in_parallel_p (this_jump_insn
))
2527 struct resources target_set
, target_res
;
2528 struct resources fallthrough_res
;
2530 /* We can handle conditional branches here by following
2531 both paths, and then IOR the results of the two paths
2532 together, which will give us registers that are dead
2533 on both paths. Since this is expensive, we give it
2534 a much higher cost than unconditional branches. The
2535 cost was chosen so that we will follow at most 1
2536 conditional branch. */
2539 if (jump_count
>= 10)
2542 mark_referenced_resources (insn
, &needed
, 1);
2544 /* For an annulled branch, mark_set_resources ignores slots
2545 filled by instructions from the target. This is correct
2546 if the branch is not taken. Since we are following both
2547 paths from the branch, we must also compute correct info
2548 if the branch is taken. We do this by inverting all of
2549 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
2550 and then inverting the INSN_FROM_TARGET_P bits again. */
2552 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
2553 && INSN_ANNULLED_BRANCH_P (this_jump_insn
))
2555 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2556 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
))
2557 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
));
2560 mark_set_resources (insn
, &target_set
, 0, 1);
2562 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2563 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
))
2564 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
));
2566 mark_set_resources (insn
, &set
, 0, 1);
2570 mark_set_resources (insn
, &set
, 0, 1);
2575 COPY_HARD_REG_SET (scratch
, target_set
.regs
);
2576 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2577 AND_COMPL_HARD_REG_SET (target_res
.regs
, scratch
);
2579 fallthrough_res
= *res
;
2580 COPY_HARD_REG_SET (scratch
, set
.regs
);
2581 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2582 AND_COMPL_HARD_REG_SET (fallthrough_res
.regs
, scratch
);
2584 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn
),
2585 &target_res
, 0, jump_count
,
2586 target_set
, needed
);
2587 find_dead_or_set_registers (next
,
2588 &fallthrough_res
, 0, jump_count
,
2590 IOR_HARD_REG_SET (fallthrough_res
.regs
, target_res
.regs
);
2591 AND_HARD_REG_SET (res
->regs
, fallthrough_res
.regs
);
2599 /* Don't try this optimization if we expired our jump count
2600 above, since that would mean there may be an infinite loop
2601 in the function being compiled. */
2607 mark_referenced_resources (insn
, &needed
, 1);
2608 mark_set_resources (insn
, &set
, 0, 1);
2610 COPY_HARD_REG_SET (scratch
, set
.regs
);
2611 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2612 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
2618 /* Set the resources that are live at TARGET.
2620 If TARGET is zero, we refer to the end of the current function and can
2621 return our precomputed value.
2623 Otherwise, we try to find out what is live by consulting the basic block
2624 information. This is tricky, because we must consider the actions of
2625 reload and jump optimization, which occur after the basic block information
2628 Accordingly, we proceed as follows::
2630 We find the previous BARRIER and look at all immediately following labels
2631 (with no intervening active insns) to see if any of them start a basic
2632 block. If we hit the start of the function first, we use block 0.
2634 Once we have found a basic block and a corresponding first insns, we can
2635 accurately compute the live status from basic_block_live_regs and
2636 reg_renumber. (By starting at a label following a BARRIER, we are immune
2637 to actions taken by reload and jump.) Then we scan all insns between
2638 that point and our target. For each CLOBBER (or for call-clobbered regs
2639 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2640 a SET, mark them as live.
2642 We have to be careful when using REG_DEAD notes because they are not
2643 updated by such things as find_equiv_reg. So keep track of registers
2644 marked as dead that haven't been assigned to, and mark them dead at the
2645 next CODE_LABEL since reload and jump won't propagate values across labels.
2647 If we cannot find the start of a basic block (should be a very rare
2648 case, if it can happen at all), mark everything as potentially live.
2650 Next, scan forward from TARGET looking for things set or clobbered
2651 before they are used. These are not live.
2653 Because we can be called many times on the same target, save our results
2654 in a hash table indexed by INSN_UID. */
2657 mark_target_live_regs (target
, res
)
2659 struct resources
*res
;
2663 struct target_info
*tinfo
;
2667 HARD_REG_SET scratch
;
2668 struct resources set
, needed
;
2671 /* Handle end of function. */
2674 *res
= end_of_function_needs
;
2678 /* We have to assume memory is needed, but the CC isn't. */
2680 res
->volatil
= res
->unch_memory
= 0;
2683 /* See if we have computed this value already. */
2684 for (tinfo
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
2685 tinfo
; tinfo
= tinfo
->next
)
2686 if (tinfo
->uid
== INSN_UID (target
))
2689 /* Start by getting the basic block number. If we have saved information,
2690 we can get it from there unless the insn at the start of the basic block
2691 has been deleted. */
2692 if (tinfo
&& tinfo
->block
!= -1
2693 && ! INSN_DELETED_P (basic_block_head
[tinfo
->block
]))
2697 b
= find_basic_block (target
);
2701 /* If the information is up-to-date, use it. Otherwise, we will
2703 if (b
== tinfo
->block
&& b
!= -1 && tinfo
->bb_tick
== bb_ticks
[b
])
2705 COPY_HARD_REG_SET (res
->regs
, tinfo
->live_regs
);
2711 /* Allocate a place to put our results and chain it into the
2713 tinfo
= (struct target_info
*) oballoc (sizeof (struct target_info
));
2714 tinfo
->uid
= INSN_UID (target
);
2716 tinfo
->next
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
2717 target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
] = tinfo
;
2720 CLEAR_HARD_REG_SET (pending_dead_regs
);
2722 /* If we found a basic block, get the live registers from it and update
2723 them with anything set or killed between its start and the insn before
2724 TARGET. Otherwise, we must assume everything is live. */
2727 regset regs_live
= basic_block_live_at_start
[b
];
2729 REGSET_ELT_TYPE bit
;
2731 rtx start_insn
, stop_insn
;
2733 /* Compute hard regs live at start of block -- this is the real hard regs
2734 marked live, plus live pseudo regs that have been renumbered to
2738 current_live_regs
= *regs_live
;
2740 COPY_HARD_REG_SET (current_live_regs
, regs_live
);
2743 for (offset
= 0, i
= 0; offset
< regset_size
; offset
++)
2745 if (regs_live
[offset
] == 0)
2746 i
+= REGSET_ELT_BITS
;
2748 for (bit
= 1; bit
&& i
< max_regno
; bit
<<= 1, i
++)
2749 if ((regs_live
[offset
] & bit
)
2750 && (regno
= reg_renumber
[i
]) >= 0)
2752 j
< regno
+ HARD_REGNO_NREGS (regno
,
2753 PSEUDO_REGNO_MODE (i
));
2755 SET_HARD_REG_BIT (current_live_regs
, j
);
2758 /* Get starting and ending insn, handling the case where each might
2760 start_insn
= (b
== 0 ? get_insns () : basic_block_head
[b
]);
2763 if (GET_CODE (start_insn
) == INSN
2764 && GET_CODE (PATTERN (start_insn
)) == SEQUENCE
)
2765 start_insn
= XVECEXP (PATTERN (start_insn
), 0, 0);
2767 if (GET_CODE (stop_insn
) == INSN
2768 && GET_CODE (PATTERN (stop_insn
)) == SEQUENCE
)
2769 stop_insn
= next_insn (PREV_INSN (stop_insn
));
2771 for (insn
= start_insn
; insn
!= stop_insn
;
2772 insn
= next_insn_no_annul (insn
))
2775 rtx real_insn
= insn
;
2777 /* If this insn is from the target of a branch, it isn't going to
2778 be used in the sequel. If it is used in both cases, this
2779 test will not be true. */
2780 if (INSN_FROM_TARGET_P (insn
))
2783 /* If this insn is a USE made by update_block, we care about the
2785 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
2786 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2787 real_insn
= XEXP (PATTERN (insn
), 0);
2789 if (GET_CODE (real_insn
) == CALL_INSN
)
2791 /* CALL clobbers all call-used regs that aren't fixed except
2792 sp, ap, and fp. Do this before setting the result of the
2794 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2795 if (call_used_regs
[i
]
2796 && i
!= STACK_POINTER_REGNUM
&& i
!= FRAME_POINTER_REGNUM
2797 && i
!= ARG_POINTER_REGNUM
2798 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2799 && i
!= HARD_FRAME_POINTER_REGNUM
2801 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2802 && ! (i
== ARG_POINTER_REGNUM
&& fixed_regs
[i
])
2804 #ifdef PIC_OFFSET_TABLE_REGNUM
2805 && ! (i
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2808 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2810 /* A CALL_INSN sets any global register live, since it may
2811 have been modified by the call. */
2812 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2814 SET_HARD_REG_BIT (current_live_regs
, i
);
2817 /* Mark anything killed in an insn to be deadened at the next
2818 label. Ignore USE insns; the only REG_DEAD notes will be for
2819 parameters. But they might be early. A CALL_INSN will usually
2820 clobber registers used for parameters. It isn't worth bothering
2821 with the unlikely case when it won't. */
2822 if ((GET_CODE (real_insn
) == INSN
2823 && GET_CODE (PATTERN (real_insn
)) != USE
2824 && GET_CODE (PATTERN (real_insn
)) != CLOBBER
)
2825 || GET_CODE (real_insn
) == JUMP_INSN
2826 || GET_CODE (real_insn
) == CALL_INSN
)
2828 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2829 if (REG_NOTE_KIND (link
) == REG_DEAD
2830 && GET_CODE (XEXP (link
, 0)) == REG
2831 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2833 int first_regno
= REGNO (XEXP (link
, 0));
2836 + HARD_REGNO_NREGS (first_regno
,
2837 GET_MODE (XEXP (link
, 0))));
2839 for (i
= first_regno
; i
< last_regno
; i
++)
2840 SET_HARD_REG_BIT (pending_dead_regs
, i
);
2843 note_stores (PATTERN (real_insn
), update_live_status
);
2845 /* If any registers were unused after this insn, kill them.
2846 These notes will always be accurate. */
2847 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2848 if (REG_NOTE_KIND (link
) == REG_UNUSED
2849 && GET_CODE (XEXP (link
, 0)) == REG
2850 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2852 int first_regno
= REGNO (XEXP (link
, 0));
2855 + HARD_REGNO_NREGS (first_regno
,
2856 GET_MODE (XEXP (link
, 0))));
2858 for (i
= first_regno
; i
< last_regno
; i
++)
2859 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2863 else if (GET_CODE (real_insn
) == CODE_LABEL
)
2865 /* A label clobbers the pending dead registers since neither
2866 reload nor jump will propagate a value across a label. */
2867 AND_COMPL_HARD_REG_SET (current_live_regs
, pending_dead_regs
);
2868 CLEAR_HARD_REG_SET (pending_dead_regs
);
2871 /* The beginning of the epilogue corresponds to the end of the
2872 RTL chain when there are no epilogue insns. Certain resources
2873 are implicitly required at that point. */
2874 else if (GET_CODE (real_insn
) == NOTE
2875 && NOTE_LINE_NUMBER (real_insn
) == NOTE_INSN_EPILOGUE_BEG
)
2876 IOR_HARD_REG_SET (current_live_regs
, start_of_epilogue_needs
.regs
);
2879 COPY_HARD_REG_SET (res
->regs
, current_live_regs
);
2881 tinfo
->bb_tick
= bb_ticks
[b
];
2884 /* We didn't find the start of a basic block. Assume everything
2885 in use. This should happen only extremely rarely. */
2886 SET_HARD_REG_SET (res
->regs
);
2888 CLEAR_RESOURCE (&set
);
2889 CLEAR_RESOURCE (&needed
);
2891 jump_insn
= find_dead_or_set_registers (target
, res
, &jump_target
, 0,
2894 /* If we hit an unconditional branch, we have another way of finding out
2895 what is live: we can see what is live at the branch target and include
2896 anything used but not set before the branch. The only things that are
2897 live are those that are live using the above test and the test below. */
2901 struct resources new_resources
;
2902 rtx stop_insn
= next_active_insn (jump_insn
);
2904 mark_target_live_regs (next_active_insn (jump_target
), &new_resources
);
2905 CLEAR_RESOURCE (&set
);
2906 CLEAR_RESOURCE (&needed
);
2908 /* Include JUMP_INSN in the needed registers. */
2909 for (insn
= target
; insn
!= stop_insn
; insn
= next_active_insn (insn
))
2911 mark_referenced_resources (insn
, &needed
, 1);
2913 COPY_HARD_REG_SET (scratch
, needed
.regs
);
2914 AND_COMPL_HARD_REG_SET (scratch
, set
.regs
);
2915 IOR_HARD_REG_SET (new_resources
.regs
, scratch
);
2917 mark_set_resources (insn
, &set
, 0, 1);
2920 AND_HARD_REG_SET (res
->regs
, new_resources
.regs
);
2923 COPY_HARD_REG_SET (tinfo
->live_regs
, res
->regs
);
2926 /* Scan a function looking for insns that need a delay slot and find insns to
2927 put into the delay slot.
2929 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2930 as calls). We do these first since we don't want jump insns (that are
2931 easier to fill) to get the only insns that could be used for non-jump insns.
2932 When it is zero, only try to fill JUMP_INSNs.
2934 When slots are filled in this manner, the insns (including the
2935 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2936 it is possible to tell whether a delay slot has really been filled
2937 or not. `final' knows how to deal with this, by communicating
2938 through FINAL_SEQUENCE. */
2941 fill_simple_delay_slots (first
, non_jumps_p
)
2945 register rtx insn
, pat
, trial
, next_trial
;
2947 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2948 struct resources needed
, set
;
2949 int slots_to_fill
, slots_filled
;
2952 for (i
= 0; i
< num_unfilled_slots
; i
++)
2955 /* Get the next insn to fill. If it has already had any slots assigned,
2956 we can't do anything with it. Maybe we'll improve this later. */
2958 insn
= unfilled_slots_base
[i
];
2960 || INSN_DELETED_P (insn
)
2961 || (GET_CODE (insn
) == INSN
2962 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2963 || (GET_CODE (insn
) == JUMP_INSN
&& non_jumps_p
)
2964 || (GET_CODE (insn
) != JUMP_INSN
&& ! non_jumps_p
))
2967 if (GET_CODE (insn
) == JUMP_INSN
)
2968 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2970 flags
= get_jump_flags (insn
, NULL_RTX
);
2971 slots_to_fill
= num_delay_slots (insn
);
2972 if (slots_to_fill
== 0)
2975 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2976 says how many. After initialization, first try optimizing
2979 nop add %o7,.-L1,%o7
2983 If this case applies, the delay slot of the call is filled with
2984 the unconditional jump. This is done first to avoid having the
2985 delay slot of the call filled in the backward scan. Also, since
2986 the unconditional jump is likely to also have a delay slot, that
2987 insn must exist when it is subsequently scanned.
2989 This is tried on each insn with delay slots as some machines
2990 have insns which perform calls, but are not represented as
2996 if ((trial
= next_active_insn (insn
))
2997 && GET_CODE (trial
) == JUMP_INSN
2998 && simplejump_p (trial
)
2999 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
3000 && no_labels_between_p (insn
, trial
))
3004 delay_list
= add_to_delay_list (trial
, delay_list
);
3006 /* TRIAL may have had its delay slot filled, then unfilled. When
3007 the delay slot is unfilled, TRIAL is placed back on the unfilled
3008 slots obstack. Unfortunately, it is placed on the end of the
3009 obstack, not in its original location. Therefore, we must search
3010 from entry i + 1 to the end of the unfilled slots obstack to
3011 try and find TRIAL. */
3012 tmp
= &unfilled_slots_base
[i
+ 1];
3013 while (*tmp
!= trial
&& tmp
!= unfilled_slots_next
)
3016 /* Remove the unconditional jump from consideration for delay slot
3017 filling and unthread it. */
3021 rtx next
= NEXT_INSN (trial
);
3022 rtx prev
= PREV_INSN (trial
);
3024 NEXT_INSN (prev
) = next
;
3026 PREV_INSN (next
) = prev
;
3030 /* Now, scan backwards from the insn to search for a potential
3031 delay-slot candidate. Stop searching when a label or jump is hit.
3033 For each candidate, if it is to go into the delay slot (moved
3034 forward in execution sequence), it must not need or set any resources
3035 that were set by later insns and must not set any resources that
3036 are needed for those insns.
3038 The delay slot insn itself sets resources unless it is a call
3039 (in which case the called routine, not the insn itself, is doing
3042 if (slots_filled
< slots_to_fill
)
3044 CLEAR_RESOURCE (&needed
);
3045 CLEAR_RESOURCE (&set
);
3046 mark_set_resources (insn
, &set
, 0, 0);
3047 mark_referenced_resources (insn
, &needed
, 0);
3049 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
3052 next_trial
= prev_nonnote_insn (trial
);
3054 /* This must be an INSN or CALL_INSN. */
3055 pat
= PATTERN (trial
);
3057 /* USE and CLOBBER at this level was just for flow; ignore it. */
3058 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3061 /* Check for resource conflict first, to avoid unnecessary
3063 if (! insn_references_resource_p (trial
, &set
, 1)
3064 && ! insn_sets_resource_p (trial
, &set
, 1)
3065 && ! insn_sets_resource_p (trial
, &needed
, 1)
3067 /* Can't separate set of cc0 from its use. */
3068 && ! (reg_mentioned_p (cc0_rtx
, pat
)
3069 && ! sets_cc0_p (cc0_rtx
, pat
))
3073 trial
= try_split (pat
, trial
, 1);
3074 next_trial
= prev_nonnote_insn (trial
);
3075 if (eligible_for_delay (insn
, slots_filled
, trial
, flags
))
3077 /* In this case, we are searching backward, so if we
3078 find insns to put on the delay list, we want
3079 to put them at the head, rather than the
3080 tail, of the list. */
3082 update_reg_dead_notes (trial
, insn
);
3083 delay_list
= gen_rtx (INSN_LIST
, VOIDmode
,
3085 update_block (trial
, trial
);
3086 delete_insn (trial
);
3087 if (slots_to_fill
== ++slots_filled
)
3093 mark_set_resources (trial
, &set
, 0, 1);
3094 mark_referenced_resources (trial
, &needed
, 1);
3098 /* If all needed slots haven't been filled, we come here. */
3100 /* Try to optimize case of jumping around a single insn. */
3101 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
3102 if (slots_filled
!= slots_to_fill
3104 && GET_CODE (insn
) == JUMP_INSN
3105 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
3107 delay_list
= optimize_skip (insn
);
3113 /* Try to get insns from beyond the insn needing the delay slot.
3114 These insns can neither set or reference resources set in insns being
3115 skipped, cannot set resources in the insn being skipped, and, if this
3116 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
3117 call might not return).
3119 There used to be code which continued past the target label if
3120 we saw all uses of the target label. This code did not work,
3121 because it failed to account for some instructions which were
3122 both annulled and marked as from the target. This can happen as a
3123 result of optimize_skip. Since this code was redundant with
3124 fill_eager_delay_slots anyways, it was just deleted. */
3126 if (slots_filled
!= slots_to_fill
3127 && (GET_CODE (insn
) != JUMP_INSN
3128 || ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
3129 && ! simplejump_p (insn
)
3130 && JUMP_LABEL (insn
) != 0)))
3133 int maybe_never
= 0;
3134 struct resources needed_at_jump
;
3136 CLEAR_RESOURCE (&needed
);
3137 CLEAR_RESOURCE (&set
);
3139 if (GET_CODE (insn
) == CALL_INSN
)
3141 mark_set_resources (insn
, &set
, 0, 1);
3142 mark_referenced_resources (insn
, &needed
, 1);
3147 mark_set_resources (insn
, &set
, 0, 1);
3148 mark_referenced_resources (insn
, &needed
, 1);
3149 if (GET_CODE (insn
) == JUMP_INSN
)
3150 target
= JUMP_LABEL (insn
);
3153 for (trial
= next_nonnote_insn (insn
); trial
; trial
= next_trial
)
3155 rtx pat
, trial_delay
;
3157 next_trial
= next_nonnote_insn (trial
);
3159 if (GET_CODE (trial
) == CODE_LABEL
3160 || GET_CODE (trial
) == BARRIER
)
3163 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3164 pat
= PATTERN (trial
);
3166 /* Stand-alone USE and CLOBBER are just for flow. */
3167 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3170 /* If this already has filled delay slots, get the insn needing
3172 if (GET_CODE (pat
) == SEQUENCE
)
3173 trial_delay
= XVECEXP (pat
, 0, 0);
3175 trial_delay
= trial
;
3177 /* If this is a jump insn to our target, indicate that we have
3178 seen another jump to it. If we aren't handling a conditional
3179 jump, stop our search. Otherwise, compute the needs at its
3180 target and add them to NEEDED. */
3181 if (GET_CODE (trial_delay
) == JUMP_INSN
)
3185 else if (JUMP_LABEL (trial_delay
) != target
)
3187 mark_target_live_regs
3188 (next_active_insn (JUMP_LABEL (trial_delay
)),
3190 needed
.memory
|= needed_at_jump
.memory
;
3191 needed
.unch_memory
|= needed_at_jump
.unch_memory
;
3192 IOR_HARD_REG_SET (needed
.regs
, needed_at_jump
.regs
);
3196 /* See if we have a resource problem before we try to
3199 && GET_CODE (pat
) != SEQUENCE
3200 && ! insn_references_resource_p (trial
, &set
, 1)
3201 && ! insn_sets_resource_p (trial
, &set
, 1)
3202 && ! insn_sets_resource_p (trial
, &needed
, 1)
3204 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
3206 && ! (maybe_never
&& may_trap_p (pat
))
3207 && (trial
= try_split (pat
, trial
, 0))
3208 && eligible_for_delay (insn
, slots_filled
, trial
, flags
))
3210 next_trial
= next_nonnote_insn (trial
);
3211 delay_list
= add_to_delay_list (trial
, delay_list
);
3214 if (reg_mentioned_p (cc0_rtx
, pat
))
3215 link_cc0_insns (trial
);
3218 delete_insn (trial
);
3219 if (slots_to_fill
== ++slots_filled
)
3224 mark_set_resources (trial
, &set
, 0, 1);
3225 mark_referenced_resources (trial
, &needed
, 1);
3227 /* Ensure we don't put insns between the setting of cc and the
3228 comparison by moving a setting of cc into an earlier delay
3229 slot since these insns could clobber the condition code. */
3232 /* If this is a call or jump, we might not get here. */
3233 if (GET_CODE (trial_delay
) == CALL_INSN
3234 || GET_CODE (trial_delay
) == JUMP_INSN
)
3238 /* If there are slots left to fill and our search was stopped by an
3239 unconditional branch, try the insn at the branch target. We can
3240 redirect the branch if it works.
3242 Don't do this if the insn at the branch target is a branch. */
3243 if (slots_to_fill
!= slots_filled
3245 && GET_CODE (trial
) == JUMP_INSN
3246 && simplejump_p (trial
)
3247 && (target
== 0 || JUMP_LABEL (trial
) == target
)
3248 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
3249 && ! (GET_CODE (next_trial
) == INSN
3250 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
3251 && GET_CODE (next_trial
) != JUMP_INSN
3252 && ! insn_references_resource_p (next_trial
, &set
, 1)
3253 && ! insn_sets_resource_p (next_trial
, &set
, 1)
3254 && ! insn_sets_resource_p (next_trial
, &needed
, 1)
3256 && ! reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
))
3258 && ! (maybe_never
&& may_trap_p (PATTERN (next_trial
)))
3259 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
3260 && eligible_for_delay (insn
, slots_filled
, next_trial
, flags
))
3262 rtx new_label
= next_active_insn (next_trial
);
3265 new_label
= get_label_before (new_label
);
3267 new_label
= find_end_label ();
3270 = add_to_delay_list (copy_rtx (next_trial
), delay_list
);
3272 reorg_redirect_jump (trial
, new_label
);
3274 /* If we merged because we both jumped to the same place,
3275 redirect the original insn also. */
3277 reorg_redirect_jump (insn
, new_label
);
3281 /* If this is an unconditional jump, then try to get insns from the
3282 target of the jump. */
3283 if (GET_CODE (insn
) == JUMP_INSN
3284 && simplejump_p (insn
)
3285 && slots_filled
!= slots_to_fill
)
3287 = fill_slots_from_thread (insn
, const_true_rtx
,
3288 next_active_insn (JUMP_LABEL (insn
)),
3290 own_thread_p (JUMP_LABEL (insn
),
3291 JUMP_LABEL (insn
), 0),
3292 0, slots_to_fill
, &slots_filled
);
3295 unfilled_slots_base
[i
]
3296 = emit_delay_sequence (insn
, delay_list
,
3297 slots_filled
, slots_to_fill
);
3299 if (slots_to_fill
== slots_filled
)
3300 unfilled_slots_base
[i
] = 0;
3302 note_delay_statistics (slots_filled
, 0);
3305 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3306 /* See if the epilogue needs any delay slots. Try to fill them if so.
3307 The only thing we can do is scan backwards from the end of the
3308 function. If we did this in a previous pass, it is incorrect to do it
3310 if (current_function_epilogue_delay_list
)
3313 slots_to_fill
= DELAY_SLOTS_FOR_EPILOGUE
;
3314 if (slots_to_fill
== 0)
3318 CLEAR_RESOURCE (&set
);
3320 /* The frame pointer and stack pointer are needed at the beginning of
3321 the epilogue, so instructions setting them can not be put in the
3322 epilogue delay slot. However, everything else needed at function
3323 end is safe, so we don't want to use end_of_function_needs here. */
3324 CLEAR_RESOURCE (&needed
);
3325 if (frame_pointer_needed
)
3327 SET_HARD_REG_BIT (needed
.regs
, FRAME_POINTER_REGNUM
);
3328 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3329 SET_HARD_REG_BIT (needed
.regs
, HARD_FRAME_POINTER_REGNUM
);
3331 #ifdef EXIT_IGNORE_STACK
3332 if (! EXIT_IGNORE_STACK
)
3334 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
3337 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
3339 for (trial
= get_last_insn (); ! stop_search_p (trial
, 1);
3340 trial
= PREV_INSN (trial
))
3342 if (GET_CODE (trial
) == NOTE
)
3344 pat
= PATTERN (trial
);
3345 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3348 if (! insn_references_resource_p (trial
, &set
, 1)
3349 && ! insn_sets_resource_p (trial
, &needed
, 1)
3350 && ! insn_sets_resource_p (trial
, &set
, 1)
3352 /* Don't want to mess with cc0 here. */
3353 && ! reg_mentioned_p (cc0_rtx
, pat
)
3357 trial
= try_split (pat
, trial
, 1);
3358 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial
, slots_filled
))
3360 /* Here as well we are searching backward, so put the
3361 insns we find on the head of the list. */
3363 current_function_epilogue_delay_list
3364 = gen_rtx (INSN_LIST
, VOIDmode
, trial
,
3365 current_function_epilogue_delay_list
);
3366 mark_referenced_resources (trial
, &end_of_function_needs
, 1);
3367 update_block (trial
, trial
);
3368 delete_insn (trial
);
3370 /* Clear deleted bit so final.c will output the insn. */
3371 INSN_DELETED_P (trial
) = 0;
3373 if (slots_to_fill
== ++slots_filled
)
3379 mark_set_resources (trial
, &set
, 0, 1);
3380 mark_referenced_resources (trial
, &needed
, 1);
3383 note_delay_statistics (slots_filled
, 0);
3387 /* Try to find insns to place in delay slots.
3389 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3390 or is an unconditional branch if CONDITION is const_true_rtx.
3391 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3393 THREAD is a flow-of-control, either the insns to be executed if the
3394 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3396 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3397 to see if any potential delay slot insns set things needed there.
3399 LIKELY is non-zero if it is extremely likely that the branch will be
3400 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3401 end of a loop back up to the top.
3403 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3404 thread. I.e., it is the fallthrough code of our jump or the target of the
3405 jump when we are the only jump going there.
3407 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3408 case, we can only take insns from the head of the thread for our delay
3409 slot. We then adjust the jump to point after the insns we have taken. */
3412 fill_slots_from_thread (insn
, condition
, thread
, opposite_thread
, likely
,
3413 thread_if_true
, own_thread
, own_opposite_thread
,
3414 slots_to_fill
, pslots_filled
)
3417 rtx thread
, opposite_thread
;
3420 int own_thread
, own_opposite_thread
;
3421 int slots_to_fill
, *pslots_filled
;
3425 struct resources opposite_needed
, set
, needed
;
3431 /* Validate our arguments. */
3432 if ((condition
== const_true_rtx
&& ! thread_if_true
)
3433 || (! own_thread
&& ! thread_if_true
))
3436 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
3438 /* If our thread is the end of subroutine, we can't get any delay
3443 /* If this is an unconditional branch, nothing is needed at the
3444 opposite thread. Otherwise, compute what is needed there. */
3445 if (condition
== const_true_rtx
)
3446 CLEAR_RESOURCE (&opposite_needed
);
3448 mark_target_live_regs (opposite_thread
, &opposite_needed
);
3450 /* If the insn at THREAD can be split, do it here to avoid having to
3451 update THREAD and NEW_THREAD if it is done in the loop below. Also
3452 initialize NEW_THREAD. */
3454 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
3456 /* Scan insns at THREAD. We are looking for an insn that can be removed
3457 from THREAD (it neither sets nor references resources that were set
3458 ahead of it and it doesn't set anything needs by the insns ahead of
3459 it) and that either can be placed in an annulling insn or aren't
3460 needed at OPPOSITE_THREAD. */
3462 CLEAR_RESOURCE (&needed
);
3463 CLEAR_RESOURCE (&set
);
3465 /* If we do not own this thread, we must stop as soon as we find
3466 something that we can't put in a delay slot, since all we can do
3467 is branch into THREAD at a later point. Therefore, labels stop
3468 the search if this is not the `true' thread. */
3470 for (trial
= thread
;
3471 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
3472 trial
= next_nonnote_insn (trial
))
3476 /* If we have passed a label, we no longer own this thread. */
3477 if (GET_CODE (trial
) == CODE_LABEL
)
3483 pat
= PATTERN (trial
);
3484 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3487 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3488 don't separate or copy insns that set and use CC0. */
3489 if (! insn_references_resource_p (trial
, &set
, 1)
3490 && ! insn_sets_resource_p (trial
, &set
, 1)
3491 && ! insn_sets_resource_p (trial
, &needed
, 1)
3493 && ! (reg_mentioned_p (cc0_rtx
, pat
)
3494 && (! own_thread
|| ! sets_cc0_p (pat
)))
3500 /* If TRIAL is redundant with some insn before INSN, we don't
3501 actually need to add it to the delay list; we can merely pretend
3503 if (prior_insn
= redundant_insn (trial
, insn
, delay_list
))
3505 fix_reg_dead_note (prior_insn
, insn
);
3508 update_block (trial
, thread
);
3509 if (trial
== thread
)
3511 thread
= next_active_insn (thread
);
3512 if (new_thread
== trial
)
3513 new_thread
= thread
;
3516 delete_insn (trial
);
3520 update_reg_unused_notes (prior_insn
, trial
);
3521 new_thread
= next_active_insn (trial
);
3527 /* There are two ways we can win: If TRIAL doesn't set anything
3528 needed at the opposite thread and can't trap, or if it can
3529 go into an annulled delay slot. */
3530 if (condition
== const_true_rtx
3531 || (! insn_sets_resource_p (trial
, &opposite_needed
, 1)
3532 && ! may_trap_p (pat
)))
3535 trial
= try_split (pat
, trial
, 0);
3536 if (new_thread
== old_trial
)
3538 if (thread
== old_trial
)
3540 pat
= PATTERN (trial
);
3541 if (eligible_for_delay (insn
, *pslots_filled
, trial
, flags
))
3545 #ifdef ANNUL_IFTRUE_SLOTS
3548 #ifdef ANNUL_IFFALSE_SLOTS
3554 trial
= try_split (pat
, trial
, 0);
3555 if (new_thread
== old_trial
)
3557 if (thread
== old_trial
)
3559 pat
= PATTERN (trial
);
3561 ? eligible_for_annul_false (insn
, *pslots_filled
, trial
, flags
)
3562 : eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
3570 if (reg_mentioned_p (cc0_rtx
, pat
))
3571 link_cc0_insns (trial
);
3574 /* If we own this thread, delete the insn. If this is the
3575 destination of a branch, show that a basic block status
3576 may have been updated. In any case, mark the new
3577 starting point of this thread. */
3580 update_block (trial
, thread
);
3581 delete_insn (trial
);
3584 new_thread
= next_active_insn (trial
);
3586 temp
= own_thread
? trial
: copy_rtx (trial
);
3588 INSN_FROM_TARGET_P (temp
) = 1;
3590 delay_list
= add_to_delay_list (temp
, delay_list
);
3592 if (slots_to_fill
== ++(*pslots_filled
))
3594 /* Even though we have filled all the slots, we
3595 may be branching to a location that has a
3596 redundant insn. Skip any if so. */
3597 while (new_thread
&& ! own_thread
3598 && ! insn_sets_resource_p (new_thread
, &set
, 1)
3599 && ! insn_sets_resource_p (new_thread
, &needed
, 1)
3600 && ! insn_references_resource_p (new_thread
,
3602 && redundant_insn (new_thread
, insn
, delay_list
))
3603 new_thread
= next_active_insn (new_thread
);
3612 /* This insn can't go into a delay slot. */
3614 mark_set_resources (trial
, &set
, 0, 1);
3615 mark_referenced_resources (trial
, &needed
, 1);
3617 /* Ensure we don't put insns between the setting of cc and the comparison
3618 by moving a setting of cc into an earlier delay slot since these insns
3619 could clobber the condition code. */
3622 /* If this insn is a register-register copy and the next insn has
3623 a use of our destination, change it to use our source. That way,
3624 it will become a candidate for our delay slot the next time
3625 through this loop. This case occurs commonly in loops that
3628 We could check for more complex cases than those tested below,
3629 but it doesn't seem worth it. It might also be a good idea to try
3630 to swap the two insns. That might do better.
3632 We can't do this if the next insn modifies our destination, because
3633 that would make the replacement into the insn invalid. We also can't
3634 do this if it modifies our source, because it might be an earlyclobber
3635 operand. This latter test also prevents updating the contents of
3638 if (GET_CODE (trial
) == INSN
&& GET_CODE (pat
) == SET
3639 && GET_CODE (SET_SRC (pat
)) == REG
3640 && GET_CODE (SET_DEST (pat
)) == REG
)
3642 rtx next
= next_nonnote_insn (trial
);
3644 if (next
&& GET_CODE (next
) == INSN
3645 && GET_CODE (PATTERN (next
)) != USE
3646 && ! reg_set_p (SET_DEST (pat
), next
)
3647 && ! reg_set_p (SET_SRC (pat
), next
)
3648 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
)))
3649 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
3653 /* If we stopped on a branch insn that has delay slots, see if we can
3654 steal some of the insns in those slots. */
3655 if (trial
&& GET_CODE (trial
) == INSN
3656 && GET_CODE (PATTERN (trial
)) == SEQUENCE
3657 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
)
3659 /* If this is the `true' thread, we will want to follow the jump,
3660 so we can only do this if we have taken everything up to here. */
3661 if (thread_if_true
&& trial
== new_thread
)
3663 = steal_delay_list_from_target (insn
, condition
, PATTERN (trial
),
3664 delay_list
, &set
, &needed
,
3665 &opposite_needed
, slots_to_fill
,
3666 pslots_filled
, &must_annul
,
3668 else if (! thread_if_true
)
3670 = steal_delay_list_from_fallthrough (insn
, condition
,
3672 delay_list
, &set
, &needed
,
3673 &opposite_needed
, slots_to_fill
,
3674 pslots_filled
, &must_annul
);
3677 /* If we haven't found anything for this delay slot and it is very
3678 likely that the branch will be taken, see if the insn at our target
3679 increments or decrements a register with an increment that does not
3680 depend on the destination register. If so, try to place the opposite
3681 arithmetic insn after the jump insn and put the arithmetic insn in the
3682 delay slot. If we can't do this, return. */
3683 if (delay_list
== 0 && likely
&& new_thread
&& GET_CODE (new_thread
) == INSN
)
3685 rtx pat
= PATTERN (new_thread
);
3690 pat
= PATTERN (trial
);
3692 if (GET_CODE (trial
) != INSN
|| GET_CODE (pat
) != SET
3693 || ! eligible_for_delay (insn
, 0, trial
, flags
))
3696 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
3697 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
3698 && rtx_equal_p (XEXP (src
, 0), dest
)
3699 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1)))
3701 rtx other
= XEXP (src
, 1);
3705 /* If this is a constant adjustment, use the same code with
3706 the negated constant. Otherwise, reverse the sense of the
3708 if (GET_CODE (other
) == CONST_INT
)
3709 new_arith
= gen_rtx (GET_CODE (src
), GET_MODE (src
), dest
,
3710 negate_rtx (GET_MODE (src
), other
));
3712 new_arith
= gen_rtx (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
3713 GET_MODE (src
), dest
, other
);
3715 ninsn
= emit_insn_after (gen_rtx (SET
, VOIDmode
, dest
, new_arith
),
3718 if (recog_memoized (ninsn
) < 0
3719 || (insn_extract (ninsn
),
3720 ! constrain_operands (INSN_CODE (ninsn
), 1)))
3722 delete_insn (ninsn
);
3728 update_block (trial
, thread
);
3729 delete_insn (trial
);
3732 new_thread
= next_active_insn (trial
);
3734 ninsn
= own_thread
? trial
: copy_rtx (trial
);
3736 INSN_FROM_TARGET_P (ninsn
) = 1;
3738 delay_list
= add_to_delay_list (ninsn
, NULL_RTX
);
3743 if (delay_list
&& must_annul
)
3744 INSN_ANNULLED_BRANCH_P (insn
) = 1;
3746 /* If we are to branch into the middle of this thread, find an appropriate
3747 label or make a new one if none, and redirect INSN to it. If we hit the
3748 end of the function, use the end-of-function label. */
3749 if (new_thread
!= thread
)
3753 if (! thread_if_true
)
3756 if (new_thread
&& GET_CODE (new_thread
) == JUMP_INSN
3757 && (simplejump_p (new_thread
)
3758 || GET_CODE (PATTERN (new_thread
)) == RETURN
)
3759 && redirect_with_delay_list_safe_p (insn
,
3760 JUMP_LABEL (new_thread
),
3762 new_thread
= follow_jumps (JUMP_LABEL (new_thread
));
3764 if (new_thread
== 0)
3765 label
= find_end_label ();
3766 else if (GET_CODE (new_thread
) == CODE_LABEL
)
3769 label
= get_label_before (new_thread
);
3771 reorg_redirect_jump (insn
, label
);
3777 /* Make another attempt to find insns to place in delay slots.
3779 We previously looked for insns located in front of the delay insn
3780 and, for non-jump delay insns, located behind the delay insn.
3782 Here only try to schedule jump insns and try to move insns from either
3783 the target or the following insns into the delay slot. If annulling is
3784 supported, we will be likely to do this. Otherwise, we can do this only
3788 fill_eager_delay_slots (first
)
3793 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
3795 for (i
= 0; i
< num_unfilled_slots
; i
++)
3798 rtx target_label
, insn_at_target
, fallthrough_insn
;
3801 int own_fallthrough
;
3802 int prediction
, slots_to_fill
, slots_filled
;
3804 insn
= unfilled_slots_base
[i
];
3806 || INSN_DELETED_P (insn
)
3807 || GET_CODE (insn
) != JUMP_INSN
3808 || ! (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
3811 slots_to_fill
= num_delay_slots (insn
);
3812 if (slots_to_fill
== 0)
3816 target_label
= JUMP_LABEL (insn
);
3817 condition
= get_branch_condition (insn
, target_label
);
3822 /* Get the next active fallthrough and target insns and see if we own
3823 them. Then see whether the branch is likely true. We don't need
3824 to do a lot of this for unconditional branches. */
3826 insn_at_target
= next_active_insn (target_label
);
3827 own_target
= own_thread_p (target_label
, target_label
, 0);
3829 if (condition
== const_true_rtx
)
3831 own_fallthrough
= 0;
3832 fallthrough_insn
= 0;
3837 fallthrough_insn
= next_active_insn (insn
);
3838 own_fallthrough
= own_thread_p (NEXT_INSN (insn
), NULL_RTX
, 1);
3839 prediction
= mostly_true_jump (insn
, condition
);
3842 /* If this insn is expected to branch, first try to get insns from our
3843 target, then our fallthrough insns. If it is not, expected to branch,
3844 try the other order. */
3849 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3850 fallthrough_insn
, prediction
== 2, 1,
3851 own_target
, own_fallthrough
,
3852 slots_to_fill
, &slots_filled
);
3854 if (delay_list
== 0 && own_fallthrough
)
3856 /* Even though we didn't find anything for delay slots,
3857 we might have found a redundant insn which we deleted
3858 from the thread that was filled. So we have to recompute
3859 the next insn at the target. */
3860 target_label
= JUMP_LABEL (insn
);
3861 insn_at_target
= next_active_insn (target_label
);
3864 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3865 insn_at_target
, 0, 0,
3866 own_fallthrough
, own_target
,
3867 slots_to_fill
, &slots_filled
);
3872 if (own_fallthrough
)
3874 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3875 insn_at_target
, 0, 0,
3876 own_fallthrough
, own_target
,
3877 slots_to_fill
, &slots_filled
);
3879 if (delay_list
== 0)
3881 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3882 next_active_insn (insn
), 0, 1,
3883 own_target
, own_fallthrough
,
3884 slots_to_fill
, &slots_filled
);
3888 unfilled_slots_base
[i
]
3889 = emit_delay_sequence (insn
, delay_list
,
3890 slots_filled
, slots_to_fill
);
3892 if (slots_to_fill
== slots_filled
)
3893 unfilled_slots_base
[i
] = 0;
3895 note_delay_statistics (slots_filled
, 1);
3899 /* Once we have tried two ways to fill a delay slot, make a pass over the
3900 code to try to improve the results and to do such things as more jump
3904 relax_delay_slots (first
)
3907 register rtx insn
, next
, pat
;
3908 register rtx trial
, delay_insn
, target_label
;
3910 /* Look at every JUMP_INSN and see if we can improve it. */
3911 for (insn
= first
; insn
; insn
= next
)
3915 next
= next_active_insn (insn
);
3917 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3918 the next insn, or jumps to a label that is not the last of a
3919 group of consecutive labels. */
3920 if (GET_CODE (insn
) == JUMP_INSN
3921 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3922 && (target_label
= JUMP_LABEL (insn
)) != 0)
3924 target_label
= follow_jumps (target_label
);
3925 target_label
= prev_label (next_active_insn (target_label
));
3927 if (target_label
== 0)
3928 target_label
= find_end_label ();
3930 if (next_active_insn (target_label
) == next
3931 && ! condjump_in_parallel_p (insn
))
3937 if (target_label
!= JUMP_LABEL (insn
))
3938 reorg_redirect_jump (insn
, target_label
);
3940 /* See if this jump branches around a unconditional jump.
3941 If so, invert this jump and point it to the target of the
3943 if (next
&& GET_CODE (next
) == JUMP_INSN
3944 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3945 && next_active_insn (target_label
) == next_active_insn (next
)
3946 && no_labels_between_p (insn
, next
))
3948 rtx label
= JUMP_LABEL (next
);
3950 /* Be careful how we do this to avoid deleting code or
3951 labels that are momentarily dead. See similar optimization
3954 We also need to ensure we properly handle the case when
3955 invert_jump fails. */
3957 ++LABEL_NUSES (target_label
);
3959 ++LABEL_NUSES (label
);
3961 if (invert_jump (insn
, label
))
3968 --LABEL_NUSES (label
);
3970 if (--LABEL_NUSES (target_label
) == 0)
3971 delete_insn (target_label
);
3977 /* If this is an unconditional jump and the previous insn is a
3978 conditional jump, try reversing the condition of the previous
3979 insn and swapping our targets. The next pass might be able to
3982 Don't do this if we expect the conditional branch to be true, because
3983 we would then be making the more common case longer. */
3985 if (GET_CODE (insn
) == JUMP_INSN
3986 && (simplejump_p (insn
) || GET_CODE (PATTERN (insn
)) == RETURN
)
3987 && (other
= prev_active_insn (insn
)) != 0
3988 && (condjump_p (other
) || condjump_in_parallel_p (other
))
3989 && no_labels_between_p (other
, insn
)
3990 && 0 < mostly_true_jump (other
,
3991 get_branch_condition (other
,
3992 JUMP_LABEL (other
))))
3994 rtx other_target
= JUMP_LABEL (other
);
3995 target_label
= JUMP_LABEL (insn
);
3997 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3998 as we move the label. */
4000 ++LABEL_NUSES (other_target
);
4002 if (invert_jump (other
, target_label
))
4003 reorg_redirect_jump (insn
, other_target
);
4006 --LABEL_NUSES (other_target
);
4009 /* Now look only at cases where we have filled a delay slot. */
4010 if (GET_CODE (insn
) != INSN
4011 || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
4014 pat
= PATTERN (insn
);
4015 delay_insn
= XVECEXP (pat
, 0, 0);
4017 /* See if the first insn in the delay slot is redundant with some
4018 previous insn. Remove it from the delay slot if so; then set up
4019 to reprocess this insn. */
4020 if (redundant_insn (XVECEXP (pat
, 0, 1), delay_insn
, 0))
4022 delete_from_delay_slot (XVECEXP (pat
, 0, 1));
4023 next
= prev_active_insn (next
);
4027 /* Now look only at the cases where we have a filled JUMP_INSN. */
4028 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
4029 || ! (condjump_p (XVECEXP (PATTERN (insn
), 0, 0))
4030 || condjump_in_parallel_p (XVECEXP (PATTERN (insn
), 0, 0))))
4033 target_label
= JUMP_LABEL (delay_insn
);
4037 /* If this jump goes to another unconditional jump, thread it, but
4038 don't convert a jump into a RETURN here. */
4039 trial
= follow_jumps (target_label
);
4040 /* We use next_real_insn instead of next_active_insn, so that
4041 the special USE insns emitted by reorg won't be ignored.
4042 If they are ignored, then they will get deleted if target_label
4043 is now unreachable, and that would cause mark_target_live_regs
4045 trial
= prev_label (next_real_insn (trial
));
4046 if (trial
== 0 && target_label
!= 0)
4047 trial
= find_end_label ();
4049 if (trial
!= target_label
4050 && redirect_with_delay_slots_safe_p (delay_insn
, trial
, insn
))
4052 reorg_redirect_jump (delay_insn
, trial
);
4053 target_label
= trial
;
4056 /* If the first insn at TARGET_LABEL is redundant with a previous
4057 insn, redirect the jump to the following insn process again. */
4058 trial
= next_active_insn (target_label
);
4059 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
4060 && redundant_insn (trial
, insn
, 0))
4062 trial
= next_active_insn (trial
);
4064 target_label
= find_end_label ();
4066 target_label
= get_label_before (trial
);
4067 reorg_redirect_jump (delay_insn
, target_label
);
4072 /* Similarly, if it is an unconditional jump with one insn in its
4073 delay list and that insn is redundant, thread the jump. */
4074 if (trial
&& GET_CODE (PATTERN (trial
)) == SEQUENCE
4075 && XVECLEN (PATTERN (trial
), 0) == 2
4076 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
4077 && (simplejump_p (XVECEXP (PATTERN (trial
), 0, 0))
4078 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial
), 0, 0))) == RETURN
)
4079 && redundant_insn (XVECEXP (PATTERN (trial
), 0, 1), insn
, 0))
4081 target_label
= JUMP_LABEL (XVECEXP (PATTERN (trial
), 0, 0));
4082 if (target_label
== 0)
4083 target_label
= find_end_label ();
4085 if (redirect_with_delay_slots_safe_p (delay_insn
, target_label
,
4088 reorg_redirect_jump (delay_insn
, target_label
);
4095 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
4096 && prev_active_insn (target_label
) == insn
4097 && ! condjump_in_parallel_p (delay_insn
)
4099 /* If the last insn in the delay slot sets CC0 for some insn,
4100 various code assumes that it is in a delay slot. We could
4101 put it back where it belonged and delete the register notes,
4102 but it doesn't seem worthwhile in this uncommon case. */
4103 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
4104 REG_CC_USER
, NULL_RTX
)
4110 /* All this insn does is execute its delay list and jump to the
4111 following insn. So delete the jump and just execute the delay
4114 We do this by deleting the INSN containing the SEQUENCE, then
4115 re-emitting the insns separately, and then deleting the jump.
4116 This allows the count of the jump target to be properly
4119 /* Clear the from target bit, since these insns are no longer
4121 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4122 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
4124 trial
= PREV_INSN (insn
);
4126 emit_insn_after (pat
, trial
);
4127 delete_scheduled_jump (delay_insn
);
4131 /* See if this is an unconditional jump around a single insn which is
4132 identical to the one in its delay slot. In this case, we can just
4133 delete the branch and the insn in its delay slot. */
4134 if (next
&& GET_CODE (next
) == INSN
4135 && prev_label (next_active_insn (next
)) == target_label
4136 && simplejump_p (insn
)
4137 && XVECLEN (pat
, 0) == 2
4138 && rtx_equal_p (PATTERN (next
), PATTERN (XVECEXP (pat
, 0, 1))))
4144 /* See if this jump (with its delay slots) branches around another
4145 jump (without delay slots). If so, invert this jump and point
4146 it to the target of the second jump. We cannot do this for
4147 annulled jumps, though. Again, don't convert a jump to a RETURN
4149 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
4150 && next
&& GET_CODE (next
) == JUMP_INSN
4151 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
4152 && next_active_insn (target_label
) == next_active_insn (next
)
4153 && no_labels_between_p (insn
, next
))
4155 rtx label
= JUMP_LABEL (next
);
4156 rtx old_label
= JUMP_LABEL (delay_insn
);
4159 label
= find_end_label ();
4161 if (redirect_with_delay_slots_safe_p (delay_insn
, label
, insn
))
4163 /* Be careful how we do this to avoid deleting code or labels
4164 that are momentarily dead. See similar optimization in
4167 ++LABEL_NUSES (old_label
);
4169 if (invert_jump (delay_insn
, label
))
4173 /* Must update the INSN_FROM_TARGET_P bits now that
4174 the branch is reversed, so that mark_target_live_regs
4175 will handle the delay slot insn correctly. */
4176 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4178 rtx slot
= XVECEXP (PATTERN (insn
), 0, i
);
4179 INSN_FROM_TARGET_P (slot
) = ! INSN_FROM_TARGET_P (slot
);
4186 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
4187 delete_insn (old_label
);
4192 /* If we own the thread opposite the way this insn branches, see if we
4193 can merge its delay slots with following insns. */
4194 if (INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
4195 && own_thread_p (NEXT_INSN (insn
), 0, 1))
4196 try_merge_delay_insns (insn
, next
);
4197 else if (! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
4198 && own_thread_p (target_label
, target_label
, 0))
4199 try_merge_delay_insns (insn
, next_active_insn (target_label
));
4201 /* If we get here, we haven't deleted INSN. But we may have deleted
4202 NEXT, so recompute it. */
4203 next
= next_active_insn (insn
);
4209 /* Look for filled jumps to the end of function label. We can try to convert
4210 them into RETURN insns if the insns in the delay slot are valid for the
4214 make_return_insns (first
)
4217 rtx insn
, jump_insn
, pat
;
4218 rtx real_return_label
= end_of_function_label
;
4221 /* See if there is a RETURN insn in the function other than the one we
4222 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4223 into a RETURN to jump to it. */
4224 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4225 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == RETURN
)
4227 real_return_label
= get_label_before (insn
);
4231 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4232 was equal to END_OF_FUNCTION_LABEL. */
4233 LABEL_NUSES (real_return_label
)++;
4235 /* Clear the list of insns to fill so we can use it. */
4236 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
4238 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4242 /* Only look at filled JUMP_INSNs that go to the end of function
4244 if (GET_CODE (insn
) != INSN
4245 || GET_CODE (PATTERN (insn
)) != SEQUENCE
4246 || GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
4247 || JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0)) != end_of_function_label
)
4250 pat
= PATTERN (insn
);
4251 jump_insn
= XVECEXP (pat
, 0, 0);
4253 /* If we can't make the jump into a RETURN, try to redirect it to the best
4254 RETURN and go on to the next insn. */
4255 if (! reorg_redirect_jump (jump_insn
, NULL_RTX
))
4257 /* Make sure redirecting the jump will not invalidate the delay
4259 if (redirect_with_delay_slots_safe_p (jump_insn
,
4262 reorg_redirect_jump (jump_insn
, real_return_label
);
4266 /* See if this RETURN can accept the insns current in its delay slot.
4267 It can if it has more or an equal number of slots and the contents
4268 of each is valid. */
4270 flags
= get_jump_flags (jump_insn
, JUMP_LABEL (jump_insn
));
4271 slots
= num_delay_slots (jump_insn
);
4272 if (slots
>= XVECLEN (pat
, 0) - 1)
4274 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
4276 #ifdef ANNUL_IFFALSE_SLOTS
4277 (INSN_ANNULLED_BRANCH_P (jump_insn
)
4278 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
4279 ? eligible_for_annul_false (jump_insn
, i
- 1,
4280 XVECEXP (pat
, 0, i
), flags
) :
4282 #ifdef ANNUL_IFTRUE_SLOTS
4283 (INSN_ANNULLED_BRANCH_P (jump_insn
)
4284 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
4285 ? eligible_for_annul_true (jump_insn
, i
- 1,
4286 XVECEXP (pat
, 0, i
), flags
) :
4288 eligible_for_delay (jump_insn
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
4294 if (i
== XVECLEN (pat
, 0))
4297 /* We have to do something with this insn. If it is an unconditional
4298 RETURN, delete the SEQUENCE and output the individual insns,
4299 followed by the RETURN. Then set things up so we try to find
4300 insns for its delay slots, if it needs some. */
4301 if (GET_CODE (PATTERN (jump_insn
)) == RETURN
)
4303 rtx prev
= PREV_INSN (insn
);
4306 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
4307 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
4309 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
4310 emit_barrier_after (insn
);
4313 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
4316 /* It is probably more efficient to keep this with its current
4317 delay slot as a branch to a RETURN. */
4318 reorg_redirect_jump (jump_insn
, real_return_label
);
4321 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4322 new delay slots we have created. */
4323 if (--LABEL_NUSES (real_return_label
) == 0)
4324 delete_insn (real_return_label
);
4326 fill_simple_delay_slots (first
, 1);
4327 fill_simple_delay_slots (first
, 0);
4331 /* Try to find insns to place in delay slots. */
4334 dbr_schedule (first
, file
)
4338 rtx insn
, next
, epilogue_insn
= 0;
4341 int old_flag_no_peephole
= flag_no_peephole
;
4343 /* Execute `final' once in prescan mode to delete any insns that won't be
4344 used. Don't let final try to do any peephole optimization--it will
4345 ruin dataflow information for this pass. */
4347 flag_no_peephole
= 1;
4348 final (first
, 0, NO_DEBUG
, 1, 1);
4349 flag_no_peephole
= old_flag_no_peephole
;
4352 /* If the current function has no insns other than the prologue and
4353 epilogue, then do not try to fill any delay slots. */
4354 if (n_basic_blocks
== 0)
4357 /* Find the highest INSN_UID and allocate and initialize our map from
4358 INSN_UID's to position in code. */
4359 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4361 if (INSN_UID (insn
) > max_uid
)
4362 max_uid
= INSN_UID (insn
);
4363 if (GET_CODE (insn
) == NOTE
4364 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
4365 epilogue_insn
= insn
;
4368 uid_to_ruid
= (int *) alloca ((max_uid
+ 1) * sizeof (int *));
4369 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
4370 uid_to_ruid
[INSN_UID (insn
)] = i
;
4372 /* Initialize the list of insns that need filling. */
4373 if (unfilled_firstobj
== 0)
4375 gcc_obstack_init (&unfilled_slots_obstack
);
4376 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
4379 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
4383 INSN_ANNULLED_BRANCH_P (insn
) = 0;
4384 INSN_FROM_TARGET_P (insn
) = 0;
4386 /* Skip vector tables. We can't get attributes for them. */
4387 if (GET_CODE (insn
) == JUMP_INSN
4388 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4389 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
4392 if (num_delay_slots (insn
) > 0)
4393 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
4395 /* Ensure all jumps go to the last of a set of consecutive labels. */
4396 if (GET_CODE (insn
) == JUMP_INSN
4397 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
4398 && JUMP_LABEL (insn
) != 0
4399 && ((target
= prev_label (next_active_insn (JUMP_LABEL (insn
))))
4400 != JUMP_LABEL (insn
)))
4401 redirect_jump (insn
, target
);
4404 /* Indicate what resources are required to be valid at the end of the current
4405 function. The condition code never is and memory always is. If the
4406 frame pointer is needed, it is and so is the stack pointer unless
4407 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4408 stack pointer is. Registers used to return the function value are
4409 needed. Registers holding global variables are needed. */
4411 end_of_function_needs
.cc
= 0;
4412 end_of_function_needs
.memory
= 1;
4413 end_of_function_needs
.unch_memory
= 0;
4414 CLEAR_HARD_REG_SET (end_of_function_needs
.regs
);
4416 if (frame_pointer_needed
)
4418 SET_HARD_REG_BIT (end_of_function_needs
.regs
, FRAME_POINTER_REGNUM
);
4419 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4420 SET_HARD_REG_BIT (end_of_function_needs
.regs
, HARD_FRAME_POINTER_REGNUM
);
4422 #ifdef EXIT_IGNORE_STACK
4423 if (! EXIT_IGNORE_STACK
)
4425 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
4428 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
4430 if (current_function_return_rtx
!= 0
4431 && GET_CODE (current_function_return_rtx
) == REG
)
4432 mark_referenced_resources (current_function_return_rtx
,
4433 &end_of_function_needs
, 1);
4435 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4437 SET_HARD_REG_BIT (end_of_function_needs
.regs
, i
);
4439 /* The registers required to be live at the end of the function are
4440 represented in the flow information as being dead just prior to
4441 reaching the end of the function. For example, the return of a value
4442 might be represented by a USE of the return register immediately
4443 followed by an unconditional jump to the return label where the
4444 return label is the end of the RTL chain. The end of the RTL chain
4445 is then taken to mean that the return register is live.
4447 This sequence is no longer maintained when epilogue instructions are
4448 added to the RTL chain. To reconstruct the original meaning, the
4449 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4450 point where these registers become live (start_of_epilogue_needs).
4451 If epilogue instructions are present, the registers set by those
4452 instructions won't have been processed by flow. Thus, those
4453 registers are additionally required at the end of the RTL chain
4454 (end_of_function_needs). */
4456 start_of_epilogue_needs
= end_of_function_needs
;
4458 while (epilogue_insn
= next_nonnote_insn (epilogue_insn
))
4459 mark_set_resources (epilogue_insn
, &end_of_function_needs
, 0, 1);
4461 /* Show we haven't computed an end-of-function label yet. */
4462 end_of_function_label
= 0;
4464 /* Allocate and initialize the tables used by mark_target_live_regs. */
4466 = (struct target_info
**) alloca ((TARGET_HASH_PRIME
4467 * sizeof (struct target_info
*)));
4468 bzero ((char *) target_hash_table
,
4469 TARGET_HASH_PRIME
* sizeof (struct target_info
*));
4471 bb_ticks
= (int *) alloca (n_basic_blocks
* sizeof (int));
4472 bzero ((char *) bb_ticks
, n_basic_blocks
* sizeof (int));
4474 /* Initialize the statistics for this function. */
4475 bzero ((char *) num_insns_needing_delays
, sizeof num_insns_needing_delays
);
4476 bzero ((char *) num_filled_delays
, sizeof num_filled_delays
);
4478 /* Now do the delay slot filling. Try everything twice in case earlier
4479 changes make more slots fillable. */
4481 for (reorg_pass_number
= 0;
4482 reorg_pass_number
< MAX_REORG_PASSES
;
4483 reorg_pass_number
++)
4485 fill_simple_delay_slots (first
, 1);
4486 fill_simple_delay_slots (first
, 0);
4487 fill_eager_delay_slots (first
);
4488 relax_delay_slots (first
);
4491 /* Delete any USE insns made by update_block; subsequent passes don't need
4492 them or know how to deal with them. */
4493 for (insn
= first
; insn
; insn
= next
)
4495 next
= NEXT_INSN (insn
);
4497 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
4498 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
4499 next
= delete_insn (insn
);
4502 /* If we made an end of function label, indicate that it is now
4503 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4504 If it is now unused, delete it. */
4505 if (end_of_function_label
&& --LABEL_NUSES (end_of_function_label
) == 0)
4506 delete_insn (end_of_function_label
);
4509 if (HAVE_return
&& end_of_function_label
!= 0)
4510 make_return_insns (first
);
4513 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
4515 /* It is not clear why the line below is needed, but it does seem to be. */
4516 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
4518 /* Reposition the prologue and epilogue notes in case we moved the
4519 prologue/epilogue insns. */
4520 reposition_prologue_and_epilogue_notes (first
);
4524 register int i
, j
, need_comma
;
4526 for (reorg_pass_number
= 0;
4527 reorg_pass_number
< MAX_REORG_PASSES
;
4528 reorg_pass_number
++)
4530 fprintf (file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
4531 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
4534 fprintf (file
, ";; Reorg function #%d\n", i
);
4536 fprintf (file
, ";; %d insns needing delay slots\n;; ",
4537 num_insns_needing_delays
[i
][reorg_pass_number
]);
4539 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
; j
++)
4540 if (num_filled_delays
[i
][j
][reorg_pass_number
])
4543 fprintf (file
, ", ");
4545 fprintf (file
, "%d got %d delays",
4546 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
4548 fprintf (file
, "\n");
4553 #endif /* DELAY_SLOTS */