1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
121 #include "insn-config.h"
122 #include "conditions.h"
123 #include "hard-reg-set.h"
124 #include "basic-block.h"
126 #include "insn-flags.h"
131 #include "insn-attr.h"
133 /* Import list of registers used as spill regs from reload. */
134 extern HARD_REG_SET used_spill_regs
;
136 /* Import highest label used in function at end of reload. */
137 extern int max_label_num_after_reload
;
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 #ifndef ANNUL_IFTRUE_SLOTS
146 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
148 #ifndef ANNUL_IFFALSE_SLOTS
149 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
152 /* Insns which have delay slots that have not yet been filled. */
154 static struct obstack unfilled_slots_obstack
;
155 static rtx
*unfilled_firstobj
;
157 /* Define macros to refer to the first and last slot containing unfilled
158 insns. These are used because the list may move and its address
159 should be recomputed at each use. */
161 #define unfilled_slots_base \
162 ((rtx *) obstack_base (&unfilled_slots_obstack))
164 #define unfilled_slots_next \
165 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
167 /* This structure is used to indicate which hardware resources are set or
168 needed by insns so far. */
172 char memory
; /* Insn sets or needs a memory location. */
173 char unch_memory
; /* Insn sets of needs a "unchanging" MEM. */
174 char volatil
; /* Insn sets or needs a volatile memory loc. */
175 char cc
; /* Insn sets or needs the condition codes. */
176 HARD_REG_SET regs
; /* Which registers are set or needed. */
179 /* Macro to clear all resources. */
180 #define CLEAR_RESOURCE(RES) \
181 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
182 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
184 /* Indicates what resources are required at the beginning of the epilogue. */
185 static struct resources start_of_epilogue_needs
;
187 /* Indicates what resources are required at function end. */
188 static struct resources end_of_function_needs
;
190 /* Points to the label before the end of the function. */
191 static rtx end_of_function_label
;
193 /* This structure is used to record liveness information at the targets or
194 fallthrough insns of branches. We will most likely need the information
195 at targets again, so save them in a hash table rather than recomputing them
200 int uid
; /* INSN_UID of target. */
201 struct target_info
*next
; /* Next info for same hash bucket. */
202 HARD_REG_SET live_regs
; /* Registers live at target. */
203 int block
; /* Basic block number containing target. */
204 int bb_tick
; /* Generation count of basic block info. */
207 #define TARGET_HASH_PRIME 257
209 /* Define the hash table itself. */
210 static struct target_info
**target_hash_table
;
212 /* For each basic block, we maintain a generation number of its basic
213 block info, which is updated each time we move an insn from the
214 target of a jump. This is the generation number indexed by block
217 static int *bb_ticks
;
219 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
220 not always monotonically increase. */
221 static int *uid_to_ruid
;
223 /* Highest valid index in `uid_to_ruid'. */
226 static void mark_referenced_resources
PROTO((rtx
, struct resources
*, int));
227 static void mark_set_resources
PROTO((rtx
, struct resources
*, int, int));
228 static int stop_search_p
PROTO((rtx
, int));
229 static int resource_conflicts_p
PROTO((struct resources
*,
230 struct resources
*));
231 static int insn_references_resource_p
PROTO((rtx
, struct resources
*, int));
232 static int insn_sets_resources_p
PROTO((rtx
, struct resources
*, int));
233 static rtx find_end_label
PROTO((void));
234 static rtx emit_delay_sequence
PROTO((rtx
, rtx
, int, int));
235 static rtx add_to_delay_list
PROTO((rtx
, rtx
));
236 static void delete_from_delay_slot
PROTO((rtx
));
237 static void delete_scheduled_jump
PROTO((rtx
));
238 static void note_delay_statistics
PROTO((int, int));
239 static rtx optimize_skip
PROTO((rtx
));
240 static int get_jump_flags
PROTO((rtx
, rtx
));
241 static int rare_destination
PROTO((rtx
));
242 static int mostly_true_jump
PROTO((rtx
, rtx
));
243 static rtx get_branch_condition
PROTO((rtx
, rtx
));
244 static int condition_dominates_p
PROTO((rtx
, rtx
));
245 static rtx steal_delay_list_from_target
PROTO((rtx
, rtx
, rtx
, rtx
,
249 int, int *, int *, rtx
*));
250 static rtx steal_delay_list_from_fallthrough
PROTO((rtx
, rtx
, rtx
, rtx
,
255 static void try_merge_delay_insns
PROTO((rtx
, rtx
));
256 static rtx redundant_insn
PROTO((rtx
, rtx
, rtx
));
257 static int own_thread_p
PROTO((rtx
, rtx
, int));
258 static int find_basic_block
PROTO((rtx
));
259 static void update_block
PROTO((rtx
, rtx
));
260 static int reorg_redirect_jump
PROTO((rtx
, rtx
));
261 static void update_reg_dead_notes
PROTO((rtx
, rtx
));
262 static void fix_reg_dead_note
PROTO((rtx
, rtx
));
263 static void update_reg_unused_notes
PROTO((rtx
, rtx
));
264 static void update_live_status
PROTO((rtx
, rtx
));
265 static rtx next_insn_no_annul
PROTO((rtx
));
266 static void mark_target_live_regs
PROTO((rtx
, struct resources
*));
267 static void fill_simple_delay_slots
PROTO((rtx
, int));
268 static rtx fill_slots_from_thread
PROTO((rtx
, rtx
, rtx
, rtx
, int, int,
269 int, int, int, int *));
270 static void fill_eager_delay_slots
PROTO((rtx
));
271 static void relax_delay_slots
PROTO((rtx
));
272 static void make_return_insns
PROTO((rtx
));
273 static int redirect_with_delay_slots_safe_p
PROTO ((rtx
, rtx
, rtx
));
274 static int redirect_with_delay_list_safe_p
PROTO ((rtx
, rtx
, rtx
));
276 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
277 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
278 is TRUE, resources used by the called routine will be included for
282 mark_referenced_resources (x
, res
, include_delayed_effects
)
284 register struct resources
*res
;
285 register int include_delayed_effects
;
287 register enum rtx_code code
= GET_CODE (x
);
289 register char *format_ptr
;
291 /* Handle leaf items for which we set resource flags. Also, special-case
292 CALL, SET and CLOBBER operators. */
304 if (GET_CODE (SUBREG_REG (x
)) != REG
)
305 mark_referenced_resources (SUBREG_REG (x
), res
, 0);
308 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
309 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
310 for (i
= regno
; i
< last_regno
; i
++)
311 SET_HARD_REG_BIT (res
->regs
, i
);
316 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
317 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
321 /* If this memory shouldn't change, it really isn't referencing
323 if (RTX_UNCHANGING_P (x
))
324 res
->unch_memory
= 1;
327 res
->volatil
= MEM_VOLATILE_P (x
);
329 /* Mark registers used to access memory. */
330 mark_referenced_resources (XEXP (x
, 0), res
, 0);
337 case UNSPEC_VOLATILE
:
340 /* Traditional asm's are always volatile. */
345 res
->volatil
= MEM_VOLATILE_P (x
);
347 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
348 We can not just fall through here since then we would be confused
349 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
350 traditional asms unlike their normal usage. */
352 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
353 mark_referenced_resources (ASM_OPERANDS_INPUT (x
, i
), res
, 0);
357 /* The first operand will be a (MEM (xxx)) but doesn't really reference
358 memory. The second operand may be referenced, though. */
359 mark_referenced_resources (XEXP (XEXP (x
, 0), 0), res
, 0);
360 mark_referenced_resources (XEXP (x
, 1), res
, 0);
364 /* Usually, the first operand of SET is set, not referenced. But
365 registers used to access memory are referenced. SET_DEST is
366 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
368 mark_referenced_resources (SET_SRC (x
), res
, 0);
371 if (GET_CODE (x
) == SIGN_EXTRACT
|| GET_CODE (x
) == ZERO_EXTRACT
)
372 mark_referenced_resources (x
, res
, 0);
373 else if (GET_CODE (x
) == SUBREG
)
375 if (GET_CODE (x
) == MEM
)
376 mark_referenced_resources (XEXP (x
, 0), res
, 0);
383 if (include_delayed_effects
)
385 /* A CALL references memory, the frame pointer if it exists, the
386 stack pointer, any global registers and any registers given in
387 USE insns immediately in front of the CALL.
389 However, we may have moved some of the parameter loading insns
390 into the delay slot of this CALL. If so, the USE's for them
391 don't count and should be skipped. */
392 rtx insn
= PREV_INSN (x
);
395 rtx next
= NEXT_INSN (x
);
398 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
399 if (NEXT_INSN (insn
) != x
)
401 next
= NEXT_INSN (NEXT_INSN (insn
));
402 sequence
= PATTERN (NEXT_INSN (insn
));
403 seq_size
= XVECLEN (sequence
, 0);
404 if (GET_CODE (sequence
) != SEQUENCE
)
409 SET_HARD_REG_BIT (res
->regs
, STACK_POINTER_REGNUM
);
410 if (frame_pointer_needed
)
412 SET_HARD_REG_BIT (res
->regs
, FRAME_POINTER_REGNUM
);
413 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
414 SET_HARD_REG_BIT (res
->regs
, HARD_FRAME_POINTER_REGNUM
);
418 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
420 SET_HARD_REG_BIT (res
->regs
, i
);
422 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
423 assume that this call can need any register.
425 This is done to be more conservative about how we handle setjmp.
426 We assume that they both use and set all registers. Using all
427 registers ensures that a register will not be considered dead
428 just because it crosses a setjmp call. A register should be
429 considered dead only if the setjmp call returns non-zero. */
430 if (next
&& GET_CODE (next
) == NOTE
431 && NOTE_LINE_NUMBER (next
) == NOTE_INSN_SETJMP
)
432 SET_HARD_REG_SET (res
->regs
);
437 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
439 link
= XEXP (link
, 1))
440 if (GET_CODE (XEXP (link
, 0)) == USE
)
442 for (i
= 1; i
< seq_size
; i
++)
444 rtx slot_pat
= PATTERN (XVECEXP (sequence
, 0, i
));
445 if (GET_CODE (slot_pat
) == SET
446 && rtx_equal_p (SET_DEST (slot_pat
),
447 SET_DEST (XEXP (link
, 0))))
451 mark_referenced_resources (SET_DEST (XEXP (link
, 0)),
457 /* ... fall through to other INSN processing ... */
462 #ifdef INSN_REFERENCES_ARE_DELAYED
463 if (! include_delayed_effects
464 && INSN_REFERENCES_ARE_DELAYED (x
))
468 /* No special processing, just speed up. */
469 mark_referenced_resources (PATTERN (x
), res
, include_delayed_effects
);
473 /* Process each sub-expression and flag what it needs. */
474 format_ptr
= GET_RTX_FORMAT (code
);
475 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
476 switch (*format_ptr
++)
479 mark_referenced_resources (XEXP (x
, i
), res
, include_delayed_effects
);
483 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
484 mark_referenced_resources (XVECEXP (x
, i
, j
), res
,
485 include_delayed_effects
);
490 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
491 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
492 is nonzero, also mark resources potentially set by the called routine.
494 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
495 objects are being referenced instead of set.
497 We never mark the insn as modifying the condition code unless it explicitly
498 SETs CC0 even though this is not totally correct. The reason for this is
499 that we require a SET of CC0 to immediately precede the reference to CC0.
500 So if some other insn sets CC0 as a side-effect, we know it cannot affect
501 our computation and thus may be placed in a delay slot. */
504 mark_set_resources (x
, res
, in_dest
, include_delayed_effects
)
506 register struct resources
*res
;
508 int include_delayed_effects
;
510 register enum rtx_code code
;
512 register char *format_ptr
;
530 /* These don't set any resources. */
539 /* Called routine modifies the condition code, memory, any registers
540 that aren't saved across calls, global registers and anything
541 explicitly CLOBBERed immediately after the CALL_INSN. */
543 if (include_delayed_effects
)
545 rtx next
= NEXT_INSN (x
);
546 rtx prev
= PREV_INSN (x
);
549 res
->cc
= res
->memory
= 1;
550 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
551 if (call_used_regs
[i
] || global_regs
[i
])
552 SET_HARD_REG_BIT (res
->regs
, i
);
554 /* If X is part of a delay slot sequence, then NEXT should be
555 the first insn after the sequence. */
556 if (NEXT_INSN (prev
) != x
)
557 next
= NEXT_INSN (NEXT_INSN (prev
));
559 for (link
= CALL_INSN_FUNCTION_USAGE (x
);
560 link
; link
= XEXP (link
, 1))
561 if (GET_CODE (XEXP (link
, 0)) == CLOBBER
)
562 mark_set_resources (SET_DEST (XEXP (link
, 0)), res
, 1, 0);
564 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
565 assume that this call can clobber any register. */
566 if (next
&& GET_CODE (next
) == NOTE
567 && NOTE_LINE_NUMBER (next
) == NOTE_INSN_SETJMP
)
568 SET_HARD_REG_SET (res
->regs
);
571 /* ... and also what it's RTL says it modifies, if anything. */
576 /* An insn consisting of just a CLOBBER (or USE) is just for flow
577 and doesn't actually do anything, so we ignore it. */
579 #ifdef INSN_SETS_ARE_DELAYED
580 if (! include_delayed_effects
581 && INSN_SETS_ARE_DELAYED (x
))
586 if (GET_CODE (x
) != USE
&& GET_CODE (x
) != CLOBBER
)
591 /* If the source of a SET is a CALL, this is actually done by
592 the called routine. So only include it if we are to include the
593 effects of the calling routine. */
595 mark_set_resources (SET_DEST (x
), res
,
596 (include_delayed_effects
597 || GET_CODE (SET_SRC (x
)) != CALL
),
600 mark_set_resources (SET_SRC (x
), res
, 0, 0);
604 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
608 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
609 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x
, 0, 0))
610 && INSN_FROM_TARGET_P (XVECEXP (x
, 0, i
))))
611 mark_set_resources (XVECEXP (x
, 0, i
), res
, 0,
612 include_delayed_effects
);
619 mark_set_resources (XEXP (x
, 0), res
, 1, 0);
623 mark_set_resources (XEXP (x
, 0), res
, in_dest
, 0);
624 mark_set_resources (XEXP (x
, 1), res
, 0, 0);
625 mark_set_resources (XEXP (x
, 2), res
, 0, 0);
632 res
->unch_memory
= RTX_UNCHANGING_P (x
);
633 res
->volatil
= MEM_VOLATILE_P (x
);
636 mark_set_resources (XEXP (x
, 0), res
, 0, 0);
642 if (GET_CODE (SUBREG_REG (x
)) != REG
)
643 mark_set_resources (SUBREG_REG (x
), res
,
644 in_dest
, include_delayed_effects
);
647 int regno
= REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
);
648 int last_regno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
649 for (i
= regno
; i
< last_regno
; i
++)
650 SET_HARD_REG_BIT (res
->regs
, i
);
657 for (i
= 0; i
< HARD_REGNO_NREGS (REGNO (x
), GET_MODE (x
)); i
++)
658 SET_HARD_REG_BIT (res
->regs
, REGNO (x
) + i
);
662 /* Process each sub-expression and flag what it needs. */
663 format_ptr
= GET_RTX_FORMAT (code
);
664 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
665 switch (*format_ptr
++)
668 mark_set_resources (XEXP (x
, i
), res
, in_dest
, include_delayed_effects
);
672 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
673 mark_set_resources (XVECEXP (x
, i
, j
), res
, in_dest
,
674 include_delayed_effects
);
679 /* Return TRUE if this insn should stop the search for insn to fill delay
680 slots. LABELS_P indicates that labels should terminate the search.
681 In all cases, jumps terminate the search. */
684 stop_search_p (insn
, labels_p
)
691 switch (GET_CODE (insn
))
705 /* OK unless it contains a delay slot or is an `asm' insn of some type.
706 We don't know anything about these. */
707 return (GET_CODE (PATTERN (insn
)) == SEQUENCE
708 || GET_CODE (PATTERN (insn
)) == ASM_INPUT
709 || asm_noperands (PATTERN (insn
)) >= 0);
716 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
717 resource set contains a volatile memory reference. Otherwise, return FALSE. */
720 resource_conflicts_p (res1
, res2
)
721 struct resources
*res1
, *res2
;
723 if ((res1
->cc
&& res2
->cc
) || (res1
->memory
&& res2
->memory
)
724 || (res1
->unch_memory
&& res2
->unch_memory
)
725 || res1
->volatil
|| res2
->volatil
)
729 return (res1
->regs
& res2
->regs
) != HARD_CONST (0);
734 for (i
= 0; i
< HARD_REG_SET_LONGS
; i
++)
735 if ((res1
->regs
[i
] & res2
->regs
[i
]) != 0)
742 /* Return TRUE if any resource marked in RES, a `struct resources', is
743 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
744 routine is using those resources.
746 We compute this by computing all the resources referenced by INSN and
747 seeing if this conflicts with RES. It might be faster to directly check
748 ourselves, and this is the way it used to work, but it means duplicating
749 a large block of complex code. */
752 insn_references_resource_p (insn
, res
, include_delayed_effects
)
754 register struct resources
*res
;
755 int include_delayed_effects
;
757 struct resources insn_res
;
759 CLEAR_RESOURCE (&insn_res
);
760 mark_referenced_resources (insn
, &insn_res
, include_delayed_effects
);
761 return resource_conflicts_p (&insn_res
, res
);
764 /* Return TRUE if INSN modifies resources that are marked in RES.
765 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
766 included. CC0 is only modified if it is explicitly set; see comments
767 in front of mark_set_resources for details. */
770 insn_sets_resource_p (insn
, res
, include_delayed_effects
)
772 register struct resources
*res
;
773 int include_delayed_effects
;
775 struct resources insn_sets
;
777 CLEAR_RESOURCE (&insn_sets
);
778 mark_set_resources (insn
, &insn_sets
, 0, include_delayed_effects
);
779 return resource_conflicts_p (&insn_sets
, res
);
782 /* Find a label at the end of the function or before a RETURN. If there is
790 /* If we found one previously, return it. */
791 if (end_of_function_label
)
792 return end_of_function_label
;
794 /* Otherwise, see if there is a label at the end of the function. If there
795 is, it must be that RETURN insns aren't needed, so that is our return
796 label and we don't have to do anything else. */
798 insn
= get_last_insn ();
799 while (GET_CODE (insn
) == NOTE
800 || (GET_CODE (insn
) == INSN
801 && (GET_CODE (PATTERN (insn
)) == USE
802 || GET_CODE (PATTERN (insn
)) == CLOBBER
)))
803 insn
= PREV_INSN (insn
);
805 /* When a target threads its epilogue we might already have a
806 suitable return insn. If so put a label before it for the
807 end_of_function_label. */
808 if (GET_CODE (insn
) == BARRIER
809 && GET_CODE (PREV_INSN (insn
)) == JUMP_INSN
810 && GET_CODE (PATTERN (PREV_INSN (insn
))) == RETURN
)
812 rtx temp
= PREV_INSN (PREV_INSN (insn
));
813 end_of_function_label
= gen_label_rtx ();
814 LABEL_NUSES (end_of_function_label
) = 0;
816 /* Put the label before an USE insns that may proceed the RETURN insn. */
817 while (GET_CODE (temp
) == USE
)
818 temp
= PREV_INSN (temp
);
820 emit_label_after (end_of_function_label
, temp
);
823 else if (GET_CODE (insn
) == CODE_LABEL
)
824 end_of_function_label
= insn
;
827 /* Otherwise, make a new label and emit a RETURN and BARRIER,
829 end_of_function_label
= gen_label_rtx ();
830 LABEL_NUSES (end_of_function_label
) = 0;
831 emit_label (end_of_function_label
);
835 /* The return we make may have delay slots too. */
836 rtx insn
= gen_return ();
837 insn
= emit_jump_insn (insn
);
839 if (num_delay_slots (insn
) > 0)
840 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
845 /* Show one additional use for this label so it won't go away until
847 ++LABEL_NUSES (end_of_function_label
);
849 return end_of_function_label
;
852 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
853 the pattern of INSN with the SEQUENCE.
855 Chain the insns so that NEXT_INSN of each insn in the sequence points to
856 the next and NEXT_INSN of the last insn in the sequence points to
857 the first insn after the sequence. Similarly for PREV_INSN. This makes
858 it easier to scan all insns.
860 Returns the SEQUENCE that replaces INSN. */
863 emit_delay_sequence (insn
, list
, length
, avail
)
873 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
874 rtvec seqv
= rtvec_alloc (length
+ 1);
875 rtx seq
= gen_rtx (SEQUENCE
, VOIDmode
, seqv
);
876 rtx seq_insn
= make_insn_raw (seq
);
877 rtx first
= get_insns ();
878 rtx last
= get_last_insn ();
880 /* Make a copy of the insn having delay slots. */
881 rtx delay_insn
= copy_rtx (insn
);
883 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
884 confuse further processing. Update LAST in case it was the last insn.
885 We will put the BARRIER back in later. */
886 if (NEXT_INSN (insn
) && GET_CODE (NEXT_INSN (insn
)) == BARRIER
)
888 delete_insn (NEXT_INSN (insn
));
889 last
= get_last_insn ();
893 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
894 NEXT_INSN (seq_insn
) = NEXT_INSN (insn
);
895 PREV_INSN (seq_insn
) = PREV_INSN (insn
);
898 set_new_first_and_last_insn (first
, seq_insn
);
900 PREV_INSN (NEXT_INSN (seq_insn
)) = seq_insn
;
903 set_new_first_and_last_insn (seq_insn
, last
);
905 NEXT_INSN (PREV_INSN (seq_insn
)) = seq_insn
;
907 /* Build our SEQUENCE and rebuild the insn chain. */
908 XVECEXP (seq
, 0, 0) = delay_insn
;
909 INSN_DELETED_P (delay_insn
) = 0;
910 PREV_INSN (delay_insn
) = PREV_INSN (seq_insn
);
912 for (li
= list
; li
; li
= XEXP (li
, 1), i
++)
914 rtx tem
= XEXP (li
, 0);
917 /* Show that this copy of the insn isn't deleted. */
918 INSN_DELETED_P (tem
) = 0;
920 XVECEXP (seq
, 0, i
) = tem
;
921 PREV_INSN (tem
) = XVECEXP (seq
, 0, i
- 1);
922 NEXT_INSN (XVECEXP (seq
, 0, i
- 1)) = tem
;
924 /* Remove any REG_DEAD notes because we can't rely on them now
925 that the insn has been moved. */
926 for (note
= REG_NOTES (tem
); note
; note
= XEXP (note
, 1))
927 if (REG_NOTE_KIND (note
) == REG_DEAD
)
928 XEXP (note
, 0) = const0_rtx
;
931 NEXT_INSN (XVECEXP (seq
, 0, length
)) = NEXT_INSN (seq_insn
);
933 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
934 last insn in that SEQUENCE to point to us. Similarly for the first
935 insn in the following insn if it is a SEQUENCE. */
937 if (PREV_INSN (seq_insn
) && GET_CODE (PREV_INSN (seq_insn
)) == INSN
938 && GET_CODE (PATTERN (PREV_INSN (seq_insn
))) == SEQUENCE
)
939 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn
)), 0,
940 XVECLEN (PATTERN (PREV_INSN (seq_insn
)), 0) - 1))
943 if (NEXT_INSN (seq_insn
) && GET_CODE (NEXT_INSN (seq_insn
)) == INSN
944 && GET_CODE (PATTERN (NEXT_INSN (seq_insn
))) == SEQUENCE
)
945 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn
)), 0, 0)) = seq_insn
;
947 /* If there used to be a BARRIER, put it back. */
949 emit_barrier_after (seq_insn
);
957 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
958 be in the order in which the insns are to be executed. */
961 add_to_delay_list (insn
, delay_list
)
965 /* If we have an empty list, just make a new list element. If
966 INSN has it's block number recorded, clear it since we may
967 be moving the insn to a new block. */
971 struct target_info
*tinfo
;
973 for (tinfo
= target_hash_table
[INSN_UID (insn
) % TARGET_HASH_PRIME
];
974 tinfo
; tinfo
= tinfo
->next
)
975 if (tinfo
->uid
== INSN_UID (insn
))
981 return gen_rtx (INSN_LIST
, VOIDmode
, insn
, NULL_RTX
);
984 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
986 XEXP (delay_list
, 1) = add_to_delay_list (insn
, XEXP (delay_list
, 1));
991 /* Delete INSN from the the delay slot of the insn that it is in. This may
992 produce an insn without anything in its delay slots. */
995 delete_from_delay_slot (insn
)
998 rtx trial
, seq_insn
, seq
, prev
;
1002 /* We first must find the insn containing the SEQUENCE with INSN in its
1003 delay slot. Do this by finding an insn, TRIAL, where
1004 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
1007 PREV_INSN (NEXT_INSN (trial
)) == trial
;
1008 trial
= NEXT_INSN (trial
))
1011 seq_insn
= PREV_INSN (NEXT_INSN (trial
));
1012 seq
= PATTERN (seq_insn
);
1014 /* Create a delay list consisting of all the insns other than the one
1015 we are deleting (unless we were the only one). */
1016 if (XVECLEN (seq
, 0) > 2)
1017 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1018 if (XVECEXP (seq
, 0, i
) != insn
)
1019 delay_list
= add_to_delay_list (XVECEXP (seq
, 0, i
), delay_list
);
1021 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1022 list, and rebuild the delay list if non-empty. */
1023 prev
= PREV_INSN (seq_insn
);
1024 trial
= XVECEXP (seq
, 0, 0);
1025 delete_insn (seq_insn
);
1026 add_insn_after (trial
, prev
);
1028 if (GET_CODE (trial
) == JUMP_INSN
1029 && (simplejump_p (trial
) || GET_CODE (PATTERN (trial
)) == RETURN
))
1030 emit_barrier_after (trial
);
1032 /* If there are any delay insns, remit them. Otherwise clear the
1035 trial
= emit_delay_sequence (trial
, delay_list
, XVECLEN (seq
, 0) - 2, 0);
1037 INSN_ANNULLED_BRANCH_P (trial
) = 0;
1039 INSN_FROM_TARGET_P (insn
) = 0;
1041 /* Show we need to fill this insn again. */
1042 obstack_ptr_grow (&unfilled_slots_obstack
, trial
);
1045 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1046 the insn that sets CC0 for it and delete it too. */
1049 delete_scheduled_jump (insn
)
1052 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1053 delete the insn that sets the condition code, but it is hard to find it.
1054 Since this case is rare anyway, don't bother trying; there would likely
1055 be other insns that became dead anyway, which we wouldn't know to
1059 if (reg_mentioned_p (cc0_rtx
, insn
))
1061 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
1063 /* If a reg-note was found, it points to an insn to set CC0. This
1064 insn is in the delay list of some other insn. So delete it from
1065 the delay list it was in. */
1068 if (! FIND_REG_INC_NOTE (XEXP (note
, 0), NULL_RTX
)
1069 && sets_cc0_p (PATTERN (XEXP (note
, 0))) == 1)
1070 delete_from_delay_slot (XEXP (note
, 0));
1074 /* The insn setting CC0 is our previous insn, but it may be in
1075 a delay slot. It will be the last insn in the delay slot, if
1077 rtx trial
= previous_insn (insn
);
1078 if (GET_CODE (trial
) == NOTE
)
1079 trial
= prev_nonnote_insn (trial
);
1080 if (sets_cc0_p (PATTERN (trial
)) != 1
1081 || FIND_REG_INC_NOTE (trial
, 0))
1083 if (PREV_INSN (NEXT_INSN (trial
)) == trial
)
1084 delete_insn (trial
);
1086 delete_from_delay_slot (trial
);
1094 /* Counters for delay-slot filling. */
1096 #define NUM_REORG_FUNCTIONS 2
1097 #define MAX_DELAY_HISTOGRAM 3
1098 #define MAX_REORG_PASSES 2
1100 static int num_insns_needing_delays
[NUM_REORG_FUNCTIONS
][MAX_REORG_PASSES
];
1102 static int num_filled_delays
[NUM_REORG_FUNCTIONS
][MAX_DELAY_HISTOGRAM
+1][MAX_REORG_PASSES
];
1104 static int reorg_pass_number
;
1107 note_delay_statistics (slots_filled
, index
)
1108 int slots_filled
, index
;
1110 num_insns_needing_delays
[index
][reorg_pass_number
]++;
1111 if (slots_filled
> MAX_DELAY_HISTOGRAM
)
1112 slots_filled
= MAX_DELAY_HISTOGRAM
;
1113 num_filled_delays
[index
][slots_filled
][reorg_pass_number
]++;
1116 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1118 /* Optimize the following cases:
1120 1. When a conditional branch skips over only one instruction,
1121 use an annulling branch and put that insn in the delay slot.
1122 Use either a branch that annuls when the condition if true or
1123 invert the test with a branch that annuls when the condition is
1124 false. This saves insns, since otherwise we must copy an insn
1127 (orig) (skip) (otherwise)
1128 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1135 2. When a conditional branch skips over only one instruction,
1136 and after that, it unconditionally branches somewhere else,
1137 perform the similar optimization. This saves executing the
1138 second branch in the case where the inverted condition is true.
1145 INSN is a JUMP_INSN.
1147 This should be expanded to skip over N insns, where N is the number
1148 of delay slots required. */
1151 optimize_skip (insn
)
1154 register rtx trial
= next_nonnote_insn (insn
);
1155 rtx next_trial
= next_active_insn (trial
);
1160 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1163 || GET_CODE (trial
) != INSN
1164 || GET_CODE (PATTERN (trial
)) == SEQUENCE
1165 || recog_memoized (trial
) < 0
1166 || (! eligible_for_annul_false (insn
, 0, trial
, flags
)
1167 && ! eligible_for_annul_true (insn
, 0, trial
, flags
)))
1170 /* There are two cases where we are just executing one insn (we assume
1171 here that a branch requires only one insn; this should be generalized
1172 at some point): Where the branch goes around a single insn or where
1173 we have one insn followed by a branch to the same label we branch to.
1174 In both of these cases, inverting the jump and annulling the delay
1175 slot give the same effect in fewer insns. */
1176 if ((next_trial
== next_active_insn (JUMP_LABEL (insn
)))
1178 && GET_CODE (next_trial
) == JUMP_INSN
1179 && JUMP_LABEL (insn
) == JUMP_LABEL (next_trial
)
1180 && (simplejump_p (next_trial
)
1181 || GET_CODE (PATTERN (next_trial
)) == RETURN
)))
1183 if (eligible_for_annul_false (insn
, 0, trial
, flags
))
1185 if (invert_jump (insn
, JUMP_LABEL (insn
)))
1186 INSN_FROM_TARGET_P (trial
) = 1;
1187 else if (! eligible_for_annul_true (insn
, 0, trial
, flags
))
1191 delay_list
= add_to_delay_list (trial
, NULL_RTX
);
1192 next_trial
= next_active_insn (trial
);
1193 update_block (trial
, trial
);
1194 delete_insn (trial
);
1196 /* Also, if we are targeting an unconditional
1197 branch, thread our jump to the target of that branch. Don't
1198 change this into a RETURN here, because it may not accept what
1199 we have in the delay slot. We'll fix this up later. */
1200 if (next_trial
&& GET_CODE (next_trial
) == JUMP_INSN
1201 && (simplejump_p (next_trial
)
1202 || GET_CODE (PATTERN (next_trial
)) == RETURN
))
1204 target_label
= JUMP_LABEL (next_trial
);
1205 if (target_label
== 0)
1206 target_label
= find_end_label ();
1208 /* Recompute the flags based on TARGET_LABEL since threading
1209 the jump to TARGET_LABEL may change the direction of the
1210 jump (which may change the circumstances in which the
1211 delay slot is nullified). */
1212 flags
= get_jump_flags (insn
, target_label
);
1213 if (eligible_for_annul_true (insn
, 0, trial
, flags
))
1214 reorg_redirect_jump (insn
, target_label
);
1217 INSN_ANNULLED_BRANCH_P (insn
) = 1;
1225 /* Encode and return branch direction and prediction information for
1226 INSN assuming it will jump to LABEL.
1228 Non conditional branches return no direction information and
1229 are predicted as very likely taken. */
1232 get_jump_flags (insn
, label
)
1237 /* get_jump_flags can be passed any insn with delay slots, these may
1238 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1239 direction information, and only if they are conditional jumps.
1241 If LABEL is zero, then there is no way to determine the branch
1243 if (GET_CODE (insn
) == JUMP_INSN
1244 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
1245 && INSN_UID (insn
) <= max_uid
1247 && INSN_UID (label
) <= max_uid
)
1249 = (uid_to_ruid
[INSN_UID (label
)] > uid_to_ruid
[INSN_UID (insn
)])
1250 ? ATTR_FLAG_forward
: ATTR_FLAG_backward
;
1251 /* No valid direction information. */
1255 /* If insn is a conditional branch call mostly_true_jump to get
1256 determine the branch prediction.
1258 Non conditional branches are predicted as very likely taken. */
1259 if (GET_CODE (insn
) == JUMP_INSN
1260 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
1264 prediction
= mostly_true_jump (insn
, get_branch_condition (insn
, label
));
1268 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
1271 flags
|= ATTR_FLAG_likely
;
1274 flags
|= ATTR_FLAG_unlikely
;
1277 flags
|= (ATTR_FLAG_very_unlikely
| ATTR_FLAG_unlikely
);
1285 flags
|= (ATTR_FLAG_very_likely
| ATTR_FLAG_likely
);
1290 /* Return 1 if INSN is a destination that will be branched to rarely (the
1291 return point of a function); return 2 if DEST will be branched to very
1292 rarely (a call to a function that doesn't return). Otherwise,
1296 rare_destination (insn
)
1302 for (; insn
; insn
= next
)
1304 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == SEQUENCE
)
1305 insn
= XVECEXP (PATTERN (insn
), 0, 0);
1307 next
= NEXT_INSN (insn
);
1309 switch (GET_CODE (insn
))
1314 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1315 don't scan past JUMP_INSNs, so any barrier we find here must
1316 have been after a CALL_INSN and hence mean the call doesn't
1320 if (GET_CODE (PATTERN (insn
)) == RETURN
)
1322 else if (simplejump_p (insn
)
1323 && jump_count
++ < 10)
1324 next
= JUMP_LABEL (insn
);
1330 /* If we got here it means we hit the end of the function. So this
1331 is an unlikely destination. */
1336 /* Return truth value of the statement that this branch
1337 is mostly taken. If we think that the branch is extremely likely
1338 to be taken, we return 2. If the branch is slightly more likely to be
1339 taken, return 1. If the branch is slightly less likely to be taken,
1340 return 0 and if the branch is highly unlikely to be taken, return -1.
1342 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1345 mostly_true_jump (jump_insn
, condition
)
1346 rtx jump_insn
, condition
;
1348 rtx target_label
= JUMP_LABEL (jump_insn
);
1350 int rare_dest
= rare_destination (target_label
);
1351 int rare_fallthrough
= rare_destination (NEXT_INSN (jump_insn
));
1353 /* If branch probabilities are available, then use that number since it
1354 always gives a correct answer. */
1355 if (flag_branch_probabilities
)
1357 rtx note
= find_reg_note (jump_insn
, REG_BR_PROB
, 0);;
1360 int prob
= XINT (note
, 0);
1362 if (prob
>= REG_BR_PROB_BASE
* 9 / 10)
1364 else if (prob
>= REG_BR_PROB_BASE
/ 2)
1366 else if (prob
>= REG_BR_PROB_BASE
/ 10)
1373 /* If this is a branch outside a loop, it is highly unlikely. */
1374 if (GET_CODE (PATTERN (jump_insn
)) == SET
1375 && GET_CODE (SET_SRC (PATTERN (jump_insn
))) == IF_THEN_ELSE
1376 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)) == LABEL_REF
1377 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 1)))
1378 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)) == LABEL_REF
1379 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn
)), 2)))))
1384 /* If this is the test of a loop, it is very likely true. We scan
1385 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1386 before the next real insn, we assume the branch is to the top of
1388 for (insn
= PREV_INSN (target_label
);
1389 insn
&& GET_CODE (insn
) == NOTE
;
1390 insn
= PREV_INSN (insn
))
1391 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
1394 /* If this is a jump to the test of a loop, it is likely true. We scan
1395 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1396 before the next real insn, we assume the branch is to the loop branch
1398 for (insn
= NEXT_INSN (target_label
);
1399 insn
&& GET_CODE (insn
) == NOTE
;
1400 insn
= PREV_INSN (insn
))
1401 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
)
1405 /* Look at the relative rarities of the fallthrough and destination. If
1406 they differ, we can predict the branch that way. */
1408 switch (rare_fallthrough
- rare_dest
)
1422 /* If we couldn't figure out what this jump was, assume it won't be
1423 taken. This should be rare. */
1427 /* EQ tests are usually false and NE tests are usually true. Also,
1428 most quantities are positive, so we can make the appropriate guesses
1429 about signed comparisons against zero. */
1430 switch (GET_CODE (condition
))
1433 /* Unconditional branch. */
1441 if (XEXP (condition
, 1) == const0_rtx
)
1446 if (XEXP (condition
, 1) == const0_rtx
)
1451 /* Predict backward branches usually take, forward branches usually not. If
1452 we don't know whether this is forward or backward, assume the branch
1453 will be taken, since most are. */
1454 return (target_label
== 0 || INSN_UID (jump_insn
) > max_uid
1455 || INSN_UID (target_label
) > max_uid
1456 || (uid_to_ruid
[INSN_UID (jump_insn
)]
1457 > uid_to_ruid
[INSN_UID (target_label
)]));;
1460 /* Return the condition under which INSN will branch to TARGET. If TARGET
1461 is zero, return the condition under which INSN will return. If INSN is
1462 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1463 type of jump, or it doesn't go to TARGET, return 0. */
1466 get_branch_condition (insn
, target
)
1470 rtx pat
= PATTERN (insn
);
1473 if (condjump_in_parallel_p (insn
))
1474 pat
= XVECEXP (pat
, 0, 0);
1476 if (GET_CODE (pat
) == RETURN
)
1477 return target
== 0 ? const_true_rtx
: 0;
1479 else if (GET_CODE (pat
) != SET
|| SET_DEST (pat
) != pc_rtx
)
1482 src
= SET_SRC (pat
);
1483 if (GET_CODE (src
) == LABEL_REF
&& XEXP (src
, 0) == target
)
1484 return const_true_rtx
;
1486 else if (GET_CODE (src
) == IF_THEN_ELSE
1487 && ((target
== 0 && GET_CODE (XEXP (src
, 1)) == RETURN
)
1488 || (GET_CODE (XEXP (src
, 1)) == LABEL_REF
1489 && XEXP (XEXP (src
, 1), 0) == target
))
1490 && XEXP (src
, 2) == pc_rtx
)
1491 return XEXP (src
, 0);
1493 else if (GET_CODE (src
) == IF_THEN_ELSE
1494 && ((target
== 0 && GET_CODE (XEXP (src
, 2)) == RETURN
)
1495 || (GET_CODE (XEXP (src
, 2)) == LABEL_REF
1496 && XEXP (XEXP (src
, 2), 0) == target
))
1497 && XEXP (src
, 1) == pc_rtx
)
1498 return gen_rtx (reverse_condition (GET_CODE (XEXP (src
, 0))),
1499 GET_MODE (XEXP (src
, 0)),
1500 XEXP (XEXP (src
, 0), 0), XEXP (XEXP (src
, 0), 1));
1505 /* Return non-zero if CONDITION is more strict than the condition of
1506 INSN, i.e., if INSN will always branch if CONDITION is true. */
1509 condition_dominates_p (condition
, insn
)
1513 rtx other_condition
= get_branch_condition (insn
, JUMP_LABEL (insn
));
1514 enum rtx_code code
= GET_CODE (condition
);
1515 enum rtx_code other_code
;
1517 if (rtx_equal_p (condition
, other_condition
)
1518 || other_condition
== const_true_rtx
)
1521 else if (condition
== const_true_rtx
|| other_condition
== 0)
1524 other_code
= GET_CODE (other_condition
);
1525 if (GET_RTX_LENGTH (code
) != 2 || GET_RTX_LENGTH (other_code
) != 2
1526 || ! rtx_equal_p (XEXP (condition
, 0), XEXP (other_condition
, 0))
1527 || ! rtx_equal_p (XEXP (condition
, 1), XEXP (other_condition
, 1)))
1530 return comparison_dominates_p (code
, other_code
);
1533 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1534 any insns already in the delay slot of JUMP. */
1537 redirect_with_delay_slots_safe_p (jump
, newlabel
, seq
)
1538 rtx jump
, newlabel
, seq
;
1540 int flags
, slots
, i
;
1541 rtx pat
= PATTERN (seq
);
1543 /* Make sure all the delay slots of this jump would still
1544 be valid after threading the jump. If they are still
1545 valid, then return non-zero. */
1547 flags
= get_jump_flags (jump
, newlabel
);
1548 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1550 #ifdef ANNUL_IFFALSE_SLOTS
1551 (INSN_ANNULLED_BRANCH_P (jump
)
1552 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1553 ? eligible_for_annul_false (jump
, i
- 1,
1554 XVECEXP (pat
, 0, i
), flags
) :
1556 #ifdef ANNUL_IFTRUE_SLOTS
1557 (INSN_ANNULLED_BRANCH_P (jump
)
1558 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
1559 ? eligible_for_annul_true (jump
, i
- 1,
1560 XVECEXP (pat
, 0, i
), flags
) :
1562 eligible_for_delay (jump
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
1565 return (i
== XVECLEN (pat
, 0));
1568 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1569 any insns we wish to place in the delay slot of JUMP. */
1572 redirect_with_delay_list_safe_p (jump
, newlabel
, delay_list
)
1573 rtx jump
, newlabel
, delay_list
;
1578 /* Make sure all the insns in DELAY_LIST would still be
1579 valid after threading the jump. If they are still
1580 valid, then return non-zero. */
1582 flags
= get_jump_flags (jump
, newlabel
);
1583 for (li
= delay_list
, i
= 0; li
; li
= XEXP (li
, 1), i
++)
1585 #ifdef ANNUL_IFFALSE_SLOTS
1586 (INSN_ANNULLED_BRANCH_P (jump
)
1587 && INSN_FROM_TARGET_P (XEXP (li
, 0)))
1588 ? eligible_for_annul_false (jump
, i
, XEXP (li
, 0), flags
) :
1590 #ifdef ANNUL_IFTRUE_SLOTS
1591 (INSN_ANNULLED_BRANCH_P (jump
)
1592 && ! INSN_FROM_TARGET_P (XEXP (li
, 0)))
1593 ? eligible_for_annul_true (jump
, i
, XEXP (li
, 0), flags
) :
1595 eligible_for_delay (jump
, i
, XEXP (li
, 0), flags
)))
1598 return (li
== NULL
);
1602 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1603 the condition tested by INSN is CONDITION and the resources shown in
1604 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1605 from SEQ's delay list, in addition to whatever insns it may execute
1606 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1607 needed while searching for delay slot insns. Return the concatenated
1608 delay list if possible, otherwise, return 0.
1610 SLOTS_TO_FILL is the total number of slots required by INSN, and
1611 PSLOTS_FILLED points to the number filled so far (also the number of
1612 insns in DELAY_LIST). It is updated with the number that have been
1613 filled from the SEQUENCE, if any.
1615 PANNUL_P points to a non-zero value if we already know that we need
1616 to annul INSN. If this routine determines that annulling is needed,
1617 it may set that value non-zero.
1619 PNEW_THREAD points to a location that is to receive the place at which
1620 execution should continue. */
1623 steal_delay_list_from_target (insn
, condition
, seq
, delay_list
,
1624 sets
, needed
, other_needed
,
1625 slots_to_fill
, pslots_filled
, pannul_p
,
1627 rtx insn
, condition
;
1630 struct resources
*sets
, *needed
, *other_needed
;
1637 int slots_remaining
= slots_to_fill
- *pslots_filled
;
1638 int total_slots_filled
= *pslots_filled
;
1639 rtx new_delay_list
= 0;
1640 int must_annul
= *pannul_p
;
1643 /* We can't do anything if there are more delay slots in SEQ than we
1644 can handle, or if we don't know that it will be a taken branch.
1645 We know that it will be a taken branch if it is either an unconditional
1646 branch or a conditional branch with a stricter branch condition.
1648 Also, exit if the branch has more than one set, since then it is computing
1649 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1650 ??? It may be possible to move other sets into INSN in addition to
1651 moving the instructions in the delay slots. */
1653 if (XVECLEN (seq
, 0) - 1 > slots_remaining
1654 || ! condition_dominates_p (condition
, XVECEXP (seq
, 0, 0))
1655 || ! single_set (XVECEXP (seq
, 0, 0)))
1658 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1660 rtx trial
= XVECEXP (seq
, 0, i
);
1663 if (insn_references_resource_p (trial
, sets
, 0)
1664 || insn_sets_resource_p (trial
, needed
, 0)
1665 || insn_sets_resource_p (trial
, sets
, 0)
1667 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1669 || find_reg_note (trial
, REG_CC_USER
, NULL_RTX
)
1671 /* If TRIAL is from the fallthrough code of an annulled branch insn
1672 in SEQ, we cannot use it. */
1673 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq
, 0, 0))
1674 && ! INSN_FROM_TARGET_P (trial
)))
1677 /* If this insn was already done (usually in a previous delay slot),
1678 pretend we put it in our delay slot. */
1679 if (redundant_insn (trial
, insn
, new_delay_list
))
1682 /* We will end up re-vectoring this branch, so compute flags
1683 based on jumping to the new label. */
1684 flags
= get_jump_flags (insn
, JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1687 && ((condition
== const_true_rtx
1688 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1689 && ! may_trap_p (PATTERN (trial
)))))
1690 ? eligible_for_delay (insn
, total_slots_filled
, trial
, flags
)
1692 eligible_for_annul_false (insn
, total_slots_filled
, trial
, flags
)))
1694 temp
= copy_rtx (trial
);
1695 INSN_FROM_TARGET_P (temp
) = 1;
1696 new_delay_list
= add_to_delay_list (temp
, new_delay_list
);
1697 total_slots_filled
++;
1699 if (--slots_remaining
== 0)
1706 /* Show the place to which we will be branching. */
1707 *pnew_thread
= next_active_insn (JUMP_LABEL (XVECEXP (seq
, 0, 0)));
1709 /* Add any new insns to the delay list and update the count of the
1710 number of slots filled. */
1711 *pslots_filled
= total_slots_filled
;
1712 *pannul_p
= must_annul
;
1714 if (delay_list
== 0)
1715 return new_delay_list
;
1717 for (temp
= new_delay_list
; temp
; temp
= XEXP (temp
, 1))
1718 delay_list
= add_to_delay_list (XEXP (temp
, 0), delay_list
);
1723 /* Similar to steal_delay_list_from_target except that SEQ is on the
1724 fallthrough path of INSN. Here we only do something if the delay insn
1725 of SEQ is an unconditional branch. In that case we steal its delay slot
1726 for INSN since unconditional branches are much easier to fill. */
1729 steal_delay_list_from_fallthrough (insn
, condition
, seq
,
1730 delay_list
, sets
, needed
, other_needed
,
1731 slots_to_fill
, pslots_filled
, pannul_p
)
1732 rtx insn
, condition
;
1735 struct resources
*sets
, *needed
, *other_needed
;
1743 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
1745 /* We can't do anything if SEQ's delay insn isn't an
1746 unconditional branch. */
1748 if (! simplejump_p (XVECEXP (seq
, 0, 0))
1749 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) != RETURN
)
1752 for (i
= 1; i
< XVECLEN (seq
, 0); i
++)
1754 rtx trial
= XVECEXP (seq
, 0, i
);
1756 /* If TRIAL sets CC0, stealing it will move it too far from the use
1758 if (insn_references_resource_p (trial
, sets
, 0)
1759 || insn_sets_resource_p (trial
, needed
, 0)
1760 || insn_sets_resource_p (trial
, sets
, 0)
1762 || sets_cc0_p (PATTERN (trial
))
1768 /* If this insn was already done, we don't need it. */
1769 if (redundant_insn (trial
, insn
, delay_list
))
1771 delete_from_delay_slot (trial
);
1776 && ((condition
== const_true_rtx
1777 || (! insn_sets_resource_p (trial
, other_needed
, 0)
1778 && ! may_trap_p (PATTERN (trial
)))))
1779 ? eligible_for_delay (insn
, *pslots_filled
, trial
, flags
)
1781 eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
1783 delete_from_delay_slot (trial
);
1784 delay_list
= add_to_delay_list (trial
, delay_list
);
1786 if (++(*pslots_filled
) == slots_to_fill
)
1796 /* Try merging insns starting at THREAD which match exactly the insns in
1799 If all insns were matched and the insn was previously annulling, the
1800 annul bit will be cleared.
1802 For each insn that is merged, if the branch is or will be non-annulling,
1803 we delete the merged insn. */
1806 try_merge_delay_insns (insn
, thread
)
1809 rtx trial
, next_trial
;
1810 rtx delay_insn
= XVECEXP (PATTERN (insn
), 0, 0);
1811 int annul_p
= INSN_ANNULLED_BRANCH_P (delay_insn
);
1812 int slot_number
= 1;
1813 int num_slots
= XVECLEN (PATTERN (insn
), 0);
1814 rtx next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1815 struct resources set
, needed
;
1816 rtx merged_insns
= 0;
1820 flags
= get_jump_flags (delay_insn
, JUMP_LABEL (delay_insn
));
1822 CLEAR_RESOURCE (&needed
);
1823 CLEAR_RESOURCE (&set
);
1825 /* If this is not an annulling branch, take into account anything needed in
1826 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1827 folded into one. If we are annulling, this would be the correct
1828 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1829 will essentially disable this optimization. This method is somewhat of
1830 a kludge, but I don't see a better way.) */
1832 mark_referenced_resources (next_to_match
, &needed
, 1);
1834 for (trial
= thread
; !stop_search_p (trial
, 1); trial
= next_trial
)
1836 rtx pat
= PATTERN (trial
);
1837 rtx oldtrial
= trial
;
1839 next_trial
= next_nonnote_insn (trial
);
1841 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1842 if (GET_CODE (trial
) == INSN
1843 && (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
))
1846 if (GET_CODE (next_to_match
) == GET_CODE (trial
)
1848 /* We can't share an insn that sets cc0. */
1849 && ! sets_cc0_p (pat
)
1851 && ! insn_references_resource_p (trial
, &set
, 1)
1852 && ! insn_sets_resource_p (trial
, &set
, 1)
1853 && ! insn_sets_resource_p (trial
, &needed
, 1)
1854 && (trial
= try_split (pat
, trial
, 0)) != 0
1855 /* Update next_trial, in case try_split succeeded. */
1856 && (next_trial
= next_nonnote_insn (trial
))
1857 /* Likewise THREAD. */
1858 && (thread
= oldtrial
== thread
? trial
: thread
)
1859 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (trial
))
1860 /* Have to test this condition if annul condition is different
1861 from (and less restrictive than) non-annulling one. */
1862 && eligible_for_delay (delay_insn
, slot_number
- 1, trial
, flags
))
1867 update_block (trial
, thread
);
1868 if (trial
== thread
)
1869 thread
= next_active_insn (thread
);
1871 delete_insn (trial
);
1872 INSN_FROM_TARGET_P (next_to_match
) = 0;
1875 merged_insns
= gen_rtx (INSN_LIST
, VOIDmode
, trial
, merged_insns
);
1877 if (++slot_number
== num_slots
)
1880 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1882 mark_referenced_resources (next_to_match
, &needed
, 1);
1885 mark_set_resources (trial
, &set
, 0, 1);
1886 mark_referenced_resources (trial
, &needed
, 1);
1889 /* See if we stopped on a filled insn. If we did, try to see if its
1890 delay slots match. */
1891 if (slot_number
!= num_slots
1892 && trial
&& GET_CODE (trial
) == INSN
1893 && GET_CODE (PATTERN (trial
)) == SEQUENCE
1894 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial
), 0, 0)))
1896 rtx pat
= PATTERN (trial
);
1897 rtx filled_insn
= XVECEXP (pat
, 0, 0);
1899 /* Account for resources set/needed by the filled insn. */
1900 mark_set_resources (filled_insn
, &set
, 0, 1);
1901 mark_referenced_resources (filled_insn
, &needed
, 1);
1903 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
1905 rtx dtrial
= XVECEXP (pat
, 0, i
);
1907 if (! insn_references_resource_p (dtrial
, &set
, 1)
1908 && ! insn_sets_resource_p (dtrial
, &set
, 1)
1909 && ! insn_sets_resource_p (dtrial
, &needed
, 1)
1911 && ! sets_cc0_p (PATTERN (dtrial
))
1913 && rtx_equal_p (PATTERN (next_to_match
), PATTERN (dtrial
))
1914 && eligible_for_delay (delay_insn
, slot_number
- 1, dtrial
, flags
))
1918 update_block (dtrial
, thread
);
1919 delete_from_delay_slot (dtrial
);
1920 INSN_FROM_TARGET_P (next_to_match
) = 0;
1923 merged_insns
= gen_rtx (INSN_LIST
, SImode
, dtrial
,
1926 if (++slot_number
== num_slots
)
1929 next_to_match
= XVECEXP (PATTERN (insn
), 0, slot_number
);
1934 /* If all insns in the delay slot have been matched and we were previously
1935 annulling the branch, we need not any more. In that case delete all the
1936 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1937 the delay list so that we know that it isn't only being used at the
1939 if (slot_number
== num_slots
&& annul_p
)
1941 for (; merged_insns
; merged_insns
= XEXP (merged_insns
, 1))
1943 if (GET_MODE (merged_insns
) == SImode
)
1945 update_block (XEXP (merged_insns
, 0), thread
);
1946 delete_from_delay_slot (XEXP (merged_insns
, 0));
1950 update_block (XEXP (merged_insns
, 0), thread
);
1951 delete_insn (XEXP (merged_insns
, 0));
1955 INSN_ANNULLED_BRANCH_P (delay_insn
) = 0;
1957 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1958 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
)) = 0;
1962 /* See if INSN is redundant with an insn in front of TARGET. Often this
1963 is called when INSN is a candidate for a delay slot of TARGET.
1964 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1965 of INSN. Often INSN will be redundant with an insn in a delay slot of
1966 some previous insn. This happens when we have a series of branches to the
1967 same label; in that case the first insn at the target might want to go
1968 into each of the delay slots.
1970 If we are not careful, this routine can take up a significant fraction
1971 of the total compilation time (4%), but only wins rarely. Hence we
1972 speed this routine up by making two passes. The first pass goes back
1973 until it hits a label and sees if it find an insn with an identical
1974 pattern. Only in this (relatively rare) event does it check for
1977 We do not split insns we encounter. This could cause us not to find a
1978 redundant insn, but the cost of splitting seems greater than the possible
1979 gain in rare cases. */
1982 redundant_insn (insn
, target
, delay_list
)
1987 rtx target_main
= target
;
1988 rtx ipat
= PATTERN (insn
);
1990 struct resources needed
, set
;
1993 /* Scan backwards looking for a match. */
1994 for (trial
= PREV_INSN (target
); trial
; trial
= PREV_INSN (trial
))
1996 if (GET_CODE (trial
) == CODE_LABEL
)
1999 if (GET_RTX_CLASS (GET_CODE (trial
)) != 'i')
2002 pat
= PATTERN (trial
);
2003 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2006 if (GET_CODE (pat
) == SEQUENCE
)
2008 /* Stop for a CALL and its delay slots because it is difficult to
2009 track its resource needs correctly. */
2010 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
2013 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
2014 slots because it is difficult to track its resource needs
2017 #ifdef INSN_SETS_ARE_DELAYED
2018 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2022 #ifdef INSN_REFERENCES_ARE_DELAYED
2023 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2027 /* See if any of the insns in the delay slot match, updating
2028 resource requirements as we go. */
2029 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
2030 if (GET_CODE (XVECEXP (pat
, 0, i
)) == GET_CODE (insn
)
2031 && rtx_equal_p (PATTERN (XVECEXP (pat
, 0, i
)), ipat
))
2034 /* If found a match, exit this loop early. */
2039 else if (GET_CODE (trial
) == GET_CODE (insn
) && rtx_equal_p (pat
, ipat
))
2043 /* If we didn't find an insn that matches, return 0. */
2047 /* See what resources this insn sets and needs. If they overlap, or
2048 if this insn references CC0, it can't be redundant. */
2050 CLEAR_RESOURCE (&needed
);
2051 CLEAR_RESOURCE (&set
);
2052 mark_set_resources (insn
, &set
, 0, 1);
2053 mark_referenced_resources (insn
, &needed
, 1);
2055 /* If TARGET is a SEQUENCE, get the main insn. */
2056 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
2057 target_main
= XVECEXP (PATTERN (target
), 0, 0);
2059 if (resource_conflicts_p (&needed
, &set
)
2061 || reg_mentioned_p (cc0_rtx
, ipat
)
2063 /* The insn requiring the delay may not set anything needed or set by
2065 || insn_sets_resource_p (target_main
, &needed
, 1)
2066 || insn_sets_resource_p (target_main
, &set
, 1))
2069 /* Insns we pass may not set either NEEDED or SET, so merge them for
2071 needed
.memory
|= set
.memory
;
2072 needed
.unch_memory
|= set
.unch_memory
;
2073 IOR_HARD_REG_SET (needed
.regs
, set
.regs
);
2075 /* This insn isn't redundant if it conflicts with an insn that either is
2076 or will be in a delay slot of TARGET. */
2080 if (insn_sets_resource_p (XEXP (delay_list
, 0), &needed
, 1))
2082 delay_list
= XEXP (delay_list
, 1);
2085 if (GET_CODE (target
) == INSN
&& GET_CODE (PATTERN (target
)) == SEQUENCE
)
2086 for (i
= 1; i
< XVECLEN (PATTERN (target
), 0); i
++)
2087 if (insn_sets_resource_p (XVECEXP (PATTERN (target
), 0, i
), &needed
, 1))
2090 /* Scan backwards until we reach a label or an insn that uses something
2091 INSN sets or sets something insn uses or sets. */
2093 for (trial
= PREV_INSN (target
);
2094 trial
&& GET_CODE (trial
) != CODE_LABEL
;
2095 trial
= PREV_INSN (trial
))
2097 if (GET_CODE (trial
) != INSN
&& GET_CODE (trial
) != CALL_INSN
2098 && GET_CODE (trial
) != JUMP_INSN
)
2101 pat
= PATTERN (trial
);
2102 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
2105 if (GET_CODE (pat
) == SEQUENCE
)
2107 /* If this is a CALL_INSN and its delay slots, it is hard to track
2108 the resource needs properly, so give up. */
2109 if (GET_CODE (XVECEXP (pat
, 0, 0)) == CALL_INSN
)
2112 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2113 is hard to track the resource needs properly, so give up. */
2115 #ifdef INSN_SETS_ARE_DELAYED
2116 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2120 #ifdef INSN_REFERENCES_ARE_DELAYED
2121 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat
, 0, 0)))
2125 /* See if any of the insns in the delay slot match, updating
2126 resource requirements as we go. */
2127 for (i
= XVECLEN (pat
, 0) - 1; i
> 0; i
--)
2129 rtx candidate
= XVECEXP (pat
, 0, i
);
2131 /* If an insn will be annulled if the branch is false, it isn't
2132 considered as a possible duplicate insn. */
2133 if (rtx_equal_p (PATTERN (candidate
), ipat
)
2134 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
2135 && INSN_FROM_TARGET_P (candidate
)))
2137 /* Show that this insn will be used in the sequel. */
2138 INSN_FROM_TARGET_P (candidate
) = 0;
2142 /* Unless this is an annulled insn from the target of a branch,
2143 we must stop if it sets anything needed or set by INSN. */
2144 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat
, 0, 0))
2145 || ! INSN_FROM_TARGET_P (candidate
))
2146 && insn_sets_resource_p (candidate
, &needed
, 1))
2151 /* If the insn requiring the delay slot conflicts with INSN, we
2153 if (insn_sets_resource_p (XVECEXP (pat
, 0, 0), &needed
, 1))
2158 /* See if TRIAL is the same as INSN. */
2159 pat
= PATTERN (trial
);
2160 if (rtx_equal_p (pat
, ipat
))
2163 /* Can't go any further if TRIAL conflicts with INSN. */
2164 if (insn_sets_resource_p (trial
, &needed
, 1))
2172 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2173 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2174 is non-zero, we are allowed to fall into this thread; otherwise, we are
2177 If LABEL is used more than one or we pass a label other than LABEL before
2178 finding an active insn, we do not own this thread. */
2181 own_thread_p (thread
, label
, allow_fallthrough
)
2184 int allow_fallthrough
;
2189 /* We don't own the function end. */
2193 /* Get the first active insn, or THREAD, if it is an active insn. */
2194 active_insn
= next_active_insn (PREV_INSN (thread
));
2196 for (insn
= thread
; insn
!= active_insn
; insn
= NEXT_INSN (insn
))
2197 if (GET_CODE (insn
) == CODE_LABEL
2198 && (insn
!= label
|| LABEL_NUSES (insn
) != 1))
2201 if (allow_fallthrough
)
2204 /* Ensure that we reach a BARRIER before any insn or label. */
2205 for (insn
= prev_nonnote_insn (thread
);
2206 insn
== 0 || GET_CODE (insn
) != BARRIER
;
2207 insn
= prev_nonnote_insn (insn
))
2209 || GET_CODE (insn
) == CODE_LABEL
2210 || (GET_CODE (insn
) == INSN
2211 && GET_CODE (PATTERN (insn
)) != USE
2212 && GET_CODE (PATTERN (insn
)) != CLOBBER
))
2218 /* Find the number of the basic block that starts closest to INSN. Return -1
2219 if we couldn't find such a basic block. */
2222 find_basic_block (insn
)
2227 /* Scan backwards to the previous BARRIER. Then see if we can find a
2228 label that starts a basic block. Return the basic block number. */
2230 for (insn
= prev_nonnote_insn (insn
);
2231 insn
&& GET_CODE (insn
) != BARRIER
;
2232 insn
= prev_nonnote_insn (insn
))
2235 /* The start of the function is basic block zero. */
2239 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2240 anything other than a CODE_LABEL or note, we can't find this code. */
2241 for (insn
= next_nonnote_insn (insn
);
2242 insn
&& GET_CODE (insn
) == CODE_LABEL
;
2243 insn
= next_nonnote_insn (insn
))
2245 for (i
= 0; i
< n_basic_blocks
; i
++)
2246 if (insn
== basic_block_head
[i
])
2253 /* Called when INSN is being moved from a location near the target of a jump.
2254 We leave a marker of the form (use (INSN)) immediately in front
2255 of WHERE for mark_target_live_regs. These markers will be deleted when
2258 We used to try to update the live status of registers if WHERE is at
2259 the start of a basic block, but that can't work since we may remove a
2260 BARRIER in relax_delay_slots. */
2263 update_block (insn
, where
)
2269 /* Ignore if this was in a delay slot and it came from the target of
2271 if (INSN_FROM_TARGET_P (insn
))
2274 emit_insn_before (gen_rtx (USE
, VOIDmode
, insn
), where
);
2276 /* INSN might be making a value live in a block where it didn't use to
2277 be. So recompute liveness information for this block. */
2279 b
= find_basic_block (insn
);
2284 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2285 the basic block containing the jump. */
2288 reorg_redirect_jump (jump
, nlabel
)
2292 int b
= find_basic_block (jump
);
2297 return redirect_jump (jump
, nlabel
);
2300 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2301 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2302 that reference values used in INSN. If we find one, then we move the
2303 REG_DEAD note to INSN.
2305 This is needed to handle the case where an later insn (after INSN) has a
2306 REG_DEAD note for a register used by INSN, and this later insn subsequently
2307 gets moved before a CODE_LABEL because it is a redundant insn. In this
2308 case, mark_target_live_regs may be confused into thinking the register
2309 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2312 update_reg_dead_notes (insn
, delayed_insn
)
2313 rtx insn
, delayed_insn
;
2317 for (p
= next_nonnote_insn (insn
); p
!= delayed_insn
;
2318 p
= next_nonnote_insn (p
))
2319 for (link
= REG_NOTES (p
); link
; link
= next
)
2321 next
= XEXP (link
, 1);
2323 if (REG_NOTE_KIND (link
) != REG_DEAD
2324 || GET_CODE (XEXP (link
, 0)) != REG
)
2327 if (reg_referenced_p (XEXP (link
, 0), PATTERN (insn
)))
2329 /* Move the REG_DEAD note from P to INSN. */
2330 remove_note (p
, link
);
2331 XEXP (link
, 1) = REG_NOTES (insn
);
2332 REG_NOTES (insn
) = link
;
2337 /* Called when an insn redundant with start_insn is deleted. If there
2338 is a REG_DEAD note for the target of start_insn between start_insn
2339 and stop_insn, then the REG_DEAD note needs to be deleted since the
2340 value no longer dies there.
2342 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
2343 confused into thinking the register is dead. */
2346 fix_reg_dead_note (start_insn
, stop_insn
)
2347 rtx start_insn
, stop_insn
;
2351 for (p
= next_nonnote_insn (start_insn
); p
!= stop_insn
;
2352 p
= next_nonnote_insn (p
))
2353 for (link
= REG_NOTES (p
); link
; link
= next
)
2355 next
= XEXP (link
, 1);
2357 if (REG_NOTE_KIND (link
) != REG_DEAD
2358 || GET_CODE (XEXP (link
, 0)) != REG
)
2361 if (reg_set_p (XEXP (link
, 0), PATTERN (start_insn
)))
2363 remove_note (p
, link
);
2369 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2371 This handles the case of udivmodXi4 instructions which optimize their
2372 output depending on whether any REG_UNUSED notes are present.
2373 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2377 update_reg_unused_notes (insn
, redundant_insn
)
2378 rtx insn
, redundant_insn
;
2382 for (link
= REG_NOTES (insn
); link
; link
= next
)
2384 next
= XEXP (link
, 1);
2386 if (REG_NOTE_KIND (link
) != REG_UNUSED
2387 || GET_CODE (XEXP (link
, 0)) != REG
)
2390 if (! find_regno_note (redundant_insn
, REG_UNUSED
,
2391 REGNO (XEXP (link
, 0))))
2392 remove_note (insn
, link
);
2396 /* Marks registers possibly live at the current place being scanned by
2397 mark_target_live_regs. Used only by next two function. */
2399 static HARD_REG_SET current_live_regs
;
2401 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2402 Also only used by the next two functions. */
2404 static HARD_REG_SET pending_dead_regs
;
2406 /* Utility function called from mark_target_live_regs via note_stores.
2407 It deadens any CLOBBERed registers and livens any SET registers. */
2410 update_live_status (dest
, x
)
2414 int first_regno
, last_regno
;
2417 if (GET_CODE (dest
) != REG
2418 && (GET_CODE (dest
) != SUBREG
|| GET_CODE (SUBREG_REG (dest
)) != REG
))
2421 if (GET_CODE (dest
) == SUBREG
)
2422 first_regno
= REGNO (SUBREG_REG (dest
)) + SUBREG_WORD (dest
);
2424 first_regno
= REGNO (dest
);
2426 last_regno
= first_regno
+ HARD_REGNO_NREGS (first_regno
, GET_MODE (dest
));
2428 if (GET_CODE (x
) == CLOBBER
)
2429 for (i
= first_regno
; i
< last_regno
; i
++)
2430 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2432 for (i
= first_regno
; i
< last_regno
; i
++)
2434 SET_HARD_REG_BIT (current_live_regs
, i
);
2435 CLEAR_HARD_REG_BIT (pending_dead_regs
, i
);
2439 /* Similar to next_insn, but ignores insns in the delay slots of
2440 an annulled branch. */
2443 next_insn_no_annul (insn
)
2448 /* If INSN is an annulled branch, skip any insns from the target
2450 if (INSN_ANNULLED_BRANCH_P (insn
)
2451 && NEXT_INSN (PREV_INSN (insn
)) != insn
)
2452 while (INSN_FROM_TARGET_P (NEXT_INSN (insn
)))
2453 insn
= NEXT_INSN (insn
);
2455 insn
= NEXT_INSN (insn
);
2456 if (insn
&& GET_CODE (insn
) == INSN
2457 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2458 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2464 /* A subroutine of mark_target_live_regs. Search forward from TARGET
2465 looking for registers that are set before they are used. These are dead.
2466 Stop after passing a few conditional jumps, and/or a small
2467 number of unconditional branches. */
2470 find_dead_or_set_registers (target
, res
, jump_target
, jump_count
, set
, needed
)
2472 struct resources
*res
;
2475 struct resources set
, needed
;
2477 HARD_REG_SET scratch
;
2482 for (insn
= target
; insn
; insn
= next
)
2484 rtx this_jump_insn
= insn
;
2486 next
= NEXT_INSN (insn
);
2487 switch (GET_CODE (insn
))
2490 /* After a label, any pending dead registers that weren't yet
2491 used can be made dead. */
2492 AND_COMPL_HARD_REG_SET (pending_dead_regs
, needed
.regs
);
2493 AND_COMPL_HARD_REG_SET (res
->regs
, pending_dead_regs
);
2494 CLEAR_HARD_REG_SET (pending_dead_regs
);
2496 if (CODE_LABEL_NUMBER (insn
) < max_label_num_after_reload
)
2498 /* All spill registers are dead at a label, so kill all of the
2499 ones that aren't needed also. */
2500 COPY_HARD_REG_SET (scratch
, used_spill_regs
);
2501 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2502 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
2511 if (GET_CODE (PATTERN (insn
)) == USE
)
2513 /* If INSN is a USE made by update_block, we care about the
2514 underlying insn. Any registers set by the underlying insn
2515 are live since the insn is being done somewhere else. */
2516 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2517 mark_set_resources (XEXP (PATTERN (insn
), 0), res
, 0, 1);
2519 /* All other USE insns are to be ignored. */
2522 else if (GET_CODE (PATTERN (insn
)) == CLOBBER
)
2524 else if (GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2526 /* An unconditional jump can be used to fill the delay slot
2527 of a call, so search for a JUMP_INSN in any position. */
2528 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2530 this_jump_insn
= XVECEXP (PATTERN (insn
), 0, i
);
2531 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2537 if (GET_CODE (this_jump_insn
) == JUMP_INSN
)
2539 if (jump_count
++ < 10)
2541 if (simplejump_p (this_jump_insn
)
2542 || GET_CODE (PATTERN (this_jump_insn
)) == RETURN
)
2544 next
= JUMP_LABEL (this_jump_insn
);
2549 *jump_target
= JUMP_LABEL (this_jump_insn
);
2552 else if (condjump_p (this_jump_insn
)
2553 || condjump_in_parallel_p (this_jump_insn
))
2555 struct resources target_set
, target_res
;
2556 struct resources fallthrough_res
;
2558 /* We can handle conditional branches here by following
2559 both paths, and then IOR the results of the two paths
2560 together, which will give us registers that are dead
2561 on both paths. Since this is expensive, we give it
2562 a much higher cost than unconditional branches. The
2563 cost was chosen so that we will follow at most 1
2564 conditional branch. */
2567 if (jump_count
>= 10)
2570 mark_referenced_resources (insn
, &needed
, 1);
2572 /* For an annulled branch, mark_set_resources ignores slots
2573 filled by instructions from the target. This is correct
2574 if the branch is not taken. Since we are following both
2575 paths from the branch, we must also compute correct info
2576 if the branch is taken. We do this by inverting all of
2577 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
2578 and then inverting the INSN_FROM_TARGET_P bits again. */
2580 if (GET_CODE (PATTERN (insn
)) == SEQUENCE
2581 && INSN_ANNULLED_BRANCH_P (this_jump_insn
))
2583 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2584 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
))
2585 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
));
2588 mark_set_resources (insn
, &target_set
, 0, 1);
2590 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
2591 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
))
2592 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn
), 0, i
));
2594 mark_set_resources (insn
, &set
, 0, 1);
2598 mark_set_resources (insn
, &set
, 0, 1);
2603 COPY_HARD_REG_SET (scratch
, target_set
.regs
);
2604 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2605 AND_COMPL_HARD_REG_SET (target_res
.regs
, scratch
);
2607 fallthrough_res
= *res
;
2608 COPY_HARD_REG_SET (scratch
, set
.regs
);
2609 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2610 AND_COMPL_HARD_REG_SET (fallthrough_res
.regs
, scratch
);
2612 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn
),
2613 &target_res
, 0, jump_count
,
2614 target_set
, needed
);
2615 find_dead_or_set_registers (next
,
2616 &fallthrough_res
, 0, jump_count
,
2618 IOR_HARD_REG_SET (fallthrough_res
.regs
, target_res
.regs
);
2619 AND_HARD_REG_SET (res
->regs
, fallthrough_res
.regs
);
2627 /* Don't try this optimization if we expired our jump count
2628 above, since that would mean there may be an infinite loop
2629 in the function being compiled. */
2635 mark_referenced_resources (insn
, &needed
, 1);
2636 mark_set_resources (insn
, &set
, 0, 1);
2638 COPY_HARD_REG_SET (scratch
, set
.regs
);
2639 AND_COMPL_HARD_REG_SET (scratch
, needed
.regs
);
2640 AND_COMPL_HARD_REG_SET (res
->regs
, scratch
);
2646 /* Set the resources that are live at TARGET.
2648 If TARGET is zero, we refer to the end of the current function and can
2649 return our precomputed value.
2651 Otherwise, we try to find out what is live by consulting the basic block
2652 information. This is tricky, because we must consider the actions of
2653 reload and jump optimization, which occur after the basic block information
2656 Accordingly, we proceed as follows::
2658 We find the previous BARRIER and look at all immediately following labels
2659 (with no intervening active insns) to see if any of them start a basic
2660 block. If we hit the start of the function first, we use block 0.
2662 Once we have found a basic block and a corresponding first insns, we can
2663 accurately compute the live status from basic_block_live_regs and
2664 reg_renumber. (By starting at a label following a BARRIER, we are immune
2665 to actions taken by reload and jump.) Then we scan all insns between
2666 that point and our target. For each CLOBBER (or for call-clobbered regs
2667 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2668 a SET, mark them as live.
2670 We have to be careful when using REG_DEAD notes because they are not
2671 updated by such things as find_equiv_reg. So keep track of registers
2672 marked as dead that haven't been assigned to, and mark them dead at the
2673 next CODE_LABEL since reload and jump won't propagate values across labels.
2675 If we cannot find the start of a basic block (should be a very rare
2676 case, if it can happen at all), mark everything as potentially live.
2678 Next, scan forward from TARGET looking for things set or clobbered
2679 before they are used. These are not live.
2681 Because we can be called many times on the same target, save our results
2682 in a hash table indexed by INSN_UID. */
2685 mark_target_live_regs (target
, res
)
2687 struct resources
*res
;
2691 struct target_info
*tinfo
;
2695 HARD_REG_SET scratch
;
2696 struct resources set
, needed
;
2699 /* Handle end of function. */
2702 *res
= end_of_function_needs
;
2706 /* We have to assume memory is needed, but the CC isn't. */
2708 res
->volatil
= res
->unch_memory
= 0;
2711 /* See if we have computed this value already. */
2712 for (tinfo
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
2713 tinfo
; tinfo
= tinfo
->next
)
2714 if (tinfo
->uid
== INSN_UID (target
))
2717 /* Start by getting the basic block number. If we have saved information,
2718 we can get it from there unless the insn at the start of the basic block
2719 has been deleted. */
2720 if (tinfo
&& tinfo
->block
!= -1
2721 && ! INSN_DELETED_P (basic_block_head
[tinfo
->block
]))
2725 b
= find_basic_block (target
);
2729 /* If the information is up-to-date, use it. Otherwise, we will
2731 if (b
== tinfo
->block
&& b
!= -1 && tinfo
->bb_tick
== bb_ticks
[b
])
2733 COPY_HARD_REG_SET (res
->regs
, tinfo
->live_regs
);
2739 /* Allocate a place to put our results and chain it into the
2741 tinfo
= (struct target_info
*) oballoc (sizeof (struct target_info
));
2742 tinfo
->uid
= INSN_UID (target
);
2744 tinfo
->next
= target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
];
2745 target_hash_table
[INSN_UID (target
) % TARGET_HASH_PRIME
] = tinfo
;
2748 CLEAR_HARD_REG_SET (pending_dead_regs
);
2750 /* If we found a basic block, get the live registers from it and update
2751 them with anything set or killed between its start and the insn before
2752 TARGET. Otherwise, we must assume everything is live. */
2755 regset regs_live
= basic_block_live_at_start
[b
];
2757 REGSET_ELT_TYPE bit
;
2759 rtx start_insn
, stop_insn
;
2761 /* Compute hard regs live at start of block -- this is the real hard regs
2762 marked live, plus live pseudo regs that have been renumbered to
2765 REG_SET_TO_HARD_REG_SET (current_live_regs
, regs_live
);
2767 EXECUTE_IF_SET_IN_REG_SET (regs_live
, 0, i
,
2769 if ((regno
= reg_renumber
[i
]) >= 0)
2771 j
< regno
+ HARD_REGNO_NREGS (regno
,
2772 PSEUDO_REGNO_MODE (i
));
2774 SET_HARD_REG_BIT (current_live_regs
, j
);
2777 /* Get starting and ending insn, handling the case where each might
2779 start_insn
= (b
== 0 ? get_insns () : basic_block_head
[b
]);
2782 if (GET_CODE (start_insn
) == INSN
2783 && GET_CODE (PATTERN (start_insn
)) == SEQUENCE
)
2784 start_insn
= XVECEXP (PATTERN (start_insn
), 0, 0);
2786 if (GET_CODE (stop_insn
) == INSN
2787 && GET_CODE (PATTERN (stop_insn
)) == SEQUENCE
)
2788 stop_insn
= next_insn (PREV_INSN (stop_insn
));
2790 for (insn
= start_insn
; insn
!= stop_insn
;
2791 insn
= next_insn_no_annul (insn
))
2794 rtx real_insn
= insn
;
2796 /* If this insn is from the target of a branch, it isn't going to
2797 be used in the sequel. If it is used in both cases, this
2798 test will not be true. */
2799 if (INSN_FROM_TARGET_P (insn
))
2802 /* If this insn is a USE made by update_block, we care about the
2804 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
2805 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
2806 real_insn
= XEXP (PATTERN (insn
), 0);
2808 if (GET_CODE (real_insn
) == CALL_INSN
)
2810 /* CALL clobbers all call-used regs that aren't fixed except
2811 sp, ap, and fp. Do this before setting the result of the
2813 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2814 if (call_used_regs
[i
]
2815 && i
!= STACK_POINTER_REGNUM
&& i
!= FRAME_POINTER_REGNUM
2816 && i
!= ARG_POINTER_REGNUM
2817 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2818 && i
!= HARD_FRAME_POINTER_REGNUM
2820 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2821 && ! (i
== ARG_POINTER_REGNUM
&& fixed_regs
[i
])
2823 #ifdef PIC_OFFSET_TABLE_REGNUM
2824 && ! (i
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2827 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2829 /* A CALL_INSN sets any global register live, since it may
2830 have been modified by the call. */
2831 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2833 SET_HARD_REG_BIT (current_live_regs
, i
);
2836 /* Mark anything killed in an insn to be deadened at the next
2837 label. Ignore USE insns; the only REG_DEAD notes will be for
2838 parameters. But they might be early. A CALL_INSN will usually
2839 clobber registers used for parameters. It isn't worth bothering
2840 with the unlikely case when it won't. */
2841 if ((GET_CODE (real_insn
) == INSN
2842 && GET_CODE (PATTERN (real_insn
)) != USE
2843 && GET_CODE (PATTERN (real_insn
)) != CLOBBER
)
2844 || GET_CODE (real_insn
) == JUMP_INSN
2845 || GET_CODE (real_insn
) == CALL_INSN
)
2847 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2848 if (REG_NOTE_KIND (link
) == REG_DEAD
2849 && GET_CODE (XEXP (link
, 0)) == REG
2850 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2852 int first_regno
= REGNO (XEXP (link
, 0));
2855 + HARD_REGNO_NREGS (first_regno
,
2856 GET_MODE (XEXP (link
, 0))));
2858 for (i
= first_regno
; i
< last_regno
; i
++)
2859 SET_HARD_REG_BIT (pending_dead_regs
, i
);
2862 note_stores (PATTERN (real_insn
), update_live_status
);
2864 /* If any registers were unused after this insn, kill them.
2865 These notes will always be accurate. */
2866 for (link
= REG_NOTES (real_insn
); link
; link
= XEXP (link
, 1))
2867 if (REG_NOTE_KIND (link
) == REG_UNUSED
2868 && GET_CODE (XEXP (link
, 0)) == REG
2869 && REGNO (XEXP (link
, 0)) < FIRST_PSEUDO_REGISTER
)
2871 int first_regno
= REGNO (XEXP (link
, 0));
2874 + HARD_REGNO_NREGS (first_regno
,
2875 GET_MODE (XEXP (link
, 0))));
2877 for (i
= first_regno
; i
< last_regno
; i
++)
2878 CLEAR_HARD_REG_BIT (current_live_regs
, i
);
2882 else if (GET_CODE (real_insn
) == CODE_LABEL
)
2884 /* A label clobbers the pending dead registers since neither
2885 reload nor jump will propagate a value across a label. */
2886 AND_COMPL_HARD_REG_SET (current_live_regs
, pending_dead_regs
);
2887 CLEAR_HARD_REG_SET (pending_dead_regs
);
2890 /* The beginning of the epilogue corresponds to the end of the
2891 RTL chain when there are no epilogue insns. Certain resources
2892 are implicitly required at that point. */
2893 else if (GET_CODE (real_insn
) == NOTE
2894 && NOTE_LINE_NUMBER (real_insn
) == NOTE_INSN_EPILOGUE_BEG
)
2895 IOR_HARD_REG_SET (current_live_regs
, start_of_epilogue_needs
.regs
);
2898 COPY_HARD_REG_SET (res
->regs
, current_live_regs
);
2900 tinfo
->bb_tick
= bb_ticks
[b
];
2903 /* We didn't find the start of a basic block. Assume everything
2904 in use. This should happen only extremely rarely. */
2905 SET_HARD_REG_SET (res
->regs
);
2907 CLEAR_RESOURCE (&set
);
2908 CLEAR_RESOURCE (&needed
);
2910 jump_insn
= find_dead_or_set_registers (target
, res
, &jump_target
, 0,
2913 /* If we hit an unconditional branch, we have another way of finding out
2914 what is live: we can see what is live at the branch target and include
2915 anything used but not set before the branch. The only things that are
2916 live are those that are live using the above test and the test below. */
2920 struct resources new_resources
;
2921 rtx stop_insn
= next_active_insn (jump_insn
);
2923 mark_target_live_regs (next_active_insn (jump_target
), &new_resources
);
2924 CLEAR_RESOURCE (&set
);
2925 CLEAR_RESOURCE (&needed
);
2927 /* Include JUMP_INSN in the needed registers. */
2928 for (insn
= target
; insn
!= stop_insn
; insn
= next_active_insn (insn
))
2930 mark_referenced_resources (insn
, &needed
, 1);
2932 COPY_HARD_REG_SET (scratch
, needed
.regs
);
2933 AND_COMPL_HARD_REG_SET (scratch
, set
.regs
);
2934 IOR_HARD_REG_SET (new_resources
.regs
, scratch
);
2936 mark_set_resources (insn
, &set
, 0, 1);
2939 AND_HARD_REG_SET (res
->regs
, new_resources
.regs
);
2942 COPY_HARD_REG_SET (tinfo
->live_regs
, res
->regs
);
2945 /* Scan a function looking for insns that need a delay slot and find insns to
2946 put into the delay slot.
2948 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2949 as calls). We do these first since we don't want jump insns (that are
2950 easier to fill) to get the only insns that could be used for non-jump insns.
2951 When it is zero, only try to fill JUMP_INSNs.
2953 When slots are filled in this manner, the insns (including the
2954 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2955 it is possible to tell whether a delay slot has really been filled
2956 or not. `final' knows how to deal with this, by communicating
2957 through FINAL_SEQUENCE. */
2960 fill_simple_delay_slots (first
, non_jumps_p
)
2964 register rtx insn
, pat
, trial
, next_trial
;
2966 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
2967 struct resources needed
, set
;
2968 int slots_to_fill
, slots_filled
;
2971 for (i
= 0; i
< num_unfilled_slots
; i
++)
2974 /* Get the next insn to fill. If it has already had any slots assigned,
2975 we can't do anything with it. Maybe we'll improve this later. */
2977 insn
= unfilled_slots_base
[i
];
2979 || INSN_DELETED_P (insn
)
2980 || (GET_CODE (insn
) == INSN
2981 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2982 || (GET_CODE (insn
) == JUMP_INSN
&& non_jumps_p
)
2983 || (GET_CODE (insn
) != JUMP_INSN
&& ! non_jumps_p
))
2986 if (GET_CODE (insn
) == JUMP_INSN
)
2987 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
2989 flags
= get_jump_flags (insn
, NULL_RTX
);
2990 slots_to_fill
= num_delay_slots (insn
);
2991 if (slots_to_fill
== 0)
2994 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2995 says how many. After initialization, first try optimizing
2998 nop add %o7,.-L1,%o7
3002 If this case applies, the delay slot of the call is filled with
3003 the unconditional jump. This is done first to avoid having the
3004 delay slot of the call filled in the backward scan. Also, since
3005 the unconditional jump is likely to also have a delay slot, that
3006 insn must exist when it is subsequently scanned.
3008 This is tried on each insn with delay slots as some machines
3009 have insns which perform calls, but are not represented as
3015 if ((trial
= next_active_insn (insn
))
3016 && GET_CODE (trial
) == JUMP_INSN
3017 && simplejump_p (trial
)
3018 && eligible_for_delay (insn
, slots_filled
, trial
, flags
)
3019 && no_labels_between_p (insn
, trial
))
3023 delay_list
= add_to_delay_list (trial
, delay_list
);
3025 /* TRIAL may have had its delay slot filled, then unfilled. When
3026 the delay slot is unfilled, TRIAL is placed back on the unfilled
3027 slots obstack. Unfortunately, it is placed on the end of the
3028 obstack, not in its original location. Therefore, we must search
3029 from entry i + 1 to the end of the unfilled slots obstack to
3030 try and find TRIAL. */
3031 tmp
= &unfilled_slots_base
[i
+ 1];
3032 while (*tmp
!= trial
&& tmp
!= unfilled_slots_next
)
3035 /* Remove the unconditional jump from consideration for delay slot
3036 filling and unthread it. */
3040 rtx next
= NEXT_INSN (trial
);
3041 rtx prev
= PREV_INSN (trial
);
3043 NEXT_INSN (prev
) = next
;
3045 PREV_INSN (next
) = prev
;
3049 /* Now, scan backwards from the insn to search for a potential
3050 delay-slot candidate. Stop searching when a label or jump is hit.
3052 For each candidate, if it is to go into the delay slot (moved
3053 forward in execution sequence), it must not need or set any resources
3054 that were set by later insns and must not set any resources that
3055 are needed for those insns.
3057 The delay slot insn itself sets resources unless it is a call
3058 (in which case the called routine, not the insn itself, is doing
3061 if (slots_filled
< slots_to_fill
)
3063 CLEAR_RESOURCE (&needed
);
3064 CLEAR_RESOURCE (&set
);
3065 mark_set_resources (insn
, &set
, 0, 0);
3066 mark_referenced_resources (insn
, &needed
, 0);
3068 for (trial
= prev_nonnote_insn (insn
); ! stop_search_p (trial
, 1);
3071 next_trial
= prev_nonnote_insn (trial
);
3073 /* This must be an INSN or CALL_INSN. */
3074 pat
= PATTERN (trial
);
3076 /* USE and CLOBBER at this level was just for flow; ignore it. */
3077 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3080 /* Check for resource conflict first, to avoid unnecessary
3082 if (! insn_references_resource_p (trial
, &set
, 1)
3083 && ! insn_sets_resource_p (trial
, &set
, 1)
3084 && ! insn_sets_resource_p (trial
, &needed
, 1)
3086 /* Can't separate set of cc0 from its use. */
3087 && ! (reg_mentioned_p (cc0_rtx
, pat
)
3088 && ! sets_cc0_p (cc0_rtx
, pat
))
3092 trial
= try_split (pat
, trial
, 1);
3093 next_trial
= prev_nonnote_insn (trial
);
3094 if (eligible_for_delay (insn
, slots_filled
, trial
, flags
))
3096 /* In this case, we are searching backward, so if we
3097 find insns to put on the delay list, we want
3098 to put them at the head, rather than the
3099 tail, of the list. */
3101 update_reg_dead_notes (trial
, insn
);
3102 delay_list
= gen_rtx (INSN_LIST
, VOIDmode
,
3104 update_block (trial
, trial
);
3105 delete_insn (trial
);
3106 if (slots_to_fill
== ++slots_filled
)
3112 mark_set_resources (trial
, &set
, 0, 1);
3113 mark_referenced_resources (trial
, &needed
, 1);
3117 /* If all needed slots haven't been filled, we come here. */
3119 /* Try to optimize case of jumping around a single insn. */
3120 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
3121 if (slots_filled
!= slots_to_fill
3123 && GET_CODE (insn
) == JUMP_INSN
3124 && (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
3126 delay_list
= optimize_skip (insn
);
3132 /* Try to get insns from beyond the insn needing the delay slot.
3133 These insns can neither set or reference resources set in insns being
3134 skipped, cannot set resources in the insn being skipped, and, if this
3135 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
3136 call might not return).
3138 There used to be code which continued past the target label if
3139 we saw all uses of the target label. This code did not work,
3140 because it failed to account for some instructions which were
3141 both annulled and marked as from the target. This can happen as a
3142 result of optimize_skip. Since this code was redundant with
3143 fill_eager_delay_slots anyways, it was just deleted. */
3145 if (slots_filled
!= slots_to_fill
3146 && (GET_CODE (insn
) != JUMP_INSN
3147 || ((condjump_p (insn
) || condjump_in_parallel_p (insn
))
3148 && ! simplejump_p (insn
)
3149 && JUMP_LABEL (insn
) != 0)))
3152 int maybe_never
= 0;
3153 struct resources needed_at_jump
;
3155 CLEAR_RESOURCE (&needed
);
3156 CLEAR_RESOURCE (&set
);
3158 if (GET_CODE (insn
) == CALL_INSN
)
3160 mark_set_resources (insn
, &set
, 0, 1);
3161 mark_referenced_resources (insn
, &needed
, 1);
3166 mark_set_resources (insn
, &set
, 0, 1);
3167 mark_referenced_resources (insn
, &needed
, 1);
3168 if (GET_CODE (insn
) == JUMP_INSN
)
3169 target
= JUMP_LABEL (insn
);
3172 for (trial
= next_nonnote_insn (insn
); trial
; trial
= next_trial
)
3174 rtx pat
, trial_delay
;
3176 next_trial
= next_nonnote_insn (trial
);
3178 if (GET_CODE (trial
) == CODE_LABEL
3179 || GET_CODE (trial
) == BARRIER
)
3182 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3183 pat
= PATTERN (trial
);
3185 /* Stand-alone USE and CLOBBER are just for flow. */
3186 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3189 /* If this already has filled delay slots, get the insn needing
3191 if (GET_CODE (pat
) == SEQUENCE
)
3192 trial_delay
= XVECEXP (pat
, 0, 0);
3194 trial_delay
= trial
;
3196 /* If this is a jump insn to our target, indicate that we have
3197 seen another jump to it. If we aren't handling a conditional
3198 jump, stop our search. Otherwise, compute the needs at its
3199 target and add them to NEEDED. */
3200 if (GET_CODE (trial_delay
) == JUMP_INSN
)
3204 else if (JUMP_LABEL (trial_delay
) != target
)
3206 mark_target_live_regs
3207 (next_active_insn (JUMP_LABEL (trial_delay
)),
3209 needed
.memory
|= needed_at_jump
.memory
;
3210 needed
.unch_memory
|= needed_at_jump
.unch_memory
;
3211 IOR_HARD_REG_SET (needed
.regs
, needed_at_jump
.regs
);
3215 /* See if we have a resource problem before we try to
3218 && GET_CODE (pat
) != SEQUENCE
3219 && ! insn_references_resource_p (trial
, &set
, 1)
3220 && ! insn_sets_resource_p (trial
, &set
, 1)
3221 && ! insn_sets_resource_p (trial
, &needed
, 1)
3223 && ! (reg_mentioned_p (cc0_rtx
, pat
) && ! sets_cc0_p (pat
))
3225 && ! (maybe_never
&& may_trap_p (pat
))
3226 && (trial
= try_split (pat
, trial
, 0))
3227 && eligible_for_delay (insn
, slots_filled
, trial
, flags
))
3229 next_trial
= next_nonnote_insn (trial
);
3230 delay_list
= add_to_delay_list (trial
, delay_list
);
3233 if (reg_mentioned_p (cc0_rtx
, pat
))
3234 link_cc0_insns (trial
);
3237 delete_insn (trial
);
3238 if (slots_to_fill
== ++slots_filled
)
3243 mark_set_resources (trial
, &set
, 0, 1);
3244 mark_referenced_resources (trial
, &needed
, 1);
3246 /* Ensure we don't put insns between the setting of cc and the
3247 comparison by moving a setting of cc into an earlier delay
3248 slot since these insns could clobber the condition code. */
3251 /* If this is a call or jump, we might not get here. */
3252 if (GET_CODE (trial_delay
) == CALL_INSN
3253 || GET_CODE (trial_delay
) == JUMP_INSN
)
3257 /* If there are slots left to fill and our search was stopped by an
3258 unconditional branch, try the insn at the branch target. We can
3259 redirect the branch if it works.
3261 Don't do this if the insn at the branch target is a branch. */
3262 if (slots_to_fill
!= slots_filled
3264 && GET_CODE (trial
) == JUMP_INSN
3265 && simplejump_p (trial
)
3266 && (target
== 0 || JUMP_LABEL (trial
) == target
)
3267 && (next_trial
= next_active_insn (JUMP_LABEL (trial
))) != 0
3268 && ! (GET_CODE (next_trial
) == INSN
3269 && GET_CODE (PATTERN (next_trial
)) == SEQUENCE
)
3270 && GET_CODE (next_trial
) != JUMP_INSN
3271 && ! insn_references_resource_p (next_trial
, &set
, 1)
3272 && ! insn_sets_resource_p (next_trial
, &set
, 1)
3273 && ! insn_sets_resource_p (next_trial
, &needed
, 1)
3275 && ! reg_mentioned_p (cc0_rtx
, PATTERN (next_trial
))
3277 && ! (maybe_never
&& may_trap_p (PATTERN (next_trial
)))
3278 && (next_trial
= try_split (PATTERN (next_trial
), next_trial
, 0))
3279 && eligible_for_delay (insn
, slots_filled
, next_trial
, flags
))
3281 rtx new_label
= next_active_insn (next_trial
);
3284 new_label
= get_label_before (new_label
);
3286 new_label
= find_end_label ();
3289 = add_to_delay_list (copy_rtx (next_trial
), delay_list
);
3291 reorg_redirect_jump (trial
, new_label
);
3293 /* If we merged because we both jumped to the same place,
3294 redirect the original insn also. */
3296 reorg_redirect_jump (insn
, new_label
);
3300 /* If this is an unconditional jump, then try to get insns from the
3301 target of the jump. */
3302 if (GET_CODE (insn
) == JUMP_INSN
3303 && simplejump_p (insn
)
3304 && slots_filled
!= slots_to_fill
)
3306 = fill_slots_from_thread (insn
, const_true_rtx
,
3307 next_active_insn (JUMP_LABEL (insn
)),
3309 own_thread_p (JUMP_LABEL (insn
),
3310 JUMP_LABEL (insn
), 0),
3311 0, slots_to_fill
, &slots_filled
);
3314 unfilled_slots_base
[i
]
3315 = emit_delay_sequence (insn
, delay_list
,
3316 slots_filled
, slots_to_fill
);
3318 if (slots_to_fill
== slots_filled
)
3319 unfilled_slots_base
[i
] = 0;
3321 note_delay_statistics (slots_filled
, 0);
3324 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3325 /* See if the epilogue needs any delay slots. Try to fill them if so.
3326 The only thing we can do is scan backwards from the end of the
3327 function. If we did this in a previous pass, it is incorrect to do it
3329 if (current_function_epilogue_delay_list
)
3332 slots_to_fill
= DELAY_SLOTS_FOR_EPILOGUE
;
3333 if (slots_to_fill
== 0)
3337 CLEAR_RESOURCE (&set
);
3339 /* The frame pointer and stack pointer are needed at the beginning of
3340 the epilogue, so instructions setting them can not be put in the
3341 epilogue delay slot. However, everything else needed at function
3342 end is safe, so we don't want to use end_of_function_needs here. */
3343 CLEAR_RESOURCE (&needed
);
3344 if (frame_pointer_needed
)
3346 SET_HARD_REG_BIT (needed
.regs
, FRAME_POINTER_REGNUM
);
3347 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3348 SET_HARD_REG_BIT (needed
.regs
, HARD_FRAME_POINTER_REGNUM
);
3350 #ifdef EXIT_IGNORE_STACK
3351 if (! EXIT_IGNORE_STACK
)
3353 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
3356 SET_HARD_REG_BIT (needed
.regs
, STACK_POINTER_REGNUM
);
3358 #ifdef EPILOGUE_USES
3359 for (i
= 0; i
<FIRST_PSEUDO_REGISTER
; i
++)
3361 if (EPILOGUE_USES (i
))
3362 SET_HARD_REG_BIT (needed
.regs
, i
);
3366 for (trial
= get_last_insn (); ! stop_search_p (trial
, 1);
3367 trial
= PREV_INSN (trial
))
3369 if (GET_CODE (trial
) == NOTE
)
3371 pat
= PATTERN (trial
);
3372 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3375 if (! insn_references_resource_p (trial
, &set
, 1)
3376 && ! insn_sets_resource_p (trial
, &needed
, 1)
3377 && ! insn_sets_resource_p (trial
, &set
, 1)
3379 /* Don't want to mess with cc0 here. */
3380 && ! reg_mentioned_p (cc0_rtx
, pat
)
3384 trial
= try_split (pat
, trial
, 1);
3385 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial
, slots_filled
))
3387 /* Here as well we are searching backward, so put the
3388 insns we find on the head of the list. */
3390 current_function_epilogue_delay_list
3391 = gen_rtx (INSN_LIST
, VOIDmode
, trial
,
3392 current_function_epilogue_delay_list
);
3393 mark_referenced_resources (trial
, &end_of_function_needs
, 1);
3394 update_block (trial
, trial
);
3395 delete_insn (trial
);
3397 /* Clear deleted bit so final.c will output the insn. */
3398 INSN_DELETED_P (trial
) = 0;
3400 if (slots_to_fill
== ++slots_filled
)
3406 mark_set_resources (trial
, &set
, 0, 1);
3407 mark_referenced_resources (trial
, &needed
, 1);
3410 note_delay_statistics (slots_filled
, 0);
3414 /* Try to find insns to place in delay slots.
3416 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3417 or is an unconditional branch if CONDITION is const_true_rtx.
3418 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3420 THREAD is a flow-of-control, either the insns to be executed if the
3421 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3423 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3424 to see if any potential delay slot insns set things needed there.
3426 LIKELY is non-zero if it is extremely likely that the branch will be
3427 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3428 end of a loop back up to the top.
3430 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3431 thread. I.e., it is the fallthrough code of our jump or the target of the
3432 jump when we are the only jump going there.
3434 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3435 case, we can only take insns from the head of the thread for our delay
3436 slot. We then adjust the jump to point after the insns we have taken. */
3439 fill_slots_from_thread (insn
, condition
, thread
, opposite_thread
, likely
,
3440 thread_if_true
, own_thread
, own_opposite_thread
,
3441 slots_to_fill
, pslots_filled
)
3444 rtx thread
, opposite_thread
;
3447 int own_thread
, own_opposite_thread
;
3448 int slots_to_fill
, *pslots_filled
;
3452 struct resources opposite_needed
, set
, needed
;
3458 /* Validate our arguments. */
3459 if ((condition
== const_true_rtx
&& ! thread_if_true
)
3460 || (! own_thread
&& ! thread_if_true
))
3463 flags
= get_jump_flags (insn
, JUMP_LABEL (insn
));
3465 /* If our thread is the end of subroutine, we can't get any delay
3470 /* If this is an unconditional branch, nothing is needed at the
3471 opposite thread. Otherwise, compute what is needed there. */
3472 if (condition
== const_true_rtx
)
3473 CLEAR_RESOURCE (&opposite_needed
);
3475 mark_target_live_regs (opposite_thread
, &opposite_needed
);
3477 /* If the insn at THREAD can be split, do it here to avoid having to
3478 update THREAD and NEW_THREAD if it is done in the loop below. Also
3479 initialize NEW_THREAD. */
3481 new_thread
= thread
= try_split (PATTERN (thread
), thread
, 0);
3483 /* Scan insns at THREAD. We are looking for an insn that can be removed
3484 from THREAD (it neither sets nor references resources that were set
3485 ahead of it and it doesn't set anything needs by the insns ahead of
3486 it) and that either can be placed in an annulling insn or aren't
3487 needed at OPPOSITE_THREAD. */
3489 CLEAR_RESOURCE (&needed
);
3490 CLEAR_RESOURCE (&set
);
3492 /* If we do not own this thread, we must stop as soon as we find
3493 something that we can't put in a delay slot, since all we can do
3494 is branch into THREAD at a later point. Therefore, labels stop
3495 the search if this is not the `true' thread. */
3497 for (trial
= thread
;
3498 ! stop_search_p (trial
, ! thread_if_true
) && (! lose
|| own_thread
);
3499 trial
= next_nonnote_insn (trial
))
3503 /* If we have passed a label, we no longer own this thread. */
3504 if (GET_CODE (trial
) == CODE_LABEL
)
3510 pat
= PATTERN (trial
);
3511 if (GET_CODE (pat
) == USE
|| GET_CODE (pat
) == CLOBBER
)
3514 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3515 don't separate or copy insns that set and use CC0. */
3516 if (! insn_references_resource_p (trial
, &set
, 1)
3517 && ! insn_sets_resource_p (trial
, &set
, 1)
3518 && ! insn_sets_resource_p (trial
, &needed
, 1)
3520 && ! (reg_mentioned_p (cc0_rtx
, pat
)
3521 && (! own_thread
|| ! sets_cc0_p (pat
)))
3527 /* If TRIAL is redundant with some insn before INSN, we don't
3528 actually need to add it to the delay list; we can merely pretend
3530 if (prior_insn
= redundant_insn (trial
, insn
, delay_list
))
3532 fix_reg_dead_note (prior_insn
, insn
);
3535 update_block (trial
, thread
);
3536 if (trial
== thread
)
3538 thread
= next_active_insn (thread
);
3539 if (new_thread
== trial
)
3540 new_thread
= thread
;
3543 delete_insn (trial
);
3547 update_reg_unused_notes (prior_insn
, trial
);
3548 new_thread
= next_active_insn (trial
);
3554 /* There are two ways we can win: If TRIAL doesn't set anything
3555 needed at the opposite thread and can't trap, or if it can
3556 go into an annulled delay slot. */
3557 if (condition
== const_true_rtx
3558 || (! insn_sets_resource_p (trial
, &opposite_needed
, 1)
3559 && ! may_trap_p (pat
)))
3562 trial
= try_split (pat
, trial
, 0);
3563 if (new_thread
== old_trial
)
3565 if (thread
== old_trial
)
3567 pat
= PATTERN (trial
);
3568 if (eligible_for_delay (insn
, *pslots_filled
, trial
, flags
))
3572 #ifdef ANNUL_IFTRUE_SLOTS
3575 #ifdef ANNUL_IFFALSE_SLOTS
3581 trial
= try_split (pat
, trial
, 0);
3582 if (new_thread
== old_trial
)
3584 if (thread
== old_trial
)
3586 pat
= PATTERN (trial
);
3588 ? eligible_for_annul_false (insn
, *pslots_filled
, trial
, flags
)
3589 : eligible_for_annul_true (insn
, *pslots_filled
, trial
, flags
)))
3597 if (reg_mentioned_p (cc0_rtx
, pat
))
3598 link_cc0_insns (trial
);
3601 /* If we own this thread, delete the insn. If this is the
3602 destination of a branch, show that a basic block status
3603 may have been updated. In any case, mark the new
3604 starting point of this thread. */
3607 update_block (trial
, thread
);
3608 if (trial
== thread
)
3610 thread
= next_active_insn (thread
);
3611 if (new_thread
== trial
)
3612 new_thread
= thread
;
3614 delete_insn (trial
);
3617 new_thread
= next_active_insn (trial
);
3619 temp
= own_thread
? trial
: copy_rtx (trial
);
3621 INSN_FROM_TARGET_P (temp
) = 1;
3623 delay_list
= add_to_delay_list (temp
, delay_list
);
3625 if (slots_to_fill
== ++(*pslots_filled
))
3627 /* Even though we have filled all the slots, we
3628 may be branching to a location that has a
3629 redundant insn. Skip any if so. */
3630 while (new_thread
&& ! own_thread
3631 && ! insn_sets_resource_p (new_thread
, &set
, 1)
3632 && ! insn_sets_resource_p (new_thread
, &needed
, 1)
3633 && ! insn_references_resource_p (new_thread
,
3635 && redundant_insn (new_thread
, insn
, delay_list
))
3636 new_thread
= next_active_insn (new_thread
);
3645 /* This insn can't go into a delay slot. */
3647 mark_set_resources (trial
, &set
, 0, 1);
3648 mark_referenced_resources (trial
, &needed
, 1);
3650 /* Ensure we don't put insns between the setting of cc and the comparison
3651 by moving a setting of cc into an earlier delay slot since these insns
3652 could clobber the condition code. */
3655 /* If this insn is a register-register copy and the next insn has
3656 a use of our destination, change it to use our source. That way,
3657 it will become a candidate for our delay slot the next time
3658 through this loop. This case occurs commonly in loops that
3661 We could check for more complex cases than those tested below,
3662 but it doesn't seem worth it. It might also be a good idea to try
3663 to swap the two insns. That might do better.
3665 We can't do this if the next insn modifies our destination, because
3666 that would make the replacement into the insn invalid. We also can't
3667 do this if it modifies our source, because it might be an earlyclobber
3668 operand. This latter test also prevents updating the contents of
3671 if (GET_CODE (trial
) == INSN
&& GET_CODE (pat
) == SET
3672 && GET_CODE (SET_SRC (pat
)) == REG
3673 && GET_CODE (SET_DEST (pat
)) == REG
)
3675 rtx next
= next_nonnote_insn (trial
);
3677 if (next
&& GET_CODE (next
) == INSN
3678 && GET_CODE (PATTERN (next
)) != USE
3679 && ! reg_set_p (SET_DEST (pat
), next
)
3680 && ! reg_set_p (SET_SRC (pat
), next
)
3681 && reg_referenced_p (SET_DEST (pat
), PATTERN (next
)))
3682 validate_replace_rtx (SET_DEST (pat
), SET_SRC (pat
), next
);
3686 /* If we stopped on a branch insn that has delay slots, see if we can
3687 steal some of the insns in those slots. */
3688 if (trial
&& GET_CODE (trial
) == INSN
3689 && GET_CODE (PATTERN (trial
)) == SEQUENCE
3690 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
)
3692 /* If this is the `true' thread, we will want to follow the jump,
3693 so we can only do this if we have taken everything up to here. */
3694 if (thread_if_true
&& trial
== new_thread
)
3696 = steal_delay_list_from_target (insn
, condition
, PATTERN (trial
),
3697 delay_list
, &set
, &needed
,
3698 &opposite_needed
, slots_to_fill
,
3699 pslots_filled
, &must_annul
,
3701 else if (! thread_if_true
)
3703 = steal_delay_list_from_fallthrough (insn
, condition
,
3705 delay_list
, &set
, &needed
,
3706 &opposite_needed
, slots_to_fill
,
3707 pslots_filled
, &must_annul
);
3710 /* If we haven't found anything for this delay slot and it is very
3711 likely that the branch will be taken, see if the insn at our target
3712 increments or decrements a register with an increment that does not
3713 depend on the destination register. If so, try to place the opposite
3714 arithmetic insn after the jump insn and put the arithmetic insn in the
3715 delay slot. If we can't do this, return. */
3716 if (delay_list
== 0 && likely
&& new_thread
3717 && GET_CODE (new_thread
) == INSN
3718 && GET_CODE (PATTERN (new_thread
)) != ASM_INPUT
3719 && asm_noperands (PATTERN (new_thread
)) < 0)
3721 rtx pat
= PATTERN (new_thread
);
3726 pat
= PATTERN (trial
);
3728 if (GET_CODE (trial
) != INSN
|| GET_CODE (pat
) != SET
3729 || ! eligible_for_delay (insn
, 0, trial
, flags
))
3732 dest
= SET_DEST (pat
), src
= SET_SRC (pat
);
3733 if ((GET_CODE (src
) == PLUS
|| GET_CODE (src
) == MINUS
)
3734 && rtx_equal_p (XEXP (src
, 0), dest
)
3735 && ! reg_overlap_mentioned_p (dest
, XEXP (src
, 1)))
3737 rtx other
= XEXP (src
, 1);
3741 /* If this is a constant adjustment, use the same code with
3742 the negated constant. Otherwise, reverse the sense of the
3744 if (GET_CODE (other
) == CONST_INT
)
3745 new_arith
= gen_rtx (GET_CODE (src
), GET_MODE (src
), dest
,
3746 negate_rtx (GET_MODE (src
), other
));
3748 new_arith
= gen_rtx (GET_CODE (src
) == PLUS
? MINUS
: PLUS
,
3749 GET_MODE (src
), dest
, other
);
3751 ninsn
= emit_insn_after (gen_rtx (SET
, VOIDmode
, dest
, new_arith
),
3754 if (recog_memoized (ninsn
) < 0
3755 || (insn_extract (ninsn
),
3756 ! constrain_operands (INSN_CODE (ninsn
), 1)))
3758 delete_insn (ninsn
);
3764 update_block (trial
, thread
);
3765 if (trial
== thread
)
3767 thread
= next_active_insn (thread
);
3768 if (new_thread
== trial
)
3769 new_thread
= thread
;
3771 delete_insn (trial
);
3774 new_thread
= next_active_insn (trial
);
3776 ninsn
= own_thread
? trial
: copy_rtx (trial
);
3778 INSN_FROM_TARGET_P (ninsn
) = 1;
3780 delay_list
= add_to_delay_list (ninsn
, NULL_RTX
);
3785 if (delay_list
&& must_annul
)
3786 INSN_ANNULLED_BRANCH_P (insn
) = 1;
3788 /* If we are to branch into the middle of this thread, find an appropriate
3789 label or make a new one if none, and redirect INSN to it. If we hit the
3790 end of the function, use the end-of-function label. */
3791 if (new_thread
!= thread
)
3795 if (! thread_if_true
)
3798 if (new_thread
&& GET_CODE (new_thread
) == JUMP_INSN
3799 && (simplejump_p (new_thread
)
3800 || GET_CODE (PATTERN (new_thread
)) == RETURN
)
3801 && redirect_with_delay_list_safe_p (insn
,
3802 JUMP_LABEL (new_thread
),
3804 new_thread
= follow_jumps (JUMP_LABEL (new_thread
));
3806 if (new_thread
== 0)
3807 label
= find_end_label ();
3808 else if (GET_CODE (new_thread
) == CODE_LABEL
)
3811 label
= get_label_before (new_thread
);
3813 reorg_redirect_jump (insn
, label
);
3819 /* Make another attempt to find insns to place in delay slots.
3821 We previously looked for insns located in front of the delay insn
3822 and, for non-jump delay insns, located behind the delay insn.
3824 Here only try to schedule jump insns and try to move insns from either
3825 the target or the following insns into the delay slot. If annulling is
3826 supported, we will be likely to do this. Otherwise, we can do this only
3830 fill_eager_delay_slots (first
)
3835 int num_unfilled_slots
= unfilled_slots_next
- unfilled_slots_base
;
3837 for (i
= 0; i
< num_unfilled_slots
; i
++)
3840 rtx target_label
, insn_at_target
, fallthrough_insn
;
3843 int own_fallthrough
;
3844 int prediction
, slots_to_fill
, slots_filled
;
3846 insn
= unfilled_slots_base
[i
];
3848 || INSN_DELETED_P (insn
)
3849 || GET_CODE (insn
) != JUMP_INSN
3850 || ! (condjump_p (insn
) || condjump_in_parallel_p (insn
)))
3853 slots_to_fill
= num_delay_slots (insn
);
3854 if (slots_to_fill
== 0)
3858 target_label
= JUMP_LABEL (insn
);
3859 condition
= get_branch_condition (insn
, target_label
);
3864 /* Get the next active fallthrough and target insns and see if we own
3865 them. Then see whether the branch is likely true. We don't need
3866 to do a lot of this for unconditional branches. */
3868 insn_at_target
= next_active_insn (target_label
);
3869 own_target
= own_thread_p (target_label
, target_label
, 0);
3871 if (condition
== const_true_rtx
)
3873 own_fallthrough
= 0;
3874 fallthrough_insn
= 0;
3879 fallthrough_insn
= next_active_insn (insn
);
3880 own_fallthrough
= own_thread_p (NEXT_INSN (insn
), NULL_RTX
, 1);
3881 prediction
= mostly_true_jump (insn
, condition
);
3884 /* If this insn is expected to branch, first try to get insns from our
3885 target, then our fallthrough insns. If it is not, expected to branch,
3886 try the other order. */
3891 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3892 fallthrough_insn
, prediction
== 2, 1,
3893 own_target
, own_fallthrough
,
3894 slots_to_fill
, &slots_filled
);
3896 if (delay_list
== 0 && own_fallthrough
)
3898 /* Even though we didn't find anything for delay slots,
3899 we might have found a redundant insn which we deleted
3900 from the thread that was filled. So we have to recompute
3901 the next insn at the target. */
3902 target_label
= JUMP_LABEL (insn
);
3903 insn_at_target
= next_active_insn (target_label
);
3906 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3907 insn_at_target
, 0, 0,
3908 own_fallthrough
, own_target
,
3909 slots_to_fill
, &slots_filled
);
3914 if (own_fallthrough
)
3916 = fill_slots_from_thread (insn
, condition
, fallthrough_insn
,
3917 insn_at_target
, 0, 0,
3918 own_fallthrough
, own_target
,
3919 slots_to_fill
, &slots_filled
);
3921 if (delay_list
== 0)
3923 = fill_slots_from_thread (insn
, condition
, insn_at_target
,
3924 next_active_insn (insn
), 0, 1,
3925 own_target
, own_fallthrough
,
3926 slots_to_fill
, &slots_filled
);
3930 unfilled_slots_base
[i
]
3931 = emit_delay_sequence (insn
, delay_list
,
3932 slots_filled
, slots_to_fill
);
3934 if (slots_to_fill
== slots_filled
)
3935 unfilled_slots_base
[i
] = 0;
3937 note_delay_statistics (slots_filled
, 1);
3941 /* Once we have tried two ways to fill a delay slot, make a pass over the
3942 code to try to improve the results and to do such things as more jump
3946 relax_delay_slots (first
)
3949 register rtx insn
, next
, pat
;
3950 register rtx trial
, delay_insn
, target_label
;
3952 /* Look at every JUMP_INSN and see if we can improve it. */
3953 for (insn
= first
; insn
; insn
= next
)
3957 next
= next_active_insn (insn
);
3959 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3960 the next insn, or jumps to a label that is not the last of a
3961 group of consecutive labels. */
3962 if (GET_CODE (insn
) == JUMP_INSN
3963 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
3964 && (target_label
= JUMP_LABEL (insn
)) != 0)
3966 target_label
= follow_jumps (target_label
);
3967 target_label
= prev_label (next_active_insn (target_label
));
3969 if (target_label
== 0)
3970 target_label
= find_end_label ();
3972 if (next_active_insn (target_label
) == next
3973 && ! condjump_in_parallel_p (insn
))
3979 if (target_label
!= JUMP_LABEL (insn
))
3980 reorg_redirect_jump (insn
, target_label
);
3982 /* See if this jump branches around a unconditional jump.
3983 If so, invert this jump and point it to the target of the
3985 if (next
&& GET_CODE (next
) == JUMP_INSN
3986 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
3987 && next_active_insn (target_label
) == next_active_insn (next
)
3988 && no_labels_between_p (insn
, next
))
3990 rtx label
= JUMP_LABEL (next
);
3992 /* Be careful how we do this to avoid deleting code or
3993 labels that are momentarily dead. See similar optimization
3996 We also need to ensure we properly handle the case when
3997 invert_jump fails. */
3999 ++LABEL_NUSES (target_label
);
4001 ++LABEL_NUSES (label
);
4003 if (invert_jump (insn
, label
))
4010 --LABEL_NUSES (label
);
4012 if (--LABEL_NUSES (target_label
) == 0)
4013 delete_insn (target_label
);
4019 /* If this is an unconditional jump and the previous insn is a
4020 conditional jump, try reversing the condition of the previous
4021 insn and swapping our targets. The next pass might be able to
4024 Don't do this if we expect the conditional branch to be true, because
4025 we would then be making the more common case longer. */
4027 if (GET_CODE (insn
) == JUMP_INSN
4028 && (simplejump_p (insn
) || GET_CODE (PATTERN (insn
)) == RETURN
)
4029 && (other
= prev_active_insn (insn
)) != 0
4030 && (condjump_p (other
) || condjump_in_parallel_p (other
))
4031 && no_labels_between_p (other
, insn
)
4032 && 0 < mostly_true_jump (other
,
4033 get_branch_condition (other
,
4034 JUMP_LABEL (other
))))
4036 rtx other_target
= JUMP_LABEL (other
);
4037 target_label
= JUMP_LABEL (insn
);
4039 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
4040 as we move the label. */
4042 ++LABEL_NUSES (other_target
);
4044 if (invert_jump (other
, target_label
))
4045 reorg_redirect_jump (insn
, other_target
);
4048 --LABEL_NUSES (other_target
);
4051 /* Now look only at cases where we have filled a delay slot. */
4052 if (GET_CODE (insn
) != INSN
4053 || GET_CODE (PATTERN (insn
)) != SEQUENCE
)
4056 pat
= PATTERN (insn
);
4057 delay_insn
= XVECEXP (pat
, 0, 0);
4059 /* See if the first insn in the delay slot is redundant with some
4060 previous insn. Remove it from the delay slot if so; then set up
4061 to reprocess this insn. */
4062 if (redundant_insn (XVECEXP (pat
, 0, 1), delay_insn
, 0))
4064 delete_from_delay_slot (XVECEXP (pat
, 0, 1));
4065 next
= prev_active_insn (next
);
4069 /* Now look only at the cases where we have a filled JUMP_INSN. */
4070 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
4071 || ! (condjump_p (XVECEXP (PATTERN (insn
), 0, 0))
4072 || condjump_in_parallel_p (XVECEXP (PATTERN (insn
), 0, 0))))
4075 target_label
= JUMP_LABEL (delay_insn
);
4079 /* If this jump goes to another unconditional jump, thread it, but
4080 don't convert a jump into a RETURN here. */
4081 trial
= follow_jumps (target_label
);
4082 /* We use next_real_insn instead of next_active_insn, so that
4083 the special USE insns emitted by reorg won't be ignored.
4084 If they are ignored, then they will get deleted if target_label
4085 is now unreachable, and that would cause mark_target_live_regs
4087 trial
= prev_label (next_real_insn (trial
));
4088 if (trial
== 0 && target_label
!= 0)
4089 trial
= find_end_label ();
4091 if (trial
!= target_label
4092 && redirect_with_delay_slots_safe_p (delay_insn
, trial
, insn
))
4094 reorg_redirect_jump (delay_insn
, trial
);
4095 target_label
= trial
;
4098 /* If the first insn at TARGET_LABEL is redundant with a previous
4099 insn, redirect the jump to the following insn process again. */
4100 trial
= next_active_insn (target_label
);
4101 if (trial
&& GET_CODE (PATTERN (trial
)) != SEQUENCE
4102 && redundant_insn (trial
, insn
, 0))
4106 /* Figure out where to emit the special USE insn so we don't
4107 later incorrectly compute register live/death info. */
4108 tmp
= next_active_insn (trial
);
4110 tmp
= find_end_label ();
4112 /* Insert the special USE insn and update dataflow info. */
4113 update_block (trial
, tmp
);
4115 /* Now emit a label before the special USE insn, and
4116 redirect our jump to the new label. */
4117 target_label
= get_label_before (PREV_INSN (tmp
));
4118 reorg_redirect_jump (delay_insn
, target_label
);
4123 /* Similarly, if it is an unconditional jump with one insn in its
4124 delay list and that insn is redundant, thread the jump. */
4125 if (trial
&& GET_CODE (PATTERN (trial
)) == SEQUENCE
4126 && XVECLEN (PATTERN (trial
), 0) == 2
4127 && GET_CODE (XVECEXP (PATTERN (trial
), 0, 0)) == JUMP_INSN
4128 && (simplejump_p (XVECEXP (PATTERN (trial
), 0, 0))
4129 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial
), 0, 0))) == RETURN
)
4130 && redundant_insn (XVECEXP (PATTERN (trial
), 0, 1), insn
, 0))
4132 target_label
= JUMP_LABEL (XVECEXP (PATTERN (trial
), 0, 0));
4133 if (target_label
== 0)
4134 target_label
= find_end_label ();
4136 if (redirect_with_delay_slots_safe_p (delay_insn
, target_label
,
4139 reorg_redirect_jump (delay_insn
, target_label
);
4146 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
4147 && prev_active_insn (target_label
) == insn
4148 && ! condjump_in_parallel_p (delay_insn
)
4150 /* If the last insn in the delay slot sets CC0 for some insn,
4151 various code assumes that it is in a delay slot. We could
4152 put it back where it belonged and delete the register notes,
4153 but it doesn't seem worthwhile in this uncommon case. */
4154 && ! find_reg_note (XVECEXP (pat
, 0, XVECLEN (pat
, 0) - 1),
4155 REG_CC_USER
, NULL_RTX
)
4161 /* All this insn does is execute its delay list and jump to the
4162 following insn. So delete the jump and just execute the delay
4165 We do this by deleting the INSN containing the SEQUENCE, then
4166 re-emitting the insns separately, and then deleting the jump.
4167 This allows the count of the jump target to be properly
4170 /* Clear the from target bit, since these insns are no longer
4172 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4173 INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)) = 0;
4175 trial
= PREV_INSN (insn
);
4177 emit_insn_after (pat
, trial
);
4178 delete_scheduled_jump (delay_insn
);
4182 /* See if this is an unconditional jump around a single insn which is
4183 identical to the one in its delay slot. In this case, we can just
4184 delete the branch and the insn in its delay slot. */
4185 if (next
&& GET_CODE (next
) == INSN
4186 && prev_label (next_active_insn (next
)) == target_label
4187 && simplejump_p (insn
)
4188 && XVECLEN (pat
, 0) == 2
4189 && rtx_equal_p (PATTERN (next
), PATTERN (XVECEXP (pat
, 0, 1))))
4195 /* See if this jump (with its delay slots) branches around another
4196 jump (without delay slots). If so, invert this jump and point
4197 it to the target of the second jump. We cannot do this for
4198 annulled jumps, though. Again, don't convert a jump to a RETURN
4200 if (! INSN_ANNULLED_BRANCH_P (delay_insn
)
4201 && next
&& GET_CODE (next
) == JUMP_INSN
4202 && (simplejump_p (next
) || GET_CODE (PATTERN (next
)) == RETURN
)
4203 && next_active_insn (target_label
) == next_active_insn (next
)
4204 && no_labels_between_p (insn
, next
))
4206 rtx label
= JUMP_LABEL (next
);
4207 rtx old_label
= JUMP_LABEL (delay_insn
);
4210 label
= find_end_label ();
4212 if (redirect_with_delay_slots_safe_p (delay_insn
, label
, insn
))
4214 /* Be careful how we do this to avoid deleting code or labels
4215 that are momentarily dead. See similar optimization in
4218 ++LABEL_NUSES (old_label
);
4220 if (invert_jump (delay_insn
, label
))
4224 /* Must update the INSN_FROM_TARGET_P bits now that
4225 the branch is reversed, so that mark_target_live_regs
4226 will handle the delay slot insn correctly. */
4227 for (i
= 1; i
< XVECLEN (PATTERN (insn
), 0); i
++)
4229 rtx slot
= XVECEXP (PATTERN (insn
), 0, i
);
4230 INSN_FROM_TARGET_P (slot
) = ! INSN_FROM_TARGET_P (slot
);
4237 if (old_label
&& --LABEL_NUSES (old_label
) == 0)
4238 delete_insn (old_label
);
4243 /* If we own the thread opposite the way this insn branches, see if we
4244 can merge its delay slots with following insns. */
4245 if (INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
4246 && own_thread_p (NEXT_INSN (insn
), 0, 1))
4247 try_merge_delay_insns (insn
, next
);
4248 else if (! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, 1))
4249 && own_thread_p (target_label
, target_label
, 0))
4250 try_merge_delay_insns (insn
, next_active_insn (target_label
));
4252 /* If we get here, we haven't deleted INSN. But we may have deleted
4253 NEXT, so recompute it. */
4254 next
= next_active_insn (insn
);
4260 /* Look for filled jumps to the end of function label. We can try to convert
4261 them into RETURN insns if the insns in the delay slot are valid for the
4265 make_return_insns (first
)
4268 rtx insn
, jump_insn
, pat
;
4269 rtx real_return_label
= end_of_function_label
;
4272 /* See if there is a RETURN insn in the function other than the one we
4273 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4274 into a RETURN to jump to it. */
4275 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4276 if (GET_CODE (insn
) == JUMP_INSN
&& GET_CODE (PATTERN (insn
)) == RETURN
)
4278 real_return_label
= get_label_before (insn
);
4282 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4283 was equal to END_OF_FUNCTION_LABEL. */
4284 LABEL_NUSES (real_return_label
)++;
4286 /* Clear the list of insns to fill so we can use it. */
4287 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
4289 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4293 /* Only look at filled JUMP_INSNs that go to the end of function
4295 if (GET_CODE (insn
) != INSN
4296 || GET_CODE (PATTERN (insn
)) != SEQUENCE
4297 || GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) != JUMP_INSN
4298 || JUMP_LABEL (XVECEXP (PATTERN (insn
), 0, 0)) != end_of_function_label
)
4301 pat
= PATTERN (insn
);
4302 jump_insn
= XVECEXP (pat
, 0, 0);
4304 /* If we can't make the jump into a RETURN, try to redirect it to the best
4305 RETURN and go on to the next insn. */
4306 if (! reorg_redirect_jump (jump_insn
, NULL_RTX
))
4308 /* Make sure redirecting the jump will not invalidate the delay
4310 if (redirect_with_delay_slots_safe_p (jump_insn
,
4313 reorg_redirect_jump (jump_insn
, real_return_label
);
4317 /* See if this RETURN can accept the insns current in its delay slot.
4318 It can if it has more or an equal number of slots and the contents
4319 of each is valid. */
4321 flags
= get_jump_flags (jump_insn
, JUMP_LABEL (jump_insn
));
4322 slots
= num_delay_slots (jump_insn
);
4323 if (slots
>= XVECLEN (pat
, 0) - 1)
4325 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
4327 #ifdef ANNUL_IFFALSE_SLOTS
4328 (INSN_ANNULLED_BRANCH_P (jump_insn
)
4329 && INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
4330 ? eligible_for_annul_false (jump_insn
, i
- 1,
4331 XVECEXP (pat
, 0, i
), flags
) :
4333 #ifdef ANNUL_IFTRUE_SLOTS
4334 (INSN_ANNULLED_BRANCH_P (jump_insn
)
4335 && ! INSN_FROM_TARGET_P (XVECEXP (pat
, 0, i
)))
4336 ? eligible_for_annul_true (jump_insn
, i
- 1,
4337 XVECEXP (pat
, 0, i
), flags
) :
4339 eligible_for_delay (jump_insn
, i
-1, XVECEXP (pat
, 0, i
), flags
)))
4345 if (i
== XVECLEN (pat
, 0))
4348 /* We have to do something with this insn. If it is an unconditional
4349 RETURN, delete the SEQUENCE and output the individual insns,
4350 followed by the RETURN. Then set things up so we try to find
4351 insns for its delay slots, if it needs some. */
4352 if (GET_CODE (PATTERN (jump_insn
)) == RETURN
)
4354 rtx prev
= PREV_INSN (insn
);
4357 for (i
= 1; i
< XVECLEN (pat
, 0); i
++)
4358 prev
= emit_insn_after (PATTERN (XVECEXP (pat
, 0, i
)), prev
);
4360 insn
= emit_jump_insn_after (PATTERN (jump_insn
), prev
);
4361 emit_barrier_after (insn
);
4364 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
4367 /* It is probably more efficient to keep this with its current
4368 delay slot as a branch to a RETURN. */
4369 reorg_redirect_jump (jump_insn
, real_return_label
);
4372 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4373 new delay slots we have created. */
4374 if (--LABEL_NUSES (real_return_label
) == 0)
4375 delete_insn (real_return_label
);
4377 fill_simple_delay_slots (first
, 1);
4378 fill_simple_delay_slots (first
, 0);
4382 /* Try to find insns to place in delay slots. */
4385 dbr_schedule (first
, file
)
4389 rtx insn
, next
, epilogue_insn
= 0;
4392 int old_flag_no_peephole
= flag_no_peephole
;
4394 /* Execute `final' once in prescan mode to delete any insns that won't be
4395 used. Don't let final try to do any peephole optimization--it will
4396 ruin dataflow information for this pass. */
4398 flag_no_peephole
= 1;
4399 final (first
, 0, NO_DEBUG
, 1, 1);
4400 flag_no_peephole
= old_flag_no_peephole
;
4403 /* If the current function has no insns other than the prologue and
4404 epilogue, then do not try to fill any delay slots. */
4405 if (n_basic_blocks
== 0)
4408 /* Find the highest INSN_UID and allocate and initialize our map from
4409 INSN_UID's to position in code. */
4410 for (max_uid
= 0, insn
= first
; insn
; insn
= NEXT_INSN (insn
))
4412 if (INSN_UID (insn
) > max_uid
)
4413 max_uid
= INSN_UID (insn
);
4414 if (GET_CODE (insn
) == NOTE
4415 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_EPILOGUE_BEG
)
4416 epilogue_insn
= insn
;
4419 uid_to_ruid
= (int *) alloca ((max_uid
+ 1) * sizeof (int *));
4420 for (i
= 0, insn
= first
; insn
; i
++, insn
= NEXT_INSN (insn
))
4421 uid_to_ruid
[INSN_UID (insn
)] = i
;
4423 /* Initialize the list of insns that need filling. */
4424 if (unfilled_firstobj
== 0)
4426 gcc_obstack_init (&unfilled_slots_obstack
);
4427 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
4430 for (insn
= next_active_insn (first
); insn
; insn
= next_active_insn (insn
))
4434 INSN_ANNULLED_BRANCH_P (insn
) = 0;
4435 INSN_FROM_TARGET_P (insn
) = 0;
4437 /* Skip vector tables. We can't get attributes for them. */
4438 if (GET_CODE (insn
) == JUMP_INSN
4439 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4440 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
4443 if (num_delay_slots (insn
) > 0)
4444 obstack_ptr_grow (&unfilled_slots_obstack
, insn
);
4446 /* Ensure all jumps go to the last of a set of consecutive labels. */
4447 if (GET_CODE (insn
) == JUMP_INSN
4448 && (condjump_p (insn
) || condjump_in_parallel_p (insn
))
4449 && JUMP_LABEL (insn
) != 0
4450 && ((target
= prev_label (next_active_insn (JUMP_LABEL (insn
))))
4451 != JUMP_LABEL (insn
)))
4452 redirect_jump (insn
, target
);
4455 /* Indicate what resources are required to be valid at the end of the current
4456 function. The condition code never is and memory always is. If the
4457 frame pointer is needed, it is and so is the stack pointer unless
4458 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4459 stack pointer is. Registers used to return the function value are
4460 needed. Registers holding global variables are needed. */
4462 end_of_function_needs
.cc
= 0;
4463 end_of_function_needs
.memory
= 1;
4464 end_of_function_needs
.unch_memory
= 0;
4465 CLEAR_HARD_REG_SET (end_of_function_needs
.regs
);
4467 if (frame_pointer_needed
)
4469 SET_HARD_REG_BIT (end_of_function_needs
.regs
, FRAME_POINTER_REGNUM
);
4470 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4471 SET_HARD_REG_BIT (end_of_function_needs
.regs
, HARD_FRAME_POINTER_REGNUM
);
4473 #ifdef EXIT_IGNORE_STACK
4474 if (! EXIT_IGNORE_STACK
)
4476 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
4479 SET_HARD_REG_BIT (end_of_function_needs
.regs
, STACK_POINTER_REGNUM
);
4481 if (current_function_return_rtx
!= 0
4482 && GET_CODE (current_function_return_rtx
) == REG
)
4483 mark_referenced_resources (current_function_return_rtx
,
4484 &end_of_function_needs
, 1);
4486 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4488 #ifdef EPILOGUE_USES
4489 || EPILOGUE_USES (i
)
4492 SET_HARD_REG_BIT (end_of_function_needs
.regs
, i
);
4494 /* The registers required to be live at the end of the function are
4495 represented in the flow information as being dead just prior to
4496 reaching the end of the function. For example, the return of a value
4497 might be represented by a USE of the return register immediately
4498 followed by an unconditional jump to the return label where the
4499 return label is the end of the RTL chain. The end of the RTL chain
4500 is then taken to mean that the return register is live.
4502 This sequence is no longer maintained when epilogue instructions are
4503 added to the RTL chain. To reconstruct the original meaning, the
4504 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4505 point where these registers become live (start_of_epilogue_needs).
4506 If epilogue instructions are present, the registers set by those
4507 instructions won't have been processed by flow. Thus, those
4508 registers are additionally required at the end of the RTL chain
4509 (end_of_function_needs). */
4511 start_of_epilogue_needs
= end_of_function_needs
;
4513 while (epilogue_insn
= next_nonnote_insn (epilogue_insn
))
4514 mark_set_resources (epilogue_insn
, &end_of_function_needs
, 0, 1);
4516 /* Show we haven't computed an end-of-function label yet. */
4517 end_of_function_label
= 0;
4519 /* Allocate and initialize the tables used by mark_target_live_regs. */
4521 = (struct target_info
**) alloca ((TARGET_HASH_PRIME
4522 * sizeof (struct target_info
*)));
4523 bzero ((char *) target_hash_table
,
4524 TARGET_HASH_PRIME
* sizeof (struct target_info
*));
4526 bb_ticks
= (int *) alloca (n_basic_blocks
* sizeof (int));
4527 bzero ((char *) bb_ticks
, n_basic_blocks
* sizeof (int));
4529 /* Initialize the statistics for this function. */
4530 bzero ((char *) num_insns_needing_delays
, sizeof num_insns_needing_delays
);
4531 bzero ((char *) num_filled_delays
, sizeof num_filled_delays
);
4533 /* Now do the delay slot filling. Try everything twice in case earlier
4534 changes make more slots fillable. */
4536 for (reorg_pass_number
= 0;
4537 reorg_pass_number
< MAX_REORG_PASSES
;
4538 reorg_pass_number
++)
4540 fill_simple_delay_slots (first
, 1);
4541 fill_simple_delay_slots (first
, 0);
4542 fill_eager_delay_slots (first
);
4543 relax_delay_slots (first
);
4546 /* Delete any USE insns made by update_block; subsequent passes don't need
4547 them or know how to deal with them. */
4548 for (insn
= first
; insn
; insn
= next
)
4550 next
= NEXT_INSN (insn
);
4552 if (GET_CODE (insn
) == INSN
&& GET_CODE (PATTERN (insn
)) == USE
4553 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn
), 0))) == 'i')
4554 next
= delete_insn (insn
);
4557 /* If we made an end of function label, indicate that it is now
4558 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4559 If it is now unused, delete it. */
4560 if (end_of_function_label
&& --LABEL_NUSES (end_of_function_label
) == 0)
4561 delete_insn (end_of_function_label
);
4564 if (HAVE_return
&& end_of_function_label
!= 0)
4565 make_return_insns (first
);
4568 obstack_free (&unfilled_slots_obstack
, unfilled_firstobj
);
4570 /* It is not clear why the line below is needed, but it does seem to be. */
4571 unfilled_firstobj
= (rtx
*) obstack_alloc (&unfilled_slots_obstack
, 0);
4573 /* Reposition the prologue and epilogue notes in case we moved the
4574 prologue/epilogue insns. */
4575 reposition_prologue_and_epilogue_notes (first
);
4579 register int i
, j
, need_comma
;
4581 for (reorg_pass_number
= 0;
4582 reorg_pass_number
< MAX_REORG_PASSES
;
4583 reorg_pass_number
++)
4585 fprintf (file
, ";; Reorg pass #%d:\n", reorg_pass_number
+ 1);
4586 for (i
= 0; i
< NUM_REORG_FUNCTIONS
; i
++)
4589 fprintf (file
, ";; Reorg function #%d\n", i
);
4591 fprintf (file
, ";; %d insns needing delay slots\n;; ",
4592 num_insns_needing_delays
[i
][reorg_pass_number
]);
4594 for (j
= 0; j
< MAX_DELAY_HISTOGRAM
; j
++)
4595 if (num_filled_delays
[i
][j
][reorg_pass_number
])
4598 fprintf (file
, ", ");
4600 fprintf (file
, "%d got %d delays",
4601 num_filled_delays
[i
][j
][reorg_pass_number
], j
);
4603 fprintf (file
, "\n");
4608 #endif /* DELAY_SLOTS */