Fix urgent bug
[official-gcc.git] / gcc / reorg.c
blob8d2c2a68b3c98d914db6c597f3ae683e7b6e8947
1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
55 is taken.
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
97 branch.
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
109 Not yet implemented:
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
118 #include <stdio.h>
119 #include "config.h"
120 #include "rtl.h"
121 #include "insn-config.h"
122 #include "conditions.h"
123 #include "hard-reg-set.h"
124 #include "basic-block.h"
125 #include "regs.h"
126 #include "insn-flags.h"
127 #include "recog.h"
128 #include "flags.h"
129 #include "output.h"
130 #include "obstack.h"
131 #include "insn-attr.h"
133 /* Import list of registers used as spill regs from reload. */
134 extern HARD_REG_SET used_spill_regs;
136 /* Import highest label used in function at end of reload. */
137 extern int max_label_num_after_reload;
140 #ifdef DELAY_SLOTS
142 #define obstack_chunk_alloc xmalloc
143 #define obstack_chunk_free free
145 #ifndef ANNUL_IFTRUE_SLOTS
146 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
147 #endif
148 #ifndef ANNUL_IFFALSE_SLOTS
149 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
150 #endif
152 /* Insns which have delay slots that have not yet been filled. */
154 static struct obstack unfilled_slots_obstack;
155 static rtx *unfilled_firstobj;
157 /* Define macros to refer to the first and last slot containing unfilled
158 insns. These are used because the list may move and its address
159 should be recomputed at each use. */
161 #define unfilled_slots_base \
162 ((rtx *) obstack_base (&unfilled_slots_obstack))
164 #define unfilled_slots_next \
165 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
167 /* This structure is used to indicate which hardware resources are set or
168 needed by insns so far. */
170 struct resources
172 char memory; /* Insn sets or needs a memory location. */
173 char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
174 char volatil; /* Insn sets or needs a volatile memory loc. */
175 char cc; /* Insn sets or needs the condition codes. */
176 HARD_REG_SET regs; /* Which registers are set or needed. */
179 /* Macro to clear all resources. */
180 #define CLEAR_RESOURCE(RES) \
181 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
182 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
184 /* Indicates what resources are required at the beginning of the epilogue. */
185 static struct resources start_of_epilogue_needs;
187 /* Indicates what resources are required at function end. */
188 static struct resources end_of_function_needs;
190 /* Points to the label before the end of the function. */
191 static rtx end_of_function_label;
193 /* This structure is used to record liveness information at the targets or
194 fallthrough insns of branches. We will most likely need the information
195 at targets again, so save them in a hash table rather than recomputing them
196 each time. */
198 struct target_info
200 int uid; /* INSN_UID of target. */
201 struct target_info *next; /* Next info for same hash bucket. */
202 HARD_REG_SET live_regs; /* Registers live at target. */
203 int block; /* Basic block number containing target. */
204 int bb_tick; /* Generation count of basic block info. */
207 #define TARGET_HASH_PRIME 257
209 /* Define the hash table itself. */
210 static struct target_info **target_hash_table;
212 /* For each basic block, we maintain a generation number of its basic
213 block info, which is updated each time we move an insn from the
214 target of a jump. This is the generation number indexed by block
215 number. */
217 static int *bb_ticks;
219 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
220 not always monotonically increase. */
221 static int *uid_to_ruid;
223 /* Highest valid index in `uid_to_ruid'. */
224 static int max_uid;
226 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
227 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
228 static int stop_search_p PROTO((rtx, int));
229 static int resource_conflicts_p PROTO((struct resources *,
230 struct resources *));
231 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
232 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
233 static rtx find_end_label PROTO((void));
234 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
235 static rtx add_to_delay_list PROTO((rtx, rtx));
236 static void delete_from_delay_slot PROTO((rtx));
237 static void delete_scheduled_jump PROTO((rtx));
238 static void note_delay_statistics PROTO((int, int));
239 static rtx optimize_skip PROTO((rtx));
240 static int get_jump_flags PROTO((rtx, rtx));
241 static int rare_destination PROTO((rtx));
242 static int mostly_true_jump PROTO((rtx, rtx));
243 static rtx get_branch_condition PROTO((rtx, rtx));
244 static int condition_dominates_p PROTO((rtx, rtx));
245 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
246 struct resources *,
247 struct resources *,
248 struct resources *,
249 int, int *, int *, rtx *));
250 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
251 struct resources *,
252 struct resources *,
253 struct resources *,
254 int, int *, int *));
255 static void try_merge_delay_insns PROTO((rtx, rtx));
256 static rtx redundant_insn PROTO((rtx, rtx, rtx));
257 static int own_thread_p PROTO((rtx, rtx, int));
258 static int find_basic_block PROTO((rtx));
259 static void update_block PROTO((rtx, rtx));
260 static int reorg_redirect_jump PROTO((rtx, rtx));
261 static void update_reg_dead_notes PROTO((rtx, rtx));
262 static void fix_reg_dead_note PROTO((rtx, rtx));
263 static void update_reg_unused_notes PROTO((rtx, rtx));
264 static void update_live_status PROTO((rtx, rtx));
265 static rtx next_insn_no_annul PROTO((rtx));
266 static void mark_target_live_regs PROTO((rtx, struct resources *));
267 static void fill_simple_delay_slots PROTO((rtx, int));
268 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
269 int, int, int, int *));
270 static void fill_eager_delay_slots PROTO((rtx));
271 static void relax_delay_slots PROTO((rtx));
272 static void make_return_insns PROTO((rtx));
273 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
274 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
276 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
277 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
278 is TRUE, resources used by the called routine will be included for
279 CALL_INSNs. */
281 static void
282 mark_referenced_resources (x, res, include_delayed_effects)
283 register rtx x;
284 register struct resources *res;
285 register int include_delayed_effects;
287 register enum rtx_code code = GET_CODE (x);
288 register int i, j;
289 register char *format_ptr;
291 /* Handle leaf items for which we set resource flags. Also, special-case
292 CALL, SET and CLOBBER operators. */
293 switch (code)
295 case CONST:
296 case CONST_INT:
297 case CONST_DOUBLE:
298 case PC:
299 case SYMBOL_REF:
300 case LABEL_REF:
301 return;
303 case SUBREG:
304 if (GET_CODE (SUBREG_REG (x)) != REG)
305 mark_referenced_resources (SUBREG_REG (x), res, 0);
306 else
308 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
309 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
310 for (i = regno; i < last_regno; i++)
311 SET_HARD_REG_BIT (res->regs, i);
313 return;
315 case REG:
316 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
317 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
318 return;
320 case MEM:
321 /* If this memory shouldn't change, it really isn't referencing
322 memory. */
323 if (RTX_UNCHANGING_P (x))
324 res->unch_memory = 1;
325 else
326 res->memory = 1;
327 res->volatil = MEM_VOLATILE_P (x);
329 /* Mark registers used to access memory. */
330 mark_referenced_resources (XEXP (x, 0), res, 0);
331 return;
333 case CC0:
334 res->cc = 1;
335 return;
337 case UNSPEC_VOLATILE:
338 case ASM_INPUT:
339 case TRAP_IF:
340 /* Traditional asm's are always volatile. */
341 res->volatil = 1;
342 return;
344 case ASM_OPERANDS:
345 res->volatil = MEM_VOLATILE_P (x);
347 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
348 We can not just fall through here since then we would be confused
349 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
350 traditional asms unlike their normal usage. */
352 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
353 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
354 return;
356 case CALL:
357 /* The first operand will be a (MEM (xxx)) but doesn't really reference
358 memory. The second operand may be referenced, though. */
359 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
360 mark_referenced_resources (XEXP (x, 1), res, 0);
361 return;
363 case SET:
364 /* Usually, the first operand of SET is set, not referenced. But
365 registers used to access memory are referenced. SET_DEST is
366 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
368 mark_referenced_resources (SET_SRC (x), res, 0);
370 x = SET_DEST (x);
371 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
372 mark_referenced_resources (x, res, 0);
373 else if (GET_CODE (x) == SUBREG)
374 x = SUBREG_REG (x);
375 if (GET_CODE (x) == MEM)
376 mark_referenced_resources (XEXP (x, 0), res, 0);
377 return;
379 case CLOBBER:
380 return;
382 case CALL_INSN:
383 if (include_delayed_effects)
385 /* A CALL references memory, the frame pointer if it exists, the
386 stack pointer, any global registers and any registers given in
387 USE insns immediately in front of the CALL.
389 However, we may have moved some of the parameter loading insns
390 into the delay slot of this CALL. If so, the USE's for them
391 don't count and should be skipped. */
392 rtx insn = PREV_INSN (x);
393 rtx sequence = 0;
394 int seq_size = 0;
395 rtx next = NEXT_INSN (x);
396 int i;
398 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
399 if (NEXT_INSN (insn) != x)
401 next = NEXT_INSN (NEXT_INSN (insn));
402 sequence = PATTERN (NEXT_INSN (insn));
403 seq_size = XVECLEN (sequence, 0);
404 if (GET_CODE (sequence) != SEQUENCE)
405 abort ();
408 res->memory = 1;
409 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
410 if (frame_pointer_needed)
412 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
413 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
414 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
415 #endif
418 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
419 if (global_regs[i])
420 SET_HARD_REG_BIT (res->regs, i);
422 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
423 assume that this call can need any register.
425 This is done to be more conservative about how we handle setjmp.
426 We assume that they both use and set all registers. Using all
427 registers ensures that a register will not be considered dead
428 just because it crosses a setjmp call. A register should be
429 considered dead only if the setjmp call returns non-zero. */
430 if (next && GET_CODE (next) == NOTE
431 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
432 SET_HARD_REG_SET (res->regs);
435 rtx link;
437 for (link = CALL_INSN_FUNCTION_USAGE (x);
438 link;
439 link = XEXP (link, 1))
440 if (GET_CODE (XEXP (link, 0)) == USE)
442 for (i = 1; i < seq_size; i++)
444 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
445 if (GET_CODE (slot_pat) == SET
446 && rtx_equal_p (SET_DEST (slot_pat),
447 SET_DEST (XEXP (link, 0))))
448 break;
450 if (i >= seq_size)
451 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
452 res, 0);
457 /* ... fall through to other INSN processing ... */
459 case INSN:
460 case JUMP_INSN:
462 #ifdef INSN_REFERENCES_ARE_DELAYED
463 if (! include_delayed_effects
464 && INSN_REFERENCES_ARE_DELAYED (x))
465 return;
466 #endif
468 /* No special processing, just speed up. */
469 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
470 return;
473 /* Process each sub-expression and flag what it needs. */
474 format_ptr = GET_RTX_FORMAT (code);
475 for (i = 0; i < GET_RTX_LENGTH (code); i++)
476 switch (*format_ptr++)
478 case 'e':
479 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
480 break;
482 case 'E':
483 for (j = 0; j < XVECLEN (x, i); j++)
484 mark_referenced_resources (XVECEXP (x, i, j), res,
485 include_delayed_effects);
486 break;
490 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
491 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
492 is nonzero, also mark resources potentially set by the called routine.
494 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
495 objects are being referenced instead of set.
497 We never mark the insn as modifying the condition code unless it explicitly
498 SETs CC0 even though this is not totally correct. The reason for this is
499 that we require a SET of CC0 to immediately precede the reference to CC0.
500 So if some other insn sets CC0 as a side-effect, we know it cannot affect
501 our computation and thus may be placed in a delay slot. */
503 static void
504 mark_set_resources (x, res, in_dest, include_delayed_effects)
505 register rtx x;
506 register struct resources *res;
507 int in_dest;
508 int include_delayed_effects;
510 register enum rtx_code code;
511 register int i, j;
512 register char *format_ptr;
514 restart:
516 code = GET_CODE (x);
518 switch (code)
520 case NOTE:
521 case BARRIER:
522 case CODE_LABEL:
523 case USE:
524 case CONST_INT:
525 case CONST_DOUBLE:
526 case LABEL_REF:
527 case SYMBOL_REF:
528 case CONST:
529 case PC:
530 /* These don't set any resources. */
531 return;
533 case CC0:
534 if (in_dest)
535 res->cc = 1;
536 return;
538 case CALL_INSN:
539 /* Called routine modifies the condition code, memory, any registers
540 that aren't saved across calls, global registers and anything
541 explicitly CLOBBERed immediately after the CALL_INSN. */
543 if (include_delayed_effects)
545 rtx next = NEXT_INSN (x);
546 rtx prev = PREV_INSN (x);
547 rtx link;
549 res->cc = res->memory = 1;
550 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
551 if (call_used_regs[i] || global_regs[i])
552 SET_HARD_REG_BIT (res->regs, i);
554 /* If X is part of a delay slot sequence, then NEXT should be
555 the first insn after the sequence. */
556 if (NEXT_INSN (prev) != x)
557 next = NEXT_INSN (NEXT_INSN (prev));
559 for (link = CALL_INSN_FUNCTION_USAGE (x);
560 link; link = XEXP (link, 1))
561 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
562 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
564 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
565 assume that this call can clobber any register. */
566 if (next && GET_CODE (next) == NOTE
567 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
568 SET_HARD_REG_SET (res->regs);
571 /* ... and also what it's RTL says it modifies, if anything. */
573 case JUMP_INSN:
574 case INSN:
576 /* An insn consisting of just a CLOBBER (or USE) is just for flow
577 and doesn't actually do anything, so we ignore it. */
579 #ifdef INSN_SETS_ARE_DELAYED
580 if (! include_delayed_effects
581 && INSN_SETS_ARE_DELAYED (x))
582 return;
583 #endif
585 x = PATTERN (x);
586 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
587 goto restart;
588 return;
590 case SET:
591 /* If the source of a SET is a CALL, this is actually done by
592 the called routine. So only include it if we are to include the
593 effects of the calling routine. */
595 mark_set_resources (SET_DEST (x), res,
596 (include_delayed_effects
597 || GET_CODE (SET_SRC (x)) != CALL),
600 mark_set_resources (SET_SRC (x), res, 0, 0);
601 return;
603 case CLOBBER:
604 mark_set_resources (XEXP (x, 0), res, 1, 0);
605 return;
607 case SEQUENCE:
608 for (i = 0; i < XVECLEN (x, 0); i++)
609 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
610 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
611 mark_set_resources (XVECEXP (x, 0, i), res, 0,
612 include_delayed_effects);
613 return;
615 case POST_INC:
616 case PRE_INC:
617 case POST_DEC:
618 case PRE_DEC:
619 mark_set_resources (XEXP (x, 0), res, 1, 0);
620 return;
622 case ZERO_EXTRACT:
623 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
624 mark_set_resources (XEXP (x, 1), res, 0, 0);
625 mark_set_resources (XEXP (x, 2), res, 0, 0);
626 return;
628 case MEM:
629 if (in_dest)
631 res->memory = 1;
632 res->unch_memory = RTX_UNCHANGING_P (x);
633 res->volatil = MEM_VOLATILE_P (x);
636 mark_set_resources (XEXP (x, 0), res, 0, 0);
637 return;
639 case SUBREG:
640 if (in_dest)
642 if (GET_CODE (SUBREG_REG (x)) != REG)
643 mark_set_resources (SUBREG_REG (x), res,
644 in_dest, include_delayed_effects);
645 else
647 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
648 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
649 for (i = regno; i < last_regno; i++)
650 SET_HARD_REG_BIT (res->regs, i);
653 return;
655 case REG:
656 if (in_dest)
657 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
658 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
659 return;
662 /* Process each sub-expression and flag what it needs. */
663 format_ptr = GET_RTX_FORMAT (code);
664 for (i = 0; i < GET_RTX_LENGTH (code); i++)
665 switch (*format_ptr++)
667 case 'e':
668 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
669 break;
671 case 'E':
672 for (j = 0; j < XVECLEN (x, i); j++)
673 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
674 include_delayed_effects);
675 break;
679 /* Return TRUE if this insn should stop the search for insn to fill delay
680 slots. LABELS_P indicates that labels should terminate the search.
681 In all cases, jumps terminate the search. */
683 static int
684 stop_search_p (insn, labels_p)
685 rtx insn;
686 int labels_p;
688 if (insn == 0)
689 return 1;
691 switch (GET_CODE (insn))
693 case NOTE:
694 case CALL_INSN:
695 return 0;
697 case CODE_LABEL:
698 return labels_p;
700 case JUMP_INSN:
701 case BARRIER:
702 return 1;
704 case INSN:
705 /* OK unless it contains a delay slot or is an `asm' insn of some type.
706 We don't know anything about these. */
707 return (GET_CODE (PATTERN (insn)) == SEQUENCE
708 || GET_CODE (PATTERN (insn)) == ASM_INPUT
709 || asm_noperands (PATTERN (insn)) >= 0);
711 default:
712 abort ();
716 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
717 resource set contains a volatile memory reference. Otherwise, return FALSE. */
719 static int
720 resource_conflicts_p (res1, res2)
721 struct resources *res1, *res2;
723 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
724 || (res1->unch_memory && res2->unch_memory)
725 || res1->volatil || res2->volatil)
726 return 1;
728 #ifdef HARD_REG_SET
729 return (res1->regs & res2->regs) != HARD_CONST (0);
730 #else
732 int i;
734 for (i = 0; i < HARD_REG_SET_LONGS; i++)
735 if ((res1->regs[i] & res2->regs[i]) != 0)
736 return 1;
737 return 0;
739 #endif
742 /* Return TRUE if any resource marked in RES, a `struct resources', is
743 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
744 routine is using those resources.
746 We compute this by computing all the resources referenced by INSN and
747 seeing if this conflicts with RES. It might be faster to directly check
748 ourselves, and this is the way it used to work, but it means duplicating
749 a large block of complex code. */
751 static int
752 insn_references_resource_p (insn, res, include_delayed_effects)
753 register rtx insn;
754 register struct resources *res;
755 int include_delayed_effects;
757 struct resources insn_res;
759 CLEAR_RESOURCE (&insn_res);
760 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
761 return resource_conflicts_p (&insn_res, res);
764 /* Return TRUE if INSN modifies resources that are marked in RES.
765 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
766 included. CC0 is only modified if it is explicitly set; see comments
767 in front of mark_set_resources for details. */
769 static int
770 insn_sets_resource_p (insn, res, include_delayed_effects)
771 register rtx insn;
772 register struct resources *res;
773 int include_delayed_effects;
775 struct resources insn_sets;
777 CLEAR_RESOURCE (&insn_sets);
778 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
779 return resource_conflicts_p (&insn_sets, res);
782 /* Find a label at the end of the function or before a RETURN. If there is
783 none, make one. */
785 static rtx
786 find_end_label ()
788 rtx insn;
790 /* If we found one previously, return it. */
791 if (end_of_function_label)
792 return end_of_function_label;
794 /* Otherwise, see if there is a label at the end of the function. If there
795 is, it must be that RETURN insns aren't needed, so that is our return
796 label and we don't have to do anything else. */
798 insn = get_last_insn ();
799 while (GET_CODE (insn) == NOTE
800 || (GET_CODE (insn) == INSN
801 && (GET_CODE (PATTERN (insn)) == USE
802 || GET_CODE (PATTERN (insn)) == CLOBBER)))
803 insn = PREV_INSN (insn);
805 /* When a target threads its epilogue we might already have a
806 suitable return insn. If so put a label before it for the
807 end_of_function_label. */
808 if (GET_CODE (insn) == BARRIER
809 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
810 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
812 rtx temp = PREV_INSN (PREV_INSN (insn));
813 end_of_function_label = gen_label_rtx ();
814 LABEL_NUSES (end_of_function_label) = 0;
816 /* Put the label before an USE insns that may proceed the RETURN insn. */
817 while (GET_CODE (temp) == USE)
818 temp = PREV_INSN (temp);
820 emit_label_after (end_of_function_label, temp);
823 else if (GET_CODE (insn) == CODE_LABEL)
824 end_of_function_label = insn;
825 else
827 /* Otherwise, make a new label and emit a RETURN and BARRIER,
828 if needed. */
829 end_of_function_label = gen_label_rtx ();
830 LABEL_NUSES (end_of_function_label) = 0;
831 emit_label (end_of_function_label);
832 #ifdef HAVE_return
833 if (HAVE_return)
835 /* The return we make may have delay slots too. */
836 rtx insn = gen_return ();
837 insn = emit_jump_insn (insn);
838 emit_barrier ();
839 if (num_delay_slots (insn) > 0)
840 obstack_ptr_grow (&unfilled_slots_obstack, insn);
842 #endif
845 /* Show one additional use for this label so it won't go away until
846 we are done. */
847 ++LABEL_NUSES (end_of_function_label);
849 return end_of_function_label;
852 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
853 the pattern of INSN with the SEQUENCE.
855 Chain the insns so that NEXT_INSN of each insn in the sequence points to
856 the next and NEXT_INSN of the last insn in the sequence points to
857 the first insn after the sequence. Similarly for PREV_INSN. This makes
858 it easier to scan all insns.
860 Returns the SEQUENCE that replaces INSN. */
862 static rtx
863 emit_delay_sequence (insn, list, length, avail)
864 rtx insn;
865 rtx list;
866 int length;
867 int avail;
869 register int i = 1;
870 register rtx li;
871 int had_barrier = 0;
873 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
874 rtvec seqv = rtvec_alloc (length + 1);
875 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
876 rtx seq_insn = make_insn_raw (seq);
877 rtx first = get_insns ();
878 rtx last = get_last_insn ();
880 /* Make a copy of the insn having delay slots. */
881 rtx delay_insn = copy_rtx (insn);
883 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
884 confuse further processing. Update LAST in case it was the last insn.
885 We will put the BARRIER back in later. */
886 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
888 delete_insn (NEXT_INSN (insn));
889 last = get_last_insn ();
890 had_barrier = 1;
893 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
894 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
895 PREV_INSN (seq_insn) = PREV_INSN (insn);
897 if (insn == last)
898 set_new_first_and_last_insn (first, seq_insn);
899 else
900 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
902 if (insn == first)
903 set_new_first_and_last_insn (seq_insn, last);
904 else
905 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
907 /* Build our SEQUENCE and rebuild the insn chain. */
908 XVECEXP (seq, 0, 0) = delay_insn;
909 INSN_DELETED_P (delay_insn) = 0;
910 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
912 for (li = list; li; li = XEXP (li, 1), i++)
914 rtx tem = XEXP (li, 0);
915 rtx note;
917 /* Show that this copy of the insn isn't deleted. */
918 INSN_DELETED_P (tem) = 0;
920 XVECEXP (seq, 0, i) = tem;
921 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
922 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
924 /* Remove any REG_DEAD notes because we can't rely on them now
925 that the insn has been moved. */
926 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
927 if (REG_NOTE_KIND (note) == REG_DEAD)
928 XEXP (note, 0) = const0_rtx;
931 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
933 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
934 last insn in that SEQUENCE to point to us. Similarly for the first
935 insn in the following insn if it is a SEQUENCE. */
937 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
938 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
939 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
940 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
941 = seq_insn;
943 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
944 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
945 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
947 /* If there used to be a BARRIER, put it back. */
948 if (had_barrier)
949 emit_barrier_after (seq_insn);
951 if (i != length + 1)
952 abort ();
954 return seq_insn;
957 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
958 be in the order in which the insns are to be executed. */
960 static rtx
961 add_to_delay_list (insn, delay_list)
962 rtx insn;
963 rtx delay_list;
965 /* If we have an empty list, just make a new list element. If
966 INSN has it's block number recorded, clear it since we may
967 be moving the insn to a new block. */
969 if (delay_list == 0)
971 struct target_info *tinfo;
973 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
974 tinfo; tinfo = tinfo->next)
975 if (tinfo->uid == INSN_UID (insn))
976 break;
978 if (tinfo)
979 tinfo->block = -1;
981 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
984 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
985 list. */
986 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
988 return delay_list;
991 /* Delete INSN from the the delay slot of the insn that it is in. This may
992 produce an insn without anything in its delay slots. */
994 static void
995 delete_from_delay_slot (insn)
996 rtx insn;
998 rtx trial, seq_insn, seq, prev;
999 rtx delay_list = 0;
1000 int i;
1002 /* We first must find the insn containing the SEQUENCE with INSN in its
1003 delay slot. Do this by finding an insn, TRIAL, where
1004 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
1006 for (trial = insn;
1007 PREV_INSN (NEXT_INSN (trial)) == trial;
1008 trial = NEXT_INSN (trial))
1011 seq_insn = PREV_INSN (NEXT_INSN (trial));
1012 seq = PATTERN (seq_insn);
1014 /* Create a delay list consisting of all the insns other than the one
1015 we are deleting (unless we were the only one). */
1016 if (XVECLEN (seq, 0) > 2)
1017 for (i = 1; i < XVECLEN (seq, 0); i++)
1018 if (XVECEXP (seq, 0, i) != insn)
1019 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
1021 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1022 list, and rebuild the delay list if non-empty. */
1023 prev = PREV_INSN (seq_insn);
1024 trial = XVECEXP (seq, 0, 0);
1025 delete_insn (seq_insn);
1026 add_insn_after (trial, prev);
1028 if (GET_CODE (trial) == JUMP_INSN
1029 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
1030 emit_barrier_after (trial);
1032 /* If there are any delay insns, remit them. Otherwise clear the
1033 annul flag. */
1034 if (delay_list)
1035 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
1036 else
1037 INSN_ANNULLED_BRANCH_P (trial) = 0;
1039 INSN_FROM_TARGET_P (insn) = 0;
1041 /* Show we need to fill this insn again. */
1042 obstack_ptr_grow (&unfilled_slots_obstack, trial);
1045 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1046 the insn that sets CC0 for it and delete it too. */
1048 static void
1049 delete_scheduled_jump (insn)
1050 rtx insn;
1052 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1053 delete the insn that sets the condition code, but it is hard to find it.
1054 Since this case is rare anyway, don't bother trying; there would likely
1055 be other insns that became dead anyway, which we wouldn't know to
1056 delete. */
1058 #ifdef HAVE_cc0
1059 if (reg_mentioned_p (cc0_rtx, insn))
1061 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1063 /* If a reg-note was found, it points to an insn to set CC0. This
1064 insn is in the delay list of some other insn. So delete it from
1065 the delay list it was in. */
1066 if (note)
1068 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1069 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1070 delete_from_delay_slot (XEXP (note, 0));
1072 else
1074 /* The insn setting CC0 is our previous insn, but it may be in
1075 a delay slot. It will be the last insn in the delay slot, if
1076 it is. */
1077 rtx trial = previous_insn (insn);
1078 if (GET_CODE (trial) == NOTE)
1079 trial = prev_nonnote_insn (trial);
1080 if (sets_cc0_p (PATTERN (trial)) != 1
1081 || FIND_REG_INC_NOTE (trial, 0))
1082 return;
1083 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1084 delete_insn (trial);
1085 else
1086 delete_from_delay_slot (trial);
1089 #endif
1091 delete_insn (insn);
1094 /* Counters for delay-slot filling. */
1096 #define NUM_REORG_FUNCTIONS 2
1097 #define MAX_DELAY_HISTOGRAM 3
1098 #define MAX_REORG_PASSES 2
1100 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1102 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1104 static int reorg_pass_number;
1106 static void
1107 note_delay_statistics (slots_filled, index)
1108 int slots_filled, index;
1110 num_insns_needing_delays[index][reorg_pass_number]++;
1111 if (slots_filled > MAX_DELAY_HISTOGRAM)
1112 slots_filled = MAX_DELAY_HISTOGRAM;
1113 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1116 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1118 /* Optimize the following cases:
1120 1. When a conditional branch skips over only one instruction,
1121 use an annulling branch and put that insn in the delay slot.
1122 Use either a branch that annuls when the condition if true or
1123 invert the test with a branch that annuls when the condition is
1124 false. This saves insns, since otherwise we must copy an insn
1125 from the L1 target.
1127 (orig) (skip) (otherwise)
1128 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1129 insn insn insn2
1130 L1: L1: L1:
1131 insn2 insn2 insn2
1132 insn3 insn3 L1':
1133 insn3
1135 2. When a conditional branch skips over only one instruction,
1136 and after that, it unconditionally branches somewhere else,
1137 perform the similar optimization. This saves executing the
1138 second branch in the case where the inverted condition is true.
1140 Bcc.n L1 Bcc',a L2
1141 insn insn
1142 L1: L1:
1143 Bra L2 Bra L2
1145 INSN is a JUMP_INSN.
1147 This should be expanded to skip over N insns, where N is the number
1148 of delay slots required. */
1150 static rtx
1151 optimize_skip (insn)
1152 register rtx insn;
1154 register rtx trial = next_nonnote_insn (insn);
1155 rtx next_trial = next_active_insn (trial);
1156 rtx delay_list = 0;
1157 rtx target_label;
1158 int flags;
1160 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1162 if (trial == 0
1163 || GET_CODE (trial) != INSN
1164 || GET_CODE (PATTERN (trial)) == SEQUENCE
1165 || recog_memoized (trial) < 0
1166 || (! eligible_for_annul_false (insn, 0, trial, flags)
1167 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1168 return 0;
1170 /* There are two cases where we are just executing one insn (we assume
1171 here that a branch requires only one insn; this should be generalized
1172 at some point): Where the branch goes around a single insn or where
1173 we have one insn followed by a branch to the same label we branch to.
1174 In both of these cases, inverting the jump and annulling the delay
1175 slot give the same effect in fewer insns. */
1176 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1177 || (next_trial != 0
1178 && GET_CODE (next_trial) == JUMP_INSN
1179 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1180 && (simplejump_p (next_trial)
1181 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1183 if (eligible_for_annul_false (insn, 0, trial, flags))
1185 if (invert_jump (insn, JUMP_LABEL (insn)))
1186 INSN_FROM_TARGET_P (trial) = 1;
1187 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1188 return 0;
1191 delay_list = add_to_delay_list (trial, NULL_RTX);
1192 next_trial = next_active_insn (trial);
1193 update_block (trial, trial);
1194 delete_insn (trial);
1196 /* Also, if we are targeting an unconditional
1197 branch, thread our jump to the target of that branch. Don't
1198 change this into a RETURN here, because it may not accept what
1199 we have in the delay slot. We'll fix this up later. */
1200 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1201 && (simplejump_p (next_trial)
1202 || GET_CODE (PATTERN (next_trial)) == RETURN))
1204 target_label = JUMP_LABEL (next_trial);
1205 if (target_label == 0)
1206 target_label = find_end_label ();
1208 /* Recompute the flags based on TARGET_LABEL since threading
1209 the jump to TARGET_LABEL may change the direction of the
1210 jump (which may change the circumstances in which the
1211 delay slot is nullified). */
1212 flags = get_jump_flags (insn, target_label);
1213 if (eligible_for_annul_true (insn, 0, trial, flags))
1214 reorg_redirect_jump (insn, target_label);
1217 INSN_ANNULLED_BRANCH_P (insn) = 1;
1220 return delay_list;
1222 #endif
1225 /* Encode and return branch direction and prediction information for
1226 INSN assuming it will jump to LABEL.
1228 Non conditional branches return no direction information and
1229 are predicted as very likely taken. */
1231 static int
1232 get_jump_flags (insn, label)
1233 rtx insn, label;
1235 int flags;
1237 /* get_jump_flags can be passed any insn with delay slots, these may
1238 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1239 direction information, and only if they are conditional jumps.
1241 If LABEL is zero, then there is no way to determine the branch
1242 direction. */
1243 if (GET_CODE (insn) == JUMP_INSN
1244 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1245 && INSN_UID (insn) <= max_uid
1246 && label != 0
1247 && INSN_UID (label) <= max_uid)
1248 flags
1249 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1250 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1251 /* No valid direction information. */
1252 else
1253 flags = 0;
1255 /* If insn is a conditional branch call mostly_true_jump to get
1256 determine the branch prediction.
1258 Non conditional branches are predicted as very likely taken. */
1259 if (GET_CODE (insn) == JUMP_INSN
1260 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1262 int prediction;
1264 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1265 switch (prediction)
1267 case 2:
1268 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1269 break;
1270 case 1:
1271 flags |= ATTR_FLAG_likely;
1272 break;
1273 case 0:
1274 flags |= ATTR_FLAG_unlikely;
1275 break;
1276 case -1:
1277 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1278 break;
1280 default:
1281 abort();
1284 else
1285 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1287 return flags;
1290 /* Return 1 if INSN is a destination that will be branched to rarely (the
1291 return point of a function); return 2 if DEST will be branched to very
1292 rarely (a call to a function that doesn't return). Otherwise,
1293 return 0. */
1295 static int
1296 rare_destination (insn)
1297 rtx insn;
1299 int jump_count = 0;
1300 rtx next;
1302 for (; insn; insn = next)
1304 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1305 insn = XVECEXP (PATTERN (insn), 0, 0);
1307 next = NEXT_INSN (insn);
1309 switch (GET_CODE (insn))
1311 case CODE_LABEL:
1312 return 0;
1313 case BARRIER:
1314 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1315 don't scan past JUMP_INSNs, so any barrier we find here must
1316 have been after a CALL_INSN and hence mean the call doesn't
1317 return. */
1318 return 2;
1319 case JUMP_INSN:
1320 if (GET_CODE (PATTERN (insn)) == RETURN)
1321 return 1;
1322 else if (simplejump_p (insn)
1323 && jump_count++ < 10)
1324 next = JUMP_LABEL (insn);
1325 else
1326 return 0;
1330 /* If we got here it means we hit the end of the function. So this
1331 is an unlikely destination. */
1333 return 1;
1336 /* Return truth value of the statement that this branch
1337 is mostly taken. If we think that the branch is extremely likely
1338 to be taken, we return 2. If the branch is slightly more likely to be
1339 taken, return 1. If the branch is slightly less likely to be taken,
1340 return 0 and if the branch is highly unlikely to be taken, return -1.
1342 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1344 static int
1345 mostly_true_jump (jump_insn, condition)
1346 rtx jump_insn, condition;
1348 rtx target_label = JUMP_LABEL (jump_insn);
1349 rtx insn;
1350 int rare_dest = rare_destination (target_label);
1351 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1353 /* CYGNUS LOCAL -- branch prediction */
1354 int expected = condjump_expect_p (jump_insn);
1356 if (expected > 0)
1357 return 2;
1358 else if (expected < 0)
1359 return -1;
1360 /* END CYGNUS LOCAL -- branch prediction */
1362 /* If branch probabilities are available, then use that number since it
1363 always gives a correct answer. */
1364 if (flag_branch_probabilities)
1366 rtx note = find_reg_note (jump_insn, REG_BR_PROB, 0);;
1367 if (note)
1369 int prob = XINT (note, 0);
1371 if (prob >= REG_BR_PROB_BASE * 9 / 10)
1372 return 2;
1373 else if (prob >= REG_BR_PROB_BASE / 2)
1374 return 1;
1375 else if (prob >= REG_BR_PROB_BASE / 10)
1376 return 0;
1377 else
1378 return -1;
1382 /* If this is a branch outside a loop, it is highly unlikely. */
1383 if (GET_CODE (PATTERN (jump_insn)) == SET
1384 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1385 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1386 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1387 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1388 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1389 return -1;
1391 if (target_label)
1393 /* If this is the test of a loop, it is very likely true. We scan
1394 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1395 before the next real insn, we assume the branch is to the top of
1396 the loop. */
1397 for (insn = PREV_INSN (target_label);
1398 insn && GET_CODE (insn) == NOTE;
1399 insn = PREV_INSN (insn))
1400 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1401 return 2;
1403 /* If this is a jump to the test of a loop, it is likely true. We scan
1404 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1405 before the next real insn, we assume the branch is to the loop branch
1406 test. */
1407 for (insn = NEXT_INSN (target_label);
1408 insn && GET_CODE (insn) == NOTE;
1409 insn = PREV_INSN (insn))
1410 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1411 return 1;
1414 /* Look at the relative rarities of the fallthrough and destination. If
1415 they differ, we can predict the branch that way. */
1417 switch (rare_fallthrough - rare_dest)
1419 case -2:
1420 return -1;
1421 case -1:
1422 return 0;
1423 case 0:
1424 break;
1425 case 1:
1426 return 1;
1427 case 2:
1428 return 2;
1431 /* If we couldn't figure out what this jump was, assume it won't be
1432 taken. This should be rare. */
1433 if (condition == 0)
1434 return 0;
1436 /* EQ tests are usually false and NE tests are usually true. Also,
1437 most quantities are positive, so we can make the appropriate guesses
1438 about signed comparisons against zero. */
1439 switch (GET_CODE (condition))
1441 case CONST_INT:
1442 /* Unconditional branch. */
1443 return 1;
1444 case EQ:
1445 return 0;
1446 case NE:
1447 return 1;
1448 case LE:
1449 case LT:
1450 if (XEXP (condition, 1) == const0_rtx)
1451 return 0;
1452 break;
1453 case GE:
1454 case GT:
1455 if (XEXP (condition, 1) == const0_rtx)
1456 return 1;
1457 break;
1460 /* Predict backward branches usually take, forward branches usually not. If
1461 we don't know whether this is forward or backward, assume the branch
1462 will be taken, since most are. */
1463 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1464 || INSN_UID (target_label) > max_uid
1465 || (uid_to_ruid[INSN_UID (jump_insn)]
1466 > uid_to_ruid[INSN_UID (target_label)]));;
1469 /* Return the condition under which INSN will branch to TARGET. If TARGET
1470 is zero, return the condition under which INSN will return. If INSN is
1471 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1472 type of jump, or it doesn't go to TARGET, return 0. */
1474 static rtx
1475 get_branch_condition (insn, target)
1476 rtx insn;
1477 rtx target;
1479 rtx pat = PATTERN (insn);
1480 rtx src;
1482 if (condjump_in_parallel_p (insn))
1483 pat = XVECEXP (pat, 0, 0);
1485 if (GET_CODE (pat) == RETURN)
1486 return target == 0 ? const_true_rtx : 0;
1488 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1489 return 0;
1491 src = SET_SRC (pat);
1492 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1493 return const_true_rtx;
1495 else if (GET_CODE (src) == IF_THEN_ELSE
1496 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1497 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1498 && XEXP (XEXP (src, 1), 0) == target))
1499 && XEXP (src, 2) == pc_rtx)
1500 return XEXP (src, 0);
1502 else if (GET_CODE (src) == IF_THEN_ELSE
1503 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1504 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1505 && XEXP (XEXP (src, 2), 0) == target))
1506 && XEXP (src, 1) == pc_rtx)
1507 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1508 GET_MODE (XEXP (src, 0)),
1509 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1511 return 0;
1514 /* Return non-zero if CONDITION is more strict than the condition of
1515 INSN, i.e., if INSN will always branch if CONDITION is true. */
1517 static int
1518 condition_dominates_p (condition, insn)
1519 rtx condition;
1520 rtx insn;
1522 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1523 enum rtx_code code = GET_CODE (condition);
1524 enum rtx_code other_code;
1526 if (rtx_equal_p (condition, other_condition)
1527 || other_condition == const_true_rtx)
1528 return 1;
1530 else if (condition == const_true_rtx || other_condition == 0)
1531 return 0;
1533 other_code = GET_CODE (other_condition);
1534 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1535 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1536 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1537 return 0;
1539 return comparison_dominates_p (code, other_code);
1542 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1543 any insns already in the delay slot of JUMP. */
1545 static int
1546 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1547 rtx jump, newlabel, seq;
1549 int flags, slots, i;
1550 rtx pat = PATTERN (seq);
1552 /* Make sure all the delay slots of this jump would still
1553 be valid after threading the jump. If they are still
1554 valid, then return non-zero. */
1556 flags = get_jump_flags (jump, newlabel);
1557 for (i = 1; i < XVECLEN (pat, 0); i++)
1558 if (! (
1559 #ifdef ANNUL_IFFALSE_SLOTS
1560 (INSN_ANNULLED_BRANCH_P (jump)
1561 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1562 ? eligible_for_annul_false (jump, i - 1,
1563 XVECEXP (pat, 0, i), flags) :
1564 #endif
1565 #ifdef ANNUL_IFTRUE_SLOTS
1566 (INSN_ANNULLED_BRANCH_P (jump)
1567 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1568 ? eligible_for_annul_true (jump, i - 1,
1569 XVECEXP (pat, 0, i), flags) :
1570 #endif
1571 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1572 break;
1574 return (i == XVECLEN (pat, 0));
1577 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1578 any insns we wish to place in the delay slot of JUMP. */
1580 static int
1581 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1582 rtx jump, newlabel, delay_list;
1584 int flags, i;
1585 rtx li;
1587 /* Make sure all the insns in DELAY_LIST would still be
1588 valid after threading the jump. If they are still
1589 valid, then return non-zero. */
1591 flags = get_jump_flags (jump, newlabel);
1592 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1593 if (! (
1594 #ifdef ANNUL_IFFALSE_SLOTS
1595 (INSN_ANNULLED_BRANCH_P (jump)
1596 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1597 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1598 #endif
1599 #ifdef ANNUL_IFTRUE_SLOTS
1600 (INSN_ANNULLED_BRANCH_P (jump)
1601 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1602 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1603 #endif
1604 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1605 break;
1607 return (li == NULL);
1611 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1612 the condition tested by INSN is CONDITION and the resources shown in
1613 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1614 from SEQ's delay list, in addition to whatever insns it may execute
1615 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1616 needed while searching for delay slot insns. Return the concatenated
1617 delay list if possible, otherwise, return 0.
1619 SLOTS_TO_FILL is the total number of slots required by INSN, and
1620 PSLOTS_FILLED points to the number filled so far (also the number of
1621 insns in DELAY_LIST). It is updated with the number that have been
1622 filled from the SEQUENCE, if any.
1624 PANNUL_P points to a non-zero value if we already know that we need
1625 to annul INSN. If this routine determines that annulling is needed,
1626 it may set that value non-zero.
1628 PNEW_THREAD points to a location that is to receive the place at which
1629 execution should continue. */
1631 static rtx
1632 steal_delay_list_from_target (insn, condition, seq, delay_list,
1633 sets, needed, other_needed,
1634 slots_to_fill, pslots_filled, pannul_p,
1635 pnew_thread)
1636 rtx insn, condition;
1637 rtx seq;
1638 rtx delay_list;
1639 struct resources *sets, *needed, *other_needed;
1640 int slots_to_fill;
1641 int *pslots_filled;
1642 int *pannul_p;
1643 rtx *pnew_thread;
1645 rtx temp;
1646 int slots_remaining = slots_to_fill - *pslots_filled;
1647 int total_slots_filled = *pslots_filled;
1648 rtx new_delay_list = 0;
1649 int must_annul = *pannul_p;
1650 int i;
1652 /* We can't do anything if there are more delay slots in SEQ than we
1653 can handle, or if we don't know that it will be a taken branch.
1654 We know that it will be a taken branch if it is either an unconditional
1655 branch or a conditional branch with a stricter branch condition.
1657 Also, exit if the branch has more than one set, since then it is computing
1658 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1659 ??? It may be possible to move other sets into INSN in addition to
1660 moving the instructions in the delay slots. */
1662 if (XVECLEN (seq, 0) - 1 > slots_remaining
1663 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1664 || ! single_set (XVECEXP (seq, 0, 0)))
1665 return delay_list;
1667 for (i = 1; i < XVECLEN (seq, 0); i++)
1669 rtx trial = XVECEXP (seq, 0, i);
1670 int flags;
1672 if (insn_references_resource_p (trial, sets, 0)
1673 || insn_sets_resource_p (trial, needed, 0)
1674 || insn_sets_resource_p (trial, sets, 0)
1675 #ifdef HAVE_cc0
1676 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1677 delay list. */
1678 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1679 #endif
1680 /* If TRIAL is from the fallthrough code of an annulled branch insn
1681 in SEQ, we cannot use it. */
1682 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1683 && ! INSN_FROM_TARGET_P (trial)))
1684 return delay_list;
1686 /* If this insn was already done (usually in a previous delay slot),
1687 pretend we put it in our delay slot. */
1688 if (redundant_insn (trial, insn, new_delay_list))
1689 continue;
1691 /* We will end up re-vectoring this branch, so compute flags
1692 based on jumping to the new label. */
1693 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1695 if (! must_annul
1696 && ((condition == const_true_rtx
1697 || (! insn_sets_resource_p (trial, other_needed, 0)
1698 && ! may_trap_p (PATTERN (trial)))))
1699 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1700 : (must_annul = 1,
1701 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1703 temp = copy_rtx (trial);
1704 INSN_FROM_TARGET_P (temp) = 1;
1705 new_delay_list = add_to_delay_list (temp, new_delay_list);
1706 total_slots_filled++;
1708 if (--slots_remaining == 0)
1709 break;
1711 else
1712 return delay_list;
1715 /* Show the place to which we will be branching. */
1716 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1718 /* Add any new insns to the delay list and update the count of the
1719 number of slots filled. */
1720 *pslots_filled = total_slots_filled;
1721 *pannul_p = must_annul;
1723 if (delay_list == 0)
1724 return new_delay_list;
1726 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1727 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1729 return delay_list;
1732 /* Similar to steal_delay_list_from_target except that SEQ is on the
1733 fallthrough path of INSN. Here we only do something if the delay insn
1734 of SEQ is an unconditional branch. In that case we steal its delay slot
1735 for INSN since unconditional branches are much easier to fill. */
1737 static rtx
1738 steal_delay_list_from_fallthrough (insn, condition, seq,
1739 delay_list, sets, needed, other_needed,
1740 slots_to_fill, pslots_filled, pannul_p)
1741 rtx insn, condition;
1742 rtx seq;
1743 rtx delay_list;
1744 struct resources *sets, *needed, *other_needed;
1745 int slots_to_fill;
1746 int *pslots_filled;
1747 int *pannul_p;
1749 int i;
1750 int flags;
1752 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1754 /* We can't do anything if SEQ's delay insn isn't an
1755 unconditional branch. */
1757 if (! simplejump_p (XVECEXP (seq, 0, 0))
1758 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1759 return delay_list;
1761 for (i = 1; i < XVECLEN (seq, 0); i++)
1763 rtx trial = XVECEXP (seq, 0, i);
1765 /* If TRIAL sets CC0, stealing it will move it too far from the use
1766 of CC0. */
1767 if (insn_references_resource_p (trial, sets, 0)
1768 || insn_sets_resource_p (trial, needed, 0)
1769 || insn_sets_resource_p (trial, sets, 0)
1770 #ifdef HAVE_cc0
1771 || sets_cc0_p (PATTERN (trial))
1772 #endif
1775 break;
1777 /* If this insn was already done, we don't need it. */
1778 if (redundant_insn (trial, insn, delay_list))
1780 delete_from_delay_slot (trial);
1781 continue;
1784 if (! *pannul_p
1785 && ((condition == const_true_rtx
1786 || (! insn_sets_resource_p (trial, other_needed, 0)
1787 && ! may_trap_p (PATTERN (trial)))))
1788 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1789 : (*pannul_p = 1,
1790 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1792 delete_from_delay_slot (trial);
1793 delay_list = add_to_delay_list (trial, delay_list);
1795 if (++(*pslots_filled) == slots_to_fill)
1796 break;
1798 else
1799 break;
1802 return delay_list;
1805 /* Try merging insns starting at THREAD which match exactly the insns in
1806 INSN's delay list.
1808 If all insns were matched and the insn was previously annulling, the
1809 annul bit will be cleared.
1811 For each insn that is merged, if the branch is or will be non-annulling,
1812 we delete the merged insn. */
1814 static void
1815 try_merge_delay_insns (insn, thread)
1816 rtx insn, thread;
1818 rtx trial, next_trial;
1819 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1820 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1821 int slot_number = 1;
1822 int num_slots = XVECLEN (PATTERN (insn), 0);
1823 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1824 struct resources set, needed;
1825 rtx merged_insns = 0;
1826 int i;
1827 int flags;
1829 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1831 CLEAR_RESOURCE (&needed);
1832 CLEAR_RESOURCE (&set);
1834 /* If this is not an annulling branch, take into account anything needed in
1835 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1836 folded into one. If we are annulling, this would be the correct
1837 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1838 will essentially disable this optimization. This method is somewhat of
1839 a kludge, but I don't see a better way.) */
1840 if (! annul_p)
1841 mark_referenced_resources (next_to_match, &needed, 1);
1843 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1845 rtx pat = PATTERN (trial);
1846 rtx oldtrial = trial;
1848 next_trial = next_nonnote_insn (trial);
1850 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1851 if (GET_CODE (trial) == INSN
1852 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1853 continue;
1855 if (GET_CODE (next_to_match) == GET_CODE (trial)
1856 #ifdef HAVE_cc0
1857 /* We can't share an insn that sets cc0. */
1858 && ! sets_cc0_p (pat)
1859 #endif
1860 && ! insn_references_resource_p (trial, &set, 1)
1861 && ! insn_sets_resource_p (trial, &set, 1)
1862 && ! insn_sets_resource_p (trial, &needed, 1)
1863 && (trial = try_split (pat, trial, 0)) != 0
1864 /* Update next_trial, in case try_split succeeded. */
1865 && (next_trial = next_nonnote_insn (trial))
1866 /* Likewise THREAD. */
1867 && (thread = oldtrial == thread ? trial : thread)
1868 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1869 /* Have to test this condition if annul condition is different
1870 from (and less restrictive than) non-annulling one. */
1871 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1874 if (! annul_p)
1876 update_block (trial, thread);
1877 if (trial == thread)
1878 thread = next_active_insn (thread);
1880 delete_insn (trial);
1881 INSN_FROM_TARGET_P (next_to_match) = 0;
1883 else
1884 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1886 if (++slot_number == num_slots)
1887 break;
1889 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1890 if (! annul_p)
1891 mark_referenced_resources (next_to_match, &needed, 1);
1894 mark_set_resources (trial, &set, 0, 1);
1895 mark_referenced_resources (trial, &needed, 1);
1898 /* See if we stopped on a filled insn. If we did, try to see if its
1899 delay slots match. */
1900 if (slot_number != num_slots
1901 && trial && GET_CODE (trial) == INSN
1902 && GET_CODE (PATTERN (trial)) == SEQUENCE
1903 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1905 rtx pat = PATTERN (trial);
1906 rtx filled_insn = XVECEXP (pat, 0, 0);
1908 /* Account for resources set/needed by the filled insn. */
1909 mark_set_resources (filled_insn, &set, 0, 1);
1910 mark_referenced_resources (filled_insn, &needed, 1);
1912 for (i = 1; i < XVECLEN (pat, 0); i++)
1914 rtx dtrial = XVECEXP (pat, 0, i);
1916 if (! insn_references_resource_p (dtrial, &set, 1)
1917 && ! insn_sets_resource_p (dtrial, &set, 1)
1918 && ! insn_sets_resource_p (dtrial, &needed, 1)
1919 #ifdef HAVE_cc0
1920 && ! sets_cc0_p (PATTERN (dtrial))
1921 #endif
1922 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1923 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1925 if (! annul_p)
1927 update_block (dtrial, thread);
1928 delete_from_delay_slot (dtrial);
1929 INSN_FROM_TARGET_P (next_to_match) = 0;
1931 else
1932 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1933 merged_insns);
1935 if (++slot_number == num_slots)
1936 break;
1938 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1943 /* If all insns in the delay slot have been matched and we were previously
1944 annulling the branch, we need not any more. In that case delete all the
1945 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1946 the delay list so that we know that it isn't only being used at the
1947 target. */
1948 if (slot_number == num_slots && annul_p)
1950 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1952 if (GET_MODE (merged_insns) == SImode)
1954 update_block (XEXP (merged_insns, 0), thread);
1955 delete_from_delay_slot (XEXP (merged_insns, 0));
1957 else
1959 update_block (XEXP (merged_insns, 0), thread);
1960 delete_insn (XEXP (merged_insns, 0));
1964 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1966 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1967 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1971 /* See if INSN is redundant with an insn in front of TARGET. Often this
1972 is called when INSN is a candidate for a delay slot of TARGET.
1973 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1974 of INSN. Often INSN will be redundant with an insn in a delay slot of
1975 some previous insn. This happens when we have a series of branches to the
1976 same label; in that case the first insn at the target might want to go
1977 into each of the delay slots.
1979 If we are not careful, this routine can take up a significant fraction
1980 of the total compilation time (4%), but only wins rarely. Hence we
1981 speed this routine up by making two passes. The first pass goes back
1982 until it hits a label and sees if it find an insn with an identical
1983 pattern. Only in this (relatively rare) event does it check for
1984 data conflicts.
1986 We do not split insns we encounter. This could cause us not to find a
1987 redundant insn, but the cost of splitting seems greater than the possible
1988 gain in rare cases. */
1990 static rtx
1991 redundant_insn (insn, target, delay_list)
1992 rtx insn;
1993 rtx target;
1994 rtx delay_list;
1996 rtx target_main = target;
1997 rtx ipat = PATTERN (insn);
1998 rtx trial, pat;
1999 struct resources needed, set;
2000 int i;
2002 /* Scan backwards looking for a match. */
2003 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
2005 if (GET_CODE (trial) == CODE_LABEL)
2006 return 0;
2008 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
2009 continue;
2011 pat = PATTERN (trial);
2012 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2013 continue;
2015 if (GET_CODE (pat) == SEQUENCE)
2017 /* Stop for a CALL and its delay slots because it is difficult to
2018 track its resource needs correctly. */
2019 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2020 return 0;
2022 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
2023 slots because it is difficult to track its resource needs
2024 correctly. */
2026 #ifdef INSN_SETS_ARE_DELAYED
2027 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2028 return 0;
2029 #endif
2031 #ifdef INSN_REFERENCES_ARE_DELAYED
2032 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2033 return 0;
2034 #endif
2036 /* See if any of the insns in the delay slot match, updating
2037 resource requirements as we go. */
2038 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2039 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
2040 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
2041 break;
2043 /* If found a match, exit this loop early. */
2044 if (i > 0)
2045 break;
2048 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
2049 break;
2052 /* If we didn't find an insn that matches, return 0. */
2053 if (trial == 0)
2054 return 0;
2056 /* See what resources this insn sets and needs. If they overlap, or
2057 if this insn references CC0, it can't be redundant. */
2059 CLEAR_RESOURCE (&needed);
2060 CLEAR_RESOURCE (&set);
2061 mark_set_resources (insn, &set, 0, 1);
2062 mark_referenced_resources (insn, &needed, 1);
2064 /* If TARGET is a SEQUENCE, get the main insn. */
2065 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2066 target_main = XVECEXP (PATTERN (target), 0, 0);
2068 if (resource_conflicts_p (&needed, &set)
2069 #ifdef HAVE_cc0
2070 || reg_mentioned_p (cc0_rtx, ipat)
2071 #endif
2072 /* The insn requiring the delay may not set anything needed or set by
2073 INSN. */
2074 || insn_sets_resource_p (target_main, &needed, 1)
2075 || insn_sets_resource_p (target_main, &set, 1))
2076 return 0;
2078 /* Insns we pass may not set either NEEDED or SET, so merge them for
2079 simpler tests. */
2080 needed.memory |= set.memory;
2081 needed.unch_memory |= set.unch_memory;
2082 IOR_HARD_REG_SET (needed.regs, set.regs);
2084 /* This insn isn't redundant if it conflicts with an insn that either is
2085 or will be in a delay slot of TARGET. */
2087 while (delay_list)
2089 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2090 return 0;
2091 delay_list = XEXP (delay_list, 1);
2094 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2095 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2096 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2097 return 0;
2099 /* Scan backwards until we reach a label or an insn that uses something
2100 INSN sets or sets something insn uses or sets. */
2102 for (trial = PREV_INSN (target);
2103 trial && GET_CODE (trial) != CODE_LABEL;
2104 trial = PREV_INSN (trial))
2106 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2107 && GET_CODE (trial) != JUMP_INSN)
2108 continue;
2110 pat = PATTERN (trial);
2111 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2112 continue;
2114 if (GET_CODE (pat) == SEQUENCE)
2116 /* If this is a CALL_INSN and its delay slots, it is hard to track
2117 the resource needs properly, so give up. */
2118 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2119 return 0;
2121 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2122 is hard to track the resource needs properly, so give up. */
2124 #ifdef INSN_SETS_ARE_DELAYED
2125 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2126 return 0;
2127 #endif
2129 #ifdef INSN_REFERENCES_ARE_DELAYED
2130 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2131 return 0;
2132 #endif
2134 /* See if any of the insns in the delay slot match, updating
2135 resource requirements as we go. */
2136 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2138 rtx candidate = XVECEXP (pat, 0, i);
2140 /* If an insn will be annulled if the branch is false, it isn't
2141 considered as a possible duplicate insn. */
2142 if (rtx_equal_p (PATTERN (candidate), ipat)
2143 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2144 && INSN_FROM_TARGET_P (candidate)))
2146 /* Show that this insn will be used in the sequel. */
2147 INSN_FROM_TARGET_P (candidate) = 0;
2148 return candidate;
2151 /* Unless this is an annulled insn from the target of a branch,
2152 we must stop if it sets anything needed or set by INSN. */
2153 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2154 || ! INSN_FROM_TARGET_P (candidate))
2155 && insn_sets_resource_p (candidate, &needed, 1))
2156 return 0;
2160 /* If the insn requiring the delay slot conflicts with INSN, we
2161 must stop. */
2162 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2163 return 0;
2165 else
2167 /* See if TRIAL is the same as INSN. */
2168 pat = PATTERN (trial);
2169 if (rtx_equal_p (pat, ipat))
2170 return trial;
2172 /* Can't go any further if TRIAL conflicts with INSN. */
2173 if (insn_sets_resource_p (trial, &needed, 1))
2174 return 0;
2178 return 0;
2181 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2182 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2183 is non-zero, we are allowed to fall into this thread; otherwise, we are
2184 not.
2186 If LABEL is used more than one or we pass a label other than LABEL before
2187 finding an active insn, we do not own this thread. */
2189 static int
2190 own_thread_p (thread, label, allow_fallthrough)
2191 rtx thread;
2192 rtx label;
2193 int allow_fallthrough;
2195 rtx active_insn;
2196 rtx insn;
2198 /* We don't own the function end. */
2199 if (thread == 0)
2200 return 0;
2202 /* Get the first active insn, or THREAD, if it is an active insn. */
2203 active_insn = next_active_insn (PREV_INSN (thread));
2205 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2206 if (GET_CODE (insn) == CODE_LABEL
2207 && (insn != label || LABEL_NUSES (insn) != 1))
2208 return 0;
2210 if (allow_fallthrough)
2211 return 1;
2213 /* Ensure that we reach a BARRIER before any insn or label. */
2214 for (insn = prev_nonnote_insn (thread);
2215 insn == 0 || GET_CODE (insn) != BARRIER;
2216 insn = prev_nonnote_insn (insn))
2217 if (insn == 0
2218 || GET_CODE (insn) == CODE_LABEL
2219 || (GET_CODE (insn) == INSN
2220 && GET_CODE (PATTERN (insn)) != USE
2221 && GET_CODE (PATTERN (insn)) != CLOBBER))
2222 return 0;
2224 return 1;
2227 /* Find the number of the basic block that starts closest to INSN. Return -1
2228 if we couldn't find such a basic block. */
2230 static int
2231 find_basic_block (insn)
2232 rtx insn;
2234 int i;
2236 /* Scan backwards to the previous BARRIER. Then see if we can find a
2237 label that starts a basic block. Return the basic block number. */
2239 for (insn = prev_nonnote_insn (insn);
2240 insn && GET_CODE (insn) != BARRIER;
2241 insn = prev_nonnote_insn (insn))
2244 /* The start of the function is basic block zero. */
2245 if (insn == 0)
2246 return 0;
2248 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2249 anything other than a CODE_LABEL or note, we can't find this code. */
2250 for (insn = next_nonnote_insn (insn);
2251 insn && GET_CODE (insn) == CODE_LABEL;
2252 insn = next_nonnote_insn (insn))
2254 for (i = 0; i < n_basic_blocks; i++)
2255 if (insn == basic_block_head[i])
2256 return i;
2259 return -1;
2262 /* Called when INSN is being moved from a location near the target of a jump.
2263 We leave a marker of the form (use (INSN)) immediately in front
2264 of WHERE for mark_target_live_regs. These markers will be deleted when
2265 reorg finishes.
2267 We used to try to update the live status of registers if WHERE is at
2268 the start of a basic block, but that can't work since we may remove a
2269 BARRIER in relax_delay_slots. */
2271 static void
2272 update_block (insn, where)
2273 rtx insn;
2274 rtx where;
2276 int b;
2278 /* Ignore if this was in a delay slot and it came from the target of
2279 a branch. */
2280 if (INSN_FROM_TARGET_P (insn))
2281 return;
2283 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
2285 /* INSN might be making a value live in a block where it didn't use to
2286 be. So recompute liveness information for this block. */
2288 b = find_basic_block (insn);
2289 if (b != -1)
2290 bb_ticks[b]++;
2293 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2294 the basic block containing the jump. */
2296 static int
2297 reorg_redirect_jump (jump, nlabel)
2298 rtx jump;
2299 rtx nlabel;
2301 int b = find_basic_block (jump);
2303 if (b != -1)
2304 bb_ticks[b]++;
2306 return redirect_jump (jump, nlabel);
2309 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2310 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2311 that reference values used in INSN. If we find one, then we move the
2312 REG_DEAD note to INSN.
2314 This is needed to handle the case where an later insn (after INSN) has a
2315 REG_DEAD note for a register used by INSN, and this later insn subsequently
2316 gets moved before a CODE_LABEL because it is a redundant insn. In this
2317 case, mark_target_live_regs may be confused into thinking the register
2318 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2320 static void
2321 update_reg_dead_notes (insn, delayed_insn)
2322 rtx insn, delayed_insn;
2324 rtx p, link, next;
2326 for (p = next_nonnote_insn (insn); p != delayed_insn;
2327 p = next_nonnote_insn (p))
2328 for (link = REG_NOTES (p); link; link = next)
2330 next = XEXP (link, 1);
2332 if (REG_NOTE_KIND (link) != REG_DEAD
2333 || GET_CODE (XEXP (link, 0)) != REG)
2334 continue;
2336 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2338 /* Move the REG_DEAD note from P to INSN. */
2339 remove_note (p, link);
2340 XEXP (link, 1) = REG_NOTES (insn);
2341 REG_NOTES (insn) = link;
2346 /* Called when an insn redundant with start_insn is deleted. If there
2347 is a REG_DEAD note for the target of start_insn between start_insn
2348 and stop_insn, then the REG_DEAD note needs to be deleted since the
2349 value no longer dies there.
2351 If the REG_DEAD note isn't deleted, then mark_target_live_regs may be
2352 confused into thinking the register is dead. */
2354 static void
2355 fix_reg_dead_note (start_insn, stop_insn)
2356 rtx start_insn, stop_insn;
2358 rtx p, link, next;
2360 for (p = next_nonnote_insn (start_insn); p != stop_insn;
2361 p = next_nonnote_insn (p))
2362 for (link = REG_NOTES (p); link; link = next)
2364 next = XEXP (link, 1);
2366 if (REG_NOTE_KIND (link) != REG_DEAD
2367 || GET_CODE (XEXP (link, 0)) != REG)
2368 continue;
2370 if (reg_set_p (XEXP (link, 0), PATTERN (start_insn)))
2372 remove_note (p, link);
2373 return;
2378 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2380 This handles the case of udivmodXi4 instructions which optimize their
2381 output depending on whether any REG_UNUSED notes are present.
2382 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2383 does. */
2385 static void
2386 update_reg_unused_notes (insn, redundant_insn)
2387 rtx insn, redundant_insn;
2389 rtx p, link, next;
2391 for (link = REG_NOTES (insn); link; link = next)
2393 next = XEXP (link, 1);
2395 if (REG_NOTE_KIND (link) != REG_UNUSED
2396 || GET_CODE (XEXP (link, 0)) != REG)
2397 continue;
2399 if (! find_regno_note (redundant_insn, REG_UNUSED,
2400 REGNO (XEXP (link, 0))))
2401 remove_note (insn, link);
2405 /* Marks registers possibly live at the current place being scanned by
2406 mark_target_live_regs. Used only by next two function. */
2408 static HARD_REG_SET current_live_regs;
2410 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2411 Also only used by the next two functions. */
2413 static HARD_REG_SET pending_dead_regs;
2415 /* Utility function called from mark_target_live_regs via note_stores.
2416 It deadens any CLOBBERed registers and livens any SET registers. */
2418 static void
2419 update_live_status (dest, x)
2420 rtx dest;
2421 rtx x;
2423 int first_regno, last_regno;
2424 int i;
2426 if (GET_CODE (dest) != REG
2427 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2428 return;
2430 if (GET_CODE (dest) == SUBREG)
2431 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2432 else
2433 first_regno = REGNO (dest);
2435 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2437 if (GET_CODE (x) == CLOBBER)
2438 for (i = first_regno; i < last_regno; i++)
2439 CLEAR_HARD_REG_BIT (current_live_regs, i);
2440 else
2441 for (i = first_regno; i < last_regno; i++)
2443 SET_HARD_REG_BIT (current_live_regs, i);
2444 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2448 /* Similar to next_insn, but ignores insns in the delay slots of
2449 an annulled branch. */
2451 static rtx
2452 next_insn_no_annul (insn)
2453 rtx insn;
2455 if (insn)
2457 /* If INSN is an annulled branch, skip any insns from the target
2458 of the branch. */
2459 if (INSN_ANNULLED_BRANCH_P (insn)
2460 && NEXT_INSN (PREV_INSN (insn)) != insn)
2461 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2462 insn = NEXT_INSN (insn);
2464 insn = NEXT_INSN (insn);
2465 if (insn && GET_CODE (insn) == INSN
2466 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2467 insn = XVECEXP (PATTERN (insn), 0, 0);
2470 return insn;
2473 /* A subroutine of mark_target_live_regs. Search forward from TARGET
2474 looking for registers that are set before they are used. These are dead.
2475 Stop after passing a few conditional jumps, and/or a small
2476 number of unconditional branches. */
2478 static rtx
2479 find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
2480 rtx target;
2481 struct resources *res;
2482 rtx *jump_target;
2483 int jump_count;
2484 struct resources set, needed;
2486 HARD_REG_SET scratch;
2487 rtx insn, next;
2488 rtx jump_insn = 0;
2489 int i;
2491 for (insn = target; insn; insn = next)
2493 rtx this_jump_insn = insn;
2495 next = NEXT_INSN (insn);
2496 switch (GET_CODE (insn))
2498 case CODE_LABEL:
2499 /* After a label, any pending dead registers that weren't yet
2500 used can be made dead. */
2501 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2502 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2503 CLEAR_HARD_REG_SET (pending_dead_regs);
2505 if (CODE_LABEL_NUMBER (insn) < max_label_num_after_reload)
2507 /* All spill registers are dead at a label, so kill all of the
2508 ones that aren't needed also. */
2509 COPY_HARD_REG_SET (scratch, used_spill_regs);
2510 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2511 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2513 continue;
2515 case BARRIER:
2516 case NOTE:
2517 continue;
2519 case INSN:
2520 if (GET_CODE (PATTERN (insn)) == USE)
2522 /* If INSN is a USE made by update_block, we care about the
2523 underlying insn. Any registers set by the underlying insn
2524 are live since the insn is being done somewhere else. */
2525 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2526 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2528 /* All other USE insns are to be ignored. */
2529 continue;
2531 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2532 continue;
2533 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2535 /* An unconditional jump can be used to fill the delay slot
2536 of a call, so search for a JUMP_INSN in any position. */
2537 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2539 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2540 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2541 break;
2546 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2548 if (jump_count++ < 10)
2550 if (simplejump_p (this_jump_insn)
2551 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
2553 next = JUMP_LABEL (this_jump_insn);
2554 if (jump_insn == 0)
2556 jump_insn = insn;
2557 if (jump_target)
2558 *jump_target = JUMP_LABEL (this_jump_insn);
2561 else if (condjump_p (this_jump_insn)
2562 || condjump_in_parallel_p (this_jump_insn))
2564 struct resources target_set, target_res;
2565 struct resources fallthrough_res;
2567 /* We can handle conditional branches here by following
2568 both paths, and then IOR the results of the two paths
2569 together, which will give us registers that are dead
2570 on both paths. Since this is expensive, we give it
2571 a much higher cost than unconditional branches. The
2572 cost was chosen so that we will follow at most 1
2573 conditional branch. */
2575 jump_count += 4;
2576 if (jump_count >= 10)
2577 break;
2579 mark_referenced_resources (insn, &needed, 1);
2581 /* For an annulled branch, mark_set_resources ignores slots
2582 filled by instructions from the target. This is correct
2583 if the branch is not taken. Since we are following both
2584 paths from the branch, we must also compute correct info
2585 if the branch is taken. We do this by inverting all of
2586 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
2587 and then inverting the INSN_FROM_TARGET_P bits again. */
2589 if (GET_CODE (PATTERN (insn)) == SEQUENCE
2590 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
2592 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2593 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2594 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2596 target_set = set;
2597 mark_set_resources (insn, &target_set, 0, 1);
2599 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
2600 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
2601 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
2603 mark_set_resources (insn, &set, 0, 1);
2605 else
2607 mark_set_resources (insn, &set, 0, 1);
2608 target_set = set;
2611 target_res = *res;
2612 COPY_HARD_REG_SET (scratch, target_set.regs);
2613 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2614 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
2616 fallthrough_res = *res;
2617 COPY_HARD_REG_SET (scratch, set.regs);
2618 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2619 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
2621 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
2622 &target_res, 0, jump_count,
2623 target_set, needed);
2624 find_dead_or_set_registers (next,
2625 &fallthrough_res, 0, jump_count,
2626 set, needed);
2627 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
2628 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
2629 break;
2631 else
2632 break;
2634 else
2636 /* Don't try this optimization if we expired our jump count
2637 above, since that would mean there may be an infinite loop
2638 in the function being compiled. */
2639 jump_insn = 0;
2640 break;
2644 mark_referenced_resources (insn, &needed, 1);
2645 mark_set_resources (insn, &set, 0, 1);
2647 COPY_HARD_REG_SET (scratch, set.regs);
2648 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2649 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2652 return jump_insn;
2655 /* Set the resources that are live at TARGET.
2657 If TARGET is zero, we refer to the end of the current function and can
2658 return our precomputed value.
2660 Otherwise, we try to find out what is live by consulting the basic block
2661 information. This is tricky, because we must consider the actions of
2662 reload and jump optimization, which occur after the basic block information
2663 has been computed.
2665 Accordingly, we proceed as follows::
2667 We find the previous BARRIER and look at all immediately following labels
2668 (with no intervening active insns) to see if any of them start a basic
2669 block. If we hit the start of the function first, we use block 0.
2671 Once we have found a basic block and a corresponding first insns, we can
2672 accurately compute the live status from basic_block_live_regs and
2673 reg_renumber. (By starting at a label following a BARRIER, we are immune
2674 to actions taken by reload and jump.) Then we scan all insns between
2675 that point and our target. For each CLOBBER (or for call-clobbered regs
2676 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2677 a SET, mark them as live.
2679 We have to be careful when using REG_DEAD notes because they are not
2680 updated by such things as find_equiv_reg. So keep track of registers
2681 marked as dead that haven't been assigned to, and mark them dead at the
2682 next CODE_LABEL since reload and jump won't propagate values across labels.
2684 If we cannot find the start of a basic block (should be a very rare
2685 case, if it can happen at all), mark everything as potentially live.
2687 Next, scan forward from TARGET looking for things set or clobbered
2688 before they are used. These are not live.
2690 Because we can be called many times on the same target, save our results
2691 in a hash table indexed by INSN_UID. */
2693 static void
2694 mark_target_live_regs (target, res)
2695 rtx target;
2696 struct resources *res;
2698 int b = -1;
2699 int i;
2700 struct target_info *tinfo;
2701 rtx insn, next;
2702 rtx jump_insn = 0;
2703 rtx jump_target;
2704 HARD_REG_SET scratch;
2705 struct resources set, needed;
2706 int jump_count = 0;
2708 /* Handle end of function. */
2709 if (target == 0)
2711 *res = end_of_function_needs;
2712 return;
2715 /* We have to assume memory is needed, but the CC isn't. */
2716 res->memory = 1;
2717 res->volatil = res->unch_memory = 0;
2718 res->cc = 0;
2720 /* See if we have computed this value already. */
2721 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2722 tinfo; tinfo = tinfo->next)
2723 if (tinfo->uid == INSN_UID (target))
2724 break;
2726 /* Start by getting the basic block number. If we have saved information,
2727 we can get it from there unless the insn at the start of the basic block
2728 has been deleted. */
2729 if (tinfo && tinfo->block != -1
2730 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2731 b = tinfo->block;
2733 if (b == -1)
2734 b = find_basic_block (target);
2736 if (tinfo)
2738 /* If the information is up-to-date, use it. Otherwise, we will
2739 update it below. */
2740 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2742 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2743 return;
2746 else
2748 /* Allocate a place to put our results and chain it into the
2749 hash table. */
2750 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2751 tinfo->uid = INSN_UID (target);
2752 tinfo->block = b;
2753 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2754 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2757 CLEAR_HARD_REG_SET (pending_dead_regs);
2759 /* If we found a basic block, get the live registers from it and update
2760 them with anything set or killed between its start and the insn before
2761 TARGET. Otherwise, we must assume everything is live. */
2762 if (b != -1)
2764 regset regs_live = basic_block_live_at_start[b];
2765 int offset, j;
2766 REGSET_ELT_TYPE bit;
2767 int regno;
2768 rtx start_insn, stop_insn;
2770 /* Compute hard regs live at start of block -- this is the real hard regs
2771 marked live, plus live pseudo regs that have been renumbered to
2772 hard regs. */
2774 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
2776 EXECUTE_IF_SET_IN_REG_SET (regs_live, 0, i,
2778 if ((regno = reg_renumber[i]) >= 0)
2779 for (j = regno;
2780 j < regno + HARD_REGNO_NREGS (regno,
2781 PSEUDO_REGNO_MODE (i));
2782 j++)
2783 SET_HARD_REG_BIT (current_live_regs, j);
2786 /* Get starting and ending insn, handling the case where each might
2787 be a SEQUENCE. */
2788 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2789 stop_insn = target;
2791 if (GET_CODE (start_insn) == INSN
2792 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2793 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2795 if (GET_CODE (stop_insn) == INSN
2796 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2797 stop_insn = next_insn (PREV_INSN (stop_insn));
2799 for (insn = start_insn; insn != stop_insn;
2800 insn = next_insn_no_annul (insn))
2802 rtx link;
2803 rtx real_insn = insn;
2805 /* If this insn is from the target of a branch, it isn't going to
2806 be used in the sequel. If it is used in both cases, this
2807 test will not be true. */
2808 if (INSN_FROM_TARGET_P (insn))
2809 continue;
2811 /* If this insn is a USE made by update_block, we care about the
2812 underlying insn. */
2813 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2814 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2815 real_insn = XEXP (PATTERN (insn), 0);
2817 if (GET_CODE (real_insn) == CALL_INSN)
2819 /* CALL clobbers all call-used regs that aren't fixed except
2820 sp, ap, and fp. Do this before setting the result of the
2821 call live. */
2822 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2823 if (call_used_regs[i]
2824 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2825 && i != ARG_POINTER_REGNUM
2826 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2827 && i != HARD_FRAME_POINTER_REGNUM
2828 #endif
2829 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2830 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2831 #endif
2832 #ifdef PIC_OFFSET_TABLE_REGNUM
2833 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2834 #endif
2836 CLEAR_HARD_REG_BIT (current_live_regs, i);
2838 /* A CALL_INSN sets any global register live, since it may
2839 have been modified by the call. */
2840 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2841 if (global_regs[i])
2842 SET_HARD_REG_BIT (current_live_regs, i);
2845 /* Mark anything killed in an insn to be deadened at the next
2846 label. Ignore USE insns; the only REG_DEAD notes will be for
2847 parameters. But they might be early. A CALL_INSN will usually
2848 clobber registers used for parameters. It isn't worth bothering
2849 with the unlikely case when it won't. */
2850 if ((GET_CODE (real_insn) == INSN
2851 && GET_CODE (PATTERN (real_insn)) != USE
2852 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2853 || GET_CODE (real_insn) == JUMP_INSN
2854 || GET_CODE (real_insn) == CALL_INSN)
2856 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2857 if (REG_NOTE_KIND (link) == REG_DEAD
2858 && GET_CODE (XEXP (link, 0)) == REG
2859 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2861 int first_regno = REGNO (XEXP (link, 0));
2862 int last_regno
2863 = (first_regno
2864 + HARD_REGNO_NREGS (first_regno,
2865 GET_MODE (XEXP (link, 0))));
2867 for (i = first_regno; i < last_regno; i++)
2868 SET_HARD_REG_BIT (pending_dead_regs, i);
2871 note_stores (PATTERN (real_insn), update_live_status);
2873 /* If any registers were unused after this insn, kill them.
2874 These notes will always be accurate. */
2875 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2876 if (REG_NOTE_KIND (link) == REG_UNUSED
2877 && GET_CODE (XEXP (link, 0)) == REG
2878 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2880 int first_regno = REGNO (XEXP (link, 0));
2881 int last_regno
2882 = (first_regno
2883 + HARD_REGNO_NREGS (first_regno,
2884 GET_MODE (XEXP (link, 0))));
2886 for (i = first_regno; i < last_regno; i++)
2887 CLEAR_HARD_REG_BIT (current_live_regs, i);
2891 else if (GET_CODE (real_insn) == CODE_LABEL)
2893 /* A label clobbers the pending dead registers since neither
2894 reload nor jump will propagate a value across a label. */
2895 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2896 CLEAR_HARD_REG_SET (pending_dead_regs);
2899 /* The beginning of the epilogue corresponds to the end of the
2900 RTL chain when there are no epilogue insns. Certain resources
2901 are implicitly required at that point. */
2902 else if (GET_CODE (real_insn) == NOTE
2903 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2904 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2907 COPY_HARD_REG_SET (res->regs, current_live_regs);
2908 tinfo->block = b;
2909 tinfo->bb_tick = bb_ticks[b];
2911 else
2912 /* We didn't find the start of a basic block. Assume everything
2913 in use. This should happen only extremely rarely. */
2914 SET_HARD_REG_SET (res->regs);
2916 CLEAR_RESOURCE (&set);
2917 CLEAR_RESOURCE (&needed);
2919 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
2920 set, needed);
2922 /* If we hit an unconditional branch, we have another way of finding out
2923 what is live: we can see what is live at the branch target and include
2924 anything used but not set before the branch. The only things that are
2925 live are those that are live using the above test and the test below. */
2927 if (jump_insn)
2929 struct resources new_resources;
2930 rtx stop_insn = next_active_insn (jump_insn);
2932 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2933 CLEAR_RESOURCE (&set);
2934 CLEAR_RESOURCE (&needed);
2936 /* Include JUMP_INSN in the needed registers. */
2937 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2939 mark_referenced_resources (insn, &needed, 1);
2941 COPY_HARD_REG_SET (scratch, needed.regs);
2942 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2943 IOR_HARD_REG_SET (new_resources.regs, scratch);
2945 mark_set_resources (insn, &set, 0, 1);
2948 AND_HARD_REG_SET (res->regs, new_resources.regs);
2951 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2954 /* Scan a function looking for insns that need a delay slot and find insns to
2955 put into the delay slot.
2957 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2958 as calls). We do these first since we don't want jump insns (that are
2959 easier to fill) to get the only insns that could be used for non-jump insns.
2960 When it is zero, only try to fill JUMP_INSNs.
2962 When slots are filled in this manner, the insns (including the
2963 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2964 it is possible to tell whether a delay slot has really been filled
2965 or not. `final' knows how to deal with this, by communicating
2966 through FINAL_SEQUENCE. */
2968 static void
2969 fill_simple_delay_slots (first, non_jumps_p)
2970 rtx first;
2971 int non_jumps_p;
2973 register rtx insn, pat, trial, next_trial;
2974 register int i, j;
2975 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2976 struct resources needed, set;
2977 int slots_to_fill, slots_filled;
2978 rtx delay_list;
2980 for (i = 0; i < num_unfilled_slots; i++)
2982 int flags;
2983 /* Get the next insn to fill. If it has already had any slots assigned,
2984 we can't do anything with it. Maybe we'll improve this later. */
2986 insn = unfilled_slots_base[i];
2987 if (insn == 0
2988 || INSN_DELETED_P (insn)
2989 || (GET_CODE (insn) == INSN
2990 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2991 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2992 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2993 continue;
2995 if (GET_CODE (insn) == JUMP_INSN)
2996 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2997 else
2998 flags = get_jump_flags (insn, NULL_RTX);
2999 slots_to_fill = num_delay_slots (insn);
3000 if (slots_to_fill == 0)
3001 abort ();
3003 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
3004 says how many. After initialization, first try optimizing
3006 call _foo call _foo
3007 nop add %o7,.-L1,%o7
3008 b,a L1
3011 If this case applies, the delay slot of the call is filled with
3012 the unconditional jump. This is done first to avoid having the
3013 delay slot of the call filled in the backward scan. Also, since
3014 the unconditional jump is likely to also have a delay slot, that
3015 insn must exist when it is subsequently scanned.
3017 This is tried on each insn with delay slots as some machines
3018 have insns which perform calls, but are not represented as
3019 CALL_INSNs. */
3021 slots_filled = 0;
3022 delay_list = 0;
3024 if ((trial = next_active_insn (insn))
3025 && GET_CODE (trial) == JUMP_INSN
3026 && simplejump_p (trial)
3027 && eligible_for_delay (insn, slots_filled, trial, flags)
3028 && no_labels_between_p (insn, trial))
3030 rtx *tmp;
3031 slots_filled++;
3032 delay_list = add_to_delay_list (trial, delay_list);
3034 /* TRIAL may have had its delay slot filled, then unfilled. When
3035 the delay slot is unfilled, TRIAL is placed back on the unfilled
3036 slots obstack. Unfortunately, it is placed on the end of the
3037 obstack, not in its original location. Therefore, we must search
3038 from entry i + 1 to the end of the unfilled slots obstack to
3039 try and find TRIAL. */
3040 tmp = &unfilled_slots_base[i + 1];
3041 while (*tmp != trial && tmp != unfilled_slots_next)
3042 tmp++;
3044 /* Remove the unconditional jump from consideration for delay slot
3045 filling and unthread it. */
3046 if (*tmp == trial)
3047 *tmp = 0;
3049 rtx next = NEXT_INSN (trial);
3050 rtx prev = PREV_INSN (trial);
3051 if (prev)
3052 NEXT_INSN (prev) = next;
3053 if (next)
3054 PREV_INSN (next) = prev;
3058 /* Now, scan backwards from the insn to search for a potential
3059 delay-slot candidate. Stop searching when a label or jump is hit.
3061 For each candidate, if it is to go into the delay slot (moved
3062 forward in execution sequence), it must not need or set any resources
3063 that were set by later insns and must not set any resources that
3064 are needed for those insns.
3066 The delay slot insn itself sets resources unless it is a call
3067 (in which case the called routine, not the insn itself, is doing
3068 the setting). */
3070 if (slots_filled < slots_to_fill)
3072 CLEAR_RESOURCE (&needed);
3073 CLEAR_RESOURCE (&set);
3074 mark_set_resources (insn, &set, 0, 0);
3075 mark_referenced_resources (insn, &needed, 0);
3077 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
3078 trial = next_trial)
3080 next_trial = prev_nonnote_insn (trial);
3082 /* This must be an INSN or CALL_INSN. */
3083 pat = PATTERN (trial);
3085 /* USE and CLOBBER at this level was just for flow; ignore it. */
3086 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3087 continue;
3089 /* Check for resource conflict first, to avoid unnecessary
3090 splitting. */
3091 if (! insn_references_resource_p (trial, &set, 1)
3092 && ! insn_sets_resource_p (trial, &set, 1)
3093 && ! insn_sets_resource_p (trial, &needed, 1)
3094 #ifdef HAVE_cc0
3095 /* Can't separate set of cc0 from its use. */
3096 && ! (reg_mentioned_p (cc0_rtx, pat)
3097 && ! sets_cc0_p (cc0_rtx, pat))
3098 #endif
3101 trial = try_split (pat, trial, 1);
3102 next_trial = prev_nonnote_insn (trial);
3103 if (eligible_for_delay (insn, slots_filled, trial, flags))
3105 /* In this case, we are searching backward, so if we
3106 find insns to put on the delay list, we want
3107 to put them at the head, rather than the
3108 tail, of the list. */
3110 update_reg_dead_notes (trial, insn);
3111 delay_list = gen_rtx (INSN_LIST, VOIDmode,
3112 trial, delay_list);
3113 update_block (trial, trial);
3114 delete_insn (trial);
3115 if (slots_to_fill == ++slots_filled)
3116 break;
3117 continue;
3121 mark_set_resources (trial, &set, 0, 1);
3122 mark_referenced_resources (trial, &needed, 1);
3126 /* If all needed slots haven't been filled, we come here. */
3128 /* Try to optimize case of jumping around a single insn. */
3129 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
3130 if (slots_filled != slots_to_fill
3131 && delay_list == 0
3132 && GET_CODE (insn) == JUMP_INSN
3133 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
3135 delay_list = optimize_skip (insn);
3136 if (delay_list)
3137 slots_filled += 1;
3139 #endif
3141 /* Try to get insns from beyond the insn needing the delay slot.
3142 These insns can neither set or reference resources set in insns being
3143 skipped, cannot set resources in the insn being skipped, and, if this
3144 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
3145 call might not return).
3147 There used to be code which continued past the target label if
3148 we saw all uses of the target label. This code did not work,
3149 because it failed to account for some instructions which were
3150 both annulled and marked as from the target. This can happen as a
3151 result of optimize_skip. Since this code was redundant with
3152 fill_eager_delay_slots anyways, it was just deleted. */
3154 if (slots_filled != slots_to_fill
3155 && (GET_CODE (insn) != JUMP_INSN
3156 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
3157 && ! simplejump_p (insn)
3158 && JUMP_LABEL (insn) != 0)))
3160 rtx target = 0;
3161 int maybe_never = 0;
3162 struct resources needed_at_jump;
3164 CLEAR_RESOURCE (&needed);
3165 CLEAR_RESOURCE (&set);
3167 if (GET_CODE (insn) == CALL_INSN)
3169 mark_set_resources (insn, &set, 0, 1);
3170 mark_referenced_resources (insn, &needed, 1);
3171 maybe_never = 1;
3173 else
3175 mark_set_resources (insn, &set, 0, 1);
3176 mark_referenced_resources (insn, &needed, 1);
3177 if (GET_CODE (insn) == JUMP_INSN)
3178 target = JUMP_LABEL (insn);
3181 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
3183 rtx pat, trial_delay;
3185 next_trial = next_nonnote_insn (trial);
3187 if (GET_CODE (trial) == CODE_LABEL
3188 || GET_CODE (trial) == BARRIER)
3189 break;
3191 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3192 pat = PATTERN (trial);
3194 /* Stand-alone USE and CLOBBER are just for flow. */
3195 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3196 continue;
3198 /* If this already has filled delay slots, get the insn needing
3199 the delay slots. */
3200 if (GET_CODE (pat) == SEQUENCE)
3201 trial_delay = XVECEXP (pat, 0, 0);
3202 else
3203 trial_delay = trial;
3205 /* If this is a jump insn to our target, indicate that we have
3206 seen another jump to it. If we aren't handling a conditional
3207 jump, stop our search. Otherwise, compute the needs at its
3208 target and add them to NEEDED. */
3209 if (GET_CODE (trial_delay) == JUMP_INSN)
3211 if (target == 0)
3212 break;
3213 else if (JUMP_LABEL (trial_delay) != target)
3215 mark_target_live_regs
3216 (next_active_insn (JUMP_LABEL (trial_delay)),
3217 &needed_at_jump);
3218 needed.memory |= needed_at_jump.memory;
3219 needed.unch_memory |= needed_at_jump.unch_memory;
3220 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
3224 /* See if we have a resource problem before we try to
3225 split. */
3226 if (target == 0
3227 && GET_CODE (pat) != SEQUENCE
3228 && ! insn_references_resource_p (trial, &set, 1)
3229 && ! insn_sets_resource_p (trial, &set, 1)
3230 && ! insn_sets_resource_p (trial, &needed, 1)
3231 #ifdef HAVE_cc0
3232 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3233 #endif
3234 && ! (maybe_never && may_trap_p (pat))
3235 && (trial = try_split (pat, trial, 0))
3236 && eligible_for_delay (insn, slots_filled, trial, flags))
3238 next_trial = next_nonnote_insn (trial);
3239 delay_list = add_to_delay_list (trial, delay_list);
3241 #ifdef HAVE_cc0
3242 if (reg_mentioned_p (cc0_rtx, pat))
3243 link_cc0_insns (trial);
3244 #endif
3246 delete_insn (trial);
3247 if (slots_to_fill == ++slots_filled)
3248 break;
3249 continue;
3252 mark_set_resources (trial, &set, 0, 1);
3253 mark_referenced_resources (trial, &needed, 1);
3255 /* Ensure we don't put insns between the setting of cc and the
3256 comparison by moving a setting of cc into an earlier delay
3257 slot since these insns could clobber the condition code. */
3258 set.cc = 1;
3260 /* If this is a call or jump, we might not get here. */
3261 if (GET_CODE (trial_delay) == CALL_INSN
3262 || GET_CODE (trial_delay) == JUMP_INSN)
3263 maybe_never = 1;
3266 /* If there are slots left to fill and our search was stopped by an
3267 unconditional branch, try the insn at the branch target. We can
3268 redirect the branch if it works.
3270 Don't do this if the insn at the branch target is a branch. */
3271 if (slots_to_fill != slots_filled
3272 && trial
3273 && GET_CODE (trial) == JUMP_INSN
3274 && simplejump_p (trial)
3275 && (target == 0 || JUMP_LABEL (trial) == target)
3276 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3277 && ! (GET_CODE (next_trial) == INSN
3278 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3279 && GET_CODE (next_trial) != JUMP_INSN
3280 && ! insn_references_resource_p (next_trial, &set, 1)
3281 && ! insn_sets_resource_p (next_trial, &set, 1)
3282 && ! insn_sets_resource_p (next_trial, &needed, 1)
3283 #ifdef HAVE_cc0
3284 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3285 #endif
3286 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3287 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3288 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3290 rtx new_label = next_active_insn (next_trial);
3292 if (new_label != 0)
3293 new_label = get_label_before (new_label);
3294 else
3295 new_label = find_end_label ();
3297 delay_list
3298 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3299 slots_filled++;
3300 reorg_redirect_jump (trial, new_label);
3302 /* If we merged because we both jumped to the same place,
3303 redirect the original insn also. */
3304 if (target)
3305 reorg_redirect_jump (insn, new_label);
3309 /* If this is an unconditional jump, then try to get insns from the
3310 target of the jump. */
3311 if (GET_CODE (insn) == JUMP_INSN
3312 && simplejump_p (insn)
3313 && slots_filled != slots_to_fill)
3314 delay_list
3315 = fill_slots_from_thread (insn, const_true_rtx,
3316 next_active_insn (JUMP_LABEL (insn)),
3317 NULL, 1, 1,
3318 own_thread_p (JUMP_LABEL (insn),
3319 JUMP_LABEL (insn), 0),
3320 0, slots_to_fill, &slots_filled);
3322 if (delay_list)
3323 unfilled_slots_base[i]
3324 = emit_delay_sequence (insn, delay_list,
3325 slots_filled, slots_to_fill);
3327 if (slots_to_fill == slots_filled)
3328 unfilled_slots_base[i] = 0;
3330 note_delay_statistics (slots_filled, 0);
3333 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3334 /* See if the epilogue needs any delay slots. Try to fill them if so.
3335 The only thing we can do is scan backwards from the end of the
3336 function. If we did this in a previous pass, it is incorrect to do it
3337 again. */
3338 if (current_function_epilogue_delay_list)
3339 return;
3341 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3342 if (slots_to_fill == 0)
3343 return;
3345 slots_filled = 0;
3346 CLEAR_RESOURCE (&set);
3348 /* The frame pointer and stack pointer are needed at the beginning of
3349 the epilogue, so instructions setting them can not be put in the
3350 epilogue delay slot. However, everything else needed at function
3351 end is safe, so we don't want to use end_of_function_needs here. */
3352 CLEAR_RESOURCE (&needed);
3353 if (frame_pointer_needed)
3355 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
3356 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3357 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
3358 #endif
3359 #ifdef EXIT_IGNORE_STACK
3360 if (! EXIT_IGNORE_STACK)
3361 #endif
3362 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3364 else
3365 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3367 #ifdef EPILOGUE_USES
3368 for (i = 0; i <FIRST_PSEUDO_REGISTER; i++)
3370 if (EPILOGUE_USES (i))
3371 SET_HARD_REG_BIT (needed.regs, i);
3373 #endif
3375 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3376 trial = PREV_INSN (trial))
3378 if (GET_CODE (trial) == NOTE)
3379 continue;
3380 pat = PATTERN (trial);
3381 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3382 continue;
3384 if (! insn_references_resource_p (trial, &set, 1)
3385 && ! insn_sets_resource_p (trial, &needed, 1)
3386 && ! insn_sets_resource_p (trial, &set, 1)
3387 #ifdef HAVE_cc0
3388 /* Don't want to mess with cc0 here. */
3389 && ! reg_mentioned_p (cc0_rtx, pat)
3390 #endif
3393 trial = try_split (pat, trial, 1);
3394 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3396 /* Here as well we are searching backward, so put the
3397 insns we find on the head of the list. */
3399 current_function_epilogue_delay_list
3400 = gen_rtx (INSN_LIST, VOIDmode, trial,
3401 current_function_epilogue_delay_list);
3402 mark_referenced_resources (trial, &end_of_function_needs, 1);
3403 update_block (trial, trial);
3404 delete_insn (trial);
3406 /* Clear deleted bit so final.c will output the insn. */
3407 INSN_DELETED_P (trial) = 0;
3409 if (slots_to_fill == ++slots_filled)
3410 break;
3411 continue;
3415 mark_set_resources (trial, &set, 0, 1);
3416 mark_referenced_resources (trial, &needed, 1);
3419 note_delay_statistics (slots_filled, 0);
3420 #endif
3423 /* Try to find insns to place in delay slots.
3425 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3426 or is an unconditional branch if CONDITION is const_true_rtx.
3427 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3429 THREAD is a flow-of-control, either the insns to be executed if the
3430 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3432 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3433 to see if any potential delay slot insns set things needed there.
3435 LIKELY is non-zero if it is extremely likely that the branch will be
3436 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3437 end of a loop back up to the top.
3439 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3440 thread. I.e., it is the fallthrough code of our jump or the target of the
3441 jump when we are the only jump going there.
3443 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3444 case, we can only take insns from the head of the thread for our delay
3445 slot. We then adjust the jump to point after the insns we have taken. */
3447 static rtx
3448 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3449 thread_if_true, own_thread, own_opposite_thread,
3450 slots_to_fill, pslots_filled)
3451 rtx insn;
3452 rtx condition;
3453 rtx thread, opposite_thread;
3454 int likely;
3455 int thread_if_true;
3456 int own_thread, own_opposite_thread;
3457 int slots_to_fill, *pslots_filled;
3459 rtx new_thread;
3460 rtx delay_list = 0;
3461 struct resources opposite_needed, set, needed;
3462 rtx trial;
3463 int lose = 0;
3464 int must_annul = 0;
3465 int flags;
3467 /* Validate our arguments. */
3468 if ((condition == const_true_rtx && ! thread_if_true)
3469 || (! own_thread && ! thread_if_true))
3470 abort ();
3472 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3474 /* If our thread is the end of subroutine, we can't get any delay
3475 insns from that. */
3476 if (thread == 0)
3477 return 0;
3479 /* If this is an unconditional branch, nothing is needed at the
3480 opposite thread. Otherwise, compute what is needed there. */
3481 if (condition == const_true_rtx)
3482 CLEAR_RESOURCE (&opposite_needed);
3483 else
3484 mark_target_live_regs (opposite_thread, &opposite_needed);
3486 /* If the insn at THREAD can be split, do it here to avoid having to
3487 update THREAD and NEW_THREAD if it is done in the loop below. Also
3488 initialize NEW_THREAD. */
3490 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3492 /* Scan insns at THREAD. We are looking for an insn that can be removed
3493 from THREAD (it neither sets nor references resources that were set
3494 ahead of it and it doesn't set anything needs by the insns ahead of
3495 it) and that either can be placed in an annulling insn or aren't
3496 needed at OPPOSITE_THREAD. */
3498 CLEAR_RESOURCE (&needed);
3499 CLEAR_RESOURCE (&set);
3501 /* If we do not own this thread, we must stop as soon as we find
3502 something that we can't put in a delay slot, since all we can do
3503 is branch into THREAD at a later point. Therefore, labels stop
3504 the search if this is not the `true' thread. */
3506 for (trial = thread;
3507 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3508 trial = next_nonnote_insn (trial))
3510 rtx pat, old_trial;
3512 /* If we have passed a label, we no longer own this thread. */
3513 if (GET_CODE (trial) == CODE_LABEL)
3515 own_thread = 0;
3516 continue;
3519 pat = PATTERN (trial);
3520 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3521 continue;
3523 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3524 don't separate or copy insns that set and use CC0. */
3525 if (! insn_references_resource_p (trial, &set, 1)
3526 && ! insn_sets_resource_p (trial, &set, 1)
3527 && ! insn_sets_resource_p (trial, &needed, 1)
3528 #ifdef HAVE_cc0
3529 && ! (reg_mentioned_p (cc0_rtx, pat)
3530 && (! own_thread || ! sets_cc0_p (pat)))
3531 #endif
3534 rtx prior_insn;
3536 /* If TRIAL is redundant with some insn before INSN, we don't
3537 actually need to add it to the delay list; we can merely pretend
3538 we did. */
3539 if (prior_insn = redundant_insn (trial, insn, delay_list))
3541 fix_reg_dead_note (prior_insn, insn);
3542 if (own_thread)
3544 update_block (trial, thread);
3545 if (trial == thread)
3547 thread = next_active_insn (thread);
3548 if (new_thread == trial)
3549 new_thread = thread;
3552 delete_insn (trial);
3554 else
3556 update_reg_unused_notes (prior_insn, trial);
3557 new_thread = next_active_insn (trial);
3560 continue;
3563 /* There are two ways we can win: If TRIAL doesn't set anything
3564 needed at the opposite thread and can't trap, or if it can
3565 go into an annulled delay slot. */
3566 if (condition == const_true_rtx
3567 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3568 && ! may_trap_p (pat)))
3570 old_trial = trial;
3571 trial = try_split (pat, trial, 0);
3572 if (new_thread == old_trial)
3573 new_thread = trial;
3574 if (thread == old_trial)
3575 thread = trial;
3576 pat = PATTERN (trial);
3577 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3578 goto winner;
3580 else if (0
3581 #ifdef ANNUL_IFTRUE_SLOTS
3582 || ! thread_if_true
3583 #endif
3584 #ifdef ANNUL_IFFALSE_SLOTS
3585 || thread_if_true
3586 #endif
3589 old_trial = trial;
3590 trial = try_split (pat, trial, 0);
3591 if (new_thread == old_trial)
3592 new_thread = trial;
3593 if (thread == old_trial)
3594 thread = trial;
3595 pat = PATTERN (trial);
3596 if ((thread_if_true
3597 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3598 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3600 rtx temp;
3602 must_annul = 1;
3603 winner:
3605 #ifdef HAVE_cc0
3606 if (reg_mentioned_p (cc0_rtx, pat))
3607 link_cc0_insns (trial);
3608 #endif
3610 /* If we own this thread, delete the insn. If this is the
3611 destination of a branch, show that a basic block status
3612 may have been updated. In any case, mark the new
3613 starting point of this thread. */
3614 if (own_thread)
3616 update_block (trial, thread);
3617 if (trial == thread)
3619 thread = next_active_insn (thread);
3620 if (new_thread == trial)
3621 new_thread = thread;
3623 delete_insn (trial);
3625 else
3626 new_thread = next_active_insn (trial);
3628 temp = own_thread ? trial : copy_rtx (trial);
3629 if (thread_if_true)
3630 INSN_FROM_TARGET_P (temp) = 1;
3632 delay_list = add_to_delay_list (temp, delay_list);
3634 if (slots_to_fill == ++(*pslots_filled))
3636 /* Even though we have filled all the slots, we
3637 may be branching to a location that has a
3638 redundant insn. Skip any if so. */
3639 while (new_thread && ! own_thread
3640 && ! insn_sets_resource_p (new_thread, &set, 1)
3641 && ! insn_sets_resource_p (new_thread, &needed, 1)
3642 && ! insn_references_resource_p (new_thread,
3643 &set, 1)
3644 && redundant_insn (new_thread, insn, delay_list))
3645 new_thread = next_active_insn (new_thread);
3646 break;
3649 continue;
3654 /* This insn can't go into a delay slot. */
3655 lose = 1;
3656 mark_set_resources (trial, &set, 0, 1);
3657 mark_referenced_resources (trial, &needed, 1);
3659 /* Ensure we don't put insns between the setting of cc and the comparison
3660 by moving a setting of cc into an earlier delay slot since these insns
3661 could clobber the condition code. */
3662 set.cc = 1;
3664 /* If this insn is a register-register copy and the next insn has
3665 a use of our destination, change it to use our source. That way,
3666 it will become a candidate for our delay slot the next time
3667 through this loop. This case occurs commonly in loops that
3668 scan a list.
3670 We could check for more complex cases than those tested below,
3671 but it doesn't seem worth it. It might also be a good idea to try
3672 to swap the two insns. That might do better.
3674 We can't do this if the next insn modifies our destination, because
3675 that would make the replacement into the insn invalid. We also can't
3676 do this if it modifies our source, because it might be an earlyclobber
3677 operand. This latter test also prevents updating the contents of
3678 a PRE_INC. */
3680 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3681 && GET_CODE (SET_SRC (pat)) == REG
3682 && GET_CODE (SET_DEST (pat)) == REG)
3684 rtx next = next_nonnote_insn (trial);
3686 if (next && GET_CODE (next) == INSN
3687 && GET_CODE (PATTERN (next)) != USE
3688 && ! reg_set_p (SET_DEST (pat), next)
3689 && ! reg_set_p (SET_SRC (pat), next)
3690 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3691 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3695 /* If we stopped on a branch insn that has delay slots, see if we can
3696 steal some of the insns in those slots. */
3697 if (trial && GET_CODE (trial) == INSN
3698 && GET_CODE (PATTERN (trial)) == SEQUENCE
3699 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3701 /* If this is the `true' thread, we will want to follow the jump,
3702 so we can only do this if we have taken everything up to here. */
3703 if (thread_if_true && trial == new_thread)
3704 delay_list
3705 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3706 delay_list, &set, &needed,
3707 &opposite_needed, slots_to_fill,
3708 pslots_filled, &must_annul,
3709 &new_thread);
3710 else if (! thread_if_true)
3711 delay_list
3712 = steal_delay_list_from_fallthrough (insn, condition,
3713 PATTERN (trial),
3714 delay_list, &set, &needed,
3715 &opposite_needed, slots_to_fill,
3716 pslots_filled, &must_annul);
3719 /* If we haven't found anything for this delay slot and it is very
3720 likely that the branch will be taken, see if the insn at our target
3721 increments or decrements a register with an increment that does not
3722 depend on the destination register. If so, try to place the opposite
3723 arithmetic insn after the jump insn and put the arithmetic insn in the
3724 delay slot. If we can't do this, return. */
3725 if (delay_list == 0 && likely && new_thread
3726 && GET_CODE (new_thread) == INSN
3727 && GET_CODE (PATTERN (new_thread)) != ASM_INPUT
3728 && asm_noperands (PATTERN (new_thread)) < 0)
3730 rtx pat = PATTERN (new_thread);
3731 rtx dest;
3732 rtx src;
3734 trial = new_thread;
3735 pat = PATTERN (trial);
3737 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3738 || ! eligible_for_delay (insn, 0, trial, flags))
3739 return 0;
3741 dest = SET_DEST (pat), src = SET_SRC (pat);
3742 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3743 && rtx_equal_p (XEXP (src, 0), dest)
3744 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3746 rtx other = XEXP (src, 1);
3747 rtx new_arith;
3748 rtx ninsn;
3750 /* If this is a constant adjustment, use the same code with
3751 the negated constant. Otherwise, reverse the sense of the
3752 arithmetic. */
3753 if (GET_CODE (other) == CONST_INT)
3754 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3755 negate_rtx (GET_MODE (src), other));
3756 else
3757 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3758 GET_MODE (src), dest, other);
3760 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3761 insn);
3763 if (recog_memoized (ninsn) < 0
3764 || (insn_extract (ninsn),
3765 ! constrain_operands (INSN_CODE (ninsn), 1)))
3767 delete_insn (ninsn);
3768 return 0;
3771 if (own_thread)
3773 update_block (trial, thread);
3774 if (trial == thread)
3776 thread = next_active_insn (thread);
3777 if (new_thread == trial)
3778 new_thread = thread;
3780 delete_insn (trial);
3782 else
3783 new_thread = next_active_insn (trial);
3785 ninsn = own_thread ? trial : copy_rtx (trial);
3786 if (thread_if_true)
3787 INSN_FROM_TARGET_P (ninsn) = 1;
3789 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3790 (*pslots_filled)++;
3794 if (delay_list && must_annul)
3795 INSN_ANNULLED_BRANCH_P (insn) = 1;
3797 /* If we are to branch into the middle of this thread, find an appropriate
3798 label or make a new one if none, and redirect INSN to it. If we hit the
3799 end of the function, use the end-of-function label. */
3800 if (new_thread != thread)
3802 rtx label;
3804 if (! thread_if_true)
3805 abort ();
3807 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3808 && (simplejump_p (new_thread)
3809 || GET_CODE (PATTERN (new_thread)) == RETURN)
3810 && redirect_with_delay_list_safe_p (insn,
3811 JUMP_LABEL (new_thread),
3812 delay_list))
3813 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3815 if (new_thread == 0)
3816 label = find_end_label ();
3817 else if (GET_CODE (new_thread) == CODE_LABEL)
3818 label = new_thread;
3819 else
3820 label = get_label_before (new_thread);
3822 reorg_redirect_jump (insn, label);
3825 return delay_list;
3828 /* Make another attempt to find insns to place in delay slots.
3830 We previously looked for insns located in front of the delay insn
3831 and, for non-jump delay insns, located behind the delay insn.
3833 Here only try to schedule jump insns and try to move insns from either
3834 the target or the following insns into the delay slot. If annulling is
3835 supported, we will be likely to do this. Otherwise, we can do this only
3836 if safe. */
3838 static void
3839 fill_eager_delay_slots (first)
3840 rtx first;
3842 register rtx insn;
3843 register int i;
3844 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3846 for (i = 0; i < num_unfilled_slots; i++)
3848 rtx condition;
3849 rtx target_label, insn_at_target, fallthrough_insn;
3850 rtx delay_list = 0;
3851 int own_target;
3852 int own_fallthrough;
3853 int prediction, slots_to_fill, slots_filled;
3855 insn = unfilled_slots_base[i];
3856 if (insn == 0
3857 || INSN_DELETED_P (insn)
3858 || GET_CODE (insn) != JUMP_INSN
3859 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3860 continue;
3862 slots_to_fill = num_delay_slots (insn);
3863 if (slots_to_fill == 0)
3864 abort ();
3866 slots_filled = 0;
3867 target_label = JUMP_LABEL (insn);
3868 condition = get_branch_condition (insn, target_label);
3870 if (condition == 0)
3871 continue;
3873 /* Get the next active fallthrough and target insns and see if we own
3874 them. Then see whether the branch is likely true. We don't need
3875 to do a lot of this for unconditional branches. */
3877 insn_at_target = next_active_insn (target_label);
3878 own_target = own_thread_p (target_label, target_label, 0);
3880 if (condition == const_true_rtx)
3882 own_fallthrough = 0;
3883 fallthrough_insn = 0;
3884 prediction = 2;
3886 else
3888 fallthrough_insn = next_active_insn (insn);
3889 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3890 prediction = mostly_true_jump (insn, condition);
3893 /* If this insn is expected to branch, first try to get insns from our
3894 target, then our fallthrough insns. If it is not, expected to branch,
3895 try the other order. */
3897 if (prediction > 0)
3899 delay_list
3900 = fill_slots_from_thread (insn, condition, insn_at_target,
3901 fallthrough_insn, prediction == 2, 1,
3902 own_target, own_fallthrough,
3903 slots_to_fill, &slots_filled);
3905 if (delay_list == 0 && own_fallthrough)
3907 /* Even though we didn't find anything for delay slots,
3908 we might have found a redundant insn which we deleted
3909 from the thread that was filled. So we have to recompute
3910 the next insn at the target. */
3911 target_label = JUMP_LABEL (insn);
3912 insn_at_target = next_active_insn (target_label);
3914 delay_list
3915 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3916 insn_at_target, 0, 0,
3917 own_fallthrough, own_target,
3918 slots_to_fill, &slots_filled);
3921 else
3923 if (own_fallthrough)
3924 delay_list
3925 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3926 insn_at_target, 0, 0,
3927 own_fallthrough, own_target,
3928 slots_to_fill, &slots_filled);
3930 if (delay_list == 0)
3931 delay_list
3932 = fill_slots_from_thread (insn, condition, insn_at_target,
3933 next_active_insn (insn), 0, 1,
3934 own_target, own_fallthrough,
3935 slots_to_fill, &slots_filled);
3938 if (delay_list)
3939 unfilled_slots_base[i]
3940 = emit_delay_sequence (insn, delay_list,
3941 slots_filled, slots_to_fill);
3943 if (slots_to_fill == slots_filled)
3944 unfilled_slots_base[i] = 0;
3946 note_delay_statistics (slots_filled, 1);
3950 /* Once we have tried two ways to fill a delay slot, make a pass over the
3951 code to try to improve the results and to do such things as more jump
3952 threading. */
3954 static void
3955 relax_delay_slots (first)
3956 rtx first;
3958 register rtx insn, next, pat;
3959 register rtx trial, delay_insn, target_label;
3961 /* Look at every JUMP_INSN and see if we can improve it. */
3962 for (insn = first; insn; insn = next)
3964 rtx other;
3966 next = next_active_insn (insn);
3968 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3969 the next insn, or jumps to a label that is not the last of a
3970 group of consecutive labels. */
3971 if (GET_CODE (insn) == JUMP_INSN
3972 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3973 && (target_label = JUMP_LABEL (insn)) != 0)
3975 target_label = follow_jumps (target_label);
3976 target_label = prev_label (next_active_insn (target_label));
3978 if (target_label == 0)
3979 target_label = find_end_label ();
3981 if (next_active_insn (target_label) == next
3982 && ! condjump_in_parallel_p (insn))
3984 delete_jump (insn);
3985 continue;
3988 if (target_label != JUMP_LABEL (insn))
3989 reorg_redirect_jump (insn, target_label);
3991 /* See if this jump branches around a unconditional jump.
3992 If so, invert this jump and point it to the target of the
3993 second jump. */
3994 if (next && GET_CODE (next) == JUMP_INSN
3995 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3996 && next_active_insn (target_label) == next_active_insn (next)
3997 && no_labels_between_p (insn, next))
3999 rtx label = JUMP_LABEL (next);
4001 /* Be careful how we do this to avoid deleting code or
4002 labels that are momentarily dead. See similar optimization
4003 in jump.c.
4005 We also need to ensure we properly handle the case when
4006 invert_jump fails. */
4008 ++LABEL_NUSES (target_label);
4009 if (label)
4010 ++LABEL_NUSES (label);
4012 if (invert_jump (insn, label))
4014 delete_insn (next);
4015 next = insn;
4018 if (label)
4019 --LABEL_NUSES (label);
4021 if (--LABEL_NUSES (target_label) == 0)
4022 delete_insn (target_label);
4024 continue;
4028 /* If this is an unconditional jump and the previous insn is a
4029 conditional jump, try reversing the condition of the previous
4030 insn and swapping our targets. The next pass might be able to
4031 fill the slots.
4033 Don't do this if we expect the conditional branch to be true, because
4034 we would then be making the more common case longer. */
4036 if (GET_CODE (insn) == JUMP_INSN
4037 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
4038 && (other = prev_active_insn (insn)) != 0
4039 && (condjump_p (other) || condjump_in_parallel_p (other))
4040 && no_labels_between_p (other, insn)
4041 && 0 < mostly_true_jump (other,
4042 get_branch_condition (other,
4043 JUMP_LABEL (other))))
4045 rtx other_target = JUMP_LABEL (other);
4046 target_label = JUMP_LABEL (insn);
4048 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
4049 as we move the label. */
4050 if (other_target)
4051 ++LABEL_NUSES (other_target);
4053 if (invert_jump (other, target_label))
4054 reorg_redirect_jump (insn, other_target);
4056 if (other_target)
4057 --LABEL_NUSES (other_target);
4060 /* Now look only at cases where we have filled a delay slot. */
4061 if (GET_CODE (insn) != INSN
4062 || GET_CODE (PATTERN (insn)) != SEQUENCE)
4063 continue;
4065 pat = PATTERN (insn);
4066 delay_insn = XVECEXP (pat, 0, 0);
4068 /* See if the first insn in the delay slot is redundant with some
4069 previous insn. Remove it from the delay slot if so; then set up
4070 to reprocess this insn. */
4071 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
4073 delete_from_delay_slot (XVECEXP (pat, 0, 1));
4074 next = prev_active_insn (next);
4075 continue;
4078 /* Now look only at the cases where we have a filled JUMP_INSN. */
4079 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4080 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
4081 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
4082 continue;
4084 target_label = JUMP_LABEL (delay_insn);
4086 if (target_label)
4088 /* If this jump goes to another unconditional jump, thread it, but
4089 don't convert a jump into a RETURN here. */
4090 trial = follow_jumps (target_label);
4091 /* We use next_real_insn instead of next_active_insn, so that
4092 the special USE insns emitted by reorg won't be ignored.
4093 If they are ignored, then they will get deleted if target_label
4094 is now unreachable, and that would cause mark_target_live_regs
4095 to fail. */
4096 trial = prev_label (next_real_insn (trial));
4097 if (trial == 0 && target_label != 0)
4098 trial = find_end_label ();
4100 if (trial != target_label
4101 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
4103 reorg_redirect_jump (delay_insn, trial);
4104 target_label = trial;
4107 /* If the first insn at TARGET_LABEL is redundant with a previous
4108 insn, redirect the jump to the following insn process again. */
4109 trial = next_active_insn (target_label);
4110 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
4111 && redundant_insn (trial, insn, 0))
4113 rtx tmp;
4115 /* Figure out where to emit the special USE insn so we don't
4116 later incorrectly compute register live/death info. */
4117 tmp = next_active_insn (trial);
4118 if (tmp == 0)
4119 tmp = find_end_label ();
4121 /* Insert the special USE insn and update dataflow info. */
4122 update_block (trial, tmp);
4124 /* Now emit a label before the special USE insn, and
4125 redirect our jump to the new label. */
4126 target_label = get_label_before (PREV_INSN (tmp));
4127 reorg_redirect_jump (delay_insn, target_label);
4128 next = insn;
4129 continue;
4132 /* Similarly, if it is an unconditional jump with one insn in its
4133 delay list and that insn is redundant, thread the jump. */
4134 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
4135 && XVECLEN (PATTERN (trial), 0) == 2
4136 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
4137 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
4138 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
4139 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
4141 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
4142 if (target_label == 0)
4143 target_label = find_end_label ();
4145 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
4146 insn))
4148 reorg_redirect_jump (delay_insn, target_label);
4149 next = insn;
4150 continue;
4155 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4156 && prev_active_insn (target_label) == insn
4157 && ! condjump_in_parallel_p (delay_insn)
4158 #ifdef HAVE_cc0
4159 /* If the last insn in the delay slot sets CC0 for some insn,
4160 various code assumes that it is in a delay slot. We could
4161 put it back where it belonged and delete the register notes,
4162 but it doesn't seem worthwhile in this uncommon case. */
4163 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
4164 REG_CC_USER, NULL_RTX)
4165 #endif
4168 int i;
4170 /* All this insn does is execute its delay list and jump to the
4171 following insn. So delete the jump and just execute the delay
4172 list insns.
4174 We do this by deleting the INSN containing the SEQUENCE, then
4175 re-emitting the insns separately, and then deleting the jump.
4176 This allows the count of the jump target to be properly
4177 decremented. */
4179 /* Clear the from target bit, since these insns are no longer
4180 in delay slots. */
4181 for (i = 0; i < XVECLEN (pat, 0); i++)
4182 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
4184 trial = PREV_INSN (insn);
4185 delete_insn (insn);
4186 emit_insn_after (pat, trial);
4187 delete_scheduled_jump (delay_insn);
4188 continue;
4191 /* See if this is an unconditional jump around a single insn which is
4192 identical to the one in its delay slot. In this case, we can just
4193 delete the branch and the insn in its delay slot. */
4194 if (next && GET_CODE (next) == INSN
4195 && prev_label (next_active_insn (next)) == target_label
4196 && simplejump_p (insn)
4197 && XVECLEN (pat, 0) == 2
4198 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
4200 delete_insn (insn);
4201 continue;
4204 /* See if this jump (with its delay slots) branches around another
4205 jump (without delay slots). If so, invert this jump and point
4206 it to the target of the second jump. We cannot do this for
4207 annulled jumps, though. Again, don't convert a jump to a RETURN
4208 here. */
4209 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
4210 && next && GET_CODE (next) == JUMP_INSN
4211 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
4212 && next_active_insn (target_label) == next_active_insn (next)
4213 && no_labels_between_p (insn, next))
4215 rtx label = JUMP_LABEL (next);
4216 rtx old_label = JUMP_LABEL (delay_insn);
4218 if (label == 0)
4219 label = find_end_label ();
4221 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
4223 /* Be careful how we do this to avoid deleting code or labels
4224 that are momentarily dead. See similar optimization in
4225 jump.c */
4226 if (old_label)
4227 ++LABEL_NUSES (old_label);
4229 if (invert_jump (delay_insn, label))
4231 int i;
4233 /* Must update the INSN_FROM_TARGET_P bits now that
4234 the branch is reversed, so that mark_target_live_regs
4235 will handle the delay slot insn correctly. */
4236 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
4238 rtx slot = XVECEXP (PATTERN (insn), 0, i);
4239 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
4242 delete_insn (next);
4243 next = insn;
4246 if (old_label && --LABEL_NUSES (old_label) == 0)
4247 delete_insn (old_label);
4248 continue;
4252 /* If we own the thread opposite the way this insn branches, see if we
4253 can merge its delay slots with following insns. */
4254 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4255 && own_thread_p (NEXT_INSN (insn), 0, 1))
4256 try_merge_delay_insns (insn, next);
4257 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4258 && own_thread_p (target_label, target_label, 0))
4259 try_merge_delay_insns (insn, next_active_insn (target_label));
4261 /* If we get here, we haven't deleted INSN. But we may have deleted
4262 NEXT, so recompute it. */
4263 next = next_active_insn (insn);
4267 #ifdef HAVE_return
4269 /* Look for filled jumps to the end of function label. We can try to convert
4270 them into RETURN insns if the insns in the delay slot are valid for the
4271 RETURN as well. */
4273 static void
4274 make_return_insns (first)
4275 rtx first;
4277 rtx insn, jump_insn, pat;
4278 rtx real_return_label = end_of_function_label;
4279 int slots, i;
4281 /* See if there is a RETURN insn in the function other than the one we
4282 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4283 into a RETURN to jump to it. */
4284 for (insn = first; insn; insn = NEXT_INSN (insn))
4285 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
4287 real_return_label = get_label_before (insn);
4288 break;
4291 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4292 was equal to END_OF_FUNCTION_LABEL. */
4293 LABEL_NUSES (real_return_label)++;
4295 /* Clear the list of insns to fill so we can use it. */
4296 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4298 for (insn = first; insn; insn = NEXT_INSN (insn))
4300 int flags;
4302 /* Only look at filled JUMP_INSNs that go to the end of function
4303 label. */
4304 if (GET_CODE (insn) != INSN
4305 || GET_CODE (PATTERN (insn)) != SEQUENCE
4306 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4307 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
4308 continue;
4310 pat = PATTERN (insn);
4311 jump_insn = XVECEXP (pat, 0, 0);
4313 /* If we can't make the jump into a RETURN, try to redirect it to the best
4314 RETURN and go on to the next insn. */
4315 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
4317 /* Make sure redirecting the jump will not invalidate the delay
4318 slot insns. */
4319 if (redirect_with_delay_slots_safe_p (jump_insn,
4320 real_return_label,
4321 insn))
4322 reorg_redirect_jump (jump_insn, real_return_label);
4323 continue;
4326 /* See if this RETURN can accept the insns current in its delay slot.
4327 It can if it has more or an equal number of slots and the contents
4328 of each is valid. */
4330 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4331 slots = num_delay_slots (jump_insn);
4332 if (slots >= XVECLEN (pat, 0) - 1)
4334 for (i = 1; i < XVECLEN (pat, 0); i++)
4335 if (! (
4336 #ifdef ANNUL_IFFALSE_SLOTS
4337 (INSN_ANNULLED_BRANCH_P (jump_insn)
4338 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4339 ? eligible_for_annul_false (jump_insn, i - 1,
4340 XVECEXP (pat, 0, i), flags) :
4341 #endif
4342 #ifdef ANNUL_IFTRUE_SLOTS
4343 (INSN_ANNULLED_BRANCH_P (jump_insn)
4344 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4345 ? eligible_for_annul_true (jump_insn, i - 1,
4346 XVECEXP (pat, 0, i), flags) :
4347 #endif
4348 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4349 break;
4351 else
4352 i = 0;
4354 if (i == XVECLEN (pat, 0))
4355 continue;
4357 /* We have to do something with this insn. If it is an unconditional
4358 RETURN, delete the SEQUENCE and output the individual insns,
4359 followed by the RETURN. Then set things up so we try to find
4360 insns for its delay slots, if it needs some. */
4361 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4363 rtx prev = PREV_INSN (insn);
4365 delete_insn (insn);
4366 for (i = 1; i < XVECLEN (pat, 0); i++)
4367 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4369 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4370 emit_barrier_after (insn);
4372 if (slots)
4373 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4375 else
4376 /* It is probably more efficient to keep this with its current
4377 delay slot as a branch to a RETURN. */
4378 reorg_redirect_jump (jump_insn, real_return_label);
4381 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4382 new delay slots we have created. */
4383 if (--LABEL_NUSES (real_return_label) == 0)
4384 delete_insn (real_return_label);
4386 fill_simple_delay_slots (first, 1);
4387 fill_simple_delay_slots (first, 0);
4389 #endif
4391 /* Try to find insns to place in delay slots. */
4393 void
4394 dbr_schedule (first, file)
4395 rtx first;
4396 FILE *file;
4398 rtx insn, next, epilogue_insn = 0;
4399 int i;
4400 #if 0
4401 int old_flag_no_peephole = flag_no_peephole;
4403 /* Execute `final' once in prescan mode to delete any insns that won't be
4404 used. Don't let final try to do any peephole optimization--it will
4405 ruin dataflow information for this pass. */
4407 flag_no_peephole = 1;
4408 final (first, 0, NO_DEBUG, 1, 1);
4409 flag_no_peephole = old_flag_no_peephole;
4410 #endif
4412 /* If the current function has no insns other than the prologue and
4413 epilogue, then do not try to fill any delay slots. */
4414 if (n_basic_blocks == 0)
4415 return;
4417 /* Find the highest INSN_UID and allocate and initialize our map from
4418 INSN_UID's to position in code. */
4419 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4421 if (INSN_UID (insn) > max_uid)
4422 max_uid = INSN_UID (insn);
4423 if (GET_CODE (insn) == NOTE
4424 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4425 epilogue_insn = insn;
4428 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4429 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4430 uid_to_ruid[INSN_UID (insn)] = i;
4432 /* Initialize the list of insns that need filling. */
4433 if (unfilled_firstobj == 0)
4435 gcc_obstack_init (&unfilled_slots_obstack);
4436 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4439 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4441 rtx target;
4443 INSN_ANNULLED_BRANCH_P (insn) = 0;
4444 INSN_FROM_TARGET_P (insn) = 0;
4446 /* Skip vector tables. We can't get attributes for them. */
4447 if (GET_CODE (insn) == JUMP_INSN
4448 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4449 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4450 continue;
4452 if (num_delay_slots (insn) > 0)
4453 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4455 /* Ensure all jumps go to the last of a set of consecutive labels. */
4456 if (GET_CODE (insn) == JUMP_INSN
4457 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4458 && JUMP_LABEL (insn) != 0
4459 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4460 != JUMP_LABEL (insn)))
4461 redirect_jump (insn, target);
4464 /* Indicate what resources are required to be valid at the end of the current
4465 function. The condition code never is and memory always is. If the
4466 frame pointer is needed, it is and so is the stack pointer unless
4467 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4468 stack pointer is. Registers used to return the function value are
4469 needed. Registers holding global variables are needed. */
4471 end_of_function_needs.cc = 0;
4472 end_of_function_needs.memory = 1;
4473 end_of_function_needs.unch_memory = 0;
4474 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4476 if (frame_pointer_needed)
4478 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4479 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4480 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4481 #endif
4482 #ifdef EXIT_IGNORE_STACK
4483 if (! EXIT_IGNORE_STACK)
4484 #endif
4485 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4487 else
4488 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4490 if (current_function_return_rtx != 0
4491 && GET_CODE (current_function_return_rtx) == REG)
4492 mark_referenced_resources (current_function_return_rtx,
4493 &end_of_function_needs, 1);
4495 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4496 if (global_regs[i]
4497 #ifdef EPILOGUE_USES
4498 || EPILOGUE_USES (i)
4499 #endif
4501 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4503 /* The registers required to be live at the end of the function are
4504 represented in the flow information as being dead just prior to
4505 reaching the end of the function. For example, the return of a value
4506 might be represented by a USE of the return register immediately
4507 followed by an unconditional jump to the return label where the
4508 return label is the end of the RTL chain. The end of the RTL chain
4509 is then taken to mean that the return register is live.
4511 This sequence is no longer maintained when epilogue instructions are
4512 added to the RTL chain. To reconstruct the original meaning, the
4513 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4514 point where these registers become live (start_of_epilogue_needs).
4515 If epilogue instructions are present, the registers set by those
4516 instructions won't have been processed by flow. Thus, those
4517 registers are additionally required at the end of the RTL chain
4518 (end_of_function_needs). */
4520 start_of_epilogue_needs = end_of_function_needs;
4522 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4523 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4525 /* Show we haven't computed an end-of-function label yet. */
4526 end_of_function_label = 0;
4528 /* Allocate and initialize the tables used by mark_target_live_regs. */
4529 target_hash_table
4530 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4531 * sizeof (struct target_info *)));
4532 bzero ((char *) target_hash_table,
4533 TARGET_HASH_PRIME * sizeof (struct target_info *));
4535 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4536 bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
4538 /* Initialize the statistics for this function. */
4539 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
4540 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
4542 /* Now do the delay slot filling. Try everything twice in case earlier
4543 changes make more slots fillable. */
4545 for (reorg_pass_number = 0;
4546 reorg_pass_number < MAX_REORG_PASSES;
4547 reorg_pass_number++)
4549 fill_simple_delay_slots (first, 1);
4550 fill_simple_delay_slots (first, 0);
4551 fill_eager_delay_slots (first);
4552 relax_delay_slots (first);
4555 /* Delete any USE insns made by update_block; subsequent passes don't need
4556 them or know how to deal with them. */
4557 for (insn = first; insn; insn = next)
4559 next = NEXT_INSN (insn);
4561 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4562 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4563 next = delete_insn (insn);
4566 /* If we made an end of function label, indicate that it is now
4567 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4568 If it is now unused, delete it. */
4569 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4570 delete_insn (end_of_function_label);
4572 #ifdef HAVE_return
4573 if (HAVE_return && end_of_function_label != 0)
4574 make_return_insns (first);
4575 #endif
4577 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4579 /* It is not clear why the line below is needed, but it does seem to be. */
4580 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4582 /* Reposition the prologue and epilogue notes in case we moved the
4583 prologue/epilogue insns. */
4584 reposition_prologue_and_epilogue_notes (first);
4586 if (file)
4588 register int i, j, need_comma;
4590 for (reorg_pass_number = 0;
4591 reorg_pass_number < MAX_REORG_PASSES;
4592 reorg_pass_number++)
4594 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4595 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4597 need_comma = 0;
4598 fprintf (file, ";; Reorg function #%d\n", i);
4600 fprintf (file, ";; %d insns needing delay slots\n;; ",
4601 num_insns_needing_delays[i][reorg_pass_number]);
4603 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4604 if (num_filled_delays[i][j][reorg_pass_number])
4606 if (need_comma)
4607 fprintf (file, ", ");
4608 need_comma = 1;
4609 fprintf (file, "%d got %d delays",
4610 num_filled_delays[i][j][reorg_pass_number], j);
4612 fprintf (file, "\n");
4617 #endif /* DELAY_SLOTS */