Fix a date.
[official-gcc.git] / gcc / resource.c
blob9881293b4198ec23eb2469d542b606ab8ce10826
1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "toplev.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "hard-reg-set.h"
30 #include "function.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "resource.h"
35 #include "except.h"
36 #include "insn-attr.h"
37 #include "params.h"
39 /* This structure is used to record liveness information at the targets or
40 fallthrough insns of branches. We will most likely need the information
41 at targets again, so save them in a hash table rather than recomputing them
42 each time. */
44 struct target_info
46 int uid; /* INSN_UID of target. */
47 struct target_info *next; /* Next info for same hash bucket. */
48 HARD_REG_SET live_regs; /* Registers live at target. */
49 int block; /* Basic block number containing target. */
50 int bb_tick; /* Generation count of basic block info. */
53 #define TARGET_HASH_PRIME 257
55 /* Indicates what resources are required at the beginning of the epilogue. */
56 static struct resources start_of_epilogue_needs;
58 /* Indicates what resources are required at function end. */
59 static struct resources end_of_function_needs;
61 /* Define the hash table itself. */
62 static struct target_info **target_hash_table = NULL;
64 /* For each basic block, we maintain a generation number of its basic
65 block info, which is updated each time we move an insn from the
66 target of a jump. This is the generation number indexed by block
67 number. */
69 static int *bb_ticks;
71 /* Marks registers possibly live at the current place being scanned by
72 mark_target_live_regs. Also used by update_live_status. */
74 static HARD_REG_SET current_live_regs;
76 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
77 Also only used by the next two functions. */
79 static HARD_REG_SET pending_dead_regs;
81 static void update_live_status (rtx, rtx, void *);
82 static int find_basic_block (rtx, int);
83 static rtx next_insn_no_annul (rtx);
84 static rtx find_dead_or_set_registers (rtx, struct resources*,
85 rtx*, int, struct resources,
86 struct resources);
88 /* Utility function called from mark_target_live_regs via note_stores.
89 It deadens any CLOBBERed registers and livens any SET registers. */
91 static void
92 update_live_status (rtx dest, rtx x, void *data ATTRIBUTE_UNUSED)
94 int first_regno, last_regno;
95 int i;
97 if (!REG_P (dest)
98 && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
99 return;
101 if (GET_CODE (dest) == SUBREG)
102 first_regno = subreg_regno (dest);
103 else
104 first_regno = REGNO (dest);
106 last_regno = first_regno + hard_regno_nregs[first_regno][GET_MODE (dest)];
108 if (GET_CODE (x) == CLOBBER)
109 for (i = first_regno; i < last_regno; i++)
110 CLEAR_HARD_REG_BIT (current_live_regs, i);
111 else
112 for (i = first_regno; i < last_regno; i++)
114 SET_HARD_REG_BIT (current_live_regs, i);
115 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
119 /* Find the number of the basic block with correct live register
120 information that starts closest to INSN. Return -1 if we couldn't
121 find such a basic block or the beginning is more than
122 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for
123 an unlimited search.
125 The delay slot filling code destroys the control-flow graph so,
126 instead of finding the basic block containing INSN, we search
127 backwards toward a BARRIER where the live register information is
128 correct. */
130 static int
131 find_basic_block (rtx insn, int search_limit)
133 basic_block bb;
135 /* Scan backwards to the previous BARRIER. Then see if we can find a
136 label that starts a basic block. Return the basic block number. */
137 for (insn = prev_nonnote_insn (insn);
138 insn && !BARRIER_P (insn) && search_limit != 0;
139 insn = prev_nonnote_insn (insn), --search_limit)
142 /* The closest BARRIER is too far away. */
143 if (search_limit == 0)
144 return -1;
146 /* The start of the function. */
147 else if (insn == 0)
148 return ENTRY_BLOCK_PTR->next_bb->index;
150 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
151 anything other than a CODE_LABEL or note, we can't find this code. */
152 for (insn = next_nonnote_insn (insn);
153 insn && LABEL_P (insn);
154 insn = next_nonnote_insn (insn))
156 FOR_EACH_BB (bb)
157 if (insn == BB_HEAD (bb))
158 return bb->index;
161 return -1;
164 /* Similar to next_insn, but ignores insns in the delay slots of
165 an annulled branch. */
167 static rtx
168 next_insn_no_annul (rtx insn)
170 if (insn)
172 /* If INSN is an annulled branch, skip any insns from the target
173 of the branch. */
174 if (INSN_P (insn)
175 && INSN_ANNULLED_BRANCH_P (insn)
176 && NEXT_INSN (PREV_INSN (insn)) != insn)
178 rtx next = NEXT_INSN (insn);
179 enum rtx_code code = GET_CODE (next);
181 while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
182 && INSN_FROM_TARGET_P (next))
184 insn = next;
185 next = NEXT_INSN (insn);
186 code = GET_CODE (next);
190 insn = NEXT_INSN (insn);
191 if (insn && NONJUMP_INSN_P (insn)
192 && GET_CODE (PATTERN (insn)) == SEQUENCE)
193 insn = XVECEXP (PATTERN (insn), 0, 0);
196 return insn;
199 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
200 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS
201 is TRUE, resources used by the called routine will be included for
202 CALL_INSNs. */
204 void
205 mark_referenced_resources (rtx x, struct resources *res,
206 int include_delayed_effects)
208 enum rtx_code code = GET_CODE (x);
209 int i, j;
210 unsigned int r;
211 const char *format_ptr;
213 /* Handle leaf items for which we set resource flags. Also, special-case
214 CALL, SET and CLOBBER operators. */
215 switch (code)
217 case CONST:
218 case CONST_INT:
219 case CONST_DOUBLE:
220 case CONST_VECTOR:
221 case PC:
222 case SYMBOL_REF:
223 case LABEL_REF:
224 return;
226 case SUBREG:
227 if (!REG_P (SUBREG_REG (x)))
228 mark_referenced_resources (SUBREG_REG (x), res, 0);
229 else
231 unsigned int regno = subreg_regno (x);
232 unsigned int last_regno
233 = regno + hard_regno_nregs[regno][GET_MODE (x)];
235 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
236 for (r = regno; r < last_regno; r++)
237 SET_HARD_REG_BIT (res->regs, r);
239 return;
241 case REG:
243 unsigned int regno = REGNO (x);
244 unsigned int last_regno
245 = regno + hard_regno_nregs[regno][GET_MODE (x)];
247 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
248 for (r = regno; r < last_regno; r++)
249 SET_HARD_REG_BIT (res->regs, r);
251 return;
253 case MEM:
254 /* If this memory shouldn't change, it really isn't referencing
255 memory. */
256 if (MEM_READONLY_P (x))
257 res->unch_memory = 1;
258 else
259 res->memory = 1;
260 res->volatil |= MEM_VOLATILE_P (x);
262 /* Mark registers used to access memory. */
263 mark_referenced_resources (XEXP (x, 0), res, 0);
264 return;
266 case CC0:
267 res->cc = 1;
268 return;
270 case UNSPEC_VOLATILE:
271 case ASM_INPUT:
272 /* Traditional asm's are always volatile. */
273 res->volatil = 1;
274 return;
276 case TRAP_IF:
277 res->volatil = 1;
278 break;
280 case ASM_OPERANDS:
281 res->volatil |= MEM_VOLATILE_P (x);
283 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
284 We can not just fall through here since then we would be confused
285 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
286 traditional asms unlike their normal usage. */
288 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
289 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
290 return;
292 case CALL:
293 /* The first operand will be a (MEM (xxx)) but doesn't really reference
294 memory. The second operand may be referenced, though. */
295 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
296 mark_referenced_resources (XEXP (x, 1), res, 0);
297 return;
299 case SET:
300 /* Usually, the first operand of SET is set, not referenced. But
301 registers used to access memory are referenced. SET_DEST is
302 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
304 mark_referenced_resources (SET_SRC (x), res, 0);
306 x = SET_DEST (x);
307 if (GET_CODE (x) == SIGN_EXTRACT
308 || GET_CODE (x) == ZERO_EXTRACT
309 || GET_CODE (x) == STRICT_LOW_PART)
310 mark_referenced_resources (x, res, 0);
311 else if (GET_CODE (x) == SUBREG)
312 x = SUBREG_REG (x);
313 if (MEM_P (x))
314 mark_referenced_resources (XEXP (x, 0), res, 0);
315 return;
317 case CLOBBER:
318 return;
320 case CALL_INSN:
321 if (include_delayed_effects)
323 /* A CALL references memory, the frame pointer if it exists, the
324 stack pointer, any global registers and any registers given in
325 USE insns immediately in front of the CALL.
327 However, we may have moved some of the parameter loading insns
328 into the delay slot of this CALL. If so, the USE's for them
329 don't count and should be skipped. */
330 rtx insn = PREV_INSN (x);
331 rtx sequence = 0;
332 int seq_size = 0;
333 int i;
335 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
336 if (NEXT_INSN (insn) != x)
338 sequence = PATTERN (NEXT_INSN (insn));
339 seq_size = XVECLEN (sequence, 0);
340 gcc_assert (GET_CODE (sequence) == SEQUENCE);
343 res->memory = 1;
344 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
345 if (frame_pointer_needed)
347 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
348 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
349 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
350 #endif
353 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
354 if (global_regs[i])
355 SET_HARD_REG_BIT (res->regs, i);
357 /* Check for a REG_SETJMP. If it exists, then we must
358 assume that this call can need any register.
360 This is done to be more conservative about how we handle setjmp.
361 We assume that they both use and set all registers. Using all
362 registers ensures that a register will not be considered dead
363 just because it crosses a setjmp call. A register should be
364 considered dead only if the setjmp call returns nonzero. */
365 if (find_reg_note (x, REG_SETJMP, NULL))
366 SET_HARD_REG_SET (res->regs);
369 rtx link;
371 for (link = CALL_INSN_FUNCTION_USAGE (x);
372 link;
373 link = XEXP (link, 1))
374 if (GET_CODE (XEXP (link, 0)) == USE)
376 for (i = 1; i < seq_size; i++)
378 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
379 if (GET_CODE (slot_pat) == SET
380 && rtx_equal_p (SET_DEST (slot_pat),
381 XEXP (XEXP (link, 0), 0)))
382 break;
384 if (i >= seq_size)
385 mark_referenced_resources (XEXP (XEXP (link, 0), 0),
386 res, 0);
391 /* ... fall through to other INSN processing ... */
393 case INSN:
394 case JUMP_INSN:
396 #ifdef INSN_REFERENCES_ARE_DELAYED
397 if (! include_delayed_effects
398 && INSN_REFERENCES_ARE_DELAYED (x))
399 return;
400 #endif
402 /* No special processing, just speed up. */
403 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
404 return;
406 default:
407 break;
410 /* Process each sub-expression and flag what it needs. */
411 format_ptr = GET_RTX_FORMAT (code);
412 for (i = 0; i < GET_RTX_LENGTH (code); i++)
413 switch (*format_ptr++)
415 case 'e':
416 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
417 break;
419 case 'E':
420 for (j = 0; j < XVECLEN (x, i); j++)
421 mark_referenced_resources (XVECEXP (x, i, j), res,
422 include_delayed_effects);
423 break;
427 /* A subroutine of mark_target_live_regs. Search forward from TARGET
428 looking for registers that are set before they are used. These are dead.
429 Stop after passing a few conditional jumps, and/or a small
430 number of unconditional branches. */
432 static rtx
433 find_dead_or_set_registers (rtx target, struct resources *res,
434 rtx *jump_target, int jump_count,
435 struct resources set, struct resources needed)
437 HARD_REG_SET scratch;
438 rtx insn, next;
439 rtx jump_insn = 0;
440 int i;
442 for (insn = target; insn; insn = next)
444 rtx this_jump_insn = insn;
446 next = NEXT_INSN (insn);
448 /* If this instruction can throw an exception, then we don't
449 know where we might end up next. That means that we have to
450 assume that whatever we have already marked as live really is
451 live. */
452 if (can_throw_internal (insn))
453 break;
455 switch (GET_CODE (insn))
457 case CODE_LABEL:
458 /* After a label, any pending dead registers that weren't yet
459 used can be made dead. */
460 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
461 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
462 CLEAR_HARD_REG_SET (pending_dead_regs);
464 continue;
466 case BARRIER:
467 case NOTE:
468 continue;
470 case INSN:
471 if (GET_CODE (PATTERN (insn)) == USE)
473 /* If INSN is a USE made by update_block, we care about the
474 underlying insn. Any registers set by the underlying insn
475 are live since the insn is being done somewhere else. */
476 if (INSN_P (XEXP (PATTERN (insn), 0)))
477 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
478 MARK_SRC_DEST_CALL);
480 /* All other USE insns are to be ignored. */
481 continue;
483 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
484 continue;
485 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
487 /* An unconditional jump can be used to fill the delay slot
488 of a call, so search for a JUMP_INSN in any position. */
489 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
491 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
492 if (JUMP_P (this_jump_insn))
493 break;
497 default:
498 break;
501 if (JUMP_P (this_jump_insn))
503 if (jump_count++ < 10)
505 if (any_uncondjump_p (this_jump_insn)
506 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
508 next = JUMP_LABEL (this_jump_insn);
509 if (jump_insn == 0)
511 jump_insn = insn;
512 if (jump_target)
513 *jump_target = JUMP_LABEL (this_jump_insn);
516 else if (any_condjump_p (this_jump_insn))
518 struct resources target_set, target_res;
519 struct resources fallthrough_res;
521 /* We can handle conditional branches here by following
522 both paths, and then IOR the results of the two paths
523 together, which will give us registers that are dead
524 on both paths. Since this is expensive, we give it
525 a much higher cost than unconditional branches. The
526 cost was chosen so that we will follow at most 1
527 conditional branch. */
529 jump_count += 4;
530 if (jump_count >= 10)
531 break;
533 mark_referenced_resources (insn, &needed, 1);
535 /* For an annulled branch, mark_set_resources ignores slots
536 filled by instructions from the target. This is correct
537 if the branch is not taken. Since we are following both
538 paths from the branch, we must also compute correct info
539 if the branch is taken. We do this by inverting all of
540 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
541 and then inverting the INSN_FROM_TARGET_P bits again. */
543 if (GET_CODE (PATTERN (insn)) == SEQUENCE
544 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
546 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
547 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
548 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
550 target_set = set;
551 mark_set_resources (insn, &target_set, 0,
552 MARK_SRC_DEST_CALL);
554 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
555 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
556 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
558 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
560 else
562 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
563 target_set = set;
566 target_res = *res;
567 COPY_HARD_REG_SET (scratch, target_set.regs);
568 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
569 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
571 fallthrough_res = *res;
572 COPY_HARD_REG_SET (scratch, set.regs);
573 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
574 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
576 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
577 &target_res, 0, jump_count,
578 target_set, needed);
579 find_dead_or_set_registers (next,
580 &fallthrough_res, 0, jump_count,
581 set, needed);
582 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
583 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
584 break;
586 else
587 break;
589 else
591 /* Don't try this optimization if we expired our jump count
592 above, since that would mean there may be an infinite loop
593 in the function being compiled. */
594 jump_insn = 0;
595 break;
599 mark_referenced_resources (insn, &needed, 1);
600 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
602 COPY_HARD_REG_SET (scratch, set.regs);
603 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
604 AND_COMPL_HARD_REG_SET (res->regs, scratch);
607 return jump_insn;
610 /* Given X, a part of an insn, and a pointer to a `struct resource',
611 RES, indicate which resources are modified by the insn. If
612 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
613 set by the called routine.
615 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
616 objects are being referenced instead of set.
618 We never mark the insn as modifying the condition code unless it explicitly
619 SETs CC0 even though this is not totally correct. The reason for this is
620 that we require a SET of CC0 to immediately precede the reference to CC0.
621 So if some other insn sets CC0 as a side-effect, we know it cannot affect
622 our computation and thus may be placed in a delay slot. */
624 void
625 mark_set_resources (rtx x, struct resources *res, int in_dest,
626 enum mark_resource_type mark_type)
628 enum rtx_code code;
629 int i, j;
630 unsigned int r;
631 const char *format_ptr;
633 restart:
635 code = GET_CODE (x);
637 switch (code)
639 case NOTE:
640 case BARRIER:
641 case CODE_LABEL:
642 case USE:
643 case CONST_INT:
644 case CONST_DOUBLE:
645 case CONST_VECTOR:
646 case LABEL_REF:
647 case SYMBOL_REF:
648 case CONST:
649 case PC:
650 /* These don't set any resources. */
651 return;
653 case CC0:
654 if (in_dest)
655 res->cc = 1;
656 return;
658 case CALL_INSN:
659 /* Called routine modifies the condition code, memory, any registers
660 that aren't saved across calls, global registers and anything
661 explicitly CLOBBERed immediately after the CALL_INSN. */
663 if (mark_type == MARK_SRC_DEST_CALL)
665 rtx link;
667 res->cc = res->memory = 1;
668 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
669 if (call_used_regs[r] || global_regs[r])
670 SET_HARD_REG_BIT (res->regs, r);
672 for (link = CALL_INSN_FUNCTION_USAGE (x);
673 link; link = XEXP (link, 1))
674 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
675 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
676 MARK_SRC_DEST);
678 /* Check for a REG_SETJMP. If it exists, then we must
679 assume that this call can clobber any register. */
680 if (find_reg_note (x, REG_SETJMP, NULL))
681 SET_HARD_REG_SET (res->regs);
684 /* ... and also what its RTL says it modifies, if anything. */
686 case JUMP_INSN:
687 case INSN:
689 /* An insn consisting of just a CLOBBER (or USE) is just for flow
690 and doesn't actually do anything, so we ignore it. */
692 #ifdef INSN_SETS_ARE_DELAYED
693 if (mark_type != MARK_SRC_DEST_CALL
694 && INSN_SETS_ARE_DELAYED (x))
695 return;
696 #endif
698 x = PATTERN (x);
699 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
700 goto restart;
701 return;
703 case SET:
704 /* If the source of a SET is a CALL, this is actually done by
705 the called routine. So only include it if we are to include the
706 effects of the calling routine. */
708 mark_set_resources (SET_DEST (x), res,
709 (mark_type == MARK_SRC_DEST_CALL
710 || GET_CODE (SET_SRC (x)) != CALL),
711 mark_type);
713 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
714 return;
716 case CLOBBER:
717 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
718 return;
720 case SEQUENCE:
721 for (i = 0; i < XVECLEN (x, 0); i++)
722 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
723 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
724 mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
725 return;
727 case POST_INC:
728 case PRE_INC:
729 case POST_DEC:
730 case PRE_DEC:
731 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
732 return;
734 case PRE_MODIFY:
735 case POST_MODIFY:
736 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
737 mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
738 mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
739 return;
741 case SIGN_EXTRACT:
742 case ZERO_EXTRACT:
743 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
744 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
745 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
746 return;
748 case MEM:
749 if (in_dest)
751 res->memory = 1;
752 res->unch_memory |= MEM_READONLY_P (x);
753 res->volatil |= MEM_VOLATILE_P (x);
756 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
757 return;
759 case SUBREG:
760 if (in_dest)
762 if (!REG_P (SUBREG_REG (x)))
763 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
764 else
766 unsigned int regno = subreg_regno (x);
767 unsigned int last_regno
768 = regno + hard_regno_nregs[regno][GET_MODE (x)];
770 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
771 for (r = regno; r < last_regno; r++)
772 SET_HARD_REG_BIT (res->regs, r);
775 return;
777 case REG:
778 if (in_dest)
780 unsigned int regno = REGNO (x);
781 unsigned int last_regno
782 = regno + hard_regno_nregs[regno][GET_MODE (x)];
784 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
785 for (r = regno; r < last_regno; r++)
786 SET_HARD_REG_BIT (res->regs, r);
788 return;
790 case UNSPEC_VOLATILE:
791 case ASM_INPUT:
792 /* Traditional asm's are always volatile. */
793 res->volatil = 1;
794 return;
796 case TRAP_IF:
797 res->volatil = 1;
798 break;
800 case ASM_OPERANDS:
801 res->volatil |= MEM_VOLATILE_P (x);
803 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
804 We can not just fall through here since then we would be confused
805 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
806 traditional asms unlike their normal usage. */
808 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
809 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
810 MARK_SRC_DEST);
811 return;
813 default:
814 break;
817 /* Process each sub-expression and flag what it needs. */
818 format_ptr = GET_RTX_FORMAT (code);
819 for (i = 0; i < GET_RTX_LENGTH (code); i++)
820 switch (*format_ptr++)
822 case 'e':
823 mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
824 break;
826 case 'E':
827 for (j = 0; j < XVECLEN (x, i); j++)
828 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
829 break;
833 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
835 static bool
836 return_insn_p (rtx insn)
838 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
839 return true;
841 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
842 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
844 return false;
847 /* Set the resources that are live at TARGET.
849 If TARGET is zero, we refer to the end of the current function and can
850 return our precomputed value.
852 Otherwise, we try to find out what is live by consulting the basic block
853 information. This is tricky, because we must consider the actions of
854 reload and jump optimization, which occur after the basic block information
855 has been computed.
857 Accordingly, we proceed as follows::
859 We find the previous BARRIER and look at all immediately following labels
860 (with no intervening active insns) to see if any of them start a basic
861 block. If we hit the start of the function first, we use block 0.
863 Once we have found a basic block and a corresponding first insns, we can
864 accurately compute the live status from basic_block_live_regs and
865 reg_renumber. (By starting at a label following a BARRIER, we are immune
866 to actions taken by reload and jump.) Then we scan all insns between
867 that point and our target. For each CLOBBER (or for call-clobbered regs
868 when we pass a CALL_INSN), mark the appropriate registers are dead. For
869 a SET, mark them as live.
871 We have to be careful when using REG_DEAD notes because they are not
872 updated by such things as find_equiv_reg. So keep track of registers
873 marked as dead that haven't been assigned to, and mark them dead at the
874 next CODE_LABEL since reload and jump won't propagate values across labels.
876 If we cannot find the start of a basic block (should be a very rare
877 case, if it can happen at all), mark everything as potentially live.
879 Next, scan forward from TARGET looking for things set or clobbered
880 before they are used. These are not live.
882 Because we can be called many times on the same target, save our results
883 in a hash table indexed by INSN_UID. This is only done if the function
884 init_resource_info () was invoked before we are called. */
886 void
887 mark_target_live_regs (rtx insns, rtx target, struct resources *res)
889 int b = -1;
890 unsigned int i;
891 struct target_info *tinfo = NULL;
892 rtx insn;
893 rtx jump_insn = 0;
894 rtx jump_target;
895 HARD_REG_SET scratch;
896 struct resources set, needed;
898 /* Handle end of function. */
899 if (target == 0)
901 *res = end_of_function_needs;
902 return;
905 /* Handle return insn. */
906 else if (return_insn_p (target))
908 *res = end_of_function_needs;
909 mark_referenced_resources (target, res, 0);
910 return;
913 /* We have to assume memory is needed, but the CC isn't. */
914 res->memory = 1;
915 res->volatil = res->unch_memory = 0;
916 res->cc = 0;
918 /* See if we have computed this value already. */
919 if (target_hash_table != NULL)
921 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
922 tinfo; tinfo = tinfo->next)
923 if (tinfo->uid == INSN_UID (target))
924 break;
926 /* Start by getting the basic block number. If we have saved
927 information, we can get it from there unless the insn at the
928 start of the basic block has been deleted. */
929 if (tinfo && tinfo->block != -1
930 && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block))))
931 b = tinfo->block;
934 if (b == -1)
935 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
937 if (target_hash_table != NULL)
939 if (tinfo)
941 /* If the information is up-to-date, use it. Otherwise, we will
942 update it below. */
943 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
945 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
946 return;
949 else
951 /* Allocate a place to put our results and chain it into the
952 hash table. */
953 tinfo = xmalloc (sizeof (struct target_info));
954 tinfo->uid = INSN_UID (target);
955 tinfo->block = b;
956 tinfo->next
957 = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
958 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
962 CLEAR_HARD_REG_SET (pending_dead_regs);
964 /* If we found a basic block, get the live registers from it and update
965 them with anything set or killed between its start and the insn before
966 TARGET. Otherwise, we must assume everything is live. */
967 if (b != -1)
969 regset regs_live = BASIC_BLOCK (b)->global_live_at_start;
970 unsigned int j;
971 unsigned int regno;
972 rtx start_insn, stop_insn;
973 reg_set_iterator rsi;
975 /* Compute hard regs live at start of block -- this is the real hard regs
976 marked live, plus live pseudo regs that have been renumbered to
977 hard regs. */
979 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
981 EXECUTE_IF_SET_IN_REG_SET (regs_live, FIRST_PSEUDO_REGISTER, i, rsi)
983 if (reg_renumber[i] >= 0)
985 regno = reg_renumber[i];
986 for (j = regno;
987 j < regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
988 j++)
989 SET_HARD_REG_BIT (current_live_regs, j);
993 /* Get starting and ending insn, handling the case where each might
994 be a SEQUENCE. */
995 start_insn = (b == 0 ? insns : BB_HEAD (BASIC_BLOCK (b)));
996 stop_insn = target;
998 if (NONJUMP_INSN_P (start_insn)
999 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
1000 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
1002 if (NONJUMP_INSN_P (stop_insn)
1003 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
1004 stop_insn = next_insn (PREV_INSN (stop_insn));
1006 for (insn = start_insn; insn != stop_insn;
1007 insn = next_insn_no_annul (insn))
1009 rtx link;
1010 rtx real_insn = insn;
1011 enum rtx_code code = GET_CODE (insn);
1013 /* If this insn is from the target of a branch, it isn't going to
1014 be used in the sequel. If it is used in both cases, this
1015 test will not be true. */
1016 if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1017 && INSN_FROM_TARGET_P (insn))
1018 continue;
1020 /* If this insn is a USE made by update_block, we care about the
1021 underlying insn. */
1022 if (code == INSN && GET_CODE (PATTERN (insn)) == USE
1023 && INSN_P (XEXP (PATTERN (insn), 0)))
1024 real_insn = XEXP (PATTERN (insn), 0);
1026 if (CALL_P (real_insn))
1028 /* CALL clobbers all call-used regs that aren't fixed except
1029 sp, ap, and fp. Do this before setting the result of the
1030 call live. */
1031 AND_COMPL_HARD_REG_SET (current_live_regs,
1032 regs_invalidated_by_call);
1034 /* A CALL_INSN sets any global register live, since it may
1035 have been modified by the call. */
1036 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1037 if (global_regs[i])
1038 SET_HARD_REG_BIT (current_live_regs, i);
1041 /* Mark anything killed in an insn to be deadened at the next
1042 label. Ignore USE insns; the only REG_DEAD notes will be for
1043 parameters. But they might be early. A CALL_INSN will usually
1044 clobber registers used for parameters. It isn't worth bothering
1045 with the unlikely case when it won't. */
1046 if ((NONJUMP_INSN_P (real_insn)
1047 && GET_CODE (PATTERN (real_insn)) != USE
1048 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1049 || JUMP_P (real_insn)
1050 || CALL_P (real_insn))
1052 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1053 if (REG_NOTE_KIND (link) == REG_DEAD
1054 && REG_P (XEXP (link, 0))
1055 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1057 unsigned int first_regno = REGNO (XEXP (link, 0));
1058 unsigned int last_regno
1059 = (first_regno
1060 + hard_regno_nregs[first_regno]
1061 [GET_MODE (XEXP (link, 0))]);
1063 for (i = first_regno; i < last_regno; i++)
1064 SET_HARD_REG_BIT (pending_dead_regs, i);
1067 note_stores (PATTERN (real_insn), update_live_status, NULL);
1069 /* If any registers were unused after this insn, kill them.
1070 These notes will always be accurate. */
1071 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1072 if (REG_NOTE_KIND (link) == REG_UNUSED
1073 && REG_P (XEXP (link, 0))
1074 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1076 unsigned int first_regno = REGNO (XEXP (link, 0));
1077 unsigned int last_regno
1078 = (first_regno
1079 + hard_regno_nregs[first_regno]
1080 [GET_MODE (XEXP (link, 0))]);
1082 for (i = first_regno; i < last_regno; i++)
1083 CLEAR_HARD_REG_BIT (current_live_regs, i);
1087 else if (LABEL_P (real_insn))
1089 /* A label clobbers the pending dead registers since neither
1090 reload nor jump will propagate a value across a label. */
1091 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1092 CLEAR_HARD_REG_SET (pending_dead_regs);
1095 /* The beginning of the epilogue corresponds to the end of the
1096 RTL chain when there are no epilogue insns. Certain resources
1097 are implicitly required at that point. */
1098 else if (NOTE_P (real_insn)
1099 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1100 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1103 COPY_HARD_REG_SET (res->regs, current_live_regs);
1104 if (tinfo != NULL)
1106 tinfo->block = b;
1107 tinfo->bb_tick = bb_ticks[b];
1110 else
1111 /* We didn't find the start of a basic block. Assume everything
1112 in use. This should happen only extremely rarely. */
1113 SET_HARD_REG_SET (res->regs);
1115 CLEAR_RESOURCE (&set);
1116 CLEAR_RESOURCE (&needed);
1118 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1119 set, needed);
1121 /* If we hit an unconditional branch, we have another way of finding out
1122 what is live: we can see what is live at the branch target and include
1123 anything used but not set before the branch. We add the live
1124 resources found using the test below to those found until now. */
1126 if (jump_insn)
1128 struct resources new_resources;
1129 rtx stop_insn = next_active_insn (jump_insn);
1131 mark_target_live_regs (insns, next_active_insn (jump_target),
1132 &new_resources);
1133 CLEAR_RESOURCE (&set);
1134 CLEAR_RESOURCE (&needed);
1136 /* Include JUMP_INSN in the needed registers. */
1137 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1139 mark_referenced_resources (insn, &needed, 1);
1141 COPY_HARD_REG_SET (scratch, needed.regs);
1142 AND_COMPL_HARD_REG_SET (scratch, set.regs);
1143 IOR_HARD_REG_SET (new_resources.regs, scratch);
1145 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1148 IOR_HARD_REG_SET (res->regs, new_resources.regs);
1151 if (tinfo != NULL)
1153 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1157 /* Initialize the resources required by mark_target_live_regs ().
1158 This should be invoked before the first call to mark_target_live_regs. */
1160 void
1161 init_resource_info (rtx epilogue_insn)
1163 int i;
1165 /* Indicate what resources are required to be valid at the end of the current
1166 function. The condition code never is and memory always is. If the
1167 frame pointer is needed, it is and so is the stack pointer unless
1168 EXIT_IGNORE_STACK is nonzero. If the frame pointer is not needed, the
1169 stack pointer is. Registers used to return the function value are
1170 needed. Registers holding global variables are needed. */
1172 end_of_function_needs.cc = 0;
1173 end_of_function_needs.memory = 1;
1174 end_of_function_needs.unch_memory = 0;
1175 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1177 if (frame_pointer_needed)
1179 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1180 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1181 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1182 #endif
1183 if (! EXIT_IGNORE_STACK
1184 || current_function_sp_is_unchanging)
1185 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1187 else
1188 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1190 if (current_function_return_rtx != 0)
1191 mark_referenced_resources (current_function_return_rtx,
1192 &end_of_function_needs, 1);
1194 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1195 if (global_regs[i]
1196 #ifdef EPILOGUE_USES
1197 || EPILOGUE_USES (i)
1198 #endif
1200 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1202 /* The registers required to be live at the end of the function are
1203 represented in the flow information as being dead just prior to
1204 reaching the end of the function. For example, the return of a value
1205 might be represented by a USE of the return register immediately
1206 followed by an unconditional jump to the return label where the
1207 return label is the end of the RTL chain. The end of the RTL chain
1208 is then taken to mean that the return register is live.
1210 This sequence is no longer maintained when epilogue instructions are
1211 added to the RTL chain. To reconstruct the original meaning, the
1212 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1213 point where these registers become live (start_of_epilogue_needs).
1214 If epilogue instructions are present, the registers set by those
1215 instructions won't have been processed by flow. Thus, those
1216 registers are additionally required at the end of the RTL chain
1217 (end_of_function_needs). */
1219 start_of_epilogue_needs = end_of_function_needs;
1221 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1223 mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1224 MARK_SRC_DEST_CALL);
1225 if (return_insn_p (epilogue_insn))
1226 break;
1229 /* Allocate and initialize the tables used by mark_target_live_regs. */
1230 target_hash_table = xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
1231 bb_ticks = xcalloc (last_basic_block, sizeof (int));
1234 /* Free up the resources allocated to mark_target_live_regs (). This
1235 should be invoked after the last call to mark_target_live_regs (). */
1237 void
1238 free_resource_info (void)
1240 if (target_hash_table != NULL)
1242 int i;
1244 for (i = 0; i < TARGET_HASH_PRIME; ++i)
1246 struct target_info *ti = target_hash_table[i];
1248 while (ti)
1250 struct target_info *next = ti->next;
1251 free (ti);
1252 ti = next;
1256 free (target_hash_table);
1257 target_hash_table = NULL;
1260 if (bb_ticks != NULL)
1262 free (bb_ticks);
1263 bb_ticks = NULL;
1267 /* Clear any hashed information that we have stored for INSN. */
1269 void
1270 clear_hashed_info_for_insn (rtx insn)
1272 struct target_info *tinfo;
1274 if (target_hash_table != NULL)
1276 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1277 tinfo; tinfo = tinfo->next)
1278 if (tinfo->uid == INSN_UID (insn))
1279 break;
1281 if (tinfo)
1282 tinfo->block = -1;
1286 /* Increment the tick count for the basic block that contains INSN. */
1288 void
1289 incr_ticks_for_insn (rtx insn)
1291 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1293 if (b != -1)
1294 bb_ticks[b]++;
1297 /* Add TRIAL to the set of resources used at the end of the current
1298 function. */
1299 void
1300 mark_end_of_function_resources (rtx trial, int include_delayed_effects)
1302 mark_referenced_resources (trial, &end_of_function_needs,
1303 include_delayed_effects);