* cppexp.c (parse_charconst): Null does not end character
[official-gcc.git] / gcc / resource.c
blob61b8b0ca48aa02997e10ef48c5066533c09532f9
1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999, 2000 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "toplev.h"
24 #include "rtl.h"
25 #include "tm_p.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "output.h"
32 #include "resource.h"
33 #include "insn-attr.h"
35 /* This structure is used to record liveness information at the targets or
36 fallthrough insns of branches. We will most likely need the information
37 at targets again, so save them in a hash table rather than recomputing them
38 each time. */
40 struct target_info
42 int uid; /* INSN_UID of target. */
43 struct target_info *next; /* Next info for same hash bucket. */
44 HARD_REG_SET live_regs; /* Registers live at target. */
45 int block; /* Basic block number containing target. */
46 int bb_tick; /* Generation count of basic block info. */
49 #define TARGET_HASH_PRIME 257
51 /* Indicates what resources are required at the beginning of the epilogue. */
52 static struct resources start_of_epilogue_needs;
54 /* Indicates what resources are required at function end. */
55 static struct resources end_of_function_needs;
57 /* Define the hash table itself. */
58 static struct target_info **target_hash_table = NULL;
60 /* For each basic block, we maintain a generation number of its basic
61 block info, which is updated each time we move an insn from the
62 target of a jump. This is the generation number indexed by block
63 number. */
65 static int *bb_ticks;
67 /* Marks registers possibly live at the current place being scanned by
68 mark_target_live_regs. Used only by next two function. */
70 static HARD_REG_SET current_live_regs;
72 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
73 Also only used by the next two functions. */
75 static HARD_REG_SET pending_dead_regs;
77 static void update_live_status PARAMS ((rtx, rtx, void *));
78 static int find_basic_block PARAMS ((rtx));
79 static rtx next_insn_no_annul PARAMS ((rtx));
80 static rtx find_dead_or_set_registers PARAMS ((rtx, struct resources*,
81 rtx*, int, struct resources,
82 struct resources));
84 /* Utility function called from mark_target_live_regs via note_stores.
85 It deadens any CLOBBERed registers and livens any SET registers. */
87 static void
88 update_live_status (dest, x, data)
89 rtx dest;
90 rtx x;
91 void *data ATTRIBUTE_UNUSED;
93 int first_regno, last_regno;
94 int i;
96 if (GET_CODE (dest) != REG
97 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
98 return;
100 if (GET_CODE (dest) == SUBREG)
101 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
102 else
103 first_regno = REGNO (dest);
105 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
107 if (GET_CODE (x) == CLOBBER)
108 for (i = first_regno; i < last_regno; i++)
109 CLEAR_HARD_REG_BIT (current_live_regs, i);
110 else
111 for (i = first_regno; i < last_regno; i++)
113 SET_HARD_REG_BIT (current_live_regs, i);
114 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
117 /* Find the number of the basic block that starts closest to INSN. Return -1
118 if we couldn't find such a basic block. */
120 static int
121 find_basic_block (insn)
122 rtx insn;
124 int i;
126 /* Scan backwards to the previous BARRIER. Then see if we can find a
127 label that starts a basic block. Return the basic block number. */
129 for (insn = prev_nonnote_insn (insn);
130 insn && GET_CODE (insn) != BARRIER;
131 insn = prev_nonnote_insn (insn))
134 /* The start of the function is basic block zero. */
135 if (insn == 0)
136 return 0;
138 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
139 anything other than a CODE_LABEL or note, we can't find this code. */
140 for (insn = next_nonnote_insn (insn);
141 insn && GET_CODE (insn) == CODE_LABEL;
142 insn = next_nonnote_insn (insn))
144 for (i = 0; i < n_basic_blocks; i++)
145 if (insn == BLOCK_HEAD (i))
146 return i;
149 return -1;
152 /* Similar to next_insn, but ignores insns in the delay slots of
153 an annulled branch. */
155 static rtx
156 next_insn_no_annul (insn)
157 rtx insn;
159 if (insn)
161 /* If INSN is an annulled branch, skip any insns from the target
162 of the branch. */
163 if (INSN_ANNULLED_BRANCH_P (insn)
164 && NEXT_INSN (PREV_INSN (insn)) != insn)
165 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
166 insn = NEXT_INSN (insn);
168 insn = NEXT_INSN (insn);
169 if (insn && GET_CODE (insn) == INSN
170 && GET_CODE (PATTERN (insn)) == SEQUENCE)
171 insn = XVECEXP (PATTERN (insn), 0, 0);
174 return insn;
177 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
178 which resources are references by the insn. If INCLUDE_DELAYED_EFFECTS
179 is TRUE, resources used by the called routine will be included for
180 CALL_INSNs. */
182 void
183 mark_referenced_resources (x, res, include_delayed_effects)
184 register rtx x;
185 register struct resources *res;
186 register int include_delayed_effects;
188 enum rtx_code code = GET_CODE (x);
189 int i, j;
190 unsigned int r;
191 register const char *format_ptr;
193 /* Handle leaf items for which we set resource flags. Also, special-case
194 CALL, SET and CLOBBER operators. */
195 switch (code)
197 case CONST:
198 case CONST_INT:
199 case CONST_DOUBLE:
200 case PC:
201 case SYMBOL_REF:
202 case LABEL_REF:
203 return;
205 case SUBREG:
206 if (GET_CODE (SUBREG_REG (x)) != REG)
207 mark_referenced_resources (SUBREG_REG (x), res, 0);
208 else
210 unsigned int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
211 unsigned int last_regno
212 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
214 for (r = regno; r < last_regno; r++)
215 SET_HARD_REG_BIT (res->regs, r);
217 return;
219 case REG:
220 for (r = 0; r < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); r++)
221 SET_HARD_REG_BIT (res->regs, REGNO (x) + r);
222 return;
224 case MEM:
225 /* If this memory shouldn't change, it really isn't referencing
226 memory. */
227 if (RTX_UNCHANGING_P (x))
228 res->unch_memory = 1;
229 else
230 res->memory = 1;
231 res->volatil |= MEM_VOLATILE_P (x);
233 /* Mark registers used to access memory. */
234 mark_referenced_resources (XEXP (x, 0), res, 0);
235 return;
237 case CC0:
238 res->cc = 1;
239 return;
241 case UNSPEC_VOLATILE:
242 case ASM_INPUT:
243 /* Traditional asm's are always volatile. */
244 res->volatil = 1;
245 return;
247 case TRAP_IF:
248 res->volatil = 1;
249 break;
251 case ASM_OPERANDS:
252 res->volatil |= MEM_VOLATILE_P (x);
254 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
255 We can not just fall through here since then we would be confused
256 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
257 traditional asms unlike their normal usage. */
259 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
260 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
261 return;
263 case CALL:
264 /* The first operand will be a (MEM (xxx)) but doesn't really reference
265 memory. The second operand may be referenced, though. */
266 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
267 mark_referenced_resources (XEXP (x, 1), res, 0);
268 return;
270 case SET:
271 /* Usually, the first operand of SET is set, not referenced. But
272 registers used to access memory are referenced. SET_DEST is
273 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
275 mark_referenced_resources (SET_SRC (x), res, 0);
277 x = SET_DEST (x);
278 if (GET_CODE (x) == SIGN_EXTRACT
279 || GET_CODE (x) == ZERO_EXTRACT
280 || GET_CODE (x) == STRICT_LOW_PART)
281 mark_referenced_resources (x, res, 0);
282 else if (GET_CODE (x) == SUBREG)
283 x = SUBREG_REG (x);
284 if (GET_CODE (x) == MEM)
285 mark_referenced_resources (XEXP (x, 0), res, 0);
286 return;
288 case CLOBBER:
289 return;
291 case CALL_INSN:
292 if (include_delayed_effects)
294 /* A CALL references memory, the frame pointer if it exists, the
295 stack pointer, any global registers and any registers given in
296 USE insns immediately in front of the CALL.
298 However, we may have moved some of the parameter loading insns
299 into the delay slot of this CALL. If so, the USE's for them
300 don't count and should be skipped. */
301 rtx insn = PREV_INSN (x);
302 rtx sequence = 0;
303 int seq_size = 0;
304 rtx next = NEXT_INSN (x);
305 int i;
307 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
308 if (NEXT_INSN (insn) != x)
310 next = NEXT_INSN (NEXT_INSN (insn));
311 sequence = PATTERN (NEXT_INSN (insn));
312 seq_size = XVECLEN (sequence, 0);
313 if (GET_CODE (sequence) != SEQUENCE)
314 abort ();
317 res->memory = 1;
318 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
319 if (frame_pointer_needed)
321 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
322 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
323 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
324 #endif
327 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
328 if (global_regs[i])
329 SET_HARD_REG_BIT (res->regs, i);
331 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
332 assume that this call can need any register.
334 This is done to be more conservative about how we handle setjmp.
335 We assume that they both use and set all registers. Using all
336 registers ensures that a register will not be considered dead
337 just because it crosses a setjmp call. A register should be
338 considered dead only if the setjmp call returns non-zero. */
339 if (next && GET_CODE (next) == NOTE
340 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
341 SET_HARD_REG_SET (res->regs);
344 rtx link;
346 for (link = CALL_INSN_FUNCTION_USAGE (x);
347 link;
348 link = XEXP (link, 1))
349 if (GET_CODE (XEXP (link, 0)) == USE)
351 for (i = 1; i < seq_size; i++)
353 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
354 if (GET_CODE (slot_pat) == SET
355 && rtx_equal_p (SET_DEST (slot_pat),
356 XEXP (XEXP (link, 0), 0)))
357 break;
359 if (i >= seq_size)
360 mark_referenced_resources (XEXP (XEXP (link, 0), 0),
361 res, 0);
366 /* ... fall through to other INSN processing ... */
368 case INSN:
369 case JUMP_INSN:
371 #ifdef INSN_REFERENCES_ARE_DELAYED
372 if (! include_delayed_effects
373 && INSN_REFERENCES_ARE_DELAYED (x))
374 return;
375 #endif
377 /* No special processing, just speed up. */
378 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
379 return;
381 default:
382 break;
385 /* Process each sub-expression and flag what it needs. */
386 format_ptr = GET_RTX_FORMAT (code);
387 for (i = 0; i < GET_RTX_LENGTH (code); i++)
388 switch (*format_ptr++)
390 case 'e':
391 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
392 break;
394 case 'E':
395 for (j = 0; j < XVECLEN (x, i); j++)
396 mark_referenced_resources (XVECEXP (x, i, j), res,
397 include_delayed_effects);
398 break;
402 /* A subroutine of mark_target_live_regs. Search forward from TARGET
403 looking for registers that are set before they are used. These are dead.
404 Stop after passing a few conditional jumps, and/or a small
405 number of unconditional branches. */
407 static rtx
408 find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
409 rtx target;
410 struct resources *res;
411 rtx *jump_target;
412 int jump_count;
413 struct resources set, needed;
415 HARD_REG_SET scratch;
416 rtx insn, next;
417 rtx jump_insn = 0;
418 int i;
420 for (insn = target; insn; insn = next)
422 rtx this_jump_insn = insn;
424 next = NEXT_INSN (insn);
425 switch (GET_CODE (insn))
427 case CODE_LABEL:
428 /* After a label, any pending dead registers that weren't yet
429 used can be made dead. */
430 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
431 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
432 CLEAR_HARD_REG_SET (pending_dead_regs);
434 continue;
436 case BARRIER:
437 case NOTE:
438 continue;
440 case INSN:
441 if (GET_CODE (PATTERN (insn)) == USE)
443 /* If INSN is a USE made by update_block, we care about the
444 underlying insn. Any registers set by the underlying insn
445 are live since the insn is being done somewhere else. */
446 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
447 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
448 MARK_SRC_DEST_CALL);
450 /* All other USE insns are to be ignored. */
451 continue;
453 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
454 continue;
455 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
457 /* An unconditional jump can be used to fill the delay slot
458 of a call, so search for a JUMP_INSN in any position. */
459 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
461 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
462 if (GET_CODE (this_jump_insn) == JUMP_INSN)
463 break;
467 default:
468 break;
471 if (GET_CODE (this_jump_insn) == JUMP_INSN)
473 if (jump_count++ < 10)
475 if (simplejump_p (this_jump_insn)
476 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
478 next = JUMP_LABEL (this_jump_insn);
479 if (jump_insn == 0)
481 jump_insn = insn;
482 if (jump_target)
483 *jump_target = JUMP_LABEL (this_jump_insn);
486 else if (condjump_p (this_jump_insn)
487 || condjump_in_parallel_p (this_jump_insn))
489 struct resources target_set, target_res;
490 struct resources fallthrough_res;
492 /* We can handle conditional branches here by following
493 both paths, and then IOR the results of the two paths
494 together, which will give us registers that are dead
495 on both paths. Since this is expensive, we give it
496 a much higher cost than unconditional branches. The
497 cost was chosen so that we will follow at most 1
498 conditional branch. */
500 jump_count += 4;
501 if (jump_count >= 10)
502 break;
504 mark_referenced_resources (insn, &needed, 1);
506 /* For an annulled branch, mark_set_resources ignores slots
507 filled by instructions from the target. This is correct
508 if the branch is not taken. Since we are following both
509 paths from the branch, we must also compute correct info
510 if the branch is taken. We do this by inverting all of
511 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
512 and then inverting the INSN_FROM_TARGET_P bits again. */
514 if (GET_CODE (PATTERN (insn)) == SEQUENCE
515 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
517 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
518 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
519 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
521 target_set = set;
522 mark_set_resources (insn, &target_set, 0,
523 MARK_SRC_DEST_CALL);
525 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
526 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
527 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
529 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
531 else
533 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
534 target_set = set;
537 target_res = *res;
538 COPY_HARD_REG_SET (scratch, target_set.regs);
539 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
540 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
542 fallthrough_res = *res;
543 COPY_HARD_REG_SET (scratch, set.regs);
544 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
545 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
547 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
548 &target_res, 0, jump_count,
549 target_set, needed);
550 find_dead_or_set_registers (next,
551 &fallthrough_res, 0, jump_count,
552 set, needed);
553 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
554 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
555 break;
557 else
558 break;
560 else
562 /* Don't try this optimization if we expired our jump count
563 above, since that would mean there may be an infinite loop
564 in the function being compiled. */
565 jump_insn = 0;
566 break;
570 mark_referenced_resources (insn, &needed, 1);
571 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
573 COPY_HARD_REG_SET (scratch, set.regs);
574 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
575 AND_COMPL_HARD_REG_SET (res->regs, scratch);
578 return jump_insn;
581 /* Given X, a part of an insn, and a pointer to a `struct resource',
582 RES, indicate which resources are modified by the insn. If
583 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
584 set by the called routine. If MARK_TYPE is MARK_DEST, only mark SET_DESTs
586 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
587 objects are being referenced instead of set.
589 We never mark the insn as modifying the condition code unless it explicitly
590 SETs CC0 even though this is not totally correct. The reason for this is
591 that we require a SET of CC0 to immediately precede the reference to CC0.
592 So if some other insn sets CC0 as a side-effect, we know it cannot affect
593 our computation and thus may be placed in a delay slot. */
595 void
596 mark_set_resources (x, res, in_dest, mark_type)
597 register rtx x;
598 register struct resources *res;
599 int in_dest;
600 enum mark_resource_type mark_type;
602 enum rtx_code code;
603 int i, j;
604 unsigned int r;
605 const char *format_ptr;
607 restart:
609 code = GET_CODE (x);
611 switch (code)
613 case NOTE:
614 case BARRIER:
615 case CODE_LABEL:
616 case USE:
617 case CONST_INT:
618 case CONST_DOUBLE:
619 case LABEL_REF:
620 case SYMBOL_REF:
621 case CONST:
622 case PC:
623 /* These don't set any resources. */
624 return;
626 case CC0:
627 if (in_dest)
628 res->cc = 1;
629 return;
631 case CALL_INSN:
632 /* Called routine modifies the condition code, memory, any registers
633 that aren't saved across calls, global registers and anything
634 explicitly CLOBBERed immediately after the CALL_INSN. */
636 if (mark_type == MARK_SRC_DEST_CALL)
638 rtx next = NEXT_INSN (x);
639 rtx prev = PREV_INSN (x);
640 rtx link;
642 res->cc = res->memory = 1;
643 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
644 if (call_used_regs[r] || global_regs[r])
645 SET_HARD_REG_BIT (res->regs, r);
647 /* If X is part of a delay slot sequence, then NEXT should be
648 the first insn after the sequence. */
649 if (NEXT_INSN (prev) != x)
650 next = NEXT_INSN (NEXT_INSN (prev));
652 for (link = CALL_INSN_FUNCTION_USAGE (x);
653 link; link = XEXP (link, 1))
654 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
655 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
656 MARK_SRC_DEST);
658 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
659 assume that this call can clobber any register. */
660 if (next && GET_CODE (next) == NOTE
661 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
662 SET_HARD_REG_SET (res->regs);
665 /* ... and also what its RTL says it modifies, if anything. */
667 case JUMP_INSN:
668 case INSN:
670 /* An insn consisting of just a CLOBBER (or USE) is just for flow
671 and doesn't actually do anything, so we ignore it. */
673 #ifdef INSN_SETS_ARE_DELAYED
674 if (mark_type != MARK_SRC_DEST_CALL
675 && INSN_SETS_ARE_DELAYED (x))
676 return;
677 #endif
679 x = PATTERN (x);
680 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
681 goto restart;
682 return;
684 case SET:
685 /* If the source of a SET is a CALL, this is actually done by
686 the called routine. So only include it if we are to include the
687 effects of the calling routine. */
689 mark_set_resources (SET_DEST (x), res,
690 (mark_type == MARK_SRC_DEST_CALL
691 || GET_CODE (SET_SRC (x)) != CALL),
692 mark_type);
694 if (mark_type != MARK_DEST)
695 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
696 return;
698 case CLOBBER:
699 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
700 return;
702 case SEQUENCE:
703 for (i = 0; i < XVECLEN (x, 0); i++)
704 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
705 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
706 mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type);
707 return;
709 case POST_INC:
710 case PRE_INC:
711 case POST_DEC:
712 case PRE_DEC:
713 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
714 return;
716 case SIGN_EXTRACT:
717 case ZERO_EXTRACT:
718 if (! (mark_type == MARK_DEST && in_dest))
720 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
721 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
722 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
724 return;
726 case MEM:
727 if (in_dest)
729 res->memory = 1;
730 res->unch_memory |= RTX_UNCHANGING_P (x);
731 res->volatil |= MEM_VOLATILE_P (x);
734 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
735 return;
737 case SUBREG:
738 if (in_dest)
740 if (GET_CODE (SUBREG_REG (x)) != REG)
741 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
742 else
744 unsigned int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
745 unsigned int last_regno
746 = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
748 for (r = regno; r < last_regno; r++)
749 SET_HARD_REG_BIT (res->regs, r);
752 return;
754 case REG:
755 if (in_dest)
756 for (r = 0; r < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); r++)
757 SET_HARD_REG_BIT (res->regs, REGNO (x) + r);
758 return;
760 case STRICT_LOW_PART:
761 if (! (mark_type == MARK_DEST && in_dest))
763 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
764 return;
767 case UNSPEC_VOLATILE:
768 case ASM_INPUT:
769 /* Traditional asm's are always volatile. */
770 res->volatil = 1;
771 return;
773 case TRAP_IF:
774 res->volatil = 1;
775 break;
777 case ASM_OPERANDS:
778 res->volatil |= MEM_VOLATILE_P (x);
780 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
781 We can not just fall through here since then we would be confused
782 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
783 traditional asms unlike their normal usage. */
785 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
786 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
787 MARK_SRC_DEST);
788 return;
790 default:
791 break;
794 /* Process each sub-expression and flag what it needs. */
795 format_ptr = GET_RTX_FORMAT (code);
796 for (i = 0; i < GET_RTX_LENGTH (code); i++)
797 switch (*format_ptr++)
799 case 'e':
800 mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
801 break;
803 case 'E':
804 for (j = 0; j < XVECLEN (x, i); j++)
805 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
806 break;
810 /* Set the resources that are live at TARGET.
812 If TARGET is zero, we refer to the end of the current function and can
813 return our precomputed value.
815 Otherwise, we try to find out what is live by consulting the basic block
816 information. This is tricky, because we must consider the actions of
817 reload and jump optimization, which occur after the basic block information
818 has been computed.
820 Accordingly, we proceed as follows::
822 We find the previous BARRIER and look at all immediately following labels
823 (with no intervening active insns) to see if any of them start a basic
824 block. If we hit the start of the function first, we use block 0.
826 Once we have found a basic block and a corresponding first insns, we can
827 accurately compute the live status from basic_block_live_regs and
828 reg_renumber. (By starting at a label following a BARRIER, we are immune
829 to actions taken by reload and jump.) Then we scan all insns between
830 that point and our target. For each CLOBBER (or for call-clobbered regs
831 when we pass a CALL_INSN), mark the appropriate registers are dead. For
832 a SET, mark them as live.
834 We have to be careful when using REG_DEAD notes because they are not
835 updated by such things as find_equiv_reg. So keep track of registers
836 marked as dead that haven't been assigned to, and mark them dead at the
837 next CODE_LABEL since reload and jump won't propagate values across labels.
839 If we cannot find the start of a basic block (should be a very rare
840 case, if it can happen at all), mark everything as potentially live.
842 Next, scan forward from TARGET looking for things set or clobbered
843 before they are used. These are not live.
845 Because we can be called many times on the same target, save our results
846 in a hash table indexed by INSN_UID. This is only done if the function
847 init_resource_info () was invoked before we are called. */
849 void
850 mark_target_live_regs (insns, target, res)
851 rtx insns;
852 rtx target;
853 struct resources *res;
855 int b = -1;
856 int i;
857 struct target_info *tinfo = NULL;
858 rtx insn;
859 rtx jump_insn = 0;
860 rtx jump_target;
861 HARD_REG_SET scratch;
862 struct resources set, needed;
864 /* Handle end of function. */
865 if (target == 0)
867 *res = end_of_function_needs;
868 return;
871 /* We have to assume memory is needed, but the CC isn't. */
872 res->memory = 1;
873 res->volatil = res->unch_memory = 0;
874 res->cc = 0;
876 /* See if we have computed this value already. */
877 if (target_hash_table != NULL)
879 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
880 tinfo; tinfo = tinfo->next)
881 if (tinfo->uid == INSN_UID (target))
882 break;
884 /* Start by getting the basic block number. If we have saved
885 information, we can get it from there unless the insn at the
886 start of the basic block has been deleted. */
887 if (tinfo && tinfo->block != -1
888 && ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block)))
889 b = tinfo->block;
892 if (b == -1)
893 b = find_basic_block (target);
895 if (target_hash_table != NULL)
897 if (tinfo)
899 /* If the information is up-to-date, use it. Otherwise, we will
900 update it below. */
901 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
903 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
904 return;
907 else
909 /* Allocate a place to put our results and chain it into the
910 hash table. */
911 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
912 tinfo->uid = INSN_UID (target);
913 tinfo->block = b;
914 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
915 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
919 CLEAR_HARD_REG_SET (pending_dead_regs);
921 /* If we found a basic block, get the live registers from it and update
922 them with anything set or killed between its start and the insn before
923 TARGET. Otherwise, we must assume everything is live. */
924 if (b != -1)
926 regset regs_live = BASIC_BLOCK (b)->global_live_at_start;
927 unsigned int j;
928 unsigned int regno;
929 rtx start_insn, stop_insn;
931 /* Compute hard regs live at start of block -- this is the real hard regs
932 marked live, plus live pseudo regs that have been renumbered to
933 hard regs. */
935 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
937 EXECUTE_IF_SET_IN_REG_SET
938 (regs_live, FIRST_PSEUDO_REGISTER, i,
940 if (reg_renumber[i] >= 0)
942 regno = reg_renumber[i];
943 for (j = regno;
944 j < regno + HARD_REGNO_NREGS (regno,
945 PSEUDO_REGNO_MODE (i));
946 j++)
947 SET_HARD_REG_BIT (current_live_regs, j);
951 /* Get starting and ending insn, handling the case where each might
952 be a SEQUENCE. */
953 start_insn = (b == 0 ? insns : BLOCK_HEAD (b));
954 stop_insn = target;
956 if (GET_CODE (start_insn) == INSN
957 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
958 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
960 if (GET_CODE (stop_insn) == INSN
961 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
962 stop_insn = next_insn (PREV_INSN (stop_insn));
964 for (insn = start_insn; insn != stop_insn;
965 insn = next_insn_no_annul (insn))
967 rtx link;
968 rtx real_insn = insn;
970 /* If this insn is from the target of a branch, it isn't going to
971 be used in the sequel. If it is used in both cases, this
972 test will not be true. */
973 if (INSN_FROM_TARGET_P (insn))
974 continue;
976 /* If this insn is a USE made by update_block, we care about the
977 underlying insn. */
978 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
979 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
980 real_insn = XEXP (PATTERN (insn), 0);
982 if (GET_CODE (real_insn) == CALL_INSN)
984 /* CALL clobbers all call-used regs that aren't fixed except
985 sp, ap, and fp. Do this before setting the result of the
986 call live. */
987 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
988 if (call_used_regs[i]
989 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
990 && i != ARG_POINTER_REGNUM
991 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
992 && i != HARD_FRAME_POINTER_REGNUM
993 #endif
994 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
995 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
996 #endif
997 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
998 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
999 #endif
1001 CLEAR_HARD_REG_BIT (current_live_regs, i);
1003 /* A CALL_INSN sets any global register live, since it may
1004 have been modified by the call. */
1005 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1006 if (global_regs[i])
1007 SET_HARD_REG_BIT (current_live_regs, i);
1010 /* Mark anything killed in an insn to be deadened at the next
1011 label. Ignore USE insns; the only REG_DEAD notes will be for
1012 parameters. But they might be early. A CALL_INSN will usually
1013 clobber registers used for parameters. It isn't worth bothering
1014 with the unlikely case when it won't. */
1015 if ((GET_CODE (real_insn) == INSN
1016 && GET_CODE (PATTERN (real_insn)) != USE
1017 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1018 || GET_CODE (real_insn) == JUMP_INSN
1019 || GET_CODE (real_insn) == CALL_INSN)
1021 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1022 if (REG_NOTE_KIND (link) == REG_DEAD
1023 && GET_CODE (XEXP (link, 0)) == REG
1024 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1026 int first_regno = REGNO (XEXP (link, 0));
1027 int last_regno
1028 = (first_regno
1029 + HARD_REGNO_NREGS (first_regno,
1030 GET_MODE (XEXP (link, 0))));
1032 for (i = first_regno; i < last_regno; i++)
1033 SET_HARD_REG_BIT (pending_dead_regs, i);
1036 note_stores (PATTERN (real_insn), update_live_status, NULL);
1038 /* If any registers were unused after this insn, kill them.
1039 These notes will always be accurate. */
1040 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1041 if (REG_NOTE_KIND (link) == REG_UNUSED
1042 && GET_CODE (XEXP (link, 0)) == REG
1043 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1045 int first_regno = REGNO (XEXP (link, 0));
1046 int last_regno
1047 = (first_regno
1048 + HARD_REGNO_NREGS (first_regno,
1049 GET_MODE (XEXP (link, 0))));
1051 for (i = first_regno; i < last_regno; i++)
1052 CLEAR_HARD_REG_BIT (current_live_regs, i);
1056 else if (GET_CODE (real_insn) == CODE_LABEL)
1058 /* A label clobbers the pending dead registers since neither
1059 reload nor jump will propagate a value across a label. */
1060 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1061 CLEAR_HARD_REG_SET (pending_dead_regs);
1064 /* The beginning of the epilogue corresponds to the end of the
1065 RTL chain when there are no epilogue insns. Certain resources
1066 are implicitly required at that point. */
1067 else if (GET_CODE (real_insn) == NOTE
1068 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1069 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1072 COPY_HARD_REG_SET (res->regs, current_live_regs);
1073 if (tinfo != NULL)
1075 tinfo->block = b;
1076 tinfo->bb_tick = bb_ticks[b];
1079 else
1080 /* We didn't find the start of a basic block. Assume everything
1081 in use. This should happen only extremely rarely. */
1082 SET_HARD_REG_SET (res->regs);
1084 CLEAR_RESOURCE (&set);
1085 CLEAR_RESOURCE (&needed);
1087 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1088 set, needed);
1090 /* If we hit an unconditional branch, we have another way of finding out
1091 what is live: we can see what is live at the branch target and include
1092 anything used but not set before the branch. We add the live
1093 resources found using the test below to those found until now. */
1095 if (jump_insn)
1097 struct resources new_resources;
1098 rtx stop_insn = next_active_insn (jump_insn);
1100 mark_target_live_regs (insns, next_active_insn (jump_target),
1101 &new_resources);
1102 CLEAR_RESOURCE (&set);
1103 CLEAR_RESOURCE (&needed);
1105 /* Include JUMP_INSN in the needed registers. */
1106 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1108 mark_referenced_resources (insn, &needed, 1);
1110 COPY_HARD_REG_SET (scratch, needed.regs);
1111 AND_COMPL_HARD_REG_SET (scratch, set.regs);
1112 IOR_HARD_REG_SET (new_resources.regs, scratch);
1114 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1117 IOR_HARD_REG_SET (res->regs, new_resources.regs);
1120 if (tinfo != NULL)
1122 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1126 /* Initialize the resources required by mark_target_live_regs ().
1127 This should be invoked before the first call to mark_target_live_regs. */
1129 void
1130 init_resource_info (epilogue_insn)
1131 rtx epilogue_insn;
1133 int i;
1135 /* Indicate what resources are required to be valid at the end of the current
1136 function. The condition code never is and memory always is. If the
1137 frame pointer is needed, it is and so is the stack pointer unless
1138 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
1139 stack pointer is. Registers used to return the function value are
1140 needed. Registers holding global variables are needed. */
1142 end_of_function_needs.cc = 0;
1143 end_of_function_needs.memory = 1;
1144 end_of_function_needs.unch_memory = 0;
1145 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1147 if (frame_pointer_needed)
1149 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1150 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1151 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1152 #endif
1153 #ifdef EXIT_IGNORE_STACK
1154 if (! EXIT_IGNORE_STACK
1155 || current_function_sp_is_unchanging)
1156 #endif
1157 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1159 else
1160 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1162 if (current_function_return_rtx != 0)
1163 mark_referenced_resources (current_function_return_rtx,
1164 &end_of_function_needs, 1);
1166 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1167 if (global_regs[i]
1168 #ifdef EPILOGUE_USES
1169 || EPILOGUE_USES (i)
1170 #endif
1172 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1174 /* The registers required to be live at the end of the function are
1175 represented in the flow information as being dead just prior to
1176 reaching the end of the function. For example, the return of a value
1177 might be represented by a USE of the return register immediately
1178 followed by an unconditional jump to the return label where the
1179 return label is the end of the RTL chain. The end of the RTL chain
1180 is then taken to mean that the return register is live.
1182 This sequence is no longer maintained when epilogue instructions are
1183 added to the RTL chain. To reconstruct the original meaning, the
1184 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1185 point where these registers become live (start_of_epilogue_needs).
1186 If epilogue instructions are present, the registers set by those
1187 instructions won't have been processed by flow. Thus, those
1188 registers are additionally required at the end of the RTL chain
1189 (end_of_function_needs). */
1191 start_of_epilogue_needs = end_of_function_needs;
1193 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1194 mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1195 MARK_SRC_DEST_CALL);
1197 /* Allocate and initialize the tables used by mark_target_live_regs. */
1198 target_hash_table = (struct target_info **)
1199 xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
1200 bb_ticks = (int *) xcalloc (n_basic_blocks, sizeof (int));
1203 /* Free up the resources allcated to mark_target_live_regs (). This
1204 should be invoked after the last call to mark_target_live_regs (). */
1206 void
1207 free_resource_info ()
1209 if (target_hash_table != NULL)
1211 free (target_hash_table);
1212 target_hash_table = NULL;
1215 if (bb_ticks != NULL)
1217 free (bb_ticks);
1218 bb_ticks = NULL;
1222 /* Clear any hashed information that we have stored for INSN. */
1224 void
1225 clear_hashed_info_for_insn (insn)
1226 rtx insn;
1228 struct target_info *tinfo;
1230 if (target_hash_table != NULL)
1232 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1233 tinfo; tinfo = tinfo->next)
1234 if (tinfo->uid == INSN_UID (insn))
1235 break;
1237 if (tinfo)
1238 tinfo->block = -1;
1242 /* Increment the tick count for the basic block that contains INSN. */
1244 void
1245 incr_ticks_for_insn (insn)
1246 rtx insn;
1248 int b = find_basic_block (insn);
1250 if (b != -1)
1251 bb_ticks[b]++;
1254 /* Add TRIAL to the set of resources used at the end of the current
1255 function. */
1256 void
1257 mark_end_of_function_resources (trial, include_delayed_effects)
1258 rtx trial;
1259 int include_delayed_effects;
1261 mark_referenced_resources (trial, &end_of_function_needs,
1262 include_delayed_effects);
1265 /* Try to find a hard register of mode MODE, matching the register class in
1266 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
1267 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
1268 in which case the only condition is that the register must be available
1269 before CURRENT_INSN.
1270 Registers that already have bits set in REG_SET will not be considered.
1272 If an appropriate register is available, it will be returned and the
1273 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
1274 returned. */
1277 find_free_register (current_insn, last_insn, class_str, mode, reg_set)
1278 rtx current_insn, last_insn;
1279 const char *class_str;
1280 int mode;
1281 HARD_REG_SET *reg_set;
1283 int i, j;
1284 struct resources used;
1285 unsigned char clet = class_str[0];
1286 enum reg_class class
1287 = (clet == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (clet));
1289 mark_target_live_regs (get_insns (), current_insn, &used);
1290 if (last_insn)
1291 while (current_insn != last_insn)
1293 /* Exclude anything set in this insn. */
1294 mark_set_resources (PATTERN (current_insn), &used, 0,
1295 MARK_SRC_DEST_CALL);
1296 current_insn = next_nonnote_insn (current_insn);
1300 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1302 int regno;
1303 int success;
1305 #ifdef REG_ALLOC_ORDER
1306 regno = reg_alloc_order [i];
1307 #else
1308 regno = i;
1309 #endif
1311 /* Don't allocate fixed registers. */
1312 if (fixed_regs[regno])
1313 continue;
1314 /* Make sure the register is of the right class. */
1315 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
1316 continue;
1317 /* And can support the mode we need. */
1318 if (! HARD_REGNO_MODE_OK (regno, mode))
1319 continue;
1320 /* And that we don't create an extra save/restore. */
1321 if (! call_used_regs[regno] && ! regs_ever_live[regno])
1322 continue;
1323 /* And we don't clobber traceback for noreturn functions. */
1324 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
1325 && (! reload_completed || frame_pointer_needed))
1326 continue;
1328 success = 1;
1329 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
1331 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
1332 || TEST_HARD_REG_BIT (used.regs, regno + j))
1334 success = 0;
1335 break;
1338 if (success)
1340 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
1342 SET_HARD_REG_BIT (*reg_set, regno + j);
1344 return gen_rtx_REG (mode, regno);
1347 return NULL_RTX;
1350 /* Return true if REG is dead at CURRENT_INSN. */
1353 reg_dead_p (current_insn, reg)
1354 rtx current_insn, reg;
1356 struct resources used;
1357 int regno, j;
1359 mark_target_live_regs (get_insns (), current_insn, &used);
1361 regno = REGNO (reg);
1362 for (j = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; j >= 0; j--)
1364 if (TEST_HARD_REG_BIT (used.regs, regno + j))
1365 return 0;
1368 return 1;