PR tree-optimization/1046
[official-gcc.git] / gcc / df-scan.c
blobfea786c01962687ba58d523fff4257d2bd6da46a
1 /* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
5 /* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
13 This file is part of GCC.
15 GCC is free software; you can redistribute it and/or modify it under
16 the terms of the GNU General Public License as published by the Free
17 Software Foundation; either version 2, or (at your option) any later
18 version.
20 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21 WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23 for more details.
25 You should have received a copy of the GNU General Public License
26 along with GCC; see the file COPYING. If not, write to the Free
27 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 02110-1301, USA.
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "rtl.h"
36 #include "tm_p.h"
37 #include "insn-config.h"
38 #include "recog.h"
39 #include "function.h"
40 #include "regs.h"
41 #include "output.h"
42 #include "alloc-pool.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "basic-block.h"
46 #include "sbitmap.h"
47 #include "bitmap.h"
48 #include "timevar.h"
49 #include "tree.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "df.h"
54 #ifndef HAVE_epilogue
55 #define HAVE_epilogue 0
56 #endif
57 #ifndef HAVE_prologue
58 #define HAVE_prologue 0
59 #endif
60 #ifndef HAVE_sibcall_epilogue
61 #define HAVE_sibcall_epilogue 0
62 #endif
64 #ifndef EPILOGUE_USES
65 #define EPILOGUE_USES(REGNO) 0
66 #endif
68 /* The bitmap_obstack is used to hold some static variables that
69 should not be reset after each function is compiled. */
71 static bitmap_obstack persistent_obstack;
73 /* The set of hard registers in eliminables[i].from. */
75 static HARD_REG_SET elim_reg_set;
77 /* This is a bitmap copy of regs_invalidated_by_call so that we can
78 easily add it into bitmaps, etc. */
80 bitmap df_invalidated_by_call = NULL;
82 /* Initialize ur_in and ur_out as if all hard registers were partially
83 available. */
85 static void df_ref_record (struct dataflow *, rtx, rtx *,
86 basic_block, rtx, enum df_ref_type,
87 enum df_ref_flags, bool record_live);
88 static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
89 enum df_ref_flags, bool record_live);
90 static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
91 static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
92 basic_block, rtx, enum df_ref_flags);
94 static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
95 static void df_bb_refs_record (struct dataflow *, basic_block);
96 static void df_refs_record (struct dataflow *, bitmap);
97 static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
98 basic_block, rtx, enum df_ref_type,
99 enum df_ref_flags);
100 static void df_record_entry_block_defs (struct dataflow *);
101 static void df_record_exit_block_uses (struct dataflow *);
102 static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
103 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
104 static void df_grow_insn_info (struct df *);
107 /*----------------------------------------------------------------------------
108 SCANNING DATAFLOW PROBLEM
110 There are several ways in which scanning looks just like the other
111 dataflow problems. It shares the all the mechanisms for local info
112 as well as basic block info. Where it differs is when and how often
113 it gets run. It also has no need for the iterative solver.
114 ----------------------------------------------------------------------------*/
116 /* Problem data for the scanning dataflow function. */
117 struct df_scan_problem_data
119 alloc_pool ref_pool;
120 alloc_pool insn_pool;
121 alloc_pool reg_pool;
122 alloc_pool mw_reg_pool;
123 alloc_pool mw_link_pool;
126 typedef struct df_scan_bb_info *df_scan_bb_info_t;
128 static void
129 df_scan_free_internal (struct dataflow *dflow)
131 struct df *df = dflow->df;
132 struct df_scan_problem_data *problem_data
133 = (struct df_scan_problem_data *) dflow->problem_data;
135 free (df->def_info.regs);
136 free (df->def_info.refs);
137 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
139 free (df->use_info.regs);
140 free (df->use_info.refs);
141 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
143 free (df->insns);
144 df->insns = NULL;
145 df->insns_size = 0;
147 free (dflow->block_info);
148 dflow->block_info = NULL;
149 dflow->block_info_size = 0;
151 BITMAP_FREE (df->hardware_regs_used);
152 BITMAP_FREE (df->entry_block_defs);
153 BITMAP_FREE (df->exit_block_uses);
155 free_alloc_pool (dflow->block_pool);
156 free_alloc_pool (problem_data->ref_pool);
157 free_alloc_pool (problem_data->insn_pool);
158 free_alloc_pool (problem_data->reg_pool);
159 free_alloc_pool (problem_data->mw_reg_pool);
160 free_alloc_pool (problem_data->mw_link_pool);
164 /* Get basic block info. */
166 struct df_scan_bb_info *
167 df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
169 gcc_assert (index < dflow->block_info_size);
170 return (struct df_scan_bb_info *) dflow->block_info[index];
174 /* Set basic block info. */
176 static void
177 df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
178 struct df_scan_bb_info *bb_info)
180 gcc_assert (index < dflow->block_info_size);
181 dflow->block_info[index] = (void *) bb_info;
185 /* Free basic block info. */
187 static void
188 df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
190 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
191 if (bb_info)
193 df_bb_refs_delete (dflow, bb->index);
194 pool_free (dflow->block_pool, bb_info);
199 /* Allocate the problem data for the scanning problem. This should be
200 called when the problem is created or when the entire function is to
201 be rescanned. */
203 static void
204 df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
205 bitmap all_blocks ATTRIBUTE_UNUSED)
207 struct df *df = dflow->df;
208 struct df_scan_problem_data *problem_data;
209 unsigned int insn_num = get_max_uid () + 1;
210 unsigned int block_size = 50;
211 unsigned int bb_index;
212 bitmap_iterator bi;
214 /* Given the number of pools, this is really faster than tearing
215 everything apart. */
216 if (dflow->problem_data)
217 df_scan_free_internal (dflow);
219 dflow->block_pool
220 = create_alloc_pool ("df_scan_block pool",
221 sizeof (struct df_scan_bb_info),
222 block_size);
224 problem_data = XNEW (struct df_scan_problem_data);
225 dflow->problem_data = problem_data;
227 problem_data->ref_pool
228 = create_alloc_pool ("df_scan_ref pool",
229 sizeof (struct df_ref), block_size);
230 problem_data->insn_pool
231 = create_alloc_pool ("df_scan_insn pool",
232 sizeof (struct df_insn_info), block_size);
233 problem_data->reg_pool
234 = create_alloc_pool ("df_scan_reg pool",
235 sizeof (struct df_reg_info), block_size);
236 problem_data->mw_reg_pool
237 = create_alloc_pool ("df_scan_mw_reg pool",
238 sizeof (struct df_mw_hardreg), block_size);
239 problem_data->mw_link_pool
240 = create_alloc_pool ("df_scan_mw_link pool",
241 sizeof (struct df_link), block_size);
243 insn_num += insn_num / 4;
244 df_grow_reg_info (dflow, &df->def_info);
245 df_grow_ref_info (&df->def_info, insn_num);
247 df_grow_reg_info (dflow, &df->use_info);
248 df_grow_ref_info (&df->use_info, insn_num *2);
250 df_grow_insn_info (df);
251 df_grow_bb_info (dflow);
253 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
255 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
256 if (!bb_info)
258 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
259 df_scan_set_bb_info (dflow, bb_index, bb_info);
261 bb_info->artificial_defs = NULL;
262 bb_info->artificial_uses = NULL;
265 df->hardware_regs_used = BITMAP_ALLOC (NULL);
266 df->entry_block_defs = BITMAP_ALLOC (NULL);
267 df->exit_block_uses = BITMAP_ALLOC (NULL);
271 /* Free all of the data associated with the scan problem. */
273 static void
274 df_scan_free (struct dataflow *dflow)
276 struct df *df = dflow->df;
278 if (dflow->problem_data)
280 df_scan_free_internal (dflow);
281 free (dflow->problem_data);
284 if (df->blocks_to_scan)
285 BITMAP_FREE (df->blocks_to_scan);
287 if (df->blocks_to_analyze)
288 BITMAP_FREE (df->blocks_to_analyze);
290 free (dflow);
293 static void
294 df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
296 struct df *df = dflow->df;
297 int i;
299 fprintf (file, " invalidated by call \t");
300 dump_bitmap (file, df_invalidated_by_call);
301 fprintf (file, " hardware regs used \t");
302 dump_bitmap (file, df->hardware_regs_used);
303 fprintf (file, " entry block uses \t");
304 dump_bitmap (file, df->entry_block_defs);
305 fprintf (file, " exit block uses \t");
306 dump_bitmap (file, df->exit_block_uses);
307 fprintf (file, " regs ever live \t");
308 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
309 if (regs_ever_live[i])
310 fprintf (file, "%d ", i);
311 fprintf (file, "\n");
314 static struct df_problem problem_SCAN =
316 DF_SCAN, /* Problem id. */
317 DF_NONE, /* Direction. */
318 df_scan_alloc, /* Allocate the problem specific data. */
319 NULL, /* Reset global information. */
320 df_scan_free_bb_info, /* Free basic block info. */
321 NULL, /* Local compute function. */
322 NULL, /* Init the solution specific data. */
323 NULL, /* Iterative solver. */
324 NULL, /* Confluence operator 0. */
325 NULL, /* Confluence operator n. */
326 NULL, /* Transfer function. */
327 NULL, /* Finalize function. */
328 df_scan_free, /* Free all of the problem information. */
329 df_scan_dump, /* Debugging. */
330 NULL, /* Dependent problem. */
331 0 /* Changeable flags. */
335 /* Create a new DATAFLOW instance and add it to an existing instance
336 of DF. The returned structure is what is used to get at the
337 solution. */
339 struct dataflow *
340 df_scan_add_problem (struct df *df, int flags)
342 return df_add_problem (df, &problem_SCAN, flags);
345 /*----------------------------------------------------------------------------
346 Storage Allocation Utilities
347 ----------------------------------------------------------------------------*/
350 /* First, grow the reg_info information. If the current size is less than
351 the number of psuedos, grow to 25% more than the number of
352 pseudos.
354 Second, assure that all of the slots up to max_reg_num have been
355 filled with reg_info structures. */
357 static void
358 df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
360 unsigned int max_reg = max_reg_num ();
361 unsigned int new_size = max_reg;
362 struct df_scan_problem_data *problem_data
363 = (struct df_scan_problem_data *) dflow->problem_data;
364 unsigned int i;
366 if (ref_info->regs_size < new_size)
368 new_size += new_size / 4;
369 ref_info->regs = xrealloc (ref_info->regs,
370 new_size *sizeof (struct df_reg_info*));
371 ref_info->regs_size = new_size;
374 for (i = ref_info->regs_inited; i < max_reg; i++)
376 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
377 memset (reg_info, 0, sizeof (struct df_reg_info));
378 ref_info->regs[i] = reg_info;
381 ref_info->regs_inited = max_reg;
385 /* Grow the ref information. */
387 static void
388 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
390 if (ref_info->refs_size < new_size)
392 ref_info->refs = xrealloc (ref_info->refs,
393 new_size *sizeof (struct df_ref *));
394 memset (ref_info->refs + ref_info->refs_size, 0,
395 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
396 ref_info->refs_size = new_size;
401 /* Grow the ref information. If the current size is less than the
402 number of instructions, grow to 25% more than the number of
403 instructions. */
405 static void
406 df_grow_insn_info (struct df *df)
408 unsigned int new_size = get_max_uid () + 1;
409 if (df->insns_size < new_size)
411 new_size += new_size / 4;
412 df->insns = xrealloc (df->insns,
413 new_size *sizeof (struct df_insn_info *));
414 memset (df->insns + df->insns_size, 0,
415 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
416 df->insns_size = new_size;
423 /*----------------------------------------------------------------------------
424 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
425 ----------------------------------------------------------------------------*/
427 /* Rescan some BLOCKS or all the blocks defined by the last call to
428 df_set_blocks if BLOCKS is NULL); */
430 void
431 df_rescan_blocks (struct df *df, bitmap blocks)
433 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
435 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
436 basic_block bb;
438 df->def_info.refs_organized = false;
439 df->use_info.refs_organized = false;
441 if (blocks)
443 int i;
444 unsigned int bb_index;
445 bitmap_iterator bi;
446 bool cleared_bits = false;
448 /* Need to assure that there are space in all of the tables. */
449 unsigned int insn_num = get_max_uid () + 1;
450 insn_num += insn_num / 4;
452 df_grow_reg_info (dflow, &df->def_info);
453 df_grow_ref_info (&df->def_info, insn_num);
455 df_grow_reg_info (dflow, &df->use_info);
456 df_grow_ref_info (&df->use_info, insn_num *2);
458 df_grow_insn_info (df);
459 df_grow_bb_info (dflow);
461 bitmap_copy (local_blocks_to_scan, blocks);
463 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
465 basic_block bb = BASIC_BLOCK (bb_index);
466 if (!bb)
468 bitmap_clear_bit (local_blocks_to_scan, bb_index);
469 cleared_bits = true;
473 if (cleared_bits)
474 bitmap_copy (blocks, local_blocks_to_scan);
476 df->def_info.add_refs_inline = true;
477 df->use_info.add_refs_inline = true;
479 for (i = df->num_problems_defined; i; i--)
481 bitmap blocks_to_reset = NULL;
482 if (dflow->problem->reset_fun)
484 if (!blocks_to_reset)
486 blocks_to_reset = BITMAP_ALLOC (NULL);
487 bitmap_copy (blocks_to_reset, local_blocks_to_scan);
488 if (df->blocks_to_scan)
489 bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
491 dflow->problem->reset_fun (dflow, blocks_to_reset);
493 if (blocks_to_reset)
494 BITMAP_FREE (blocks_to_reset);
497 df_refs_delete (dflow, local_blocks_to_scan);
499 /* This may be a mistake, but if an explicit blocks is passed in
500 and the set of blocks to analyze has been explicitly set, add
501 the extra blocks to blocks_to_analyze. The alternative is to
502 put an assert here. We do not want this to just go by
503 silently or else we may get storage leaks. */
504 if (df->blocks_to_analyze)
505 bitmap_ior_into (df->blocks_to_analyze, blocks);
507 else
509 /* If we are going to do everything, just reallocate everything.
510 Most stuff is allocated in pools so this is faster than
511 walking it. */
512 if (df->blocks_to_analyze)
513 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
514 else
515 FOR_ALL_BB (bb)
517 bitmap_set_bit (local_blocks_to_scan, bb->index);
519 df_scan_alloc (dflow, local_blocks_to_scan, NULL);
521 df->def_info.add_refs_inline = false;
522 df->use_info.add_refs_inline = false;
525 df_refs_record (dflow, local_blocks_to_scan);
526 #if 0
527 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
528 #endif
530 if (!df->blocks_to_scan)
531 df->blocks_to_scan = BITMAP_ALLOC (NULL);
533 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
534 BITMAP_FREE (local_blocks_to_scan);
538 /* Create a new ref of type DF_REF_TYPE for register REG at address
539 LOC within INSN of BB. */
541 struct df_ref *
542 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
543 basic_block bb,
544 enum df_ref_type ref_type,
545 enum df_ref_flags ref_flags)
547 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
548 struct df_scan_bb_info *bb_info;
550 df_grow_reg_info (dflow, &df->use_info);
551 df_grow_reg_info (dflow, &df->def_info);
552 df_grow_bb_info (dflow);
554 /* Make sure there is the bb_info for this block. */
555 bb_info = df_scan_get_bb_info (dflow, bb->index);
556 if (!bb_info)
558 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
559 df_scan_set_bb_info (dflow, bb->index, bb_info);
560 bb_info->artificial_defs = NULL;
561 bb_info->artificial_uses = NULL;
564 if (ref_type == DF_REF_REG_DEF)
565 df->def_info.add_refs_inline = true;
566 else
567 df->use_info.add_refs_inline = true;
569 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
574 /*----------------------------------------------------------------------------
575 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
576 ----------------------------------------------------------------------------*/
579 /* Get the artificial uses for a basic block. */
581 struct df_ref *
582 df_get_artificial_defs (struct df *df, unsigned int bb_index)
584 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
585 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
589 /* Get the artificial uses for a basic block. */
591 struct df_ref *
592 df_get_artificial_uses (struct df *df, unsigned int bb_index)
594 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
595 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
599 /* Link REF at the front of reg_use or reg_def chain for REGNO. */
601 void
602 df_reg_chain_create (struct df_reg_info *reg_info,
603 struct df_ref *ref)
605 struct df_ref *head = reg_info->reg_chain;
606 reg_info->reg_chain = ref;
608 DF_REF_NEXT_REG (ref) = head;
610 /* We cannot actually link to the head of the chain. */
611 DF_REF_PREV_REG (ref) = NULL;
613 if (head)
614 DF_REF_PREV_REG (head) = ref;
618 /* Remove REF from the CHAIN. Return the head of the chain. This
619 will be CHAIN unless the REF was at the beginning of the chain. */
621 static struct df_ref *
622 df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
624 struct df_ref *orig_chain = chain;
625 struct df_ref *prev = NULL;
626 while (chain)
628 if (chain == ref)
630 if (prev)
632 prev->next_ref = ref->next_ref;
633 ref->next_ref = NULL;
634 return orig_chain;
636 else
638 chain = ref->next_ref;
639 ref->next_ref = NULL;
640 return chain;
644 prev = chain;
645 chain = chain->next_ref;
648 /* Someone passed in a ref that was not in the chain. */
649 gcc_unreachable ();
650 return NULL;
654 /* Unlink and delete REF at the reg_use or reg_def chain. Also delete
655 the def-use or use-def chain if it exists. Returns the next ref in
656 uses or defs chain. */
658 struct df_ref *
659 df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
661 struct df *df = dflow->df;
662 struct df_ref *next = DF_REF_NEXT_REG (ref);
663 struct df_ref *prev = DF_REF_PREV_REG (ref);
664 struct df_scan_problem_data *problem_data
665 = (struct df_scan_problem_data *) dflow->problem_data;
666 struct df_reg_info *reg_info;
667 struct df_ref *next_ref = ref->next_ref;
668 unsigned int id = DF_REF_ID (ref);
670 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
672 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
673 df->def_info.bitmap_size--;
674 if (df->def_info.refs && (id < df->def_info.refs_size))
675 DF_DEFS_SET (df, id, NULL);
677 else
679 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
680 df->use_info.bitmap_size--;
681 if (df->use_info.refs && (id < df->use_info.refs_size))
682 DF_USES_SET (df, id, NULL);
685 /* Delete any def-use or use-def chains that start here. */
686 if (DF_REF_CHAIN (ref))
687 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
689 reg_info->n_refs--;
691 /* Unlink from the reg chain. If there is no prev, this is the
692 first of the list. If not, just join the next and prev. */
693 if (prev)
695 DF_REF_NEXT_REG (prev) = next;
696 if (next)
697 DF_REF_PREV_REG (next) = prev;
699 else
701 reg_info->reg_chain = next;
702 if (next)
703 DF_REF_PREV_REG (next) = NULL;
706 pool_free (problem_data->ref_pool, ref);
707 return next_ref;
711 /* Unlink REF from all def-use/use-def chains, etc. */
713 void
714 df_ref_remove (struct df *df, struct df_ref *ref)
716 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
717 if (DF_REF_REG_DEF_P (ref))
719 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
721 struct df_scan_bb_info *bb_info
722 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
723 bb_info->artificial_defs
724 = df_ref_unlink (bb_info->artificial_defs, ref);
726 else
727 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref))
728 = df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
730 if (df->def_info.add_refs_inline)
731 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
733 else
735 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
737 struct df_scan_bb_info *bb_info
738 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
739 bb_info->artificial_uses
740 = df_ref_unlink (bb_info->artificial_uses, ref);
742 else
743 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref))
744 = df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
746 if (df->use_info.add_refs_inline)
747 DF_USES_SET (df, DF_REF_ID (ref), NULL);
750 df_reg_chain_unlink (dflow, ref);
754 /* Create the insn record for INSN. If there was one there, zero it out. */
756 static struct df_insn_info *
757 df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
759 struct df *df = dflow->df;
760 struct df_scan_problem_data *problem_data
761 = (struct df_scan_problem_data *) dflow->problem_data;
763 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
764 if (!insn_rec)
766 insn_rec = pool_alloc (problem_data->insn_pool);
767 DF_INSN_SET (df, insn, insn_rec);
769 memset (insn_rec, 0, sizeof (struct df_insn_info));
771 return insn_rec;
775 /* Delete all of the refs information from INSN. */
777 void
778 df_insn_refs_delete (struct dataflow *dflow, rtx insn)
780 struct df *df = dflow->df;
781 unsigned int uid = INSN_UID (insn);
782 struct df_insn_info *insn_info = NULL;
783 struct df_ref *ref;
784 struct df_scan_problem_data *problem_data
785 = (struct df_scan_problem_data *) dflow->problem_data;
787 if (uid < df->insns_size)
788 insn_info = DF_INSN_UID_GET (df, uid);
790 if (insn_info)
792 struct df_mw_hardreg *hardregs = insn_info->mw_hardregs;
794 while (hardregs)
796 struct df_mw_hardreg *next_hr = hardregs->next;
797 struct df_link *link = hardregs->regs;
798 while (link)
800 struct df_link *next_l = link->next;
801 pool_free (problem_data->mw_link_pool, link);
802 link = next_l;
805 pool_free (problem_data->mw_reg_pool, hardregs);
806 hardregs = next_hr;
809 ref = insn_info->defs;
810 while (ref)
811 ref = df_reg_chain_unlink (dflow, ref);
813 ref = insn_info->uses;
814 while (ref)
815 ref = df_reg_chain_unlink (dflow, ref);
817 pool_free (problem_data->insn_pool, insn_info);
818 DF_INSN_SET (df, insn, NULL);
823 /* Delete all of the refs information from basic_block with BB_INDEX. */
825 void
826 df_bb_refs_delete (struct dataflow *dflow, int bb_index)
828 struct df_ref *def;
829 struct df_ref *use;
831 struct df_scan_bb_info *bb_info
832 = df_scan_get_bb_info (dflow, bb_index);
833 rtx insn;
834 basic_block bb = BASIC_BLOCK (bb_index);
835 FOR_BB_INSNS (bb, insn)
837 if (INSN_P (insn))
839 /* Record defs within INSN. */
840 df_insn_refs_delete (dflow, insn);
844 /* Get rid of any artificial uses or defs. */
845 if (bb_info)
847 def = bb_info->artificial_defs;
848 while (def)
849 def = df_reg_chain_unlink (dflow, def);
850 bb_info->artificial_defs = NULL;
851 use = bb_info->artificial_uses;
852 while (use)
853 use = df_reg_chain_unlink (dflow, use);
854 bb_info->artificial_uses = NULL;
859 /* Delete all of the refs information from BLOCKS. */
861 void
862 df_refs_delete (struct dataflow *dflow, bitmap blocks)
864 bitmap_iterator bi;
865 unsigned int bb_index;
867 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
869 df_bb_refs_delete (dflow, bb_index);
874 /* Take build ref table for either the uses or defs from the reg-use
875 or reg-def chains. */
877 void
878 df_reorganize_refs (struct df_ref_info *ref_info)
880 unsigned int m = ref_info->regs_inited;
881 unsigned int regno;
882 unsigned int offset = 0;
883 unsigned int size = 0;
885 if (ref_info->refs_organized)
886 return;
888 if (ref_info->refs_size < ref_info->bitmap_size)
890 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
891 df_grow_ref_info (ref_info, new_size);
894 for (regno = 0; regno < m; regno++)
896 struct df_reg_info *reg_info = ref_info->regs[regno];
897 int count = 0;
898 if (reg_info)
900 struct df_ref *ref = reg_info->reg_chain;
901 reg_info->begin = offset;
902 while (ref)
904 ref_info->refs[offset] = ref;
905 DF_REF_ID (ref) = offset++;
906 ref = DF_REF_NEXT_REG (ref);
907 count++;
908 size++;
910 reg_info->n_refs = count;
914 /* The bitmap size is not decremented when refs are deleted. So
915 reset it now that we have squished out all of the empty
916 slots. */
917 ref_info->bitmap_size = size;
918 ref_info->refs_organized = true;
919 ref_info->add_refs_inline = true;
923 /*----------------------------------------------------------------------------
924 Hard core instruction scanning code. No external interfaces here,
925 just a lot of routines that look inside insns.
926 ----------------------------------------------------------------------------*/
928 /* Create a ref and add it to the reg-def or reg-use chains. */
930 static struct df_ref *
931 df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
932 basic_block bb, rtx insn,
933 enum df_ref_type ref_type,
934 enum df_ref_flags ref_flags)
936 struct df_ref *this_ref;
937 struct df *df = dflow->df;
938 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
939 struct df_scan_problem_data *problem_data
940 = (struct df_scan_problem_data *) dflow->problem_data;
942 this_ref = pool_alloc (problem_data->ref_pool);
943 DF_REF_REG (this_ref) = reg;
944 DF_REF_REGNO (this_ref) = regno;
945 DF_REF_LOC (this_ref) = loc;
946 DF_REF_INSN (this_ref) = insn;
947 DF_REF_CHAIN (this_ref) = NULL;
948 DF_REF_TYPE (this_ref) = ref_type;
949 DF_REF_FLAGS (this_ref) = ref_flags;
950 DF_REF_DATA (this_ref) = NULL;
951 DF_REF_BB (this_ref) = bb;
953 /* Link the ref into the reg_def and reg_use chains and keep a count
954 of the instances. */
955 switch (ref_type)
957 case DF_REF_REG_DEF:
959 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
960 reg_info->n_refs++;
962 /* Add the ref to the reg_def chain. */
963 df_reg_chain_create (reg_info, this_ref);
964 DF_REF_ID (this_ref) = df->def_info.bitmap_size;
965 if (df->def_info.add_refs_inline)
967 if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
969 int new_size = df->def_info.bitmap_size
970 + df->def_info.bitmap_size / 4;
971 df_grow_ref_info (&df->def_info, new_size);
973 /* Add the ref to the big array of defs. */
974 DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
975 df->def_info.refs_organized = false;
978 df->def_info.bitmap_size++;
980 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
982 struct df_scan_bb_info *bb_info
983 = df_scan_get_bb_info (dflow, bb->index);
984 this_ref->next_ref = bb_info->artificial_defs;
985 bb_info->artificial_defs = this_ref;
987 else
989 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
990 DF_INSN_GET (df, insn)->defs = this_ref;
993 break;
995 case DF_REF_REG_MEM_LOAD:
996 case DF_REF_REG_MEM_STORE:
997 case DF_REF_REG_USE:
999 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
1000 reg_info->n_refs++;
1002 /* Add the ref to the reg_use chain. */
1003 df_reg_chain_create (reg_info, this_ref);
1004 DF_REF_ID (this_ref) = df->use_info.bitmap_size;
1005 if (df->use_info.add_refs_inline)
1007 if (DF_USES_SIZE (df) >= df->use_info.refs_size)
1009 int new_size = df->use_info.bitmap_size
1010 + df->use_info.bitmap_size / 4;
1011 df_grow_ref_info (&df->use_info, new_size);
1013 /* Add the ref to the big array of defs. */
1014 DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
1015 df->use_info.refs_organized = false;
1018 df->use_info.bitmap_size++;
1019 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
1021 struct df_scan_bb_info *bb_info
1022 = df_scan_get_bb_info (dflow, bb->index);
1023 this_ref->next_ref = bb_info->artificial_uses;
1024 bb_info->artificial_uses = this_ref;
1026 else
1028 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
1029 DF_INSN_GET (df, insn)->uses = this_ref;
1032 break;
1034 default:
1035 gcc_unreachable ();
1038 return this_ref;
1042 /* Create new references of type DF_REF_TYPE for each part of register REG
1043 at address LOC within INSN of BB. */
1045 static void
1046 df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1047 basic_block bb, rtx insn,
1048 enum df_ref_type ref_type,
1049 enum df_ref_flags ref_flags,
1050 bool record_live)
1052 struct df *df = dflow->df;
1053 rtx oldreg = reg;
1054 unsigned int regno;
1056 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1058 /* For the reg allocator we are interested in some SUBREG rtx's, but not
1059 all. Notably only those representing a word extraction from a multi-word
1060 reg. As written in the docu those should have the form
1061 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1062 XXX Is that true? We could also use the global word_mode variable. */
1063 if ((dflow->flags & DF_SUBREGS) == 0
1064 && GET_CODE (reg) == SUBREG
1065 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1066 || GET_MODE_SIZE (GET_MODE (reg))
1067 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1069 loc = &SUBREG_REG (reg);
1070 reg = *loc;
1071 ref_flags |= DF_REF_STRIPPED;
1074 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1075 if (regno < FIRST_PSEUDO_REGISTER)
1077 unsigned int i;
1078 unsigned int endregno;
1079 struct df_mw_hardreg *hardreg = NULL;
1080 struct df_scan_problem_data *problem_data
1081 = (struct df_scan_problem_data *) dflow->problem_data;
1083 if (!(dflow->flags & DF_HARD_REGS))
1084 return;
1086 if (GET_CODE (reg) == SUBREG)
1088 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1089 SUBREG_BYTE (reg), GET_MODE (reg));
1090 endregno = subreg_nregs (reg);
1092 else
1093 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1094 endregno += regno;
1096 /* If this is a multiword hardreg, we create some extra datastructures that
1097 will enable us to easily build REG_DEAD and REG_UNUSED notes. */
1098 if ((endregno != regno + 1) && insn)
1100 struct df_insn_info *insn_info = DF_INSN_GET (df, insn);
1101 /* Sets to a subreg of a multiword register are partial.
1102 Sets to a non-subreg of a multiword register are not. */
1103 if (GET_CODE (oldreg) == SUBREG)
1104 ref_flags |= DF_REF_PARTIAL;
1105 ref_flags |= DF_REF_MW_HARDREG;
1106 hardreg = pool_alloc (problem_data->mw_reg_pool);
1107 hardreg->next = insn_info->mw_hardregs;
1108 insn_info->mw_hardregs = hardreg;
1109 hardreg->type = ref_type;
1110 hardreg->flags = ref_flags;
1111 hardreg->mw_reg = reg;
1112 hardreg->regs = NULL;
1116 for (i = regno; i < endregno; i++)
1118 struct df_ref *ref;
1120 /* Calls are handled at call site because regs_ever_live
1121 doesn't include clobbered regs, only used ones. */
1122 if (ref_type == DF_REF_REG_DEF && record_live)
1123 regs_ever_live[i] = 1;
1124 else if ((ref_type == DF_REF_REG_USE
1125 || ref_type == DF_REF_REG_MEM_STORE
1126 || ref_type == DF_REF_REG_MEM_LOAD)
1127 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1129 /* Set regs_ever_live on uses of non-eliminable frame
1130 pointers and arg pointers. */
1131 if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
1132 && (regno == FRAME_POINTER_REGNUM
1133 || regno == ARG_POINTER_REGNUM)))
1134 regs_ever_live[i] = 1;
1137 ref = df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1138 bb, insn, ref_type, ref_flags);
1139 if (hardreg)
1141 struct df_link *link = pool_alloc (problem_data->mw_link_pool);
1143 link->next = hardreg->regs;
1144 link->ref = ref;
1145 hardreg->regs = link;
1149 else
1151 df_ref_create_structure (dflow, reg, loc,
1152 bb, insn, ref_type, ref_flags);
1157 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
1158 covered by the outer mode is smaller than that covered by the inner mode,
1159 is a read-modify-write operation.
1160 This function returns true iff the SUBREG X is such a SUBREG. */
1162 bool
1163 df_read_modify_subreg_p (rtx x)
1165 unsigned int isize, osize;
1166 if (GET_CODE (x) != SUBREG)
1167 return false;
1168 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1169 osize = GET_MODE_SIZE (GET_MODE (x));
1170 return (isize > osize && isize > UNITS_PER_WORD);
1174 /* Process all the registers defined in the rtx, X.
1175 Autoincrement/decrement definitions will be picked up by
1176 df_uses_record. */
1178 static void
1179 df_def_record_1 (struct dataflow *dflow, rtx x,
1180 basic_block bb, rtx insn,
1181 enum df_ref_flags flags, bool record_live)
1183 rtx *loc;
1184 rtx dst;
1185 bool dst_in_strict_lowpart = false;
1187 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1188 construct. */
1189 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1190 loc = &XEXP (x, 0);
1191 else
1192 loc = &SET_DEST (x);
1193 dst = *loc;
1195 /* It is legal to have a set destination be a parallel. */
1196 if (GET_CODE (dst) == PARALLEL)
1198 int i;
1200 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1202 rtx temp = XVECEXP (dst, 0, i);
1203 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1204 || GET_CODE (temp) == SET)
1205 df_def_record_1 (dflow, temp, bb, insn,
1206 GET_CODE (temp) == CLOBBER
1207 ? flags | DF_REF_MUST_CLOBBER : flags,
1208 record_live);
1210 return;
1213 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1214 be handy for the reg allocator. */
1215 while (GET_CODE (dst) == STRICT_LOW_PART
1216 || GET_CODE (dst) == ZERO_EXTRACT
1217 || df_read_modify_subreg_p (dst))
1219 #if 0
1220 /* Strict low part always contains SUBREG, but we do not want to make
1221 it appear outside, as whole register is always considered. */
1222 if (GET_CODE (dst) == STRICT_LOW_PART)
1224 loc = &XEXP (dst, 0);
1225 dst = *loc;
1227 #endif
1228 loc = &XEXP (dst, 0);
1229 if (GET_CODE (dst) == STRICT_LOW_PART)
1230 dst_in_strict_lowpart = true;
1231 dst = *loc;
1232 flags |= DF_REF_READ_WRITE;
1236 /* Sets to a subreg of a single word register are partial sets if
1237 they are wrapped in a strict lowpart, and not partial otherwise.
1239 if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
1240 && dst_in_strict_lowpart)
1241 flags |= DF_REF_PARTIAL;
1243 if (REG_P (dst)
1244 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1245 df_ref_record (dflow, dst, loc, bb, insn,
1246 DF_REF_REG_DEF, flags, record_live);
1250 /* Process all the registers defined in the pattern rtx, X. */
1252 static void
1253 df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1255 RTX_CODE code = GET_CODE (x);
1257 if (code == SET || code == CLOBBER)
1259 /* Mark the single def within the pattern. */
1260 df_def_record_1 (dflow, x, bb, insn,
1261 code == CLOBBER ? DF_REF_MUST_CLOBBER : 0, true);
1263 else if (code == COND_EXEC)
1265 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1267 else if (code == PARALLEL)
1269 int i;
1271 /* Mark the multiple defs within the pattern. */
1272 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1273 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1278 /* Process all the registers used in the rtx at address LOC. */
1280 static void
1281 df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1282 basic_block bb, rtx insn, enum df_ref_flags flags)
1284 RTX_CODE code;
1285 rtx x;
1286 retry:
1287 x = *loc;
1288 if (!x)
1289 return;
1290 code = GET_CODE (x);
1291 switch (code)
1293 case LABEL_REF:
1294 case SYMBOL_REF:
1295 case CONST_INT:
1296 case CONST:
1297 case CONST_DOUBLE:
1298 case CONST_VECTOR:
1299 case PC:
1300 case CC0:
1301 case ADDR_VEC:
1302 case ADDR_DIFF_VEC:
1303 return;
1305 case CLOBBER:
1306 /* If we are clobbering a MEM, mark any registers inside the address
1307 as being used. */
1308 if (MEM_P (XEXP (x, 0)))
1309 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1310 DF_REF_REG_MEM_STORE, bb, insn, flags);
1312 /* If we're clobbering a REG then we have a def so ignore. */
1313 return;
1315 case MEM:
1316 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1317 flags & DF_REF_IN_NOTE);
1318 return;
1320 case SUBREG:
1321 /* While we're here, optimize this case. */
1322 flags |= DF_REF_PARTIAL;
1323 /* In case the SUBREG is not of a REG, do not optimize. */
1324 if (!REG_P (SUBREG_REG (x)))
1326 loc = &SUBREG_REG (x);
1327 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1328 return;
1330 /* ... Fall through ... */
1332 case REG:
1333 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1334 return;
1336 case SET:
1338 rtx dst = SET_DEST (x);
1339 gcc_assert (!(flags & DF_REF_IN_NOTE));
1340 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
1342 switch (GET_CODE (dst))
1344 case SUBREG:
1345 if (df_read_modify_subreg_p (dst))
1347 df_uses_record (dflow, &SUBREG_REG (dst),
1348 DF_REF_REG_USE, bb,
1349 insn, flags | DF_REF_READ_WRITE);
1350 break;
1352 /* Fall through. */
1353 case REG:
1354 case PARALLEL:
1355 case SCRATCH:
1356 case PC:
1357 case CC0:
1358 break;
1359 case MEM:
1360 df_uses_record (dflow, &XEXP (dst, 0),
1361 DF_REF_REG_MEM_STORE,
1362 bb, insn, flags);
1363 break;
1364 case STRICT_LOW_PART:
1366 rtx *temp = &XEXP (dst, 0);
1367 /* A strict_low_part uses the whole REG and not just the
1368 SUBREG. */
1369 dst = XEXP (dst, 0);
1370 df_uses_record (dflow,
1371 (GET_CODE (dst) == SUBREG)
1372 ? &SUBREG_REG (dst) : temp,
1373 DF_REF_REG_USE, bb,
1374 insn, DF_REF_READ_WRITE);
1376 break;
1377 case ZERO_EXTRACT:
1378 case SIGN_EXTRACT:
1379 df_uses_record (dflow, &XEXP (dst, 0),
1380 DF_REF_REG_USE, bb, insn,
1381 DF_REF_READ_WRITE);
1382 df_uses_record (dflow, &XEXP (dst, 1),
1383 DF_REF_REG_USE, bb, insn, flags);
1384 df_uses_record (dflow, &XEXP (dst, 2),
1385 DF_REF_REG_USE, bb, insn, flags);
1386 dst = XEXP (dst, 0);
1387 break;
1388 default:
1389 gcc_unreachable ();
1391 return;
1394 case RETURN:
1395 break;
1397 case ASM_OPERANDS:
1398 case UNSPEC_VOLATILE:
1399 case TRAP_IF:
1400 case ASM_INPUT:
1402 /* Traditional and volatile asm instructions must be
1403 considered to use and clobber all hard registers, all
1404 pseudo-registers and all of memory. So must TRAP_IF and
1405 UNSPEC_VOLATILE operations.
1407 Consider for instance a volatile asm that changes the fpu
1408 rounding mode. An insn should not be moved across this
1409 even if it only uses pseudo-regs because it might give an
1410 incorrectly rounded result.
1412 However, flow.c's liveness computation did *not* do this,
1413 giving the reasoning as " ?!? Unfortunately, marking all
1414 hard registers as live causes massive problems for the
1415 register allocator and marking all pseudos as live creates
1416 mountains of uninitialized variable warnings."
1418 In order to maintain the status quo with regard to liveness
1419 and uses, we do what flow.c did and just mark any regs we
1420 can find in ASM_OPERANDS as used. Later on, when liveness
1421 is computed, asm insns are scanned and regs_asm_clobbered
1422 is filled out.
1424 For all ASM_OPERANDS, we must traverse the vector of input
1425 operands. We can not just fall through here since then we
1426 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1427 which do not indicate traditional asms unlike their normal
1428 usage. */
1429 if (code == ASM_OPERANDS)
1431 int j;
1433 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1434 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1435 DF_REF_REG_USE, bb, insn, flags);
1436 return;
1438 break;
1441 case PRE_DEC:
1442 case POST_DEC:
1443 case PRE_INC:
1444 case POST_INC:
1445 case PRE_MODIFY:
1446 case POST_MODIFY:
1447 /* Catch the def of the register being modified. */
1448 flags |= DF_REF_READ_WRITE;
1449 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1450 DF_REF_REG_DEF, flags, true);
1452 /* ... Fall through to handle uses ... */
1454 default:
1455 break;
1458 /* Recursively scan the operands of this expression. */
1460 const char *fmt = GET_RTX_FORMAT (code);
1461 int i;
1463 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1465 if (fmt[i] == 'e')
1467 /* Tail recursive case: save a function call level. */
1468 if (i == 0)
1470 loc = &XEXP (x, 0);
1471 goto retry;
1473 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1475 else if (fmt[i] == 'E')
1477 int j;
1478 for (j = 0; j < XVECLEN (x, i); j++)
1479 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1480 bb, insn, flags);
1486 /* Return true if *LOC contains an asm. */
1488 static int
1489 df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1491 if ( !*loc)
1492 return 0;
1493 if (GET_CODE (*loc) == ASM_OPERANDS)
1494 return 1;
1495 return 0;
1499 /* Return true if INSN contains an ASM. */
1501 static int
1502 df_insn_contains_asm (rtx insn)
1504 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1509 /* Record all the refs for DF within INSN of basic block BB. */
1511 static void
1512 df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1514 struct df *df = dflow->df;
1515 int i;
1517 if (INSN_P (insn))
1519 rtx note;
1521 if (df_insn_contains_asm (insn))
1522 DF_INSN_CONTAINS_ASM (df, insn) = true;
1524 /* Record register defs. */
1525 df_defs_record (dflow, PATTERN (insn), bb, insn);
1527 if (dflow->flags & DF_EQUIV_NOTES)
1528 for (note = REG_NOTES (insn); note;
1529 note = XEXP (note, 1))
1531 switch (REG_NOTE_KIND (note))
1533 case REG_EQUIV:
1534 case REG_EQUAL:
1535 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1536 bb, insn, DF_REF_IN_NOTE);
1537 default:
1538 break;
1542 if (CALL_P (insn))
1544 rtx note;
1546 /* Record the registers used to pass arguments, and explicitly
1547 noted as clobbered. */
1548 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1549 note = XEXP (note, 1))
1551 if (GET_CODE (XEXP (note, 0)) == USE)
1552 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1553 DF_REF_REG_USE,
1554 bb, insn, 0);
1555 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1557 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1558 if (REG_P (XEXP (XEXP (note, 0), 0)))
1560 rtx reg = XEXP (XEXP (note, 0), 0);
1561 int regno_last;
1562 int regno_first;
1563 int i;
1565 regno_last = regno_first = REGNO (reg);
1566 if (regno_first < FIRST_PSEUDO_REGISTER)
1567 regno_last
1568 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1569 for (i = regno_first; i <= regno_last; i++)
1570 regs_ever_live[i] = 1;
1575 /* The stack ptr is used (honorarily) by a CALL insn. */
1576 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1577 DF_REF_REG_USE, bb, insn,
1580 if (dflow->flags & DF_HARD_REGS)
1582 bitmap_iterator bi;
1583 unsigned int ui;
1584 /* Calls may also reference any of the global registers,
1585 so they are recorded as used. */
1586 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1587 if (global_regs[i])
1588 df_uses_record (dflow, &regno_reg_rtx[i],
1589 DF_REF_REG_USE, bb, insn,
1591 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1592 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb,
1593 insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER, false);
1597 /* Record the register uses. */
1598 df_uses_record (dflow, &PATTERN (insn),
1599 DF_REF_REG_USE, bb, insn, 0);
1604 static bool
1605 df_has_eh_preds (basic_block bb)
1607 edge e;
1608 edge_iterator ei;
1610 FOR_EACH_EDGE (e, ei, bb->preds)
1612 if (e->flags & EDGE_EH)
1613 return true;
1615 return false;
1618 /* Record all the refs within the basic block BB. */
1620 static void
1621 df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1623 struct df *df = dflow->df;
1624 rtx insn;
1625 int luid = 0;
1626 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1627 bitmap artificial_uses_at_bottom = NULL;
1629 if (dflow->flags & DF_HARD_REGS)
1630 artificial_uses_at_bottom = BITMAP_ALLOC (NULL);
1632 /* Need to make sure that there is a record in the basic block info. */
1633 if (!bb_info)
1635 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1636 df_scan_set_bb_info (dflow, bb->index, bb_info);
1637 bb_info->artificial_defs = NULL;
1638 bb_info->artificial_uses = NULL;
1641 /* Scan the block an insn at a time from beginning to end. */
1642 FOR_BB_INSNS (bb, insn)
1644 df_insn_create_insn_record (dflow, insn);
1645 if (INSN_P (insn))
1647 /* Record defs within INSN. */
1648 DF_INSN_LUID (df, insn) = luid++;
1649 df_insn_refs_record (dflow, bb, insn);
1651 DF_INSN_LUID (df, insn) = luid;
1654 #ifdef EH_RETURN_DATA_REGNO
1655 if ((dflow->flags & DF_HARD_REGS)
1656 && df_has_eh_preds (bb))
1658 unsigned int i;
1659 /* Mark the registers that will contain data for the handler. */
1660 for (i = 0; ; ++i)
1662 unsigned regno = EH_RETURN_DATA_REGNO (i);
1663 if (regno == INVALID_REGNUM)
1664 break;
1665 df_ref_record (dflow, regno_reg_rtx[regno], &regno_reg_rtx[regno],
1666 bb, NULL,
1667 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1668 false);
1671 #endif
1674 if ((dflow->flags & DF_HARD_REGS)
1675 && df_has_eh_preds (bb))
1677 #ifdef EH_USES
1678 unsigned int i;
1679 /* This code is putting in a artificial ref for the use at the
1680 TOP of the block that receives the exception. It is too
1681 cumbersome to actually put the ref on the edge. We could
1682 either model this at the top of the receiver block or the
1683 bottom of the sender block.
1685 The bottom of the sender block is problematic because not all
1686 out-edges of the a block are eh-edges. However, it is true
1687 that all edges into a block are either eh-edges or none of
1688 them are eh-edges. Thus, we can model this at the top of the
1689 eh-receiver for all of the edges at once. */
1690 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1691 if (EH_USES (i))
1692 df_uses_record (dflow, &regno_reg_rtx[i],
1693 DF_REF_REG_USE, bb, NULL,
1694 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1695 #endif
1697 /* The following code (down thru the arg_pointer setting APPEARS
1698 to be necessary because there is nothing that actually
1699 describes what the exception handling code may actually need
1700 to keep alive. */
1701 if (reload_completed)
1703 if (frame_pointer_needed)
1705 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1706 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1707 bitmap_set_bit (artificial_uses_at_bottom, HARD_FRAME_POINTER_REGNUM);
1708 #endif
1710 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1711 if (fixed_regs[ARG_POINTER_REGNUM])
1712 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1713 #endif
1717 if ((dflow->flags & DF_HARD_REGS)
1718 && bb->index >= NUM_FIXED_BLOCKS)
1720 /* Before reload, there are a few registers that must be forced
1721 live everywhere -- which might not already be the case for
1722 blocks within infinite loops. */
1723 if (!reload_completed)
1726 /* Any reference to any pseudo before reload is a potential
1727 reference of the frame pointer. */
1728 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1730 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1731 /* Pseudos with argument area equivalences may require
1732 reloading via the argument pointer. */
1733 if (fixed_regs[ARG_POINTER_REGNUM])
1734 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1735 #endif
1737 /* Any constant, or pseudo with constant equivalences, may
1738 require reloading from memory using the pic register. */
1739 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1740 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1741 bitmap_set_bit (artificial_uses_at_bottom, PIC_OFFSET_TABLE_REGNUM);
1743 /* The all-important stack pointer must always be live. */
1744 bitmap_set_bit (artificial_uses_at_bottom, STACK_POINTER_REGNUM);
1747 if (dflow->flags & DF_HARD_REGS)
1749 bitmap_iterator bi;
1750 unsigned int regno;
1752 EXECUTE_IF_SET_IN_BITMAP (artificial_uses_at_bottom, 0, regno, bi)
1754 df_uses_record (dflow, &regno_reg_rtx[regno],
1755 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1758 BITMAP_FREE (artificial_uses_at_bottom);
1763 /* Record all the refs in the basic blocks specified by BLOCKS. */
1765 static void
1766 df_refs_record (struct dataflow *dflow, bitmap blocks)
1768 unsigned int bb_index;
1769 bitmap_iterator bi;
1771 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1773 basic_block bb = BASIC_BLOCK (bb_index);
1774 df_bb_refs_record (dflow, bb);
1777 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1778 df_record_exit_block_uses (dflow);
1780 if (bitmap_bit_p (blocks, ENTRY_BLOCK))
1781 df_record_entry_block_defs (dflow);
1785 /*----------------------------------------------------------------------------
1786 Specialized hard register scanning functions.
1787 ----------------------------------------------------------------------------*/
1789 /* Mark a register in SET. Hard registers in large modes get all
1790 of their component registers set as well. */
1792 static void
1793 df_mark_reg (rtx reg, void *vset)
1795 bitmap set = (bitmap) vset;
1796 int regno = REGNO (reg);
1798 gcc_assert (GET_MODE (reg) != BLKmode);
1800 bitmap_set_bit (set, regno);
1801 if (regno < FIRST_PSEUDO_REGISTER)
1803 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1804 while (--n > 0)
1805 bitmap_set_bit (set, regno + n);
1810 /* Record the (conservative) set of hard registers that are defined on
1811 entry to the function. */
1813 static void
1814 df_record_entry_block_defs (struct dataflow *dflow)
1816 unsigned int i;
1817 bitmap_iterator bi;
1818 rtx r;
1819 struct df *df = dflow->df;
1821 bitmap_clear (df->entry_block_defs);
1823 if (!(dflow->flags & DF_HARD_REGS))
1824 return;
1826 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1828 if (FUNCTION_ARG_REGNO_P (i))
1829 #ifdef INCOMING_REGNO
1830 bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
1831 #else
1832 bitmap_set_bit (df->entry_block_defs, i);
1833 #endif
1836 /* Once the prologue has been generated, all of these registers
1837 should just show up in the first regular block. */
1838 if (HAVE_prologue && epilogue_completed)
1840 /* Defs for the callee saved registers are inserted so that the
1841 pushes have some defining location. */
1842 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1843 if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
1844 bitmap_set_bit (df->entry_block_defs, i);
1846 else
1848 /* The always important stack pointer. */
1849 bitmap_set_bit (df->entry_block_defs, STACK_POINTER_REGNUM);
1851 #ifdef INCOMING_RETURN_ADDR_RTX
1852 if (REG_P (INCOMING_RETURN_ADDR_RTX))
1853 bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
1854 #endif
1856 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
1857 only STATIC_CHAIN_REGNUM is defined. If they are different,
1858 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
1859 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1860 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
1861 #else
1862 #ifdef STATIC_CHAIN_REGNUM
1863 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
1864 #endif
1865 #endif
1867 r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
1868 if (r && REG_P (r))
1869 bitmap_set_bit (df->entry_block_defs, REGNO (r));
1872 if ((!reload_completed) || frame_pointer_needed)
1874 /* Any reference to any pseudo before reload is a potential
1875 reference of the frame pointer. */
1876 bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
1877 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1878 /* If they are different, also mark the hard frame pointer as live. */
1879 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1880 bitmap_set_bit (df->entry_block_defs, HARD_FRAME_POINTER_REGNUM);
1881 #endif
1884 /* These registers are live everywhere. */
1885 if (!reload_completed)
1887 #ifdef EH_USES
1888 /* The ia-64, the only machine that uses this, does not define these
1889 until after reload. */
1890 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1891 if (EH_USES (i))
1893 bitmap_set_bit (df->entry_block_defs, i);
1895 #endif
1897 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1898 /* Pseudos with argument area equivalences may require
1899 reloading via the argument pointer. */
1900 if (fixed_regs[ARG_POINTER_REGNUM])
1901 bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
1902 #endif
1904 #ifdef PIC_OFFSET_TABLE_REGNUM
1905 /* Any constant, or pseudo with constant equivalences, may
1906 require reloading from memory using the pic register. */
1907 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1908 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1909 bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
1910 #endif
1913 targetm.live_on_entry (df->entry_block_defs);
1915 EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
1917 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i],
1918 ENTRY_BLOCK_PTR, NULL,
1919 DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
1924 /* Record the set of hard registers that are used in the exit block. */
1926 static void
1927 df_record_exit_block_uses (struct dataflow *dflow)
1929 unsigned int i;
1930 bitmap_iterator bi;
1931 struct df *df = dflow->df;
1933 bitmap_clear (df->exit_block_uses);
1935 if (!(dflow->flags & DF_HARD_REGS))
1936 return;
1938 /* If exiting needs the right stack value, consider the stack
1939 pointer live at the end of the function. */
1940 if ((HAVE_epilogue && epilogue_completed)
1941 || !EXIT_IGNORE_STACK
1942 || (!FRAME_POINTER_REQUIRED
1943 && !current_function_calls_alloca
1944 && flag_omit_frame_pointer)
1945 || current_function_sp_is_unchanging)
1947 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1950 /* Mark the frame pointer if needed at the end of the function.
1951 If we end up eliminating it, it will be removed from the live
1952 list of each basic block by reload. */
1954 if ((!reload_completed) || frame_pointer_needed)
1956 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1957 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1958 /* If they are different, also mark the hard frame pointer as live. */
1959 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1960 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1961 #endif
1964 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1965 /* Many architectures have a GP register even without flag_pic.
1966 Assume the pic register is not in use, or will be handled by
1967 other means, if it is not fixed. */
1968 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1969 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1970 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1971 #endif
1973 /* Mark all global registers, and all registers used by the
1974 epilogue as being live at the end of the function since they
1975 may be referenced by our caller. */
1976 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1977 if (global_regs[i] || EPILOGUE_USES (i))
1978 bitmap_set_bit (df->exit_block_uses, i);
1980 if (HAVE_epilogue && epilogue_completed)
1982 /* Mark all call-saved registers that we actually used. */
1983 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1984 if (regs_ever_live[i] && !LOCAL_REGNO (i)
1985 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1986 bitmap_set_bit (df->exit_block_uses, i);
1989 #ifdef EH_RETURN_DATA_REGNO
1990 /* Mark the registers that will contain data for the handler. */
1991 if (reload_completed && current_function_calls_eh_return)
1992 for (i = 0; ; ++i)
1994 unsigned regno = EH_RETURN_DATA_REGNO (i);
1995 if (regno == INVALID_REGNUM)
1996 break;
1997 bitmap_set_bit (df->exit_block_uses, regno);
1999 #endif
2001 #ifdef EH_RETURN_STACKADJ_RTX
2002 if ((!HAVE_epilogue || ! epilogue_completed)
2003 && current_function_calls_eh_return)
2005 rtx tmp = EH_RETURN_STACKADJ_RTX;
2006 if (tmp && REG_P (tmp))
2007 df_mark_reg (tmp, df->exit_block_uses);
2009 #endif
2011 #ifdef EH_RETURN_HANDLER_RTX
2012 if ((!HAVE_epilogue || ! epilogue_completed)
2013 && current_function_calls_eh_return)
2015 rtx tmp = EH_RETURN_HANDLER_RTX;
2016 if (tmp && REG_P (tmp))
2017 df_mark_reg (tmp, df->exit_block_uses);
2019 #endif
2021 /* Mark function return value. */
2022 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
2024 if (dflow->flags & DF_HARD_REGS)
2025 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
2026 df_uses_record (dflow, &regno_reg_rtx[i],
2027 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
2028 DF_REF_ARTIFICIAL);
2031 static bool initialized = false;
2033 /* Initialize some platform specific structures. */
2035 void
2036 df_hard_reg_init (void)
2038 int i;
2039 #ifdef ELIMINABLE_REGS
2040 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2041 #endif
2042 /* After reload, some ports add certain bits to regs_ever_live so
2043 this cannot be reset. */
2045 if (!reload_completed)
2046 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2048 if (initialized)
2049 return;
2051 bitmap_obstack_initialize (&persistent_obstack);
2053 /* Record which registers will be eliminated. We use this in
2054 mark_used_regs. */
2055 CLEAR_HARD_REG_SET (elim_reg_set);
2057 #ifdef ELIMINABLE_REGS
2058 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2059 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2060 #else
2061 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2062 #endif
2064 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
2066 /* Inconveniently, this is only readily available in hard reg set
2067 form. */
2068 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2069 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2070 bitmap_set_bit (df_invalidated_by_call, i);
2072 initialized = true;