Merge from mainline
[official-gcc.git] / gcc / df-scan.c
blob934f98d05f924dfed3a32f9f979a8ad479a9fd63
1 /* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
5 /* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
13 This file is part of GCC.
15 GCC is free software; you can redistribute it and/or modify it under
16 the terms of the GNU General Public License as published by the Free
17 Software Foundation; either version 2, or (at your option) any later
18 version.
20 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21 WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23 for more details.
25 You should have received a copy of the GNU General Public License
26 along with GCC; see the file COPYING. If not, write to the Free
27 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
28 02110-1301, USA.
31 #include "config.h"
32 #include "system.h"
33 #include "coretypes.h"
34 #include "tm.h"
35 #include "rtl.h"
36 #include "tm_p.h"
37 #include "insn-config.h"
38 #include "recog.h"
39 #include "function.h"
40 #include "regs.h"
41 #include "output.h"
42 #include "alloc-pool.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "basic-block.h"
46 #include "sbitmap.h"
47 #include "bitmap.h"
48 #include "timevar.h"
49 #include "tree.h"
50 #include "target.h"
51 #include "target-def.h"
52 #include "df.h"
54 #ifndef HAVE_epilogue
55 #define HAVE_epilogue 0
56 #endif
57 #ifndef HAVE_prologue
58 #define HAVE_prologue 0
59 #endif
60 #ifndef HAVE_sibcall_epilogue
61 #define HAVE_sibcall_epilogue 0
62 #endif
64 #ifndef EPILOGUE_USES
65 #define EPILOGUE_USES(REGNO) 0
66 #endif
68 /* Indicates where we are in the compilation. */
69 int df_state;
71 /* The bitmap_obstack is used to hold some static variables that
72 should not be reset after each function is compiled. */
74 static bitmap_obstack persistent_obstack;
76 /* The set of hard registers in eliminables[i].from. */
78 static HARD_REG_SET elim_reg_set;
80 /* This is a bitmap copy of regs_invalidated_by_call so that we can
81 easily add it into bitmaps, etc. */
83 bitmap df_invalidated_by_call = NULL;
85 /* Initialize ur_in and ur_out as if all hard registers were partially
86 available. */
88 static void df_ref_record (struct dataflow *, rtx, rtx *,
89 basic_block, rtx, enum df_ref_type,
90 enum df_ref_flags, bool record_live);
91 static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
92 enum df_ref_flags, bool record_live);
93 static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
94 static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
95 basic_block, rtx, enum df_ref_flags);
97 static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
98 static void df_bb_refs_record (struct dataflow *, basic_block);
99 static void df_refs_record (struct dataflow *, bitmap);
100 static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
101 basic_block, rtx, enum df_ref_type,
102 enum df_ref_flags);
103 static void df_record_entry_block_defs (struct dataflow *);
104 static void df_record_exit_block_uses (struct dataflow *);
105 static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
106 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
107 static void df_grow_insn_info (struct df *);
110 /*----------------------------------------------------------------------------
111 SCANNING DATAFLOW PROBLEM
113 There are several ways in which scanning looks just like the other
114 dataflow problems. It shares the all the mechanisms for local info
115 as well as basic block info. Where it differs is when and how often
116 it gets run. It also has no need for the iterative solver.
117 ----------------------------------------------------------------------------*/
119 /* Problem data for the scanning dataflow function. */
120 struct df_scan_problem_data
122 alloc_pool ref_pool;
123 alloc_pool insn_pool;
124 alloc_pool reg_pool;
127 typedef struct df_scan_bb_info *df_scan_bb_info_t;
129 static void
130 df_scan_free_internal (struct dataflow *dflow)
132 struct df *df = dflow->df;
133 struct df_scan_problem_data *problem_data =
134 (struct df_scan_problem_data *) dflow->problem_data;
136 free (df->def_info.regs);
137 free (df->def_info.refs);
138 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
140 free (df->use_info.regs);
141 free (df->use_info.refs);
142 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
144 free (df->insns);
145 df->insns = NULL;
146 df->insns_size = 0;
148 free (dflow->block_info);
149 dflow->block_info = NULL;
150 dflow->block_info_size = 0;
152 BITMAP_FREE (df->hardware_regs_used);
153 BITMAP_FREE (df->entry_block_defs);
154 BITMAP_FREE (df->exit_block_uses);
156 free_alloc_pool (dflow->block_pool);
157 free_alloc_pool (problem_data->ref_pool);
158 free_alloc_pool (problem_data->insn_pool);
159 free_alloc_pool (problem_data->reg_pool);
163 /* Get basic block info. */
165 struct df_scan_bb_info *
166 df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
168 gcc_assert (index < dflow->block_info_size);
169 return (struct df_scan_bb_info *) dflow->block_info[index];
173 /* Set basic block info. */
175 static void
176 df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
177 struct df_scan_bb_info *bb_info)
179 gcc_assert (index < dflow->block_info_size);
180 dflow->block_info[index] = (void *) bb_info;
184 /* Free basic block info. */
186 static void
187 df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
189 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
190 if (bb_info)
192 df_bb_refs_delete (dflow, bb->index);
193 pool_free (dflow->block_pool, bb_info);
198 /* Allocate the problem data for the scanning problem. This should be
199 called when the problem is created or when the entire function is to
200 be rescanned. */
202 static void
203 df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan)
205 struct df *df = dflow->df;
206 struct df_scan_problem_data *problem_data;
207 unsigned int insn_num = get_max_uid () + 1;
208 unsigned int block_size = 50;
209 unsigned int bb_index;
210 bitmap_iterator bi;
212 /* Given the number of pools, this is really faster than tearing
213 everything apart. */
214 if (dflow->problem_data)
215 df_scan_free_internal (dflow);
217 dflow->block_pool
218 = create_alloc_pool ("df_scan_block pool",
219 sizeof (struct df_scan_bb_info),
220 block_size);
222 problem_data = XNEW (struct df_scan_problem_data);
223 dflow->problem_data = problem_data;
225 problem_data->ref_pool
226 = create_alloc_pool ("df_scan_ref pool",
227 sizeof (struct df_ref), block_size);
228 problem_data->insn_pool
229 = create_alloc_pool ("df_scan_insn pool",
230 sizeof (struct df_insn_info), block_size);
231 problem_data->reg_pool
232 = create_alloc_pool ("df_scan_reg pool",
233 sizeof (struct df_reg_info), block_size);
235 insn_num += insn_num / 4;
236 df_grow_reg_info (dflow, &df->def_info);
237 df_grow_ref_info (&df->def_info, insn_num);
239 df_grow_reg_info (dflow, &df->use_info);
240 df_grow_ref_info (&df->use_info, insn_num *2);
242 df_grow_insn_info (df);
243 df_grow_bb_info (dflow);
245 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
247 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
248 if (!bb_info)
250 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
251 df_scan_set_bb_info (dflow, bb_index, bb_info);
253 bb_info->artificial_defs = NULL;
254 bb_info->artificial_uses = NULL;
257 df->hardware_regs_used = BITMAP_ALLOC (NULL);
258 df->entry_block_defs = BITMAP_ALLOC (NULL);
259 df->exit_block_uses = BITMAP_ALLOC (NULL);
263 /* Free all of the data associated with the scan problem. */
265 static void
266 df_scan_free (struct dataflow *dflow)
268 struct df *df = dflow->df;
270 if (dflow->problem_data)
272 df_scan_free_internal (dflow);
273 free (dflow->problem_data);
276 if (df->blocks_to_scan)
277 BITMAP_FREE (df->blocks_to_scan);
279 if (df->blocks_to_analyze)
280 BITMAP_FREE (df->blocks_to_analyze);
282 free (dflow);
285 static void
286 df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
288 struct df *df = dflow->df;
289 int i;
291 fprintf (file, " invalidated by call \t");
292 dump_bitmap (file, df_invalidated_by_call);
293 fprintf (file, " hardware regs used \t");
294 dump_bitmap (file, df->hardware_regs_used);
295 fprintf (file, " entry block uses \t");
296 dump_bitmap (file, df->entry_block_defs);
297 fprintf (file, " exit block uses \t");
298 dump_bitmap (file, df->exit_block_uses);
299 fprintf (file, " regs ever live \t");
300 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
301 if (regs_ever_live[i])
302 fprintf (file, "%d ", i);
303 fprintf (file, "\n");
306 static struct df_problem problem_SCAN =
308 DF_SCAN, /* Problem id. */
309 DF_NONE, /* Direction. */
310 df_scan_alloc, /* Allocate the problem specific data. */
311 NULL, /* Reset global information. */
312 df_scan_free_bb_info, /* Free basic block info. */
313 NULL, /* Local compute function. */
314 NULL, /* Init the solution specific data. */
315 NULL, /* Iterative solver. */
316 NULL, /* Confluence operator 0. */
317 NULL, /* Confluence operator n. */
318 NULL, /* Transfer function. */
319 NULL, /* Finalize function. */
320 df_scan_free, /* Free all of the problem information. */
321 df_scan_dump, /* Debugging. */
322 NULL /* Dependent problem. */
326 /* Create a new DATAFLOW instance and add it to an existing instance
327 of DF. The returned structure is what is used to get at the
328 solution. */
330 struct dataflow *
331 df_scan_add_problem (struct df *df)
333 return df_add_problem (df, &problem_SCAN);
336 /*----------------------------------------------------------------------------
337 Storage Allocation Utilities
338 ----------------------------------------------------------------------------*/
341 /* First, grow the reg_info information. If the current size is less than
342 the number of psuedos, grow to 25% more than the number of
343 pseudos.
345 Second, assure that all of the slots up to max_reg_num have been
346 filled with reg_info structures. */
348 static void
349 df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
351 unsigned int max_reg = max_reg_num ();
352 unsigned int new_size = max_reg;
353 struct df_scan_problem_data *problem_data =
354 (struct df_scan_problem_data *) dflow->problem_data;
355 unsigned int i;
357 if (ref_info->regs_size < new_size)
359 new_size += new_size / 4;
360 ref_info->regs = xrealloc (ref_info->regs,
361 new_size *sizeof (struct df_reg_info*));
362 ref_info->regs_size = new_size;
365 for (i = ref_info->regs_inited; i < max_reg; i++)
367 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
368 memset (reg_info, 0, sizeof (struct df_reg_info));
369 ref_info->regs[i] = reg_info;
372 ref_info->regs_inited = max_reg;
376 /* Grow the ref information. */
378 static void
379 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
381 if (ref_info->refs_size < new_size)
383 ref_info->refs = xrealloc (ref_info->refs,
384 new_size *sizeof (struct df_ref *));
385 memset (ref_info->refs + ref_info->refs_size, 0,
386 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
387 ref_info->refs_size = new_size;
392 /* Grow the ref information. If the current size is less than the
393 number of instructions, grow to 25% more than the number of
394 instructions. */
396 static void
397 df_grow_insn_info (struct df *df)
399 unsigned int new_size = get_max_uid () + 1;
400 if (df->insns_size < new_size)
402 new_size += new_size / 4;
403 df->insns = xrealloc (df->insns,
404 new_size *sizeof (struct df_insn_info *));
405 memset (df->insns + df->insns_size, 0,
406 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
407 df->insns_size = new_size;
414 /*----------------------------------------------------------------------------
415 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
416 ----------------------------------------------------------------------------*/
418 /* Rescan some BLOCKS or all the blocks defined by the last call to
419 df_set_blocks if BLOCKS is NULL); */
421 void
422 df_rescan_blocks (struct df *df, bitmap blocks)
424 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
426 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
427 basic_block bb;
429 df->def_info.refs_organized = false;
430 df->use_info.refs_organized = false;
432 if (blocks)
434 int i;
436 /* Need to assure that there are space in all of the tables. */
437 unsigned int insn_num = get_max_uid () + 1;
438 insn_num += insn_num / 4;
440 df_grow_reg_info (dflow, &df->def_info);
441 df_grow_ref_info (&df->def_info, insn_num);
443 df_grow_reg_info (dflow, &df->use_info);
444 df_grow_ref_info (&df->use_info, insn_num *2);
446 df_grow_insn_info (df);
447 df_grow_bb_info (dflow);
449 bitmap_copy (local_blocks_to_scan, blocks);
450 df->def_info.add_refs_inline = true;
451 df->use_info.add_refs_inline = true;
453 for (i = df->num_problems_defined; i; i--)
455 bitmap blocks_to_reset = NULL;
456 if (dflow->problem->reset_fun)
458 if (!blocks_to_reset)
460 blocks_to_reset = BITMAP_ALLOC (NULL);
461 bitmap_copy (blocks_to_reset, local_blocks_to_scan);
462 if (df->blocks_to_scan)
463 bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
465 dflow->problem->reset_fun (dflow, blocks_to_reset);
467 if (blocks_to_reset)
468 BITMAP_FREE (blocks_to_reset);
471 df_refs_delete (dflow, local_blocks_to_scan);
473 /* This may be a mistake, but if an explicit blocks is passed in
474 and the set of blocks to analyze has been explicitly set, add
475 the extra blocks to blocks_to_analyze. The alternative is to
476 put an assert here. We do not want this to just go by
477 silently or else we may get storage leaks. */
478 if (df->blocks_to_analyze)
479 bitmap_ior_into (df->blocks_to_analyze, blocks);
481 else
483 /* If we are going to do everything, just reallocate everything.
484 Most stuff is allocated in pools so this is faster than
485 walking it. */
486 if (df->blocks_to_analyze)
487 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
488 else
489 FOR_ALL_BB (bb)
491 bitmap_set_bit (local_blocks_to_scan, bb->index);
493 df_scan_alloc (dflow, local_blocks_to_scan);
495 df->def_info.add_refs_inline = false;
496 df->use_info.add_refs_inline = false;
499 df_refs_record (dflow, local_blocks_to_scan);
500 #if 0
501 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
502 #endif
504 if (!df->blocks_to_scan)
505 df->blocks_to_scan = BITMAP_ALLOC (NULL);
507 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
508 BITMAP_FREE (local_blocks_to_scan);
511 /* Create a new ref of type DF_REF_TYPE for register REG at address
512 LOC within INSN of BB. */
514 struct df_ref *
515 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
516 basic_block bb,
517 enum df_ref_type ref_type,
518 enum df_ref_flags ref_flags)
520 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
521 struct df_scan_bb_info *bb_info;
523 df_grow_reg_info (dflow, &df->use_info);
524 df_grow_reg_info (dflow, &df->def_info);
525 df_grow_bb_info (dflow);
527 /* Make sure there is the bb_info for this block. */
528 bb_info = df_scan_get_bb_info (dflow, bb->index);
529 if (!bb_info)
531 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
532 df_scan_set_bb_info (dflow, bb->index, bb_info);
533 bb_info->artificial_defs = NULL;
534 bb_info->artificial_uses = NULL;
537 if (ref_type == DF_REF_REG_DEF)
538 df->def_info.add_refs_inline = true;
539 else
540 df->use_info.add_refs_inline = true;
542 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
547 /*----------------------------------------------------------------------------
548 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
549 ----------------------------------------------------------------------------*/
552 /* Get the artifical uses for a basic block. */
554 struct df_ref *
555 df_get_artificial_defs (struct df *df, unsigned int bb_index)
557 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
558 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
562 /* Get the artifical uses for a basic block. */
564 struct df_ref *
565 df_get_artificial_uses (struct df *df, unsigned int bb_index)
567 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
568 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
572 /* Link REF at the front of reg_use or reg_def chain for REGNO. */
574 void
575 df_reg_chain_create (struct df_reg_info *reg_info,
576 struct df_ref *ref)
578 struct df_ref *head = reg_info->reg_chain;
579 reg_info->reg_chain = ref;
581 DF_REF_NEXT_REG (ref) = head;
583 /* We cannot actually link to the head of the chain. */
584 DF_REF_PREV_REG (ref) = NULL;
586 if (head)
587 DF_REF_PREV_REG (head) = ref;
591 /* Remove REF from the CHAIN. Return the head of the chain. This
592 will be CHAIN unless the REF was at the beginning of the chain. */
594 static struct df_ref *
595 df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
597 struct df_ref *orig_chain = chain;
598 struct df_ref *prev = NULL;
599 while (chain)
601 if (chain == ref)
603 if (prev)
605 prev->next_ref = ref->next_ref;
606 ref->next_ref = NULL;
607 return orig_chain;
609 else
611 chain = ref->next_ref;
612 ref->next_ref = NULL;
613 return chain;
617 prev = chain;
618 chain = chain->next_ref;
621 /* Someone passed in a ref that was not in the chain. */
622 gcc_unreachable ();
623 return NULL;
627 /* Unlink and delete REF at the reg_use or reg_def chain. Also delete
628 the def-use or use-def chain if it exists. Returns the next ref in
629 uses or defs chain. */
631 struct df_ref *
632 df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
634 struct df *df = dflow->df;
635 struct df_ref *next = DF_REF_NEXT_REG (ref);
636 struct df_ref *prev = DF_REF_PREV_REG (ref);
637 struct df_scan_problem_data *problem_data =
638 (struct df_scan_problem_data *) dflow->problem_data;
639 struct df_reg_info *reg_info;
640 struct df_ref *next_ref = ref->next_ref;
641 unsigned int id = DF_REF_ID (ref);
643 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
645 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
646 df->def_info.bitmap_size--;
647 if (df->def_info.refs && (id < df->def_info.refs_size))
648 DF_DEFS_SET (df, id, NULL);
650 else
652 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
653 df->use_info.bitmap_size--;
654 if (df->use_info.refs && (id < df->use_info.refs_size))
655 DF_USES_SET (df, id, NULL);
658 /* Delete any def-use or use-def chains that start here. */
659 if (DF_REF_CHAIN (ref))
660 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
662 reg_info->n_refs--;
664 /* Unlink from the reg chain. If there is no prev, this is the
665 first of the list. If not, just join the next and prev. */
666 if (prev)
668 DF_REF_NEXT_REG (prev) = next;
669 if (next)
670 DF_REF_PREV_REG (next) = prev;
672 else
674 reg_info->reg_chain = next;
675 if (next)
676 DF_REF_PREV_REG (next) = NULL;
679 pool_free (problem_data->ref_pool, ref);
680 return next_ref;
684 /* Unlink REF from all def-use/use-def chains, etc. */
686 void
687 df_ref_remove (struct df *df, struct df_ref *ref)
689 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
690 if (DF_REF_REG_DEF_P (ref))
692 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
694 struct df_scan_bb_info *bb_info
695 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
696 bb_info->artificial_defs
697 = df_ref_unlink (bb_info->artificial_defs, ref);
699 else
700 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)) =
701 df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
703 if (df->def_info.add_refs_inline)
704 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
706 else
708 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
710 struct df_scan_bb_info *bb_info
711 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
712 bb_info->artificial_uses
713 = df_ref_unlink (bb_info->artificial_uses, ref);
715 else
716 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)) =
717 df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
719 if (df->use_info.add_refs_inline)
720 DF_USES_SET (df, DF_REF_ID (ref), NULL);
723 df_reg_chain_unlink (dflow, ref);
727 /* Create the insn record for INSN. If there was one there, zero it out. */
729 static struct df_insn_info *
730 df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
732 struct df *df = dflow->df;
733 struct df_scan_problem_data *problem_data =
734 (struct df_scan_problem_data *) dflow->problem_data;
736 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
737 if (!insn_rec)
739 insn_rec = pool_alloc (problem_data->insn_pool);
740 DF_INSN_SET (df, insn, insn_rec);
742 memset (insn_rec, 0, sizeof (struct df_insn_info));
744 return insn_rec;
748 /* Delete all of the refs information from INSN. */
750 void
751 df_insn_refs_delete (struct dataflow *dflow, rtx insn)
753 struct df *df = dflow->df;
754 unsigned int uid = INSN_UID (insn);
755 struct df_insn_info *insn_info = NULL;
756 struct df_ref *ref;
757 struct df_scan_problem_data *problem_data =
758 (struct df_scan_problem_data *) dflow->problem_data;
760 if (uid < df->insns_size)
761 insn_info = DF_INSN_UID_GET (df, uid);
763 if (insn_info)
765 ref = insn_info->defs;
766 while (ref)
767 ref = df_reg_chain_unlink (dflow, ref);
769 ref = insn_info->uses;
770 while (ref)
771 ref = df_reg_chain_unlink (dflow, ref);
773 pool_free (problem_data->insn_pool, insn_info);
774 DF_INSN_SET (df, insn, NULL);
779 /* Delete all of the refs information from basic_block with BB_INDEX. */
781 void
782 df_bb_refs_delete (struct dataflow *dflow, int bb_index)
784 struct df_ref *def;
785 struct df_ref *use;
787 struct df_scan_bb_info *bb_info
788 = df_scan_get_bb_info (dflow, bb_index);
789 rtx insn;
790 basic_block bb = BASIC_BLOCK (bb_index);
791 FOR_BB_INSNS (bb, insn)
793 if (INSN_P (insn))
795 /* Record defs within INSN. */
796 df_insn_refs_delete (dflow, insn);
800 /* Get rid of any artifical uses or defs. */
801 if (bb_info)
803 def = bb_info->artificial_defs;
804 while (def)
805 def = df_reg_chain_unlink (dflow, def);
806 bb_info->artificial_defs = NULL;
807 use = bb_info->artificial_uses;
808 while (use)
809 use = df_reg_chain_unlink (dflow, use);
810 bb_info->artificial_uses = NULL;
815 /* Delete all of the refs information from BLOCKS. */
817 void
818 df_refs_delete (struct dataflow *dflow, bitmap blocks)
820 bitmap_iterator bi;
821 unsigned int bb_index;
823 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
825 df_bb_refs_delete (dflow, bb_index);
830 /* Take build ref table for either the uses or defs from the reg-use
831 or reg-def chains. */
833 void
834 df_reorganize_refs (struct df_ref_info *ref_info)
836 unsigned int m = ref_info->regs_inited;
837 unsigned int regno;
838 unsigned int offset = 0;
839 unsigned int size = 0;
841 if (ref_info->refs_organized)
842 return;
844 if (ref_info->refs_size < ref_info->bitmap_size)
846 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
847 df_grow_ref_info (ref_info, new_size);
850 for (regno = 0; regno < m; regno++)
852 struct df_reg_info *reg_info = ref_info->regs[regno];
853 int count = 0;
854 if (reg_info)
856 struct df_ref *ref = reg_info->reg_chain;
857 reg_info->begin = offset;
858 while (ref)
860 ref_info->refs[offset] = ref;
861 DF_REF_ID (ref) = offset++;
862 ref = DF_REF_NEXT_REG (ref);
863 count++;
864 size++;
866 reg_info->n_refs = count;
870 /* The bitmap size is not decremented when refs are deleted. So
871 reset it now that we have squished out all of the empty
872 slots. */
873 ref_info->bitmap_size = size;
874 ref_info->refs_organized = true;
875 ref_info->add_refs_inline = true;
879 /* Local miscellaneous routines. */
881 /* Local routines for recording refs. */
883 /* Set where we are in the compilation. */
885 void
886 df_set_state (int state)
888 df_state = state;
893 /*----------------------------------------------------------------------------
894 Hard core instruction scanning code. No external interfaces here,
895 just a lot of routines that look inside insns.
896 ----------------------------------------------------------------------------*/
898 /* Create a ref and add it to the reg-def or reg-use chains. */
900 static struct df_ref *
901 df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
902 basic_block bb, rtx insn,
903 enum df_ref_type ref_type,
904 enum df_ref_flags ref_flags)
906 struct df_ref *this_ref;
907 struct df *df = dflow->df;
908 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
909 struct df_scan_problem_data *problem_data =
910 (struct df_scan_problem_data *) dflow->problem_data;
912 this_ref = pool_alloc (problem_data->ref_pool);
913 DF_REF_REG (this_ref) = reg;
914 DF_REF_REGNO (this_ref) = regno;
915 DF_REF_LOC (this_ref) = loc;
916 DF_REF_INSN (this_ref) = insn;
917 DF_REF_CHAIN (this_ref) = NULL;
918 DF_REF_TYPE (this_ref) = ref_type;
919 DF_REF_FLAGS (this_ref) = ref_flags;
920 DF_REF_DATA (this_ref) = NULL;
921 DF_REF_BB (this_ref) = bb;
923 /* Link the ref into the reg_def and reg_use chains and keep a count
924 of the instances. */
925 if (ref_type == DF_REF_REG_DEF)
927 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
928 reg_info->n_refs++;
930 /* Add the ref to the reg_def chain. */
931 df_reg_chain_create (reg_info, this_ref);
932 DF_REF_ID (this_ref) = df->def_info.bitmap_size;
933 if (df->def_info.add_refs_inline)
935 if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
937 int new_size = df->def_info.bitmap_size
938 + df->def_info.bitmap_size / 4;
939 df_grow_ref_info (&df->def_info, new_size);
941 /* Add the ref to the big array of defs. */
942 DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
943 df->def_info.refs_organized = false;
946 df->def_info.bitmap_size++;
948 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
950 struct df_scan_bb_info *bb_info
951 = df_scan_get_bb_info (dflow, bb->index);
952 this_ref->next_ref = bb_info->artificial_defs;
953 bb_info->artificial_defs = this_ref;
955 else
957 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
958 DF_INSN_GET (df, insn)->defs = this_ref;
961 else
963 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
964 reg_info->n_refs++;
966 /* Add the ref to the reg_use chain. */
967 df_reg_chain_create (reg_info, this_ref);
968 DF_REF_ID (this_ref) = df->use_info.bitmap_size;
969 if (df->use_info.add_refs_inline)
971 if (DF_USES_SIZE (df) >= df->use_info.refs_size)
973 int new_size = df->use_info.bitmap_size
974 + df->use_info.bitmap_size / 4;
975 df_grow_ref_info (&df->use_info, new_size);
977 /* Add the ref to the big array of defs. */
978 DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
979 df->use_info.refs_organized = false;
982 df->use_info.bitmap_size++;
983 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
985 struct df_scan_bb_info *bb_info
986 = df_scan_get_bb_info (dflow, bb->index);
987 this_ref->next_ref = bb_info->artificial_uses;
988 bb_info->artificial_uses = this_ref;
990 else
992 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
993 DF_INSN_GET (df, insn)->uses = this_ref;
996 return this_ref;
1000 /* Create new references of type DF_REF_TYPE for each part of register REG
1001 at address LOC within INSN of BB. */
1003 static void
1004 df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1005 basic_block bb, rtx insn,
1006 enum df_ref_type ref_type,
1007 enum df_ref_flags ref_flags,
1008 bool record_live)
1010 unsigned int regno;
1011 struct df *df = dflow->df;
1013 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1015 /* For the reg allocator we are interested in some SUBREG rtx's, but not
1016 all. Notably only those representing a word extraction from a multi-word
1017 reg. As written in the docu those should have the form
1018 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1019 XXX Is that true? We could also use the global word_mode variable. */
1020 if ((df->flags & DF_SUBREGS) == 0
1021 && GET_CODE (reg) == SUBREG
1022 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1023 || GET_MODE_SIZE (GET_MODE (reg))
1024 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1026 loc = &SUBREG_REG (reg);
1027 reg = *loc;
1028 ref_flags |= DF_REF_STRIPPED;
1031 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1032 if (regno < FIRST_PSEUDO_REGISTER)
1034 int i;
1035 int endregno;
1037 if (! (df->flags & DF_HARD_REGS))
1038 return;
1040 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
1041 for the mode, because we only want to add references to regs, which
1042 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
1043 reference the whole reg 0 in DI mode (which would also include
1044 reg 1, at least, if 0 and 1 are SImode registers). */
1045 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1046 if (GET_CODE (reg) == SUBREG)
1047 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1048 SUBREG_BYTE (reg), GET_MODE (reg));
1049 endregno += regno;
1051 for (i = regno; i < endregno; i++)
1053 /* Calls are handled at call site because regs_ever_live
1054 doesn't include clobbered regs, only used ones. */
1055 if (ref_type == DF_REF_REG_DEF && record_live)
1056 regs_ever_live[i] = 1;
1057 else if ((ref_type == DF_REF_REG_USE
1058 || ref_type == DF_REF_REG_MEM_STORE
1059 || ref_type == DF_REF_REG_MEM_LOAD)
1060 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1062 /* Set regs_ever_live on uses of non-eliminable frame
1063 pointers and arg pointers. */
1064 if (! (TEST_HARD_REG_BIT (elim_reg_set, regno)
1065 && (regno == FRAME_POINTER_REGNUM
1066 || regno == ARG_POINTER_REGNUM)))
1067 regs_ever_live[i] = 1;
1070 df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1071 bb, insn, ref_type, ref_flags);
1074 else
1076 df_ref_create_structure (dflow, reg, loc,
1077 bb, insn, ref_type, ref_flags);
1082 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
1083 covered by the outer mode is smaller than that covered by the inner mode,
1084 is a read-modify-write operation.
1085 This function returns true iff the SUBREG X is such a SUBREG. */
1087 bool
1088 df_read_modify_subreg_p (rtx x)
1090 unsigned int isize, osize;
1091 if (GET_CODE (x) != SUBREG)
1092 return false;
1093 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1094 osize = GET_MODE_SIZE (GET_MODE (x));
1095 return (isize > osize && isize > UNITS_PER_WORD);
1099 /* Process all the registers defined in the rtx, X.
1100 Autoincrement/decrement definitions will be picked up by
1101 df_uses_record. */
1103 static void
1104 df_def_record_1 (struct dataflow *dflow, rtx x,
1105 basic_block bb, rtx insn,
1106 enum df_ref_flags flags, bool record_live)
1108 rtx *loc;
1109 rtx dst;
1111 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1112 construct. */
1113 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1114 loc = &XEXP (x, 0);
1115 else
1116 loc = &SET_DEST (x);
1117 dst = *loc;
1119 /* It is legal to have a set destination be a parallel. */
1120 if (GET_CODE (dst) == PARALLEL)
1122 int i;
1124 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1126 rtx temp = XVECEXP (dst, 0, i);
1127 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1128 || GET_CODE (temp) == SET)
1129 df_def_record_1 (dflow, temp, bb, insn,
1130 GET_CODE (temp) == CLOBBER ? flags | DF_REF_CLOBBER : flags,
1131 record_live);
1133 return;
1136 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1137 be handy for the reg allocator. */
1138 while (GET_CODE (dst) == STRICT_LOW_PART
1139 || GET_CODE (dst) == ZERO_EXTRACT
1140 || df_read_modify_subreg_p (dst))
1142 #if 0
1143 /* Strict low part always contains SUBREG, but we do not want to make
1144 it appear outside, as whole register is always considered. */
1145 if (GET_CODE (dst) == STRICT_LOW_PART)
1147 loc = &XEXP (dst, 0);
1148 dst = *loc;
1150 #endif
1151 loc = &XEXP (dst, 0);
1152 dst = *loc;
1153 flags |= DF_REF_READ_WRITE;
1156 if (REG_P (dst)
1157 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1158 df_ref_record (dflow, dst, loc, bb, insn,
1159 DF_REF_REG_DEF, flags, record_live);
1163 /* Process all the registers defined in the pattern rtx, X. */
1165 static void
1166 df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1168 RTX_CODE code = GET_CODE (x);
1170 if (code == SET || code == CLOBBER)
1172 /* Mark the single def within the pattern. */
1173 df_def_record_1 (dflow, x, bb, insn,
1174 code == CLOBBER ? DF_REF_CLOBBER : 0, true);
1176 else if (code == COND_EXEC)
1178 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1180 else if (code == PARALLEL)
1182 int i;
1184 /* Mark the multiple defs within the pattern. */
1185 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1186 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1191 /* Process all the registers used in the rtx at address LOC. */
1193 static void
1194 df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1195 basic_block bb, rtx insn, enum df_ref_flags flags)
1197 RTX_CODE code;
1198 rtx x;
1199 retry:
1200 x = *loc;
1201 if (!x)
1202 return;
1203 code = GET_CODE (x);
1204 switch (code)
1206 case LABEL_REF:
1207 case SYMBOL_REF:
1208 case CONST_INT:
1209 case CONST:
1210 case CONST_DOUBLE:
1211 case CONST_VECTOR:
1212 case PC:
1213 case CC0:
1214 case ADDR_VEC:
1215 case ADDR_DIFF_VEC:
1216 return;
1218 case CLOBBER:
1219 /* If we are clobbering a MEM, mark any registers inside the address
1220 as being used. */
1221 if (MEM_P (XEXP (x, 0)))
1222 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1223 DF_REF_REG_MEM_STORE, bb, insn, flags);
1225 /* If we're clobbering a REG then we have a def so ignore. */
1226 return;
1228 case MEM:
1229 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1230 flags & DF_REF_IN_NOTE);
1231 return;
1233 case SUBREG:
1234 /* While we're here, optimize this case. */
1236 /* In case the SUBREG is not of a REG, do not optimize. */
1237 if (!REG_P (SUBREG_REG (x)))
1239 loc = &SUBREG_REG (x);
1240 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1241 return;
1243 /* ... Fall through ... */
1245 case REG:
1246 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1247 return;
1249 case SET:
1251 rtx dst = SET_DEST (x);
1252 gcc_assert (!(flags & DF_REF_IN_NOTE));
1253 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
1255 switch (GET_CODE (dst))
1257 case SUBREG:
1258 if (df_read_modify_subreg_p (dst))
1260 df_uses_record (dflow, &SUBREG_REG (dst),
1261 DF_REF_REG_USE, bb,
1262 insn, flags | DF_REF_READ_WRITE);
1263 break;
1265 /* Fall through. */
1266 case REG:
1267 case PARALLEL:
1268 case SCRATCH:
1269 case PC:
1270 case CC0:
1271 break;
1272 case MEM:
1273 df_uses_record (dflow, &XEXP (dst, 0),
1274 DF_REF_REG_MEM_STORE,
1275 bb, insn, flags);
1276 break;
1277 case STRICT_LOW_PART:
1279 rtx *temp = &XEXP (dst, 0);
1280 /* A strict_low_part uses the whole REG and not just the
1281 SUBREG. */
1282 dst = XEXP (dst, 0);
1283 df_uses_record (dflow,
1284 (GET_CODE (dst) == SUBREG)
1285 ? &SUBREG_REG (dst) : temp,
1286 DF_REF_REG_USE, bb,
1287 insn, DF_REF_READ_WRITE);
1289 break;
1290 case ZERO_EXTRACT:
1291 case SIGN_EXTRACT:
1292 df_uses_record (dflow, &XEXP (dst, 0),
1293 DF_REF_REG_USE, bb, insn,
1294 DF_REF_READ_WRITE);
1295 df_uses_record (dflow, &XEXP (dst, 1),
1296 DF_REF_REG_USE, bb, insn, flags);
1297 df_uses_record (dflow, &XEXP (dst, 2),
1298 DF_REF_REG_USE, bb, insn, flags);
1299 dst = XEXP (dst, 0);
1300 break;
1301 default:
1302 gcc_unreachable ();
1304 return;
1307 case RETURN:
1308 break;
1310 case ASM_OPERANDS:
1311 case UNSPEC_VOLATILE:
1312 case TRAP_IF:
1313 case ASM_INPUT:
1315 /* Traditional and volatile asm instructions must be
1316 considered to use and clobber all hard registers, all
1317 pseudo-registers and all of memory. So must TRAP_IF and
1318 UNSPEC_VOLATILE operations.
1320 Consider for instance a volatile asm that changes the fpu
1321 rounding mode. An insn should not be moved across this
1322 even if it only uses pseudo-regs because it might give an
1323 incorrectly rounded result.
1325 However, flow.c's liveness computation did *not* do this,
1326 giving the reasoning as " ?!? Unfortunately, marking all
1327 hard registers as live causes massive problems for the
1328 register allocator and marking all pseudos as live creates
1329 mountains of uninitialized variable warnings."
1331 In order to maintain the status quo with regard to liveness
1332 and uses, we do what flow.c did and just mark any regs we
1333 can find in ASM_OPERANDS as used. Later on, when liveness
1334 is computed, asm insns are scanned and regs_asm_clobbered
1335 is filled out.
1337 For all ASM_OPERANDS, we must traverse the vector of input
1338 operands. We can not just fall through here since then we
1339 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1340 which do not indicate traditional asms unlike their normal
1341 usage. */
1342 if (code == ASM_OPERANDS)
1344 int j;
1346 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1347 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1348 DF_REF_REG_USE, bb, insn, flags);
1349 return;
1351 break;
1354 case PRE_DEC:
1355 case POST_DEC:
1356 case PRE_INC:
1357 case POST_INC:
1358 case PRE_MODIFY:
1359 case POST_MODIFY:
1360 /* Catch the def of the register being modified. */
1361 flags |= DF_REF_READ_WRITE;
1362 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1363 DF_REF_REG_DEF, flags, true);
1365 /* ... Fall through to handle uses ... */
1367 default:
1368 break;
1371 /* Recursively scan the operands of this expression. */
1373 const char *fmt = GET_RTX_FORMAT (code);
1374 int i;
1376 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1378 if (fmt[i] == 'e')
1380 /* Tail recursive case: save a function call level. */
1381 if (i == 0)
1383 loc = &XEXP (x, 0);
1384 goto retry;
1386 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1388 else if (fmt[i] == 'E')
1390 int j;
1391 for (j = 0; j < XVECLEN (x, i); j++)
1392 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1393 bb, insn, flags);
1399 /* Return true if *LOC contains an asm. */
1401 static int
1402 df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1404 if ( !*loc)
1405 return 0;
1406 if (GET_CODE (*loc) == ASM_OPERANDS)
1407 return 1;
1408 return 0;
1412 /* Return true if INSN contains an ASM. */
1414 static int
1415 df_insn_contains_asm (rtx insn)
1417 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1422 /* Record all the refs for DF within INSN of basic block BB. */
1424 static void
1425 df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1427 int i;
1428 struct df *df = dflow->df;
1430 if (INSN_P (insn))
1432 rtx note;
1434 if (df_insn_contains_asm (insn))
1435 DF_INSN_CONTAINS_ASM (df, insn) = true;
1437 /* Record register defs. */
1438 df_defs_record (dflow, PATTERN (insn), bb, insn);
1440 if (df->flags & DF_EQUIV_NOTES)
1441 for (note = REG_NOTES (insn); note;
1442 note = XEXP (note, 1))
1444 switch (REG_NOTE_KIND (note))
1446 case REG_EQUIV:
1447 case REG_EQUAL:
1448 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1449 bb, insn, DF_REF_IN_NOTE);
1450 default:
1451 break;
1455 if (CALL_P (insn))
1457 rtx note;
1459 /* Record the registers used to pass arguments, and explicitly
1460 noted as clobbered. */
1461 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1462 note = XEXP (note, 1))
1464 if (GET_CODE (XEXP (note, 0)) == USE)
1465 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1466 DF_REF_REG_USE,
1467 bb, insn, 0);
1468 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1470 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1471 if (REG_P (XEXP (XEXP (note, 0), 0)))
1473 rtx reg = XEXP (XEXP (note, 0), 0);
1474 int regno_last;
1475 int regno_first;
1476 int i;
1478 regno_last = regno_first = REGNO (reg);
1479 if (regno_first < FIRST_PSEUDO_REGISTER)
1480 regno_last
1481 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1482 for (i = regno_first; i <= regno_last; i++)
1483 regs_ever_live[i] = 1;
1488 /* The stack ptr is used (honorarily) by a CALL insn. */
1489 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1490 DF_REF_REG_USE, bb, insn,
1493 if (df->flags & DF_HARD_REGS)
1495 bitmap_iterator bi;
1496 unsigned int ui;
1497 /* Calls may also reference any of the global registers,
1498 so they are recorded as used. */
1499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1500 if (global_regs[i])
1501 df_uses_record (dflow, &regno_reg_rtx[i],
1502 DF_REF_REG_USE, bb, insn,
1504 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1505 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb, insn,
1506 DF_REF_REG_DEF, DF_REF_CLOBBER, false);
1510 /* Record the register uses. */
1511 df_uses_record (dflow, &PATTERN (insn),
1512 DF_REF_REG_USE, bb, insn, 0);
1517 static bool
1518 df_has_eh_preds (basic_block bb)
1520 edge e;
1521 edge_iterator ei;
1523 FOR_EACH_EDGE (e, ei, bb->preds)
1525 if (e->flags & EDGE_EH)
1526 return true;
1528 return false;
1531 /* Record all the refs within the basic block BB. */
1533 static void
1534 df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1536 struct df *df = dflow->df;
1537 rtx insn;
1538 int luid = 0;
1539 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1541 /* Need to make sure that there is a record in the basic block info. */
1542 if (!bb_info)
1544 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1545 df_scan_set_bb_info (dflow, bb->index, bb_info);
1546 bb_info->artificial_defs = NULL;
1547 bb_info->artificial_uses = NULL;
1550 /* Scan the block an insn at a time from beginning to end. */
1551 FOR_BB_INSNS (bb, insn)
1553 df_insn_create_insn_record (dflow, insn);
1554 if (INSN_P (insn))
1556 /* Record defs within INSN. */
1557 DF_INSN_LUID (df, insn) = luid++;
1558 df_insn_refs_record (dflow, bb, insn);
1560 DF_INSN_LUID (df, insn) = luid;
1563 #ifdef EH_RETURN_DATA_REGNO
1564 if ((df->flags & DF_HARD_REGS)
1565 && df_has_eh_preds (bb))
1567 unsigned int i;
1568 /* Mark the registers that will contain data for the handler. */
1569 for (i = 0; ; ++i)
1571 unsigned regno = EH_RETURN_DATA_REGNO (i);
1572 if (regno == INVALID_REGNUM)
1573 break;
1574 df_ref_record (dflow, regno_reg_rtx[regno], &regno_reg_rtx[regno],
1575 bb, NULL,
1576 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1577 false);
1580 #endif
1583 if ((df->flags & DF_HARD_REGS)
1584 && df_has_eh_preds (bb))
1586 #ifdef EH_USES
1587 unsigned int i;
1588 /* This code is putting in a artificial ref for the use at the
1589 TOP of the block that receives the exception. It is too
1590 cumbersome to actually put the ref on the edge. We could
1591 either model this at the top of the receiver block or the
1592 bottom of the sender block.
1594 The bottom of the sender block is problematic because not all
1595 out-edges of the a block are eh-edges. However, it is true
1596 that all edges into a block are either eh-edges or none of
1597 them are eh-edges. Thus, we can model this at the top of the
1598 eh-receiver for all of the edges at once. */
1599 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1600 if (EH_USES (i))
1601 df_uses_record (dflow, &regno_reg_rtx[i],
1602 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1603 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1604 #endif
1606 /* The following code (down thru the arg_pointer seting APPEARS
1607 to be necessary because there is nothing that actually
1608 describes what the exception handling code may actually need
1609 to keep alive. */
1610 if (reload_completed)
1612 if (frame_pointer_needed)
1614 df_uses_record (dflow, &regno_reg_rtx[FRAME_POINTER_REGNUM],
1615 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1616 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1617 df_uses_record (dflow, &regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
1618 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1619 #endif
1621 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1622 if (fixed_regs[ARG_POINTER_REGNUM])
1623 df_uses_record (dflow, &regno_reg_rtx[ARG_POINTER_REGNUM],
1624 DF_REF_REG_USE, bb, NULL,
1625 DF_REF_ARTIFICIAL);
1626 #endif
1630 if ((df->flags & DF_HARD_REGS)
1631 && bb->index >= NUM_FIXED_BLOCKS)
1633 /* Before reload, there are a few registers that must be forced
1634 live everywhere -- which might not already be the case for
1635 blocks within infinite loops. */
1636 if (! reload_completed)
1639 /* Any reference to any pseudo before reload is a potential
1640 reference of the frame pointer. */
1641 df_uses_record (dflow, &regno_reg_rtx[FRAME_POINTER_REGNUM],
1642 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1644 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1645 /* Pseudos with argument area equivalences may require
1646 reloading via the argument pointer. */
1647 if (fixed_regs[ARG_POINTER_REGNUM])
1648 df_uses_record (dflow, &regno_reg_rtx[ARG_POINTER_REGNUM],
1649 DF_REF_REG_USE, bb, NULL,
1650 DF_REF_ARTIFICIAL);
1651 #endif
1653 /* Any constant, or pseudo with constant equivalences, may
1654 require reloading from memory using the pic register. */
1655 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1656 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1657 df_uses_record (dflow, &regno_reg_rtx[PIC_OFFSET_TABLE_REGNUM],
1658 DF_REF_REG_USE, bb, NULL,
1659 DF_REF_ARTIFICIAL);
1661 /* The all-important stack pointer must always be live. */
1662 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1663 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1668 /* Record all the refs in the basic blocks specified by BLOCKS. */
1670 static void
1671 df_refs_record (struct dataflow *dflow, bitmap blocks)
1673 unsigned int bb_index;
1674 bitmap_iterator bi;
1676 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1678 basic_block bb = BASIC_BLOCK (bb_index);
1679 df_bb_refs_record (dflow, bb);
1682 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1683 df_record_exit_block_uses (dflow);
1685 if (bitmap_bit_p (blocks, ENTRY_BLOCK))
1686 df_record_entry_block_defs (dflow);
1690 /*----------------------------------------------------------------------------
1691 Specialized hard register scanning functions.
1692 ----------------------------------------------------------------------------*/
1694 /* Mark a register in SET. Hard registers in large modes get all
1695 of their component registers set as well. */
1697 static void
1698 df_mark_reg (rtx reg, void *vset)
1700 bitmap set = (bitmap) vset;
1701 int regno = REGNO (reg);
1703 gcc_assert (GET_MODE (reg) != BLKmode);
1705 bitmap_set_bit (set, regno);
1706 if (regno < FIRST_PSEUDO_REGISTER)
1708 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1709 while (--n > 0)
1710 bitmap_set_bit (set, regno + n);
1715 /* Record the (conservative) set of hard registers that are defined on
1716 entry to the function. */
1718 static void
1719 df_record_entry_block_defs (struct dataflow * dflow)
1721 unsigned int i;
1722 bitmap_iterator bi;
1723 rtx r;
1724 struct df * df = dflow->df;
1726 bitmap_clear (df->entry_block_defs);
1728 if (! (df->flags & DF_HARD_REGS))
1729 return;
1731 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1733 if (FUNCTION_ARG_REGNO_P (i))
1734 #ifdef INCOMING_REGNO
1735 bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
1736 #else
1737 bitmap_set_bit (df->entry_block_defs, i);
1738 #endif
1741 /* Once the prologue has been generated, all of these registers
1742 should just show up in the first regular block. */
1743 if (HAVE_prologue && epilogue_completed)
1745 /* Defs for the callee saved registers are inserted so that the
1746 pushes have some defining location. */
1747 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1748 if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
1749 bitmap_set_bit (df->entry_block_defs, i);
1751 else
1753 #ifdef INCOMING_RETURN_ADDR_RTX
1754 if (REG_P (INCOMING_RETURN_ADDR_RTX))
1755 bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
1756 #endif
1758 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
1759 only STATIC_CHAIN_REGNUM is defined. If they are different,
1760 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
1761 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1762 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
1763 #else
1764 #ifdef STATIC_CHAIN_REGNUM
1765 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
1766 #endif
1767 #endif
1769 r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
1770 if (r && REG_P (r))
1771 bitmap_set_bit (df->entry_block_defs, REGNO (r));
1774 /* These registers are live everywhere. */
1775 if (!reload_completed)
1777 /* Any reference to any pseudo before reload is a potential
1778 reference of the frame pointer. */
1779 bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
1781 #ifdef EH_USES
1782 /* The ia-64, the only machine that uses this, does not define these
1783 until after reload. */
1784 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1785 if (EH_USES (i))
1787 bitmap_set_bit (df->entry_block_defs, i);
1789 #endif
1791 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1792 /* Pseudos with argument area equivalences may require
1793 reloading via the argument pointer. */
1794 if (fixed_regs[ARG_POINTER_REGNUM])
1795 bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
1796 #endif
1798 #ifdef PIC_OFFSET_TABLE_REGNUM
1799 /* Any constant, or pseudo with constant equivalences, may
1800 require reloading from memory using the pic register. */
1801 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1802 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1803 bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
1804 #endif
1807 targetm.live_on_entry (df->entry_block_defs);
1809 EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
1811 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i],
1812 ENTRY_BLOCK_PTR, NULL,
1813 DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
1818 /* Record the set of hard registers that are used in the exit block. */
1820 static void
1821 df_record_exit_block_uses (struct dataflow *dflow)
1823 unsigned int i;
1824 bitmap_iterator bi;
1825 struct df *df = dflow->df;
1827 bitmap_clear (df->exit_block_uses);
1829 if (! (df->flags & DF_HARD_REGS))
1830 return;
1832 /* If exiting needs the right stack value, consider the stack
1833 pointer live at the end of the function. */
1834 if ((HAVE_epilogue && epilogue_completed)
1835 || ! EXIT_IGNORE_STACK
1836 || (! FRAME_POINTER_REQUIRED
1837 && ! current_function_calls_alloca
1838 && flag_omit_frame_pointer)
1839 || current_function_sp_is_unchanging)
1841 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1844 /* Mark the frame pointer if needed at the end of the function.
1845 If we end up eliminating it, it will be removed from the live
1846 list of each basic block by reload. */
1848 if (! reload_completed || frame_pointer_needed)
1850 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1851 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1852 /* If they are different, also mark the hard frame pointer as live. */
1853 if (! LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1854 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1855 #endif
1858 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1859 /* Many architectures have a GP register even without flag_pic.
1860 Assume the pic register is not in use, or will be handled by
1861 other means, if it is not fixed. */
1862 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1863 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1864 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1865 #endif
1867 /* Mark all global registers, and all registers used by the
1868 epilogue as being live at the end of the function since they
1869 may be referenced by our caller. */
1870 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1871 if (global_regs[i] || EPILOGUE_USES (i))
1872 bitmap_set_bit (df->exit_block_uses, i);
1874 if (HAVE_epilogue && epilogue_completed)
1876 /* Mark all call-saved registers that we actually used. */
1877 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1878 if (regs_ever_live[i] && ! LOCAL_REGNO (i)
1879 && ! TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1880 bitmap_set_bit (df->exit_block_uses, i);
1883 #ifdef EH_RETURN_DATA_REGNO
1884 /* Mark the registers that will contain data for the handler. */
1885 if (reload_completed && current_function_calls_eh_return)
1886 for (i = 0; ; ++i)
1888 unsigned regno = EH_RETURN_DATA_REGNO (i);
1889 if (regno == INVALID_REGNUM)
1890 break;
1891 bitmap_set_bit (df->exit_block_uses, regno);
1893 #endif
1895 #ifdef EH_RETURN_STACKADJ_RTX
1896 if ((! HAVE_epilogue || ! epilogue_completed)
1897 && current_function_calls_eh_return)
1899 rtx tmp = EH_RETURN_STACKADJ_RTX;
1900 if (tmp && REG_P (tmp))
1901 df_mark_reg (tmp, df->exit_block_uses);
1903 #endif
1905 #ifdef EH_RETURN_HANDLER_RTX
1906 if ((! HAVE_epilogue || ! epilogue_completed)
1907 && current_function_calls_eh_return)
1909 rtx tmp = EH_RETURN_HANDLER_RTX;
1910 if (tmp && REG_P (tmp))
1911 df_mark_reg (tmp, df->exit_block_uses);
1913 #endif
1915 /* Mark function return value. */
1916 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
1918 if (df->flags & DF_HARD_REGS)
1919 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
1920 df_uses_record (dflow, &regno_reg_rtx[i],
1921 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
1922 DF_REF_ARTIFICIAL);
1925 static bool initialized = false;
1927 /* Initialize some platform specific structures. */
1929 void
1930 df_hard_reg_init (void)
1932 int i;
1933 #ifdef ELIMINABLE_REGS
1934 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
1935 #endif
1936 /* After reload, some ports add certain bits to regs_ever_live so
1937 this cannot be reset. */
1939 if (!reload_completed)
1940 memset (regs_ever_live, 0, sizeof (regs_ever_live));
1942 if (initialized)
1943 return;
1945 bitmap_obstack_initialize (&persistent_obstack);
1947 /* Record which registers will be eliminated. We use this in
1948 mark_used_regs. */
1949 CLEAR_HARD_REG_SET (elim_reg_set);
1951 #ifdef ELIMINABLE_REGS
1952 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
1953 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
1954 #else
1955 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
1956 #endif
1958 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
1960 /* Inconveniently, this is only readily available in hard reg set
1961 form. */
1962 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1963 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1964 bitmap_set_bit (df_invalidated_by_call, i);
1966 initialized = true;