Mark ChangeLog
[official-gcc.git] / gcc / df-scan.c
blob216a4e6d211ba8866d88135fce8c7ae0f203189d
1 /* FIXME: We need to go back and add the warning messages about code
2 moved across setjmp. */
5 /* Scanning of rtl for dataflow analysis.
6 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
7 Free Software Foundation, Inc.
8 Originally contributed by Michael P. Hayes
9 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
10 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
11 and Kenneth Zadeck (zadeck@naturalbridge.com).
13 This file is part of GCC.
15 GCC is free software; you can redistribute it and/or modify it under
16 the terms of the GNU General Public License as published by the Free
17 Software Foundation; either version 3, or (at your option) any later
18 version.
20 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
21 WARRANTY; without even the implied warranty of MERCHANTABILITY or
22 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
23 for more details.
25 You should have received a copy of the GNU General Public License
26 along with GCC; see the file COPYING3. If not see
27 <http://www.gnu.org/licenses/>. */
29 #include "config.h"
30 #include "system.h"
31 #include "coretypes.h"
32 #include "tm.h"
33 #include "rtl.h"
34 #include "tm_p.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "function.h"
38 #include "regs.h"
39 #include "output.h"
40 #include "alloc-pool.h"
41 #include "flags.h"
42 #include "hard-reg-set.h"
43 #include "basic-block.h"
44 #include "sbitmap.h"
45 #include "bitmap.h"
46 #include "timevar.h"
47 #include "tree.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "df.h"
52 #ifndef HAVE_epilogue
53 #define HAVE_epilogue 0
54 #endif
55 #ifndef HAVE_prologue
56 #define HAVE_prologue 0
57 #endif
58 #ifndef HAVE_sibcall_epilogue
59 #define HAVE_sibcall_epilogue 0
60 #endif
62 #ifndef EPILOGUE_USES
63 #define EPILOGUE_USES(REGNO) 0
64 #endif
66 /* The bitmap_obstack is used to hold some static variables that
67 should not be reset after each function is compiled. */
69 static bitmap_obstack persistent_obstack;
71 /* The set of hard registers in eliminables[i].from. */
73 static HARD_REG_SET elim_reg_set;
75 /* This is a bitmap copy of regs_invalidated_by_call so that we can
76 easily add it into bitmaps, etc. */
78 bitmap df_invalidated_by_call = NULL;
80 /* Initialize ur_in and ur_out as if all hard registers were partially
81 available. */
83 static void df_ref_record (struct dataflow *, rtx, rtx *,
84 basic_block, rtx, enum df_ref_type,
85 enum df_ref_flags, bool record_live);
86 static void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx,
87 enum df_ref_flags, bool record_live);
88 static void df_defs_record (struct dataflow *, rtx, basic_block, rtx);
89 static void df_uses_record (struct dataflow *, rtx *, enum df_ref_type,
90 basic_block, rtx, enum df_ref_flags);
92 static void df_insn_refs_record (struct dataflow *, basic_block, rtx);
93 static void df_bb_refs_record (struct dataflow *, basic_block);
94 static void df_refs_record (struct dataflow *, bitmap);
95 static struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *,
96 basic_block, rtx, enum df_ref_type,
97 enum df_ref_flags);
98 static void df_record_entry_block_defs (struct dataflow *);
99 static void df_record_exit_block_uses (struct dataflow *);
100 static void df_grow_reg_info (struct dataflow *, struct df_ref_info *);
101 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
102 static void df_grow_insn_info (struct df *);
105 /*----------------------------------------------------------------------------
106 SCANNING DATAFLOW PROBLEM
108 There are several ways in which scanning looks just like the other
109 dataflow problems. It shares the all the mechanisms for local info
110 as well as basic block info. Where it differs is when and how often
111 it gets run. It also has no need for the iterative solver.
112 ----------------------------------------------------------------------------*/
114 /* Problem data for the scanning dataflow function. */
115 struct df_scan_problem_data
117 alloc_pool ref_pool;
118 alloc_pool insn_pool;
119 alloc_pool reg_pool;
120 alloc_pool mw_reg_pool;
121 alloc_pool mw_link_pool;
124 typedef struct df_scan_bb_info *df_scan_bb_info_t;
126 static void
127 df_scan_free_internal (struct dataflow *dflow)
129 struct df *df = dflow->df;
130 struct df_scan_problem_data *problem_data
131 = (struct df_scan_problem_data *) dflow->problem_data;
133 free (df->def_info.regs);
134 free (df->def_info.refs);
135 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
137 free (df->use_info.regs);
138 free (df->use_info.refs);
139 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
141 free (df->insns);
142 df->insns = NULL;
143 df->insns_size = 0;
145 free (dflow->block_info);
146 dflow->block_info = NULL;
147 dflow->block_info_size = 0;
149 BITMAP_FREE (df->hardware_regs_used);
150 BITMAP_FREE (df->entry_block_defs);
151 BITMAP_FREE (df->exit_block_uses);
153 free_alloc_pool (dflow->block_pool);
154 free_alloc_pool (problem_data->ref_pool);
155 free_alloc_pool (problem_data->insn_pool);
156 free_alloc_pool (problem_data->reg_pool);
157 free_alloc_pool (problem_data->mw_reg_pool);
158 free_alloc_pool (problem_data->mw_link_pool);
162 /* Get basic block info. */
164 struct df_scan_bb_info *
165 df_scan_get_bb_info (struct dataflow *dflow, unsigned int index)
167 gcc_assert (index < dflow->block_info_size);
168 return (struct df_scan_bb_info *) dflow->block_info[index];
172 /* Set basic block info. */
174 static void
175 df_scan_set_bb_info (struct dataflow *dflow, unsigned int index,
176 struct df_scan_bb_info *bb_info)
178 gcc_assert (index < dflow->block_info_size);
179 dflow->block_info[index] = (void *) bb_info;
183 /* Free basic block info. */
185 static void
186 df_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info)
188 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
189 if (bb_info)
191 df_bb_refs_delete (dflow, bb->index);
192 pool_free (dflow->block_pool, bb_info);
197 /* Allocate the problem data for the scanning problem. This should be
198 called when the problem is created or when the entire function is to
199 be rescanned. */
201 static void
202 df_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
203 bitmap all_blocks ATTRIBUTE_UNUSED)
205 struct df *df = dflow->df;
206 struct df_scan_problem_data *problem_data;
207 unsigned int insn_num = get_max_uid () + 1;
208 unsigned int block_size = 50;
209 unsigned int bb_index;
210 bitmap_iterator bi;
212 /* Given the number of pools, this is really faster than tearing
213 everything apart. */
214 if (dflow->problem_data)
215 df_scan_free_internal (dflow);
217 dflow->block_pool
218 = create_alloc_pool ("df_scan_block pool",
219 sizeof (struct df_scan_bb_info),
220 block_size);
222 problem_data = XNEW (struct df_scan_problem_data);
223 dflow->problem_data = problem_data;
225 problem_data->ref_pool
226 = create_alloc_pool ("df_scan_ref pool",
227 sizeof (struct df_ref), block_size);
228 problem_data->insn_pool
229 = create_alloc_pool ("df_scan_insn pool",
230 sizeof (struct df_insn_info), block_size);
231 problem_data->reg_pool
232 = create_alloc_pool ("df_scan_reg pool",
233 sizeof (struct df_reg_info), block_size);
234 problem_data->mw_reg_pool
235 = create_alloc_pool ("df_scan_mw_reg pool",
236 sizeof (struct df_mw_hardreg), block_size);
237 problem_data->mw_link_pool
238 = create_alloc_pool ("df_scan_mw_link pool",
239 sizeof (struct df_link), block_size);
241 insn_num += insn_num / 4;
242 df_grow_reg_info (dflow, &df->def_info);
243 df_grow_ref_info (&df->def_info, insn_num);
245 df_grow_reg_info (dflow, &df->use_info);
246 df_grow_ref_info (&df->use_info, insn_num *2);
248 df_grow_insn_info (df);
249 df_grow_bb_info (dflow);
251 EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
253 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index);
254 if (!bb_info)
256 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
257 df_scan_set_bb_info (dflow, bb_index, bb_info);
259 bb_info->artificial_defs = NULL;
260 bb_info->artificial_uses = NULL;
263 df->hardware_regs_used = BITMAP_ALLOC (NULL);
264 df->entry_block_defs = BITMAP_ALLOC (NULL);
265 df->exit_block_uses = BITMAP_ALLOC (NULL);
269 /* Free all of the data associated with the scan problem. */
271 static void
272 df_scan_free (struct dataflow *dflow)
274 struct df *df = dflow->df;
276 if (dflow->problem_data)
278 df_scan_free_internal (dflow);
279 free (dflow->problem_data);
282 if (df->blocks_to_scan)
283 BITMAP_FREE (df->blocks_to_scan);
285 if (df->blocks_to_analyze)
286 BITMAP_FREE (df->blocks_to_analyze);
288 free (dflow);
291 static void
292 df_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED)
294 struct df *df = dflow->df;
295 int i;
297 fprintf (file, " invalidated by call \t");
298 dump_bitmap (file, df_invalidated_by_call);
299 fprintf (file, " hardware regs used \t");
300 dump_bitmap (file, df->hardware_regs_used);
301 fprintf (file, " entry block uses \t");
302 dump_bitmap (file, df->entry_block_defs);
303 fprintf (file, " exit block uses \t");
304 dump_bitmap (file, df->exit_block_uses);
305 fprintf (file, " regs ever live \t");
306 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
307 if (regs_ever_live[i])
308 fprintf (file, "%d ", i);
309 fprintf (file, "\n");
312 static struct df_problem problem_SCAN =
314 DF_SCAN, /* Problem id. */
315 DF_NONE, /* Direction. */
316 df_scan_alloc, /* Allocate the problem specific data. */
317 NULL, /* Reset global information. */
318 df_scan_free_bb_info, /* Free basic block info. */
319 NULL, /* Local compute function. */
320 NULL, /* Init the solution specific data. */
321 NULL, /* Iterative solver. */
322 NULL, /* Confluence operator 0. */
323 NULL, /* Confluence operator n. */
324 NULL, /* Transfer function. */
325 NULL, /* Finalize function. */
326 df_scan_free, /* Free all of the problem information. */
327 df_scan_dump, /* Debugging. */
328 NULL, /* Dependent problem. */
329 0 /* Changeable flags. */
333 /* Create a new DATAFLOW instance and add it to an existing instance
334 of DF. The returned structure is what is used to get at the
335 solution. */
337 struct dataflow *
338 df_scan_add_problem (struct df *df, int flags)
340 return df_add_problem (df, &problem_SCAN, flags);
343 /*----------------------------------------------------------------------------
344 Storage Allocation Utilities
345 ----------------------------------------------------------------------------*/
348 /* First, grow the reg_info information. If the current size is less than
349 the number of psuedos, grow to 25% more than the number of
350 pseudos.
352 Second, assure that all of the slots up to max_reg_num have been
353 filled with reg_info structures. */
355 static void
356 df_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info)
358 unsigned int max_reg = max_reg_num ();
359 unsigned int new_size = max_reg;
360 struct df_scan_problem_data *problem_data
361 = (struct df_scan_problem_data *) dflow->problem_data;
362 unsigned int i;
364 if (ref_info->regs_size < new_size)
366 new_size += new_size / 4;
367 ref_info->regs = xrealloc (ref_info->regs,
368 new_size *sizeof (struct df_reg_info*));
369 ref_info->regs_size = new_size;
372 for (i = ref_info->regs_inited; i < max_reg; i++)
374 struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool);
375 memset (reg_info, 0, sizeof (struct df_reg_info));
376 ref_info->regs[i] = reg_info;
379 ref_info->regs_inited = max_reg;
383 /* Grow the ref information. */
385 static void
386 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
388 if (ref_info->refs_size < new_size)
390 ref_info->refs = xrealloc (ref_info->refs,
391 new_size *sizeof (struct df_ref *));
392 memset (ref_info->refs + ref_info->refs_size, 0,
393 (new_size - ref_info->refs_size) *sizeof (struct df_ref *));
394 ref_info->refs_size = new_size;
399 /* Grow the ref information. If the current size is less than the
400 number of instructions, grow to 25% more than the number of
401 instructions. */
403 static void
404 df_grow_insn_info (struct df *df)
406 unsigned int new_size = get_max_uid () + 1;
407 if (df->insns_size < new_size)
409 new_size += new_size / 4;
410 df->insns = xrealloc (df->insns,
411 new_size *sizeof (struct df_insn_info *));
412 memset (df->insns + df->insns_size, 0,
413 (new_size - df->insns_size) *sizeof (struct df_insn_info *));
414 df->insns_size = new_size;
421 /*----------------------------------------------------------------------------
422 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
423 ----------------------------------------------------------------------------*/
425 /* Rescan some BLOCKS or all the blocks defined by the last call to
426 df_set_blocks if BLOCKS is NULL); */
428 void
429 df_rescan_blocks (struct df *df, bitmap blocks)
431 bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL);
433 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
434 basic_block bb;
436 df->def_info.refs_organized = false;
437 df->use_info.refs_organized = false;
439 if (blocks)
441 int i;
442 unsigned int bb_index;
443 bitmap_iterator bi;
444 bool cleared_bits = false;
446 /* Need to assure that there are space in all of the tables. */
447 unsigned int insn_num = get_max_uid () + 1;
448 insn_num += insn_num / 4;
450 df_grow_reg_info (dflow, &df->def_info);
451 df_grow_ref_info (&df->def_info, insn_num);
453 df_grow_reg_info (dflow, &df->use_info);
454 df_grow_ref_info (&df->use_info, insn_num *2);
456 df_grow_insn_info (df);
457 df_grow_bb_info (dflow);
459 bitmap_copy (local_blocks_to_scan, blocks);
461 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
463 basic_block bb = BASIC_BLOCK (bb_index);
464 if (!bb)
466 bitmap_clear_bit (local_blocks_to_scan, bb_index);
467 cleared_bits = true;
471 if (cleared_bits)
472 bitmap_copy (blocks, local_blocks_to_scan);
474 df->def_info.add_refs_inline = true;
475 df->use_info.add_refs_inline = true;
477 for (i = df->num_problems_defined; i; i--)
479 bitmap blocks_to_reset = NULL;
480 if (dflow->problem->reset_fun)
482 if (!blocks_to_reset)
484 blocks_to_reset = BITMAP_ALLOC (NULL);
485 bitmap_copy (blocks_to_reset, local_blocks_to_scan);
486 if (df->blocks_to_scan)
487 bitmap_ior_into (blocks_to_reset, df->blocks_to_scan);
489 dflow->problem->reset_fun (dflow, blocks_to_reset);
491 if (blocks_to_reset)
492 BITMAP_FREE (blocks_to_reset);
495 df_refs_delete (dflow, local_blocks_to_scan);
497 /* This may be a mistake, but if an explicit blocks is passed in
498 and the set of blocks to analyze has been explicitly set, add
499 the extra blocks to blocks_to_analyze. The alternative is to
500 put an assert here. We do not want this to just go by
501 silently or else we may get storage leaks. */
502 if (df->blocks_to_analyze)
503 bitmap_ior_into (df->blocks_to_analyze, blocks);
505 else
507 /* If we are going to do everything, just reallocate everything.
508 Most stuff is allocated in pools so this is faster than
509 walking it. */
510 if (df->blocks_to_analyze)
511 bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze);
512 else
513 FOR_ALL_BB (bb)
515 bitmap_set_bit (local_blocks_to_scan, bb->index);
517 df_scan_alloc (dflow, local_blocks_to_scan, NULL);
519 df->def_info.add_refs_inline = false;
520 df->use_info.add_refs_inline = false;
523 df_refs_record (dflow, local_blocks_to_scan);
524 #if 0
525 bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n");
526 #endif
528 if (!df->blocks_to_scan)
529 df->blocks_to_scan = BITMAP_ALLOC (NULL);
531 bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan);
532 BITMAP_FREE (local_blocks_to_scan);
536 /* Create a new ref of type DF_REF_TYPE for register REG at address
537 LOC within INSN of BB. */
539 struct df_ref *
540 df_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn,
541 basic_block bb,
542 enum df_ref_type ref_type,
543 enum df_ref_flags ref_flags)
545 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
546 struct df_scan_bb_info *bb_info;
548 df_grow_reg_info (dflow, &df->use_info);
549 df_grow_reg_info (dflow, &df->def_info);
550 df_grow_bb_info (dflow);
552 /* Make sure there is the bb_info for this block. */
553 bb_info = df_scan_get_bb_info (dflow, bb->index);
554 if (!bb_info)
556 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
557 df_scan_set_bb_info (dflow, bb->index, bb_info);
558 bb_info->artificial_defs = NULL;
559 bb_info->artificial_uses = NULL;
562 if (ref_type == DF_REF_REG_DEF)
563 df->def_info.add_refs_inline = true;
564 else
565 df->use_info.add_refs_inline = true;
567 return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags);
572 /*----------------------------------------------------------------------------
573 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
574 ----------------------------------------------------------------------------*/
577 /* Get the artificial uses for a basic block. */
579 struct df_ref *
580 df_get_artificial_defs (struct df *df, unsigned int bb_index)
582 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
583 return df_scan_get_bb_info (dflow, bb_index)->artificial_defs;
587 /* Get the artificial uses for a basic block. */
589 struct df_ref *
590 df_get_artificial_uses (struct df *df, unsigned int bb_index)
592 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
593 return df_scan_get_bb_info (dflow, bb_index)->artificial_uses;
597 /* Link REF at the front of reg_use or reg_def chain for REGNO. */
599 void
600 df_reg_chain_create (struct df_reg_info *reg_info,
601 struct df_ref *ref)
603 struct df_ref *head = reg_info->reg_chain;
604 reg_info->reg_chain = ref;
606 DF_REF_NEXT_REG (ref) = head;
608 /* We cannot actually link to the head of the chain. */
609 DF_REF_PREV_REG (ref) = NULL;
611 if (head)
612 DF_REF_PREV_REG (head) = ref;
616 /* Remove REF from the CHAIN. Return the head of the chain. This
617 will be CHAIN unless the REF was at the beginning of the chain. */
619 static struct df_ref *
620 df_ref_unlink (struct df_ref *chain, struct df_ref *ref)
622 struct df_ref *orig_chain = chain;
623 struct df_ref *prev = NULL;
624 while (chain)
626 if (chain == ref)
628 if (prev)
630 prev->next_ref = ref->next_ref;
631 ref->next_ref = NULL;
632 return orig_chain;
634 else
636 chain = ref->next_ref;
637 ref->next_ref = NULL;
638 return chain;
642 prev = chain;
643 chain = chain->next_ref;
646 /* Someone passed in a ref that was not in the chain. */
647 gcc_unreachable ();
648 return NULL;
652 /* Unlink and delete REF at the reg_use or reg_def chain. Also delete
653 the def-use or use-def chain if it exists. Returns the next ref in
654 uses or defs chain. */
656 struct df_ref *
657 df_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref)
659 struct df *df = dflow->df;
660 struct df_ref *next = DF_REF_NEXT_REG (ref);
661 struct df_ref *prev = DF_REF_PREV_REG (ref);
662 struct df_scan_problem_data *problem_data
663 = (struct df_scan_problem_data *) dflow->problem_data;
664 struct df_reg_info *reg_info;
665 struct df_ref *next_ref = ref->next_ref;
666 unsigned int id = DF_REF_ID (ref);
668 if (DF_REF_TYPE (ref) == DF_REF_REG_DEF)
670 reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref));
671 df->def_info.bitmap_size--;
672 if (df->def_info.refs && (id < df->def_info.refs_size))
673 DF_DEFS_SET (df, id, NULL);
675 else
677 reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref));
678 df->use_info.bitmap_size--;
679 if (df->use_info.refs && (id < df->use_info.refs_size))
680 DF_USES_SET (df, id, NULL);
683 /* Delete any def-use or use-def chains that start here. */
684 if (DF_REF_CHAIN (ref))
685 df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL);
687 reg_info->n_refs--;
689 /* Unlink from the reg chain. If there is no prev, this is the
690 first of the list. If not, just join the next and prev. */
691 if (prev)
693 DF_REF_NEXT_REG (prev) = next;
694 if (next)
695 DF_REF_PREV_REG (next) = prev;
697 else
699 reg_info->reg_chain = next;
700 if (next)
701 DF_REF_PREV_REG (next) = NULL;
704 pool_free (problem_data->ref_pool, ref);
705 return next_ref;
709 /* Unlink REF from all def-use/use-def chains, etc. */
711 void
712 df_ref_remove (struct df *df, struct df_ref *ref)
714 struct dataflow *dflow = df->problems_by_index[DF_SCAN];
715 if (DF_REF_REG_DEF_P (ref))
717 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
719 struct df_scan_bb_info *bb_info
720 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
721 bb_info->artificial_defs
722 = df_ref_unlink (bb_info->artificial_defs, ref);
724 else
725 DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref))
726 = df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref);
728 if (df->def_info.add_refs_inline)
729 DF_DEFS_SET (df, DF_REF_ID (ref), NULL);
731 else
733 if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL)
735 struct df_scan_bb_info *bb_info
736 = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index);
737 bb_info->artificial_uses
738 = df_ref_unlink (bb_info->artificial_uses, ref);
740 else
741 DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref))
742 = df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref);
744 if (df->use_info.add_refs_inline)
745 DF_USES_SET (df, DF_REF_ID (ref), NULL);
748 df_reg_chain_unlink (dflow, ref);
752 /* Create the insn record for INSN. If there was one there, zero it out. */
754 static struct df_insn_info *
755 df_insn_create_insn_record (struct dataflow *dflow, rtx insn)
757 struct df *df = dflow->df;
758 struct df_scan_problem_data *problem_data
759 = (struct df_scan_problem_data *) dflow->problem_data;
761 struct df_insn_info *insn_rec = DF_INSN_GET (df, insn);
762 if (!insn_rec)
764 insn_rec = pool_alloc (problem_data->insn_pool);
765 DF_INSN_SET (df, insn, insn_rec);
767 memset (insn_rec, 0, sizeof (struct df_insn_info));
769 return insn_rec;
773 /* Delete all of the refs information from INSN. */
775 void
776 df_insn_refs_delete (struct dataflow *dflow, rtx insn)
778 struct df *df = dflow->df;
779 unsigned int uid = INSN_UID (insn);
780 struct df_insn_info *insn_info = NULL;
781 struct df_ref *ref;
782 struct df_scan_problem_data *problem_data
783 = (struct df_scan_problem_data *) dflow->problem_data;
785 if (uid < df->insns_size)
786 insn_info = DF_INSN_UID_GET (df, uid);
788 if (insn_info)
790 struct df_mw_hardreg *hardregs = insn_info->mw_hardregs;
792 while (hardregs)
794 struct df_mw_hardreg *next_hr = hardregs->next;
795 struct df_link *link = hardregs->regs;
796 while (link)
798 struct df_link *next_l = link->next;
799 pool_free (problem_data->mw_link_pool, link);
800 link = next_l;
803 pool_free (problem_data->mw_reg_pool, hardregs);
804 hardregs = next_hr;
807 ref = insn_info->defs;
808 while (ref)
809 ref = df_reg_chain_unlink (dflow, ref);
811 ref = insn_info->uses;
812 while (ref)
813 ref = df_reg_chain_unlink (dflow, ref);
815 pool_free (problem_data->insn_pool, insn_info);
816 DF_INSN_SET (df, insn, NULL);
821 /* Delete all of the refs information from basic_block with BB_INDEX. */
823 void
824 df_bb_refs_delete (struct dataflow *dflow, int bb_index)
826 struct df_ref *def;
827 struct df_ref *use;
829 struct df_scan_bb_info *bb_info
830 = df_scan_get_bb_info (dflow, bb_index);
831 rtx insn;
832 basic_block bb = BASIC_BLOCK (bb_index);
833 FOR_BB_INSNS (bb, insn)
835 if (INSN_P (insn))
837 /* Record defs within INSN. */
838 df_insn_refs_delete (dflow, insn);
842 /* Get rid of any artificial uses or defs. */
843 if (bb_info)
845 def = bb_info->artificial_defs;
846 while (def)
847 def = df_reg_chain_unlink (dflow, def);
848 bb_info->artificial_defs = NULL;
849 use = bb_info->artificial_uses;
850 while (use)
851 use = df_reg_chain_unlink (dflow, use);
852 bb_info->artificial_uses = NULL;
857 /* Delete all of the refs information from BLOCKS. */
859 void
860 df_refs_delete (struct dataflow *dflow, bitmap blocks)
862 bitmap_iterator bi;
863 unsigned int bb_index;
865 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
867 df_bb_refs_delete (dflow, bb_index);
872 /* Take build ref table for either the uses or defs from the reg-use
873 or reg-def chains. */
875 void
876 df_reorganize_refs (struct df_ref_info *ref_info)
878 unsigned int m = ref_info->regs_inited;
879 unsigned int regno;
880 unsigned int offset = 0;
881 unsigned int size = 0;
883 if (ref_info->refs_organized)
884 return;
886 if (ref_info->refs_size < ref_info->bitmap_size)
888 int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4;
889 df_grow_ref_info (ref_info, new_size);
892 for (regno = 0; regno < m; regno++)
894 struct df_reg_info *reg_info = ref_info->regs[regno];
895 int count = 0;
896 if (reg_info)
898 struct df_ref *ref = reg_info->reg_chain;
899 reg_info->begin = offset;
900 while (ref)
902 ref_info->refs[offset] = ref;
903 DF_REF_ID (ref) = offset++;
904 ref = DF_REF_NEXT_REG (ref);
905 count++;
906 size++;
908 reg_info->n_refs = count;
912 /* The bitmap size is not decremented when refs are deleted. So
913 reset it now that we have squished out all of the empty
914 slots. */
915 ref_info->bitmap_size = size;
916 ref_info->refs_organized = true;
917 ref_info->add_refs_inline = true;
921 /*----------------------------------------------------------------------------
922 Hard core instruction scanning code. No external interfaces here,
923 just a lot of routines that look inside insns.
924 ----------------------------------------------------------------------------*/
926 /* Create a ref and add it to the reg-def or reg-use chains. */
928 static struct df_ref *
929 df_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc,
930 basic_block bb, rtx insn,
931 enum df_ref_type ref_type,
932 enum df_ref_flags ref_flags)
934 struct df_ref *this_ref;
935 struct df *df = dflow->df;
936 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
937 struct df_scan_problem_data *problem_data
938 = (struct df_scan_problem_data *) dflow->problem_data;
940 this_ref = pool_alloc (problem_data->ref_pool);
941 DF_REF_REG (this_ref) = reg;
942 DF_REF_REGNO (this_ref) = regno;
943 DF_REF_LOC (this_ref) = loc;
944 DF_REF_INSN (this_ref) = insn;
945 DF_REF_CHAIN (this_ref) = NULL;
946 DF_REF_TYPE (this_ref) = ref_type;
947 DF_REF_FLAGS (this_ref) = ref_flags;
948 DF_REF_DATA (this_ref) = NULL;
949 DF_REF_BB (this_ref) = bb;
951 /* Link the ref into the reg_def and reg_use chains and keep a count
952 of the instances. */
953 switch (ref_type)
955 case DF_REF_REG_DEF:
957 struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
958 reg_info->n_refs++;
960 /* Add the ref to the reg_def chain. */
961 df_reg_chain_create (reg_info, this_ref);
962 DF_REF_ID (this_ref) = df->def_info.bitmap_size;
963 if (df->def_info.add_refs_inline)
965 if (DF_DEFS_SIZE (df) >= df->def_info.refs_size)
967 int new_size = df->def_info.bitmap_size
968 + df->def_info.bitmap_size / 4;
969 df_grow_ref_info (&df->def_info, new_size);
971 /* Add the ref to the big array of defs. */
972 DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref);
973 df->def_info.refs_organized = false;
976 df->def_info.bitmap_size++;
978 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
980 struct df_scan_bb_info *bb_info
981 = df_scan_get_bb_info (dflow, bb->index);
982 this_ref->next_ref = bb_info->artificial_defs;
983 bb_info->artificial_defs = this_ref;
985 else
987 this_ref->next_ref = DF_INSN_GET (df, insn)->defs;
988 DF_INSN_GET (df, insn)->defs = this_ref;
991 break;
993 case DF_REF_REG_MEM_LOAD:
994 case DF_REF_REG_MEM_STORE:
995 case DF_REF_REG_USE:
997 struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
998 reg_info->n_refs++;
1000 /* Add the ref to the reg_use chain. */
1001 df_reg_chain_create (reg_info, this_ref);
1002 DF_REF_ID (this_ref) = df->use_info.bitmap_size;
1003 if (df->use_info.add_refs_inline)
1005 if (DF_USES_SIZE (df) >= df->use_info.refs_size)
1007 int new_size = df->use_info.bitmap_size
1008 + df->use_info.bitmap_size / 4;
1009 df_grow_ref_info (&df->use_info, new_size);
1011 /* Add the ref to the big array of defs. */
1012 DF_USES_SET (df, df->use_info.bitmap_size, this_ref);
1013 df->use_info.refs_organized = false;
1016 df->use_info.bitmap_size++;
1017 if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL)
1019 struct df_scan_bb_info *bb_info
1020 = df_scan_get_bb_info (dflow, bb->index);
1021 this_ref->next_ref = bb_info->artificial_uses;
1022 bb_info->artificial_uses = this_ref;
1024 else
1026 this_ref->next_ref = DF_INSN_GET (df, insn)->uses;
1027 DF_INSN_GET (df, insn)->uses = this_ref;
1030 break;
1032 default:
1033 gcc_unreachable ();
1036 return this_ref;
1040 /* Create new references of type DF_REF_TYPE for each part of register REG
1041 at address LOC within INSN of BB. */
1043 static void
1044 df_ref_record (struct dataflow *dflow, rtx reg, rtx *loc,
1045 basic_block bb, rtx insn,
1046 enum df_ref_type ref_type,
1047 enum df_ref_flags ref_flags,
1048 bool record_live)
1050 struct df *df = dflow->df;
1051 rtx oldreg = reg;
1052 unsigned int regno;
1054 gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
1056 /* For the reg allocator we are interested in some SUBREG rtx's, but not
1057 all. Notably only those representing a word extraction from a multi-word
1058 reg. As written in the docu those should have the form
1059 (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode).
1060 XXX Is that true? We could also use the global word_mode variable. */
1061 if ((dflow->flags & DF_SUBREGS) == 0
1062 && GET_CODE (reg) == SUBREG
1063 && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode)
1064 || GET_MODE_SIZE (GET_MODE (reg))
1065 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg)))))
1067 loc = &SUBREG_REG (reg);
1068 reg = *loc;
1069 ref_flags |= DF_REF_STRIPPED;
1072 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
1073 if (regno < FIRST_PSEUDO_REGISTER)
1075 unsigned int i;
1076 unsigned int endregno;
1077 struct df_mw_hardreg *hardreg = NULL;
1078 struct df_scan_problem_data *problem_data
1079 = (struct df_scan_problem_data *) dflow->problem_data;
1081 if (!(dflow->flags & DF_HARD_REGS))
1082 return;
1084 /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG
1085 for the mode, because we only want to add references to regs, which
1086 are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_
1087 reference the whole reg 0 in DI mode (which would also include
1088 reg 1, at least, if 0 and 1 are SImode registers). */
1089 endregno = hard_regno_nregs[regno][GET_MODE (reg)];
1090 if (GET_CODE (reg) == SUBREG)
1091 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
1092 SUBREG_BYTE (reg), GET_MODE (reg));
1093 endregno += regno;
1095 /* If this is a multiword hardreg, we create some extra datastructures that
1096 will enable us to easily build REG_DEAD and REG_UNUSED notes. */
1097 if ((endregno != regno + 1) && insn)
1099 struct df_insn_info *insn_info = DF_INSN_GET (df, insn);
1100 /* Sets to a subreg of a multiword register are partial.
1101 Sets to a non-subreg of a multiword register are not. */
1102 if (GET_CODE (oldreg) == SUBREG)
1103 ref_flags |= DF_REF_PARTIAL;
1104 ref_flags |= DF_REF_MW_HARDREG;
1105 hardreg = pool_alloc (problem_data->mw_reg_pool);
1106 hardreg->next = insn_info->mw_hardregs;
1107 insn_info->mw_hardregs = hardreg;
1108 hardreg->type = ref_type;
1109 hardreg->flags = ref_flags;
1110 hardreg->mw_reg = reg;
1111 hardreg->regs = NULL;
1115 for (i = regno; i < endregno; i++)
1117 struct df_ref *ref;
1119 /* Calls are handled at call site because regs_ever_live
1120 doesn't include clobbered regs, only used ones. */
1121 if (ref_type == DF_REF_REG_DEF && record_live)
1122 regs_ever_live[i] = 1;
1123 else if ((ref_type == DF_REF_REG_USE
1124 || ref_type == DF_REF_REG_MEM_STORE
1125 || ref_type == DF_REF_REG_MEM_LOAD)
1126 && ((ref_flags & DF_REF_ARTIFICIAL) == 0))
1128 /* Set regs_ever_live on uses of non-eliminable frame
1129 pointers and arg pointers. */
1130 if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
1131 && (regno == FRAME_POINTER_REGNUM
1132 || regno == ARG_POINTER_REGNUM)))
1133 regs_ever_live[i] = 1;
1136 ref = df_ref_create_structure (dflow, regno_reg_rtx[i], loc,
1137 bb, insn, ref_type, ref_flags);
1138 if (hardreg)
1140 struct df_link *link = pool_alloc (problem_data->mw_link_pool);
1142 link->next = hardreg->regs;
1143 link->ref = ref;
1144 hardreg->regs = link;
1148 else
1150 df_ref_create_structure (dflow, reg, loc,
1151 bb, insn, ref_type, ref_flags);
1156 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
1157 covered by the outer mode is smaller than that covered by the inner mode,
1158 is a read-modify-write operation.
1159 This function returns true iff the SUBREG X is such a SUBREG. */
1161 bool
1162 df_read_modify_subreg_p (rtx x)
1164 unsigned int isize, osize;
1165 if (GET_CODE (x) != SUBREG)
1166 return false;
1167 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1168 osize = GET_MODE_SIZE (GET_MODE (x));
1169 return (isize > osize && isize > UNITS_PER_WORD);
1173 /* Process all the registers defined in the rtx, X.
1174 Autoincrement/decrement definitions will be picked up by
1175 df_uses_record. */
1177 static void
1178 df_def_record_1 (struct dataflow *dflow, rtx x,
1179 basic_block bb, rtx insn,
1180 enum df_ref_flags flags, bool record_live)
1182 rtx *loc;
1183 rtx dst;
1184 bool dst_in_strict_lowpart = false;
1186 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
1187 construct. */
1188 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
1189 loc = &XEXP (x, 0);
1190 else
1191 loc = &SET_DEST (x);
1192 dst = *loc;
1194 /* It is legal to have a set destination be a parallel. */
1195 if (GET_CODE (dst) == PARALLEL)
1197 int i;
1199 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
1201 rtx temp = XVECEXP (dst, 0, i);
1202 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
1203 || GET_CODE (temp) == SET)
1204 df_def_record_1 (dflow, temp, bb, insn,
1205 GET_CODE (temp) == CLOBBER
1206 ? flags | DF_REF_MUST_CLOBBER : flags,
1207 record_live);
1209 return;
1212 /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might
1213 be handy for the reg allocator. */
1214 while (GET_CODE (dst) == STRICT_LOW_PART
1215 || GET_CODE (dst) == ZERO_EXTRACT
1216 || df_read_modify_subreg_p (dst))
1218 #if 0
1219 /* Strict low part always contains SUBREG, but we do not want to make
1220 it appear outside, as whole register is always considered. */
1221 if (GET_CODE (dst) == STRICT_LOW_PART)
1223 loc = &XEXP (dst, 0);
1224 dst = *loc;
1226 #endif
1227 loc = &XEXP (dst, 0);
1228 if (GET_CODE (dst) == STRICT_LOW_PART)
1229 dst_in_strict_lowpart = true;
1230 dst = *loc;
1231 flags |= DF_REF_READ_WRITE;
1235 /* Sets to a subreg of a single word register are partial sets if
1236 they are wrapped in a strict lowpart, and not partial otherwise.
1238 if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))
1239 && dst_in_strict_lowpart)
1240 flags |= DF_REF_PARTIAL;
1242 if (REG_P (dst)
1243 || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
1244 df_ref_record (dflow, dst, loc, bb, insn,
1245 DF_REF_REG_DEF, flags, record_live);
1249 /* Process all the registers defined in the pattern rtx, X. */
1251 static void
1252 df_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn)
1254 RTX_CODE code = GET_CODE (x);
1256 if (code == SET || code == CLOBBER)
1258 /* Mark the single def within the pattern. */
1259 df_def_record_1 (dflow, x, bb, insn,
1260 code == CLOBBER ? DF_REF_MUST_CLOBBER : 0, true);
1262 else if (code == COND_EXEC)
1264 df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn);
1266 else if (code == PARALLEL)
1268 int i;
1270 /* Mark the multiple defs within the pattern. */
1271 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1272 df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn);
1277 /* Process all the registers used in the rtx at address LOC. */
1279 static void
1280 df_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type,
1281 basic_block bb, rtx insn, enum df_ref_flags flags)
1283 RTX_CODE code;
1284 rtx x;
1285 retry:
1286 x = *loc;
1287 if (!x)
1288 return;
1289 code = GET_CODE (x);
1290 switch (code)
1292 case LABEL_REF:
1293 case SYMBOL_REF:
1294 case CONST_INT:
1295 case CONST:
1296 case CONST_DOUBLE:
1297 case CONST_VECTOR:
1298 case PC:
1299 case CC0:
1300 case ADDR_VEC:
1301 case ADDR_DIFF_VEC:
1302 return;
1304 case CLOBBER:
1305 /* If we are clobbering a MEM, mark any registers inside the address
1306 as being used. */
1307 if (MEM_P (XEXP (x, 0)))
1308 df_uses_record (dflow, &XEXP (XEXP (x, 0), 0),
1309 DF_REF_REG_MEM_STORE, bb, insn, flags);
1311 /* If we're clobbering a REG then we have a def so ignore. */
1312 return;
1314 case MEM:
1315 df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn,
1316 flags & DF_REF_IN_NOTE);
1317 return;
1319 case SUBREG:
1320 /* While we're here, optimize this case. */
1321 flags |= DF_REF_PARTIAL;
1322 /* In case the SUBREG is not of a REG, do not optimize. */
1323 if (!REG_P (SUBREG_REG (x)))
1325 loc = &SUBREG_REG (x);
1326 df_uses_record (dflow, loc, ref_type, bb, insn, flags);
1327 return;
1329 /* ... Fall through ... */
1331 case REG:
1332 df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true);
1333 return;
1335 case SET:
1337 rtx dst = SET_DEST (x);
1338 gcc_assert (!(flags & DF_REF_IN_NOTE));
1339 df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags);
1341 switch (GET_CODE (dst))
1343 case SUBREG:
1344 if (df_read_modify_subreg_p (dst))
1346 df_uses_record (dflow, &SUBREG_REG (dst),
1347 DF_REF_REG_USE, bb,
1348 insn, flags | DF_REF_READ_WRITE);
1349 break;
1351 /* Fall through. */
1352 case REG:
1353 case PARALLEL:
1354 case SCRATCH:
1355 case PC:
1356 case CC0:
1357 break;
1358 case MEM:
1359 df_uses_record (dflow, &XEXP (dst, 0),
1360 DF_REF_REG_MEM_STORE,
1361 bb, insn, flags);
1362 break;
1363 case STRICT_LOW_PART:
1365 rtx *temp = &XEXP (dst, 0);
1366 /* A strict_low_part uses the whole REG and not just the
1367 SUBREG. */
1368 dst = XEXP (dst, 0);
1369 df_uses_record (dflow,
1370 (GET_CODE (dst) == SUBREG)
1371 ? &SUBREG_REG (dst) : temp,
1372 DF_REF_REG_USE, bb,
1373 insn, DF_REF_READ_WRITE);
1375 break;
1376 case ZERO_EXTRACT:
1377 case SIGN_EXTRACT:
1378 df_uses_record (dflow, &XEXP (dst, 0),
1379 DF_REF_REG_USE, bb, insn,
1380 DF_REF_READ_WRITE);
1381 df_uses_record (dflow, &XEXP (dst, 1),
1382 DF_REF_REG_USE, bb, insn, flags);
1383 df_uses_record (dflow, &XEXP (dst, 2),
1384 DF_REF_REG_USE, bb, insn, flags);
1385 dst = XEXP (dst, 0);
1386 break;
1387 default:
1388 gcc_unreachable ();
1390 return;
1393 case RETURN:
1394 break;
1396 case ASM_OPERANDS:
1397 case UNSPEC_VOLATILE:
1398 case TRAP_IF:
1399 case ASM_INPUT:
1401 /* Traditional and volatile asm instructions must be
1402 considered to use and clobber all hard registers, all
1403 pseudo-registers and all of memory. So must TRAP_IF and
1404 UNSPEC_VOLATILE operations.
1406 Consider for instance a volatile asm that changes the fpu
1407 rounding mode. An insn should not be moved across this
1408 even if it only uses pseudo-regs because it might give an
1409 incorrectly rounded result.
1411 However, flow.c's liveness computation did *not* do this,
1412 giving the reasoning as " ?!? Unfortunately, marking all
1413 hard registers as live causes massive problems for the
1414 register allocator and marking all pseudos as live creates
1415 mountains of uninitialized variable warnings."
1417 In order to maintain the status quo with regard to liveness
1418 and uses, we do what flow.c did and just mark any regs we
1419 can find in ASM_OPERANDS as used. Later on, when liveness
1420 is computed, asm insns are scanned and regs_asm_clobbered
1421 is filled out.
1423 For all ASM_OPERANDS, we must traverse the vector of input
1424 operands. We can not just fall through here since then we
1425 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
1426 which do not indicate traditional asms unlike their normal
1427 usage. */
1428 if (code == ASM_OPERANDS)
1430 int j;
1432 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
1433 df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j),
1434 DF_REF_REG_USE, bb, insn, flags);
1435 return;
1437 break;
1440 case PRE_DEC:
1441 case POST_DEC:
1442 case PRE_INC:
1443 case POST_INC:
1444 case PRE_MODIFY:
1445 case POST_MODIFY:
1446 /* Catch the def of the register being modified. */
1447 flags |= DF_REF_READ_WRITE;
1448 df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn,
1449 DF_REF_REG_DEF, flags, true);
1451 /* ... Fall through to handle uses ... */
1453 default:
1454 break;
1457 /* Recursively scan the operands of this expression. */
1459 const char *fmt = GET_RTX_FORMAT (code);
1460 int i;
1462 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1464 if (fmt[i] == 'e')
1466 /* Tail recursive case: save a function call level. */
1467 if (i == 0)
1469 loc = &XEXP (x, 0);
1470 goto retry;
1472 df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags);
1474 else if (fmt[i] == 'E')
1476 int j;
1477 for (j = 0; j < XVECLEN (x, i); j++)
1478 df_uses_record (dflow, &XVECEXP (x, i, j), ref_type,
1479 bb, insn, flags);
1485 /* Return true if *LOC contains an asm. */
1487 static int
1488 df_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
1490 if ( !*loc)
1491 return 0;
1492 if (GET_CODE (*loc) == ASM_OPERANDS)
1493 return 1;
1494 return 0;
1498 /* Return true if INSN contains an ASM. */
1500 static int
1501 df_insn_contains_asm (rtx insn)
1503 return for_each_rtx (&insn, df_insn_contains_asm_1, NULL);
1508 /* Record all the refs for DF within INSN of basic block BB. */
1510 static void
1511 df_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn)
1513 struct df *df = dflow->df;
1514 int i;
1516 if (INSN_P (insn))
1518 rtx note;
1520 if (df_insn_contains_asm (insn))
1521 DF_INSN_CONTAINS_ASM (df, insn) = true;
1523 /* Record register defs. */
1524 df_defs_record (dflow, PATTERN (insn), bb, insn);
1526 if (dflow->flags & DF_EQUIV_NOTES)
1527 for (note = REG_NOTES (insn); note;
1528 note = XEXP (note, 1))
1530 switch (REG_NOTE_KIND (note))
1532 case REG_EQUIV:
1533 case REG_EQUAL:
1534 df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE,
1535 bb, insn, DF_REF_IN_NOTE);
1536 default:
1537 break;
1541 if (CALL_P (insn))
1543 rtx note;
1545 /* Record the registers used to pass arguments, and explicitly
1546 noted as clobbered. */
1547 for (note = CALL_INSN_FUNCTION_USAGE (insn); note;
1548 note = XEXP (note, 1))
1550 if (GET_CODE (XEXP (note, 0)) == USE)
1551 df_uses_record (dflow, &XEXP (XEXP (note, 0), 0),
1552 DF_REF_REG_USE,
1553 bb, insn, 0);
1554 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
1556 df_defs_record (dflow, XEXP (note, 0), bb, insn);
1557 if (REG_P (XEXP (XEXP (note, 0), 0)))
1559 rtx reg = XEXP (XEXP (note, 0), 0);
1560 int regno_last;
1561 int regno_first;
1562 int i;
1564 regno_last = regno_first = REGNO (reg);
1565 if (regno_first < FIRST_PSEUDO_REGISTER)
1566 regno_last
1567 += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1;
1568 for (i = regno_first; i <= regno_last; i++)
1569 regs_ever_live[i] = 1;
1574 /* The stack ptr is used (honorarily) by a CALL insn. */
1575 df_uses_record (dflow, &regno_reg_rtx[STACK_POINTER_REGNUM],
1576 DF_REF_REG_USE, bb, insn,
1579 if (dflow->flags & DF_HARD_REGS)
1581 bitmap_iterator bi;
1582 unsigned int ui;
1583 /* Calls may also reference any of the global registers,
1584 so they are recorded as used. */
1585 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1586 if (global_regs[i])
1588 df_uses_record (dflow, &regno_reg_rtx[i],
1589 DF_REF_REG_USE, bb, insn, 0);
1590 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i],
1591 bb, insn, DF_REF_REG_DEF, 0, true);
1594 EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
1596 if (!global_regs[ui])
1597 df_ref_record (dflow, regno_reg_rtx[ui], &regno_reg_rtx[ui], bb,
1598 insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER, false);
1603 /* Record the register uses. */
1604 df_uses_record (dflow, &PATTERN (insn),
1605 DF_REF_REG_USE, bb, insn, 0);
1610 static bool
1611 df_has_eh_preds (basic_block bb)
1613 edge e;
1614 edge_iterator ei;
1616 FOR_EACH_EDGE (e, ei, bb->preds)
1618 if (e->flags & EDGE_EH)
1619 return true;
1621 return false;
1624 /* Record all the refs within the basic block BB. */
1626 static void
1627 df_bb_refs_record (struct dataflow *dflow, basic_block bb)
1629 struct df *df = dflow->df;
1630 rtx insn;
1631 int luid = 0;
1632 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index);
1633 bitmap artificial_uses_at_bottom = NULL;
1635 if (dflow->flags & DF_HARD_REGS)
1636 artificial_uses_at_bottom = BITMAP_ALLOC (NULL);
1638 /* Need to make sure that there is a record in the basic block info. */
1639 if (!bb_info)
1641 bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool);
1642 df_scan_set_bb_info (dflow, bb->index, bb_info);
1643 bb_info->artificial_defs = NULL;
1644 bb_info->artificial_uses = NULL;
1647 /* Scan the block an insn at a time from beginning to end. */
1648 FOR_BB_INSNS (bb, insn)
1650 df_insn_create_insn_record (dflow, insn);
1651 if (INSN_P (insn))
1653 /* Record defs within INSN. */
1654 DF_INSN_LUID (df, insn) = luid++;
1655 df_insn_refs_record (dflow, bb, insn);
1657 DF_INSN_LUID (df, insn) = luid;
1660 #ifdef EH_RETURN_DATA_REGNO
1661 if ((dflow->flags & DF_HARD_REGS)
1662 && df_has_eh_preds (bb))
1664 unsigned int i;
1665 /* Mark the registers that will contain data for the handler. */
1666 for (i = 0; ; ++i)
1668 unsigned regno = EH_RETURN_DATA_REGNO (i);
1669 if (regno == INVALID_REGNUM)
1670 break;
1671 df_ref_record (dflow, regno_reg_rtx[regno], &regno_reg_rtx[regno],
1672 bb, NULL,
1673 DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP,
1674 false);
1677 #endif
1680 if ((dflow->flags & DF_HARD_REGS)
1681 && df_has_eh_preds (bb))
1683 #ifdef EH_USES
1684 unsigned int i;
1685 /* This code is putting in a artificial ref for the use at the
1686 TOP of the block that receives the exception. It is too
1687 cumbersome to actually put the ref on the edge. We could
1688 either model this at the top of the receiver block or the
1689 bottom of the sender block.
1691 The bottom of the sender block is problematic because not all
1692 out-edges of the a block are eh-edges. However, it is true
1693 that all edges into a block are either eh-edges or none of
1694 them are eh-edges. Thus, we can model this at the top of the
1695 eh-receiver for all of the edges at once. */
1696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1697 if (EH_USES (i))
1698 df_uses_record (dflow, &regno_reg_rtx[i],
1699 DF_REF_REG_USE, bb, NULL,
1700 DF_REF_ARTIFICIAL | DF_REF_AT_TOP);
1701 #endif
1703 /* The following code (down thru the arg_pointer setting APPEARS
1704 to be necessary because there is nothing that actually
1705 describes what the exception handling code may actually need
1706 to keep alive. */
1707 if (reload_completed)
1709 if (frame_pointer_needed)
1711 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1712 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1713 bitmap_set_bit (artificial_uses_at_bottom, HARD_FRAME_POINTER_REGNUM);
1714 #endif
1716 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1717 if (fixed_regs[ARG_POINTER_REGNUM])
1718 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1719 #endif
1723 if ((dflow->flags & DF_HARD_REGS)
1724 && bb->index >= NUM_FIXED_BLOCKS)
1726 /* Before reload, there are a few registers that must be forced
1727 live everywhere -- which might not already be the case for
1728 blocks within infinite loops. */
1729 if (!reload_completed)
1732 /* Any reference to any pseudo before reload is a potential
1733 reference of the frame pointer. */
1734 bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM);
1736 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1737 /* Pseudos with argument area equivalences may require
1738 reloading via the argument pointer. */
1739 if (fixed_regs[ARG_POINTER_REGNUM])
1740 bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM);
1741 #endif
1743 /* Any constant, or pseudo with constant equivalences, may
1744 require reloading from memory using the pic register. */
1745 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1746 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1747 bitmap_set_bit (artificial_uses_at_bottom, PIC_OFFSET_TABLE_REGNUM);
1749 /* The all-important stack pointer must always be live. */
1750 bitmap_set_bit (artificial_uses_at_bottom, STACK_POINTER_REGNUM);
1753 if (dflow->flags & DF_HARD_REGS)
1755 bitmap_iterator bi;
1756 unsigned int regno;
1758 EXECUTE_IF_SET_IN_BITMAP (artificial_uses_at_bottom, 0, regno, bi)
1760 df_uses_record (dflow, &regno_reg_rtx[regno],
1761 DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL);
1764 BITMAP_FREE (artificial_uses_at_bottom);
1769 /* Record all the refs in the basic blocks specified by BLOCKS. */
1771 static void
1772 df_refs_record (struct dataflow *dflow, bitmap blocks)
1774 unsigned int bb_index;
1775 bitmap_iterator bi;
1777 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
1779 basic_block bb = BASIC_BLOCK (bb_index);
1780 df_bb_refs_record (dflow, bb);
1783 if (bitmap_bit_p (blocks, EXIT_BLOCK))
1784 df_record_exit_block_uses (dflow);
1786 if (bitmap_bit_p (blocks, ENTRY_BLOCK))
1787 df_record_entry_block_defs (dflow);
1791 /*----------------------------------------------------------------------------
1792 Specialized hard register scanning functions.
1793 ----------------------------------------------------------------------------*/
1795 /* Mark a register in SET. Hard registers in large modes get all
1796 of their component registers set as well. */
1798 static void
1799 df_mark_reg (rtx reg, void *vset)
1801 bitmap set = (bitmap) vset;
1802 int regno = REGNO (reg);
1804 gcc_assert (GET_MODE (reg) != BLKmode);
1806 bitmap_set_bit (set, regno);
1807 if (regno < FIRST_PSEUDO_REGISTER)
1809 int n = hard_regno_nregs[regno][GET_MODE (reg)];
1810 while (--n > 0)
1811 bitmap_set_bit (set, regno + n);
1816 /* Record the (conservative) set of hard registers that are defined on
1817 entry to the function. */
1819 static void
1820 df_record_entry_block_defs (struct dataflow *dflow)
1822 unsigned int i;
1823 bitmap_iterator bi;
1824 rtx r;
1825 struct df *df = dflow->df;
1827 bitmap_clear (df->entry_block_defs);
1829 if (!(dflow->flags & DF_HARD_REGS))
1830 return;
1832 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1834 if (FUNCTION_ARG_REGNO_P (i))
1835 #ifdef INCOMING_REGNO
1836 bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i));
1837 #else
1838 bitmap_set_bit (df->entry_block_defs, i);
1839 #endif
1842 /* Once the prologue has been generated, all of these registers
1843 should just show up in the first regular block. */
1844 if (HAVE_prologue && epilogue_completed)
1846 /* Defs for the callee saved registers are inserted so that the
1847 pushes have some defining location. */
1848 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1849 if ((call_used_regs[i] == 0) && (regs_ever_live[i]))
1850 bitmap_set_bit (df->entry_block_defs, i);
1852 else
1854 /* The always important stack pointer. */
1855 bitmap_set_bit (df->entry_block_defs, STACK_POINTER_REGNUM);
1857 #ifdef INCOMING_RETURN_ADDR_RTX
1858 if (REG_P (INCOMING_RETURN_ADDR_RTX))
1859 bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
1860 #endif
1862 /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM
1863 only STATIC_CHAIN_REGNUM is defined. If they are different,
1864 we only care about the STATIC_CHAIN_INCOMING_REGNUM. */
1865 #ifdef STATIC_CHAIN_INCOMING_REGNUM
1866 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM);
1867 #else
1868 #ifdef STATIC_CHAIN_REGNUM
1869 bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM);
1870 #endif
1871 #endif
1873 r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true);
1874 if (r && REG_P (r))
1875 bitmap_set_bit (df->entry_block_defs, REGNO (r));
1878 if ((!reload_completed) || frame_pointer_needed)
1880 /* Any reference to any pseudo before reload is a potential
1881 reference of the frame pointer. */
1882 bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM);
1883 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1884 /* If they are different, also mark the hard frame pointer as live. */
1885 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1886 bitmap_set_bit (df->entry_block_defs, HARD_FRAME_POINTER_REGNUM);
1887 #endif
1890 /* These registers are live everywhere. */
1891 if (!reload_completed)
1893 #ifdef EH_USES
1894 /* The ia-64, the only machine that uses this, does not define these
1895 until after reload. */
1896 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1897 if (EH_USES (i))
1899 bitmap_set_bit (df->entry_block_defs, i);
1901 #endif
1903 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1904 /* Pseudos with argument area equivalences may require
1905 reloading via the argument pointer. */
1906 if (fixed_regs[ARG_POINTER_REGNUM])
1907 bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM);
1908 #endif
1910 #ifdef PIC_OFFSET_TABLE_REGNUM
1911 /* Any constant, or pseudo with constant equivalences, may
1912 require reloading from memory using the pic register. */
1913 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1914 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1915 bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
1916 #endif
1919 targetm.live_on_entry (df->entry_block_defs);
1921 EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi)
1923 df_ref_record (dflow, regno_reg_rtx[i], &regno_reg_rtx[i],
1924 ENTRY_BLOCK_PTR, NULL,
1925 DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false);
1930 /* Record the set of hard registers that are used in the exit block. */
1932 static void
1933 df_record_exit_block_uses (struct dataflow *dflow)
1935 unsigned int i;
1936 bitmap_iterator bi;
1937 struct df *df = dflow->df;
1939 bitmap_clear (df->exit_block_uses);
1941 if (!(dflow->flags & DF_HARD_REGS))
1942 return;
1944 /* If exiting needs the right stack value, consider the stack
1945 pointer live at the end of the function. */
1946 if ((HAVE_epilogue && epilogue_completed)
1947 || !EXIT_IGNORE_STACK
1948 || (!FRAME_POINTER_REQUIRED
1949 && !current_function_calls_alloca
1950 && flag_omit_frame_pointer)
1951 || current_function_sp_is_unchanging)
1953 bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM);
1956 /* Mark the frame pointer if needed at the end of the function.
1957 If we end up eliminating it, it will be removed from the live
1958 list of each basic block by reload. */
1960 if ((!reload_completed) || frame_pointer_needed)
1962 bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM);
1963 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1964 /* If they are different, also mark the hard frame pointer as live. */
1965 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
1966 bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM);
1967 #endif
1970 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
1971 /* Many architectures have a GP register even without flag_pic.
1972 Assume the pic register is not in use, or will be handled by
1973 other means, if it is not fixed. */
1974 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1975 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1976 bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
1977 #endif
1979 /* Mark all global registers, and all registers used by the
1980 epilogue as being live at the end of the function since they
1981 may be referenced by our caller. */
1982 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1983 if (global_regs[i] || EPILOGUE_USES (i))
1984 bitmap_set_bit (df->exit_block_uses, i);
1986 if (HAVE_epilogue && epilogue_completed)
1988 /* Mark all call-saved registers that we actually used. */
1989 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1990 if (regs_ever_live[i] && !LOCAL_REGNO (i)
1991 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
1992 bitmap_set_bit (df->exit_block_uses, i);
1995 #ifdef EH_RETURN_DATA_REGNO
1996 /* Mark the registers that will contain data for the handler. */
1997 if (reload_completed && current_function_calls_eh_return)
1998 for (i = 0; ; ++i)
2000 unsigned regno = EH_RETURN_DATA_REGNO (i);
2001 if (regno == INVALID_REGNUM)
2002 break;
2003 bitmap_set_bit (df->exit_block_uses, regno);
2005 #endif
2007 #ifdef EH_RETURN_STACKADJ_RTX
2008 if ((!HAVE_epilogue || ! epilogue_completed)
2009 && current_function_calls_eh_return)
2011 rtx tmp = EH_RETURN_STACKADJ_RTX;
2012 if (tmp && REG_P (tmp))
2013 df_mark_reg (tmp, df->exit_block_uses);
2015 #endif
2017 #ifdef EH_RETURN_HANDLER_RTX
2018 if ((!HAVE_epilogue || ! epilogue_completed)
2019 && current_function_calls_eh_return)
2021 rtx tmp = EH_RETURN_HANDLER_RTX;
2022 if (tmp && REG_P (tmp))
2023 df_mark_reg (tmp, df->exit_block_uses);
2025 #endif
2027 /* Mark function return value. */
2028 diddle_return_value (df_mark_reg, (void*) df->exit_block_uses);
2030 if (dflow->flags & DF_HARD_REGS)
2031 EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi)
2032 df_uses_record (dflow, &regno_reg_rtx[i],
2033 DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL,
2034 DF_REF_ARTIFICIAL);
2037 static bool initialized = false;
2039 /* Initialize some platform specific structures. */
2041 void
2042 df_hard_reg_init (void)
2044 int i;
2045 #ifdef ELIMINABLE_REGS
2046 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2047 #endif
2048 /* After reload, some ports add certain bits to regs_ever_live so
2049 this cannot be reset. */
2051 if (!reload_completed)
2052 memset (regs_ever_live, 0, sizeof (regs_ever_live));
2054 if (initialized)
2055 return;
2057 bitmap_obstack_initialize (&persistent_obstack);
2059 /* Record which registers will be eliminated. We use this in
2060 mark_used_regs. */
2061 CLEAR_HARD_REG_SET (elim_reg_set);
2063 #ifdef ELIMINABLE_REGS
2064 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2065 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
2066 #else
2067 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
2068 #endif
2070 df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack);
2072 /* Inconveniently, this is only readily available in hard reg set
2073 form. */
2074 for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
2075 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
2076 bitmap_set_bit (df_invalidated_by_call, i);
2078 initialized = true;