2015-06-25 Zhouyi Zhou <yizhouzhou@ict.ac.cn>
[official-gcc.git] / gcc / df-scan.c
blob4e53f8844436d7c76155fedd8dc99ad55f9fa727
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999-2015 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "rtl.h"
29 #include "tm_p.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "hard-reg-set.h"
35 #include "function.h"
36 #include "regs.h"
37 #include "alloc-pool.h"
38 #include "flags.h"
39 #include "predict.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "basic-block.h"
43 #include "sbitmap.h"
44 #include "bitmap.h"
45 #include "dumpfile.h"
46 #include "tree.h"
47 #include "target.h"
48 #include "df.h"
49 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
52 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
55 #ifndef HAVE_prologue
56 #define HAVE_prologue 0
57 #endif
58 #ifndef HAVE_sibcall_epilogue
59 #define HAVE_sibcall_epilogue 0
60 #endif
62 /* The set of hard registers in eliminables[i].from. */
64 static HARD_REG_SET elim_reg_set;
66 /* Initialize ur_in and ur_out as if all hard registers were partially
67 available. */
69 struct df_collection_rec
71 auto_vec<df_ref, 128> def_vec;
72 auto_vec<df_ref, 32> use_vec;
73 auto_vec<df_ref, 32> eq_use_vec;
74 auto_vec<df_mw_hardreg_ptr, 32> mw_vec;
77 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
78 rtx, rtx *,
79 basic_block, struct df_insn_info *,
80 enum df_ref_type, int ref_flags);
81 static void df_def_record_1 (struct df_collection_rec *, rtx *,
82 basic_block, struct df_insn_info *,
83 int ref_flags);
84 static void df_defs_record (struct df_collection_rec *, rtx,
85 basic_block, struct df_insn_info *,
86 int ref_flags);
87 static void df_uses_record (struct df_collection_rec *,
88 rtx *, enum df_ref_type,
89 basic_block, struct df_insn_info *,
90 int ref_flags);
92 static void df_install_ref_incremental (df_ref);
93 static void df_insn_refs_collect (struct df_collection_rec*,
94 basic_block, struct df_insn_info *);
95 static void df_canonize_collection_rec (struct df_collection_rec *);
97 static void df_get_regular_block_artificial_uses (bitmap);
98 static void df_get_eh_block_artificial_uses (bitmap);
100 static void df_record_entry_block_defs (bitmap);
101 static void df_record_exit_block_uses (bitmap);
102 static void df_get_exit_block_use_set (bitmap);
103 static void df_get_entry_block_def_set (bitmap);
104 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
105 static void df_ref_chain_delete_du_chain (df_ref);
106 static void df_ref_chain_delete (df_ref);
108 static void df_refs_add_to_chains (struct df_collection_rec *,
109 basic_block, rtx_insn *, unsigned int);
111 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block,
112 rtx_insn *, bool);
113 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
114 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
115 static void df_install_ref (df_ref, struct df_reg_info *,
116 struct df_ref_info *, bool);
118 static int df_ref_compare (df_ref, df_ref);
119 static int df_ref_ptr_compare (const void *, const void *);
120 static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
121 static int df_mw_ptr_compare (const void *, const void *);
123 static void df_insn_info_delete (unsigned int);
125 /* Indexed by hardware reg number, is true if that register is ever
126 used in the current function.
128 In df-scan.c, this is set up to record the hard regs used
129 explicitly. Reload adds in the hard regs used for holding pseudo
130 regs. Final uses it to generate the code in the function prologue
131 and epilogue to save and restore registers as needed. */
133 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
135 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
136 static const unsigned int copy_defs = 0x1;
137 static const unsigned int copy_uses = 0x2;
138 static const unsigned int copy_eq_uses = 0x4;
139 static const unsigned int copy_mw = 0x8;
140 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
141 | copy_mw;
143 /*----------------------------------------------------------------------------
144 SCANNING DATAFLOW PROBLEM
146 There are several ways in which scanning looks just like the other
147 dataflow problems. It shares the all the mechanisms for local info
148 as well as basic block info. Where it differs is when and how often
149 it gets run. It also has no need for the iterative solver.
150 ----------------------------------------------------------------------------*/
152 #define SCAN_PROBLEM_DATA_BLOCK_SIZE 512
154 /* Problem data for the scanning dataflow function. */
155 struct df_scan_problem_data
157 pool_allocator<df_base_ref> *ref_base_pool;
158 pool_allocator<df_artificial_ref> *ref_artificial_pool;
159 pool_allocator<df_regular_ref> *ref_regular_pool;
160 pool_allocator<df_insn_info> *insn_pool;
161 pool_allocator<df_reg_info> *reg_pool;
162 pool_allocator<df_mw_hardreg> *mw_reg_pool;
164 bitmap_obstack reg_bitmaps;
165 bitmap_obstack insn_bitmaps;
168 typedef struct df_scan_bb_info *df_scan_bb_info_t;
171 /* Internal function to shut down the scanning problem. */
172 static void
173 df_scan_free_internal (void)
175 struct df_scan_problem_data *problem_data
176 = (struct df_scan_problem_data *) df_scan->problem_data;
178 free (df->def_info.refs);
179 free (df->def_info.begin);
180 free (df->def_info.count);
181 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
183 free (df->use_info.refs);
184 free (df->use_info.begin);
185 free (df->use_info.count);
186 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
188 free (df->def_regs);
189 df->def_regs = NULL;
190 free (df->use_regs);
191 df->use_regs = NULL;
192 free (df->eq_use_regs);
193 df->eq_use_regs = NULL;
194 df->regs_size = 0;
195 DF_REG_SIZE (df) = 0;
197 free (df->insns);
198 df->insns = NULL;
199 DF_INSN_SIZE () = 0;
201 free (df_scan->block_info);
202 df_scan->block_info = NULL;
203 df_scan->block_info_size = 0;
205 bitmap_clear (&df->hardware_regs_used);
206 bitmap_clear (&df->regular_block_artificial_uses);
207 bitmap_clear (&df->eh_block_artificial_uses);
208 BITMAP_FREE (df->entry_block_defs);
209 BITMAP_FREE (df->exit_block_uses);
210 bitmap_clear (&df->insns_to_delete);
211 bitmap_clear (&df->insns_to_rescan);
212 bitmap_clear (&df->insns_to_notes_rescan);
214 delete problem_data->ref_base_pool;
215 delete problem_data->ref_artificial_pool;
216 delete problem_data->ref_regular_pool;
217 delete problem_data->insn_pool;
218 delete problem_data->reg_pool;
219 delete problem_data->mw_reg_pool;
220 bitmap_obstack_release (&problem_data->reg_bitmaps);
221 bitmap_obstack_release (&problem_data->insn_bitmaps);
222 free (df_scan->problem_data);
226 /* Free basic block info. */
228 static void
229 df_scan_free_bb_info (basic_block bb, void *vbb_info)
231 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
232 unsigned int bb_index = bb->index;
233 rtx_insn *insn;
235 FOR_BB_INSNS (bb, insn)
236 if (INSN_P (insn))
237 df_insn_info_delete (INSN_UID (insn));
239 if (bb_index < df_scan->block_info_size)
240 bb_info = df_scan_get_bb_info (bb_index);
242 /* Get rid of any artificial uses or defs. */
243 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
244 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
245 df_ref_chain_delete (bb_info->artificial_defs);
246 df_ref_chain_delete (bb_info->artificial_uses);
247 bb_info->artificial_defs = NULL;
248 bb_info->artificial_uses = NULL;
252 /* Allocate the problem data for the scanning problem. This should be
253 called when the problem is created or when the entire function is to
254 be rescanned. */
255 void
256 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
258 struct df_scan_problem_data *problem_data;
259 unsigned int insn_num = get_max_uid () + 1;
260 basic_block bb;
262 /* Given the number of pools, this is really faster than tearing
263 everything apart. */
264 if (df_scan->problem_data)
265 df_scan_free_internal ();
267 problem_data = XNEW (struct df_scan_problem_data);
268 df_scan->problem_data = problem_data;
269 df_scan->computed = true;
271 problem_data->ref_base_pool = new pool_allocator<df_base_ref>
272 ("df_scan ref base", SCAN_PROBLEM_DATA_BLOCK_SIZE);
273 problem_data->ref_artificial_pool = new pool_allocator<df_artificial_ref>
274 ("df_scan ref artificial", SCAN_PROBLEM_DATA_BLOCK_SIZE);
275 problem_data->ref_regular_pool = new pool_allocator<df_regular_ref>
276 ("df_scan ref regular", SCAN_PROBLEM_DATA_BLOCK_SIZE);
277 problem_data->insn_pool = new pool_allocator<df_insn_info>
278 ("df_scan insn", SCAN_PROBLEM_DATA_BLOCK_SIZE);
279 problem_data->reg_pool = new pool_allocator<df_reg_info>
280 ("df_scan reg", SCAN_PROBLEM_DATA_BLOCK_SIZE);
281 problem_data->mw_reg_pool = new pool_allocator<df_mw_hardreg>
282 ("df_scan mw_reg", SCAN_PROBLEM_DATA_BLOCK_SIZE / 16);
284 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
285 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
287 insn_num += insn_num / 4;
288 df_grow_reg_info ();
290 df_grow_insn_info ();
291 df_grow_bb_info (df_scan);
293 FOR_ALL_BB_FN (bb, cfun)
295 unsigned int bb_index = bb->index;
296 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
297 bb_info->artificial_defs = NULL;
298 bb_info->artificial_uses = NULL;
301 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
302 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
303 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
304 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
305 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
306 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
307 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
308 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
309 df_scan->optional_p = false;
313 /* Free all of the data associated with the scan problem. */
315 static void
316 df_scan_free (void)
318 if (df_scan->problem_data)
319 df_scan_free_internal ();
321 if (df->blocks_to_analyze)
323 BITMAP_FREE (df->blocks_to_analyze);
324 df->blocks_to_analyze = NULL;
327 free (df_scan);
330 /* Dump the preamble for DF_SCAN dump. */
331 static void
332 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
334 int i;
335 int dcount = 0;
336 int ucount = 0;
337 int ecount = 0;
338 int icount = 0;
339 int ccount = 0;
340 basic_block bb;
341 rtx_insn *insn;
343 fprintf (file, ";; invalidated by call \t");
344 df_print_regset (file, regs_invalidated_by_call_regset);
345 fprintf (file, ";; hardware regs used \t");
346 df_print_regset (file, &df->hardware_regs_used);
347 fprintf (file, ";; regular block artificial uses \t");
348 df_print_regset (file, &df->regular_block_artificial_uses);
349 fprintf (file, ";; eh block artificial uses \t");
350 df_print_regset (file, &df->eh_block_artificial_uses);
351 fprintf (file, ";; entry block defs \t");
352 df_print_regset (file, df->entry_block_defs);
353 fprintf (file, ";; exit block uses \t");
354 df_print_regset (file, df->exit_block_uses);
355 fprintf (file, ";; regs ever live \t");
356 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
357 if (df_regs_ever_live_p (i))
358 fprintf (file, " %d [%s]", i, reg_names[i]);
359 fprintf (file, "\n;; ref usage \t");
361 for (i = 0; i < (int)df->regs_inited; i++)
362 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
364 const char * sep = "";
366 fprintf (file, "r%d={", i);
367 if (DF_REG_DEF_COUNT (i))
369 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
370 sep = ",";
371 dcount += DF_REG_DEF_COUNT (i);
373 if (DF_REG_USE_COUNT (i))
375 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
376 sep = ",";
377 ucount += DF_REG_USE_COUNT (i);
379 if (DF_REG_EQ_USE_COUNT (i))
381 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
382 ecount += DF_REG_EQ_USE_COUNT (i);
384 fprintf (file, "} ");
387 FOR_EACH_BB_FN (bb, cfun)
388 FOR_BB_INSNS (bb, insn)
389 if (INSN_P (insn))
391 if (CALL_P (insn))
392 ccount++;
393 else
394 icount++;
397 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
398 " in %d{%d regular + %d call} insns.\n",
399 dcount + ucount + ecount, dcount, ucount, ecount,
400 icount + ccount, icount, ccount);
403 /* Dump the bb_info for a given basic block. */
404 static void
405 df_scan_start_block (basic_block bb, FILE *file)
407 struct df_scan_bb_info *bb_info
408 = df_scan_get_bb_info (bb->index);
410 if (bb_info)
412 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
413 df_refs_chain_dump (bb_info->artificial_defs, true, file);
414 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
415 df_refs_chain_dump (bb_info->artificial_uses, true, file);
416 fprintf (file, "\n");
418 #if 0
420 rtx_insn *insn;
421 FOR_BB_INSNS (bb, insn)
422 if (INSN_P (insn))
423 df_insn_debug (insn, false, file);
425 #endif
428 static struct df_problem problem_SCAN =
430 DF_SCAN, /* Problem id. */
431 DF_NONE, /* Direction. */
432 df_scan_alloc, /* Allocate the problem specific data. */
433 NULL, /* Reset global information. */
434 df_scan_free_bb_info, /* Free basic block info. */
435 NULL, /* Local compute function. */
436 NULL, /* Init the solution specific data. */
437 NULL, /* Iterative solver. */
438 NULL, /* Confluence operator 0. */
439 NULL, /* Confluence operator n. */
440 NULL, /* Transfer function. */
441 NULL, /* Finalize function. */
442 df_scan_free, /* Free all of the problem information. */
443 NULL, /* Remove this problem from the stack of dataflow problems. */
444 df_scan_start_dump, /* Debugging. */
445 df_scan_start_block, /* Debugging start block. */
446 NULL, /* Debugging end block. */
447 NULL, /* Debugging start insn. */
448 NULL, /* Debugging end insn. */
449 NULL, /* Incremental solution verify start. */
450 NULL, /* Incremental solution verify end. */
451 NULL, /* Dependent problem. */
452 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
453 TV_DF_SCAN, /* Timing variable. */
454 false /* Reset blocks on dropping out of blocks_to_analyze. */
458 /* Create a new DATAFLOW instance and add it to an existing instance
459 of DF. The returned structure is what is used to get at the
460 solution. */
462 void
463 df_scan_add_problem (void)
465 df_add_problem (&problem_SCAN);
469 /*----------------------------------------------------------------------------
470 Storage Allocation Utilities
471 ----------------------------------------------------------------------------*/
474 /* First, grow the reg_info information. If the current size is less than
475 the number of pseudos, grow to 25% more than the number of
476 pseudos.
478 Second, assure that all of the slots up to max_reg_num have been
479 filled with reg_info structures. */
481 void
482 df_grow_reg_info (void)
484 unsigned int max_reg = max_reg_num ();
485 unsigned int new_size = max_reg;
486 struct df_scan_problem_data *problem_data
487 = (struct df_scan_problem_data *) df_scan->problem_data;
488 unsigned int i;
490 if (df->regs_size < new_size)
492 new_size += new_size / 4;
493 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
494 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
495 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
496 new_size);
497 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
498 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
499 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
500 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
501 df->regs_size = new_size;
504 for (i = df->regs_inited; i < max_reg; i++)
506 struct df_reg_info *reg_info;
508 // TODO
509 reg_info = problem_data->reg_pool->allocate ();
510 memset (reg_info, 0, sizeof (struct df_reg_info));
511 df->def_regs[i] = reg_info;
512 reg_info = problem_data->reg_pool->allocate ();
513 memset (reg_info, 0, sizeof (struct df_reg_info));
514 df->use_regs[i] = reg_info;
515 reg_info = problem_data->reg_pool->allocate ();
516 memset (reg_info, 0, sizeof (struct df_reg_info));
517 df->eq_use_regs[i] = reg_info;
518 df->def_info.begin[i] = 0;
519 df->def_info.count[i] = 0;
520 df->use_info.begin[i] = 0;
521 df->use_info.count[i] = 0;
524 df->regs_inited = max_reg;
528 /* Grow the ref information. */
530 static void
531 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
533 if (ref_info->refs_size < new_size)
535 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
536 memset (ref_info->refs + ref_info->refs_size, 0,
537 (new_size - ref_info->refs_size) *sizeof (df_ref));
538 ref_info->refs_size = new_size;
543 /* Check and grow the ref information if necessary. This routine
544 guarantees total_size + BITMAP_ADDEND amount of entries in refs
545 array. It updates ref_info->refs_size only and does not change
546 ref_info->total_size. */
548 static void
549 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
550 unsigned bitmap_addend)
552 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
554 int new_size = ref_info->total_size + bitmap_addend;
555 new_size += ref_info->total_size / 4;
556 df_grow_ref_info (ref_info, new_size);
561 /* Grow the ref information. If the current size is less than the
562 number of instructions, grow to 25% more than the number of
563 instructions. */
565 void
566 df_grow_insn_info (void)
568 unsigned int new_size = get_max_uid () + 1;
569 if (DF_INSN_SIZE () < new_size)
571 new_size += new_size / 4;
572 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
573 memset (df->insns + df->insns_size, 0,
574 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
575 DF_INSN_SIZE () = new_size;
582 /*----------------------------------------------------------------------------
583 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
584 ----------------------------------------------------------------------------*/
586 /* Rescan all of the block_to_analyze or all of the blocks in the
587 function if df_set_blocks if blocks_to_analyze is NULL; */
589 void
590 df_scan_blocks (void)
592 basic_block bb;
594 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
595 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
597 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
598 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
600 bitmap_ior_into (&df->eh_block_artificial_uses,
601 &df->regular_block_artificial_uses);
603 /* ENTRY and EXIT blocks have special defs/uses. */
604 df_get_entry_block_def_set (df->entry_block_defs);
605 df_record_entry_block_defs (df->entry_block_defs);
606 df_get_exit_block_use_set (df->exit_block_uses);
607 df_record_exit_block_uses (df->exit_block_uses);
608 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
609 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
611 /* Regular blocks */
612 FOR_EACH_BB_FN (bb, cfun)
614 unsigned int bb_index = bb->index;
615 df_bb_refs_record (bb_index, true);
619 /* Create new refs under address LOC within INSN. This function is
620 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
621 depending on whether LOC is inside PATTERN (INSN) or a note. */
623 void
624 df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
626 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
627 df_uses_record (NULL, loc, DF_REF_REG_USE,
628 BLOCK_FOR_INSN (insn),
629 DF_INSN_INFO_GET (insn),
630 ref_flags);
633 static void
634 df_install_ref_incremental (df_ref ref)
636 struct df_reg_info **reg_info;
637 struct df_ref_info *ref_info;
638 df_ref *ref_ptr;
639 bool add_to_table;
641 rtx_insn *insn = DF_REF_INSN (ref);
642 basic_block bb = BLOCK_FOR_INSN (insn);
644 if (DF_REF_REG_DEF_P (ref))
646 reg_info = df->def_regs;
647 ref_info = &df->def_info;
648 ref_ptr = &DF_INSN_DEFS (insn);
649 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
651 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
653 reg_info = df->eq_use_regs;
654 ref_info = &df->use_info;
655 ref_ptr = &DF_INSN_EQ_USES (insn);
656 switch (ref_info->ref_order)
658 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
659 case DF_REF_ORDER_BY_REG_WITH_NOTES:
660 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
661 add_to_table = true;
662 break;
663 default:
664 add_to_table = false;
665 break;
668 else
670 reg_info = df->use_regs;
671 ref_info = &df->use_info;
672 ref_ptr = &DF_INSN_USES (insn);
673 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
676 /* Do not add if ref is not in the right blocks. */
677 if (add_to_table && df->analyze_subset)
678 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
680 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
682 if (add_to_table)
683 switch (ref_info->ref_order)
685 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
686 case DF_REF_ORDER_BY_REG_WITH_NOTES:
687 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
688 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
689 break;
690 default:
691 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
692 break;
695 while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
696 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
698 DF_REF_NEXT_LOC (ref) = *ref_ptr;
699 *ref_ptr = ref;
701 #if 0
702 if (dump_file)
704 fprintf (dump_file, "adding ref ");
705 df_ref_debug (ref, dump_file);
707 #endif
708 /* By adding the ref directly, df_insn_rescan my not find any
709 differences even though the block will have changed. So we need
710 to mark the block dirty ourselves. */
711 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
712 df_set_bb_dirty (bb);
717 /*----------------------------------------------------------------------------
718 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
719 ----------------------------------------------------------------------------*/
721 static void
722 df_free_ref (df_ref ref)
724 struct df_scan_problem_data *problem_data
725 = (struct df_scan_problem_data *) df_scan->problem_data;
727 switch (DF_REF_CLASS (ref))
729 case DF_REF_BASE:
730 problem_data->ref_base_pool->remove ((df_base_ref *) (ref));
731 break;
733 case DF_REF_ARTIFICIAL:
734 problem_data->ref_artificial_pool->remove
735 ((df_artificial_ref *) (ref));
736 break;
738 case DF_REF_REGULAR:
739 problem_data->ref_regular_pool->remove
740 ((df_regular_ref *) (ref));
741 break;
746 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
747 Also delete the def-use or use-def chain if it exists. */
749 static void
750 df_reg_chain_unlink (df_ref ref)
752 df_ref next = DF_REF_NEXT_REG (ref);
753 df_ref prev = DF_REF_PREV_REG (ref);
754 int id = DF_REF_ID (ref);
755 struct df_reg_info *reg_info;
756 df_ref *refs = NULL;
758 if (DF_REF_REG_DEF_P (ref))
760 int regno = DF_REF_REGNO (ref);
761 reg_info = DF_REG_DEF_GET (regno);
762 refs = df->def_info.refs;
764 else
766 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
768 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
769 switch (df->use_info.ref_order)
771 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
772 case DF_REF_ORDER_BY_REG_WITH_NOTES:
773 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
774 refs = df->use_info.refs;
775 break;
776 default:
777 break;
780 else
782 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
783 refs = df->use_info.refs;
787 if (refs)
789 if (df->analyze_subset)
791 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
792 refs[id] = NULL;
794 else
795 refs[id] = NULL;
798 /* Delete any def-use or use-def chains that start here. It is
799 possible that there is trash in this field. This happens for
800 insns that have been deleted when rescanning has been deferred
801 and the chain problem has also been deleted. The chain tear down
802 code skips deleted insns. */
803 if (df_chain && DF_REF_CHAIN (ref))
804 df_chain_unlink (ref);
806 reg_info->n_refs--;
807 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
809 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
810 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
813 /* Unlink from the reg chain. If there is no prev, this is the
814 first of the list. If not, just join the next and prev. */
815 if (prev)
816 DF_REF_NEXT_REG (prev) = next;
817 else
819 gcc_assert (reg_info->reg_chain == ref);
820 reg_info->reg_chain = next;
822 if (next)
823 DF_REF_PREV_REG (next) = prev;
825 df_free_ref (ref);
829 /* Create the insn record for INSN. If there was one there, zero it
830 out. */
832 struct df_insn_info *
833 df_insn_create_insn_record (rtx_insn *insn)
835 struct df_scan_problem_data *problem_data
836 = (struct df_scan_problem_data *) df_scan->problem_data;
837 struct df_insn_info *insn_rec;
839 df_grow_insn_info ();
840 insn_rec = DF_INSN_INFO_GET (insn);
841 if (!insn_rec)
843 insn_rec = problem_data->insn_pool->allocate ();
844 DF_INSN_INFO_SET (insn, insn_rec);
846 memset (insn_rec, 0, sizeof (struct df_insn_info));
847 insn_rec->insn = insn;
848 return insn_rec;
852 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
854 static void
855 df_ref_chain_delete_du_chain (df_ref ref)
857 for (; ref; ref = DF_REF_NEXT_LOC (ref))
858 /* CHAIN is allocated by DF_CHAIN. So make sure to
859 pass df_scan instance for the problem. */
860 if (DF_REF_CHAIN (ref))
861 df_chain_unlink (ref);
865 /* Delete all refs in the ref chain. */
867 static void
868 df_ref_chain_delete (df_ref ref)
870 df_ref next;
871 for (; ref; ref = next)
873 next = DF_REF_NEXT_LOC (ref);
874 df_reg_chain_unlink (ref);
879 /* Delete the hardreg chain. */
881 static void
882 df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
884 struct df_scan_problem_data *problem_data
885 = (struct df_scan_problem_data *) df_scan->problem_data;
886 df_mw_hardreg *next;
888 for (; hardregs; hardregs = next)
890 next = DF_MWS_NEXT (hardregs);
891 problem_data->mw_reg_pool->remove (hardregs);
896 /* Delete all of the refs information from the insn with UID.
897 Internal helper for df_insn_delete, df_insn_rescan, and other
898 df-scan routines that don't have to work in deferred mode
899 and do not have to mark basic blocks for re-processing. */
901 static void
902 df_insn_info_delete (unsigned int uid)
904 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
906 bitmap_clear_bit (&df->insns_to_delete, uid);
907 bitmap_clear_bit (&df->insns_to_rescan, uid);
908 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
909 if (insn_info)
911 struct df_scan_problem_data *problem_data
912 = (struct df_scan_problem_data *) df_scan->problem_data;
914 /* In general, notes do not have the insn_info fields
915 initialized. However, combine deletes insns by changing them
916 to notes. How clever. So we cannot just check if it is a
917 valid insn before short circuiting this code, we need to see
918 if we actually initialized it. */
919 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
921 if (df_chain)
923 df_ref_chain_delete_du_chain (insn_info->defs);
924 df_ref_chain_delete_du_chain (insn_info->uses);
925 df_ref_chain_delete_du_chain (insn_info->eq_uses);
928 df_ref_chain_delete (insn_info->defs);
929 df_ref_chain_delete (insn_info->uses);
930 df_ref_chain_delete (insn_info->eq_uses);
932 problem_data->insn_pool->remove (insn_info);
933 DF_INSN_UID_SET (uid, NULL);
937 /* Delete all of the refs information from INSN, either right now
938 or marked for later in deferred mode. */
940 void
941 df_insn_delete (rtx_insn *insn)
943 unsigned int uid;
944 basic_block bb;
946 gcc_checking_assert (INSN_P (insn));
948 if (!df)
949 return;
951 uid = INSN_UID (insn);
952 bb = BLOCK_FOR_INSN (insn);
954 /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
955 not exist anymore (as mentioned in df-core.c: "The only requirement
956 [for DF] is that there be a correct control flow graph." Clearly
957 that isn't the case after pass_free_cfg. But DF is freed much later
958 because some back-ends want to use DF info even though the CFG is
959 already gone. It's not clear to me whether that is safe, actually.
960 In any case, we expect BB to be non-NULL at least up to register
961 allocation, so disallow a non-NULL BB up to there. Not perfect
962 but better than nothing... */
963 gcc_checking_assert (bb != NULL || reload_completed);
965 df_grow_bb_info (df_scan);
966 df_grow_reg_info ();
968 /* The block must be marked as dirty now, rather than later as in
969 df_insn_rescan and df_notes_rescan because it may not be there at
970 rescanning time and the mark would blow up.
971 DEBUG_INSNs do not make a block's data flow solution dirty (at
972 worst the LUIDs are no longer contiguous). */
973 if (bb != NULL && NONDEBUG_INSN_P (insn))
974 df_set_bb_dirty (bb);
976 /* The client has deferred rescanning. */
977 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
979 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
980 if (insn_info)
982 bitmap_clear_bit (&df->insns_to_rescan, uid);
983 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
984 bitmap_set_bit (&df->insns_to_delete, uid);
986 if (dump_file)
987 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
988 return;
991 if (dump_file)
992 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
994 df_insn_info_delete (uid);
998 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1000 static void
1001 df_free_collection_rec (struct df_collection_rec *collection_rec)
1003 unsigned int ix;
1004 struct df_scan_problem_data *problem_data
1005 = (struct df_scan_problem_data *) df_scan->problem_data;
1006 df_ref ref;
1007 struct df_mw_hardreg *mw;
1009 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
1010 df_free_ref (ref);
1011 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
1012 df_free_ref (ref);
1013 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
1014 df_free_ref (ref);
1015 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1016 problem_data->mw_reg_pool->remove (mw);
1018 collection_rec->def_vec.release ();
1019 collection_rec->use_vec.release ();
1020 collection_rec->eq_use_vec.release ();
1021 collection_rec->mw_vec.release ();
1024 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1026 bool
1027 df_insn_rescan (rtx_insn *insn)
1029 unsigned int uid = INSN_UID (insn);
1030 struct df_insn_info *insn_info = NULL;
1031 basic_block bb = BLOCK_FOR_INSN (insn);
1032 struct df_collection_rec collection_rec;
1034 if ((!df) || (!INSN_P (insn)))
1035 return false;
1037 if (!bb)
1039 if (dump_file)
1040 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1041 return false;
1044 /* The client has disabled rescanning and plans to do it itself. */
1045 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1046 return false;
1048 df_grow_bb_info (df_scan);
1049 df_grow_reg_info ();
1051 insn_info = DF_INSN_UID_SAFE_GET (uid);
1053 /* The client has deferred rescanning. */
1054 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1056 if (!insn_info)
1058 insn_info = df_insn_create_insn_record (insn);
1059 insn_info->defs = 0;
1060 insn_info->uses = 0;
1061 insn_info->eq_uses = 0;
1062 insn_info->mw_hardregs = 0;
1064 if (dump_file)
1065 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1067 bitmap_clear_bit (&df->insns_to_delete, uid);
1068 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1069 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1070 return false;
1073 bitmap_clear_bit (&df->insns_to_delete, uid);
1074 bitmap_clear_bit (&df->insns_to_rescan, uid);
1075 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1076 if (insn_info)
1078 int luid;
1079 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1080 /* If there's no change, return false. */
1081 if (the_same)
1083 df_free_collection_rec (&collection_rec);
1084 if (dump_file)
1085 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1086 return false;
1088 if (dump_file)
1089 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1091 /* There's change - we need to delete the existing info.
1092 Since the insn isn't moved, we can salvage its LUID. */
1093 luid = DF_INSN_LUID (insn);
1094 df_insn_info_delete (uid);
1095 df_insn_create_insn_record (insn);
1096 DF_INSN_LUID (insn) = luid;
1098 else
1100 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1101 df_insn_refs_collect (&collection_rec, bb, insn_info);
1102 if (dump_file)
1103 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1106 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1107 if (!DEBUG_INSN_P (insn))
1108 df_set_bb_dirty (bb);
1110 return true;
1113 /* Same as df_insn_rescan, but don't mark the basic block as
1114 dirty. */
1116 bool
1117 df_insn_rescan_debug_internal (rtx_insn *insn)
1119 unsigned int uid = INSN_UID (insn);
1120 struct df_insn_info *insn_info;
1122 gcc_assert (DEBUG_INSN_P (insn)
1123 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1125 if (!df)
1126 return false;
1128 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1129 if (!insn_info)
1130 return false;
1132 if (dump_file)
1133 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1135 bitmap_clear_bit (&df->insns_to_delete, uid);
1136 bitmap_clear_bit (&df->insns_to_rescan, uid);
1137 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1139 if (insn_info->defs == 0
1140 && insn_info->uses == 0
1141 && insn_info->eq_uses == 0
1142 && insn_info->mw_hardregs == 0)
1143 return false;
1145 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1147 if (df_chain)
1149 df_ref_chain_delete_du_chain (insn_info->defs);
1150 df_ref_chain_delete_du_chain (insn_info->uses);
1151 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1154 df_ref_chain_delete (insn_info->defs);
1155 df_ref_chain_delete (insn_info->uses);
1156 df_ref_chain_delete (insn_info->eq_uses);
1158 insn_info->defs = 0;
1159 insn_info->uses = 0;
1160 insn_info->eq_uses = 0;
1161 insn_info->mw_hardregs = 0;
1163 return true;
1167 /* Rescan all of the insns in the function. Note that the artificial
1168 uses and defs are not touched. This function will destroy def-use
1169 or use-def chains. */
1171 void
1172 df_insn_rescan_all (void)
1174 bool no_insn_rescan = false;
1175 bool defer_insn_rescan = false;
1176 basic_block bb;
1177 bitmap_iterator bi;
1178 unsigned int uid;
1179 bitmap_head tmp;
1181 bitmap_initialize (&tmp, &df_bitmap_obstack);
1183 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1185 df_clear_flags (DF_NO_INSN_RESCAN);
1186 no_insn_rescan = true;
1189 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1191 df_clear_flags (DF_DEFER_INSN_RESCAN);
1192 defer_insn_rescan = true;
1195 bitmap_copy (&tmp, &df->insns_to_delete);
1196 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1198 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1199 if (insn_info)
1200 df_insn_info_delete (uid);
1203 bitmap_clear (&tmp);
1204 bitmap_clear (&df->insns_to_delete);
1205 bitmap_clear (&df->insns_to_rescan);
1206 bitmap_clear (&df->insns_to_notes_rescan);
1208 FOR_EACH_BB_FN (bb, cfun)
1210 rtx_insn *insn;
1211 FOR_BB_INSNS (bb, insn)
1213 df_insn_rescan (insn);
1217 if (no_insn_rescan)
1218 df_set_flags (DF_NO_INSN_RESCAN);
1219 if (defer_insn_rescan)
1220 df_set_flags (DF_DEFER_INSN_RESCAN);
1224 /* Process all of the deferred rescans or deletions. */
1226 void
1227 df_process_deferred_rescans (void)
1229 bool no_insn_rescan = false;
1230 bool defer_insn_rescan = false;
1231 bitmap_iterator bi;
1232 unsigned int uid;
1233 bitmap_head tmp;
1235 bitmap_initialize (&tmp, &df_bitmap_obstack);
1237 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1239 df_clear_flags (DF_NO_INSN_RESCAN);
1240 no_insn_rescan = true;
1243 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1245 df_clear_flags (DF_DEFER_INSN_RESCAN);
1246 defer_insn_rescan = true;
1249 if (dump_file)
1250 fprintf (dump_file, "starting the processing of deferred insns\n");
1252 bitmap_copy (&tmp, &df->insns_to_delete);
1253 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1255 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1256 if (insn_info)
1257 df_insn_info_delete (uid);
1260 bitmap_copy (&tmp, &df->insns_to_rescan);
1261 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1263 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1264 if (insn_info)
1265 df_insn_rescan (insn_info->insn);
1268 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1269 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1271 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1272 if (insn_info)
1273 df_notes_rescan (insn_info->insn);
1276 if (dump_file)
1277 fprintf (dump_file, "ending the processing of deferred insns\n");
1279 bitmap_clear (&tmp);
1280 bitmap_clear (&df->insns_to_delete);
1281 bitmap_clear (&df->insns_to_rescan);
1282 bitmap_clear (&df->insns_to_notes_rescan);
1284 if (no_insn_rescan)
1285 df_set_flags (DF_NO_INSN_RESCAN);
1286 if (defer_insn_rescan)
1287 df_set_flags (DF_DEFER_INSN_RESCAN);
1289 /* If someone changed regs_ever_live during this pass, fix up the
1290 entry and exit blocks. */
1291 if (df->redo_entry_and_exit)
1293 df_update_entry_exit_and_calls ();
1294 df->redo_entry_and_exit = false;
1299 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1300 the uses if INCLUDE_USES. Include the eq_uses if
1301 INCLUDE_EQ_USES. */
1303 static unsigned int
1304 df_count_refs (bool include_defs, bool include_uses,
1305 bool include_eq_uses)
1307 unsigned int regno;
1308 int size = 0;
1309 unsigned int m = df->regs_inited;
1311 for (regno = 0; regno < m; regno++)
1313 if (include_defs)
1314 size += DF_REG_DEF_COUNT (regno);
1315 if (include_uses)
1316 size += DF_REG_USE_COUNT (regno);
1317 if (include_eq_uses)
1318 size += DF_REG_EQ_USE_COUNT (regno);
1320 return size;
1324 /* Take build ref table for either the uses or defs from the reg-use
1325 or reg-def chains. This version processes the refs in reg order
1326 which is likely to be best if processing the whole function. */
1328 static void
1329 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1330 bool include_defs,
1331 bool include_uses,
1332 bool include_eq_uses)
1334 unsigned int m = df->regs_inited;
1335 unsigned int regno;
1336 unsigned int offset = 0;
1337 unsigned int start;
1339 if (df->changeable_flags & DF_NO_HARD_REGS)
1341 start = FIRST_PSEUDO_REGISTER;
1342 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1343 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1345 else
1346 start = 0;
1348 ref_info->total_size
1349 = df_count_refs (include_defs, include_uses, include_eq_uses);
1351 df_check_and_grow_ref_info (ref_info, 1);
1353 for (regno = start; regno < m; regno++)
1355 int count = 0;
1356 ref_info->begin[regno] = offset;
1357 if (include_defs)
1359 df_ref ref = DF_REG_DEF_CHAIN (regno);
1360 while (ref)
1362 ref_info->refs[offset] = ref;
1363 DF_REF_ID (ref) = offset++;
1364 count++;
1365 ref = DF_REF_NEXT_REG (ref);
1366 gcc_checking_assert (offset < ref_info->refs_size);
1369 if (include_uses)
1371 df_ref ref = DF_REG_USE_CHAIN (regno);
1372 while (ref)
1374 ref_info->refs[offset] = ref;
1375 DF_REF_ID (ref) = offset++;
1376 count++;
1377 ref = DF_REF_NEXT_REG (ref);
1378 gcc_checking_assert (offset < ref_info->refs_size);
1381 if (include_eq_uses)
1383 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1384 while (ref)
1386 ref_info->refs[offset] = ref;
1387 DF_REF_ID (ref) = offset++;
1388 count++;
1389 ref = DF_REF_NEXT_REG (ref);
1390 gcc_checking_assert (offset < ref_info->refs_size);
1393 ref_info->count[regno] = count;
1396 /* The bitmap size is not decremented when refs are deleted. So
1397 reset it now that we have squished out all of the empty
1398 slots. */
1399 ref_info->table_size = offset;
1403 /* Take build ref table for either the uses or defs from the reg-use
1404 or reg-def chains. This version processes the refs in insn order
1405 which is likely to be best if processing some segment of the
1406 function. */
1408 static void
1409 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1410 bool include_defs,
1411 bool include_uses,
1412 bool include_eq_uses)
1414 bitmap_iterator bi;
1415 unsigned int bb_index;
1416 unsigned int m = df->regs_inited;
1417 unsigned int offset = 0;
1418 unsigned int r;
1419 unsigned int start
1420 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1422 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1423 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1425 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1426 df_check_and_grow_ref_info (ref_info, 1);
1428 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1430 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1431 rtx_insn *insn;
1432 df_ref def, use;
1434 if (include_defs)
1435 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1437 unsigned int regno = DF_REF_REGNO (def);
1438 ref_info->count[regno]++;
1440 if (include_uses)
1441 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1443 unsigned int regno = DF_REF_REGNO (use);
1444 ref_info->count[regno]++;
1447 FOR_BB_INSNS (bb, insn)
1449 if (INSN_P (insn))
1451 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1453 if (include_defs)
1454 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1456 unsigned int regno = DF_REF_REGNO (def);
1457 ref_info->count[regno]++;
1459 if (include_uses)
1460 FOR_EACH_INSN_INFO_USE (use, insn_info)
1462 unsigned int regno = DF_REF_REGNO (use);
1463 ref_info->count[regno]++;
1465 if (include_eq_uses)
1466 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1468 unsigned int regno = DF_REF_REGNO (use);
1469 ref_info->count[regno]++;
1475 for (r = start; r < m; r++)
1477 ref_info->begin[r] = offset;
1478 offset += ref_info->count[r];
1479 ref_info->count[r] = 0;
1482 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1484 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1485 rtx_insn *insn;
1486 df_ref def, use;
1488 if (include_defs)
1489 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1491 unsigned int regno = DF_REF_REGNO (def);
1492 if (regno >= start)
1494 unsigned int id
1495 = ref_info->begin[regno] + ref_info->count[regno]++;
1496 DF_REF_ID (def) = id;
1497 ref_info->refs[id] = def;
1500 if (include_uses)
1501 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1503 unsigned int regno = DF_REF_REGNO (def);
1504 if (regno >= start)
1506 unsigned int id
1507 = ref_info->begin[regno] + ref_info->count[regno]++;
1508 DF_REF_ID (use) = id;
1509 ref_info->refs[id] = use;
1513 FOR_BB_INSNS (bb, insn)
1515 if (INSN_P (insn))
1517 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1519 if (include_defs)
1520 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1522 unsigned int regno = DF_REF_REGNO (def);
1523 if (regno >= start)
1525 unsigned int id
1526 = ref_info->begin[regno] + ref_info->count[regno]++;
1527 DF_REF_ID (def) = id;
1528 ref_info->refs[id] = def;
1531 if (include_uses)
1532 FOR_EACH_INSN_INFO_USE (use, insn_info)
1534 unsigned int regno = DF_REF_REGNO (use);
1535 if (regno >= start)
1537 unsigned int id
1538 = ref_info->begin[regno] + ref_info->count[regno]++;
1539 DF_REF_ID (use) = id;
1540 ref_info->refs[id] = use;
1543 if (include_eq_uses)
1544 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1546 unsigned int regno = DF_REF_REGNO (use);
1547 if (regno >= start)
1549 unsigned int id
1550 = ref_info->begin[regno] + ref_info->count[regno]++;
1551 DF_REF_ID (use) = id;
1552 ref_info->refs[id] = use;
1559 /* The bitmap size is not decremented when refs are deleted. So
1560 reset it now that we have squished out all of the empty
1561 slots. */
1563 ref_info->table_size = offset;
1566 /* Take build ref table for either the uses or defs from the reg-use
1567 or reg-def chains. */
1569 static void
1570 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1571 bool include_defs,
1572 bool include_uses,
1573 bool include_eq_uses)
1575 if (df->analyze_subset)
1576 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1577 include_uses, include_eq_uses);
1578 else
1579 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1580 include_uses, include_eq_uses);
1584 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1585 static unsigned int
1586 df_add_refs_to_table (unsigned int offset,
1587 struct df_ref_info *ref_info,
1588 df_ref ref)
1590 for (; ref; ref = DF_REF_NEXT_LOC (ref))
1591 if (!(df->changeable_flags & DF_NO_HARD_REGS)
1592 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1594 ref_info->refs[offset] = ref;
1595 DF_REF_ID (ref) = offset++;
1597 return offset;
1601 /* Count the number of refs in all of the insns of BB. Include the
1602 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1603 eq_uses if INCLUDE_EQ_USES. */
1605 static unsigned int
1606 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1607 struct df_ref_info *ref_info,
1608 bool include_defs, bool include_uses,
1609 bool include_eq_uses)
1611 rtx_insn *insn;
1613 if (include_defs)
1614 offset = df_add_refs_to_table (offset, ref_info,
1615 df_get_artificial_defs (bb->index));
1616 if (include_uses)
1617 offset = df_add_refs_to_table (offset, ref_info,
1618 df_get_artificial_uses (bb->index));
1620 FOR_BB_INSNS (bb, insn)
1621 if (INSN_P (insn))
1623 unsigned int uid = INSN_UID (insn);
1624 if (include_defs)
1625 offset = df_add_refs_to_table (offset, ref_info,
1626 DF_INSN_UID_DEFS (uid));
1627 if (include_uses)
1628 offset = df_add_refs_to_table (offset, ref_info,
1629 DF_INSN_UID_USES (uid));
1630 if (include_eq_uses)
1631 offset = df_add_refs_to_table (offset, ref_info,
1632 DF_INSN_UID_EQ_USES (uid));
1634 return offset;
1638 /* Organize the refs by insn into the table in REF_INFO. If
1639 blocks_to_analyze is defined, use that set, otherwise the entire
1640 program. Include the defs if INCLUDE_DEFS. Include the uses if
1641 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1643 static void
1644 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1645 bool include_defs, bool include_uses,
1646 bool include_eq_uses)
1648 basic_block bb;
1649 unsigned int offset = 0;
1651 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1652 df_check_and_grow_ref_info (ref_info, 1);
1653 if (df->blocks_to_analyze)
1655 bitmap_iterator bi;
1656 unsigned int index;
1658 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1660 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1661 index),
1662 offset, ref_info,
1663 include_defs, include_uses,
1664 include_eq_uses);
1667 ref_info->table_size = offset;
1669 else
1671 FOR_ALL_BB_FN (bb, cfun)
1672 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1673 include_defs, include_uses,
1674 include_eq_uses);
1675 ref_info->table_size = offset;
1680 /* If the use refs in DF are not organized, reorganize them. */
1682 void
1683 df_maybe_reorganize_use_refs (enum df_ref_order order)
1685 if (order == df->use_info.ref_order)
1686 return;
1688 switch (order)
1690 case DF_REF_ORDER_BY_REG:
1691 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1692 break;
1694 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1695 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1696 break;
1698 case DF_REF_ORDER_BY_INSN:
1699 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1700 break;
1702 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1703 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1704 break;
1706 case DF_REF_ORDER_NO_TABLE:
1707 free (df->use_info.refs);
1708 df->use_info.refs = NULL;
1709 df->use_info.refs_size = 0;
1710 break;
1712 case DF_REF_ORDER_UNORDERED:
1713 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1714 gcc_unreachable ();
1715 break;
1718 df->use_info.ref_order = order;
1722 /* If the def refs in DF are not organized, reorganize them. */
1724 void
1725 df_maybe_reorganize_def_refs (enum df_ref_order order)
1727 if (order == df->def_info.ref_order)
1728 return;
1730 switch (order)
1732 case DF_REF_ORDER_BY_REG:
1733 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1734 break;
1736 case DF_REF_ORDER_BY_INSN:
1737 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1738 break;
1740 case DF_REF_ORDER_NO_TABLE:
1741 free (df->def_info.refs);
1742 df->def_info.refs = NULL;
1743 df->def_info.refs_size = 0;
1744 break;
1746 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1747 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1748 case DF_REF_ORDER_UNORDERED:
1749 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1750 gcc_unreachable ();
1751 break;
1754 df->def_info.ref_order = order;
1758 /* Change all of the basic block references in INSN to use the insn's
1759 current basic block. This function is called from routines that move
1760 instructions from one block to another. */
1762 void
1763 df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1765 basic_block old_bb = BLOCK_FOR_INSN (insn);
1766 struct df_insn_info *insn_info;
1767 unsigned int uid = INSN_UID (insn);
1769 if (old_bb == new_bb)
1770 return;
1772 set_block_for_insn (insn, new_bb);
1774 if (!df)
1775 return;
1777 if (dump_file)
1778 fprintf (dump_file, "changing bb of uid %d\n", uid);
1780 insn_info = DF_INSN_UID_SAFE_GET (uid);
1781 if (insn_info == NULL)
1783 if (dump_file)
1784 fprintf (dump_file, " unscanned insn\n");
1785 df_insn_rescan (insn);
1786 return;
1789 if (!INSN_P (insn))
1790 return;
1792 df_set_bb_dirty (new_bb);
1793 if (old_bb)
1795 if (dump_file)
1796 fprintf (dump_file, " from %d to %d\n",
1797 old_bb->index, new_bb->index);
1798 df_set_bb_dirty (old_bb);
1800 else
1801 if (dump_file)
1802 fprintf (dump_file, " to %d\n", new_bb->index);
1806 /* Helper function for df_ref_change_reg_with_loc. */
1808 static void
1809 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1810 struct df_reg_info *new_df,
1811 unsigned int new_regno, rtx loc)
1813 df_ref the_ref = old_df->reg_chain;
1815 while (the_ref)
1817 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1818 && DF_REF_LOC (the_ref)
1819 && (*DF_REF_LOC (the_ref) == loc))
1821 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1822 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1823 df_ref *ref_ptr;
1824 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1826 DF_REF_REGNO (the_ref) = new_regno;
1827 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1829 /* Pull the_ref out of the old regno chain. */
1830 if (prev_ref)
1831 DF_REF_NEXT_REG (prev_ref) = next_ref;
1832 else
1833 old_df->reg_chain = next_ref;
1834 if (next_ref)
1835 DF_REF_PREV_REG (next_ref) = prev_ref;
1836 old_df->n_refs--;
1838 /* Put the ref into the new regno chain. */
1839 DF_REF_PREV_REG (the_ref) = NULL;
1840 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1841 if (new_df->reg_chain)
1842 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1843 new_df->reg_chain = the_ref;
1844 new_df->n_refs++;
1845 if (DF_REF_BB (the_ref))
1846 df_set_bb_dirty (DF_REF_BB (the_ref));
1848 /* Need to sort the record again that the ref was in because
1849 the regno is a sorting key. First, find the right
1850 record. */
1851 if (DF_REF_REG_DEF_P (the_ref))
1852 ref_ptr = &insn_info->defs;
1853 else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1854 ref_ptr = &insn_info->eq_uses;
1855 else
1856 ref_ptr = &insn_info->uses;
1857 if (dump_file)
1858 fprintf (dump_file, "changing reg in insn %d\n",
1859 DF_REF_INSN_UID (the_ref));
1861 /* Stop if we find the current reference or where the reference
1862 needs to be. */
1863 while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1864 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1865 if (*ref_ptr != the_ref)
1867 /* The reference needs to be promoted up the list. */
1868 df_ref next = DF_REF_NEXT_LOC (the_ref);
1869 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1870 *ref_ptr = the_ref;
1872 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1873 while (*ref_ptr != the_ref);
1874 *ref_ptr = next;
1876 else if (DF_REF_NEXT_LOC (the_ref)
1877 && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1879 /* The reference needs to be demoted down the list. */
1880 *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1882 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1883 while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1884 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1885 *ref_ptr = the_ref;
1888 the_ref = next_ref;
1890 else
1891 the_ref = DF_REF_NEXT_REG (the_ref);
1896 /* Change the regno of register LOC to NEW_REGNO and update the df
1897 information accordingly. Refs that do not match LOC are not changed
1898 which means that artificial refs are not changed since they have no loc.
1899 This call is to support the SET_REGNO macro. */
1901 void
1902 df_ref_change_reg_with_loc (rtx loc, unsigned int new_regno)
1904 unsigned int old_regno = REGNO (loc);
1905 if (old_regno == new_regno)
1906 return;
1908 if (df)
1910 df_grow_reg_info ();
1912 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1913 DF_REG_DEF_GET (new_regno),
1914 new_regno, loc);
1915 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1916 DF_REG_USE_GET (new_regno),
1917 new_regno, loc);
1918 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1919 DF_REG_EQ_USE_GET (new_regno),
1920 new_regno, loc);
1922 set_mode_and_regno (loc, GET_MODE (loc), new_regno);
1926 /* Delete the mw_hardregs that point into the eq_notes. */
1928 static void
1929 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1931 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1932 struct df_scan_problem_data *problem_data
1933 = (struct df_scan_problem_data *) df_scan->problem_data;
1935 while (*mw_ptr)
1937 df_mw_hardreg *mw = *mw_ptr;
1938 if (mw->flags & DF_REF_IN_NOTE)
1940 *mw_ptr = DF_MWS_NEXT (mw);
1941 problem_data->mw_reg_pool->remove (mw);
1943 else
1944 mw_ptr = &DF_MWS_NEXT (mw);
1949 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1951 void
1952 df_notes_rescan (rtx_insn *insn)
1954 struct df_insn_info *insn_info;
1955 unsigned int uid = INSN_UID (insn);
1957 if (!df)
1958 return;
1960 /* The client has disabled rescanning and plans to do it itself. */
1961 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1962 return;
1964 /* Do nothing if the insn hasn't been emitted yet. */
1965 if (!BLOCK_FOR_INSN (insn))
1966 return;
1968 df_grow_bb_info (df_scan);
1969 df_grow_reg_info ();
1971 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1973 /* The client has deferred rescanning. */
1974 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1976 if (!insn_info)
1978 insn_info = df_insn_create_insn_record (insn);
1979 insn_info->defs = 0;
1980 insn_info->uses = 0;
1981 insn_info->eq_uses = 0;
1982 insn_info->mw_hardregs = 0;
1985 bitmap_clear_bit (&df->insns_to_delete, uid);
1986 /* If the insn is set to be rescanned, it does not need to also
1987 be notes rescanned. */
1988 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1989 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
1990 return;
1993 bitmap_clear_bit (&df->insns_to_delete, uid);
1994 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1996 if (insn_info)
1998 basic_block bb = BLOCK_FOR_INSN (insn);
1999 rtx note;
2000 struct df_collection_rec collection_rec;
2001 unsigned int i;
2003 df_mw_hardreg_chain_delete_eq_uses (insn_info);
2004 df_ref_chain_delete (insn_info->eq_uses);
2005 insn_info->eq_uses = NULL;
2007 /* Process REG_EQUIV/REG_EQUAL notes */
2008 for (note = REG_NOTES (insn); note;
2009 note = XEXP (note, 1))
2011 switch (REG_NOTE_KIND (note))
2013 case REG_EQUIV:
2014 case REG_EQUAL:
2015 df_uses_record (&collection_rec,
2016 &XEXP (note, 0), DF_REF_REG_USE,
2017 bb, insn_info, DF_REF_IN_NOTE);
2018 default:
2019 break;
2023 /* Find some place to put any new mw_hardregs. */
2024 df_canonize_collection_rec (&collection_rec);
2025 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2026 FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2028 while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2029 mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2030 DF_MWS_NEXT (mw) = *mw_ptr;
2031 *mw_ptr = mw;
2032 mw_ptr = &DF_MWS_NEXT (mw);
2034 df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2036 else
2037 df_insn_rescan (insn);
2042 /*----------------------------------------------------------------------------
2043 Hard core instruction scanning code. No external interfaces here,
2044 just a lot of routines that look inside insns.
2045 ----------------------------------------------------------------------------*/
2048 /* Return true if the contents of two df_ref's are identical.
2049 It ignores DF_REF_MARKER. */
2051 static bool
2052 df_ref_equal_p (df_ref ref1, df_ref ref2)
2054 if (!ref2)
2055 return false;
2057 if (ref1 == ref2)
2058 return true;
2060 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2061 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2062 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2063 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2064 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2065 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2066 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2067 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2068 return false;
2070 switch (DF_REF_CLASS (ref1))
2072 case DF_REF_ARTIFICIAL:
2073 case DF_REF_BASE:
2074 return true;
2076 case DF_REF_REGULAR:
2077 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2079 default:
2080 gcc_unreachable ();
2082 return false;
2086 /* Compare REF1 and REF2 for sorting. This is only called from places
2087 where all of the refs are of the same type, in the same insn, and
2088 have the same bb. So these fields are not checked. */
2090 static int
2091 df_ref_compare (df_ref ref1, df_ref ref2)
2093 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2094 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2096 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2097 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2099 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2100 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2102 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2103 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2105 /* Cannot look at the LOC field on artificial refs. */
2106 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2107 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2108 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2110 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2112 /* If two refs are identical except that one of them has is from
2113 a mw and one is not, we need to have the one with the mw
2114 first. */
2115 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2116 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2117 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2118 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2119 return -1;
2120 else
2121 return 1;
2124 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2127 /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2. */
2129 static int
2130 df_ref_ptr_compare (const void *r1, const void *r2)
2132 return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2135 /* Sort and compress a set of refs. */
2137 static void
2138 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2140 unsigned int count;
2141 unsigned int i;
2142 unsigned int dist = 0;
2144 count = ref_vec->length ();
2146 /* If there are 1 or 0 elements, there is nothing to do. */
2147 if (count < 2)
2148 return;
2149 else if (count == 2)
2151 df_ref r0 = (*ref_vec)[0];
2152 df_ref r1 = (*ref_vec)[1];
2153 if (df_ref_compare (r0, r1) > 0)
2154 std::swap ((*ref_vec)[0], (*ref_vec)[1]);
2156 else
2158 for (i = 0; i < count - 1; i++)
2160 df_ref r0 = (*ref_vec)[i];
2161 df_ref r1 = (*ref_vec)[i + 1];
2162 if (df_ref_compare (r0, r1) >= 0)
2163 break;
2165 /* If the array is already strictly ordered,
2166 which is the most common case for large COUNT case
2167 (which happens for CALL INSNs),
2168 no need to sort and filter out duplicate.
2169 Simply return the count.
2170 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2171 of DF_REF_COMPARE. */
2172 if (i == count - 1)
2173 return;
2174 ref_vec->qsort (df_ref_ptr_compare);
2177 for (i=0; i<count-dist; i++)
2179 /* Find the next ref that is not equal to the current ref. */
2180 while (i + dist + 1 < count
2181 && df_ref_equal_p ((*ref_vec)[i],
2182 (*ref_vec)[i + dist + 1]))
2184 df_free_ref ((*ref_vec)[i + dist + 1]);
2185 dist++;
2187 /* Copy it down to the next position. */
2188 if (dist && i + dist + 1 < count)
2189 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2192 count -= dist;
2193 ref_vec->truncate (count);
2197 /* Return true if the contents of two df_ref's are identical.
2198 It ignores DF_REF_MARKER. */
2200 static bool
2201 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2203 if (!mw2)
2204 return false;
2205 return (mw1 == mw2) ||
2206 (mw1->mw_reg == mw2->mw_reg
2207 && mw1->type == mw2->type
2208 && mw1->flags == mw2->flags
2209 && mw1->start_regno == mw2->start_regno
2210 && mw1->end_regno == mw2->end_regno);
2214 /* Compare MW1 and MW2 for sorting. */
2216 static int
2217 df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2219 if (mw1->type != mw2->type)
2220 return mw1->type - mw2->type;
2222 if (mw1->flags != mw2->flags)
2223 return mw1->flags - mw2->flags;
2225 if (mw1->start_regno != mw2->start_regno)
2226 return mw1->start_regno - mw2->start_regno;
2228 if (mw1->end_regno != mw2->end_regno)
2229 return mw1->end_regno - mw2->end_regno;
2231 if (mw1->mw_reg != mw2->mw_reg)
2232 return mw1->mw_order - mw2->mw_order;
2234 return 0;
2237 /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2. */
2239 static int
2240 df_mw_ptr_compare (const void *m1, const void *m2)
2242 return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2243 *(const df_mw_hardreg *const *) m2);
2246 /* Sort and compress a set of refs. */
2248 static void
2249 df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_heap> *mw_vec)
2251 unsigned int count;
2252 struct df_scan_problem_data *problem_data
2253 = (struct df_scan_problem_data *) df_scan->problem_data;
2254 unsigned int i;
2255 unsigned int dist = 0;
2257 count = mw_vec->length ();
2258 if (count < 2)
2259 return;
2260 else if (count == 2)
2262 struct df_mw_hardreg *m0 = (*mw_vec)[0];
2263 struct df_mw_hardreg *m1 = (*mw_vec)[1];
2264 if (df_mw_compare (m0, m1) > 0)
2266 struct df_mw_hardreg *tmp = (*mw_vec)[0];
2267 (*mw_vec)[0] = (*mw_vec)[1];
2268 (*mw_vec)[1] = tmp;
2271 else
2272 mw_vec->qsort (df_mw_ptr_compare);
2274 for (i=0; i<count-dist; i++)
2276 /* Find the next ref that is not equal to the current ref. */
2277 while (i + dist + 1 < count
2278 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2280 problem_data->mw_reg_pool->remove ((*mw_vec)[i + dist + 1]);
2281 dist++;
2283 /* Copy it down to the next position. */
2284 if (dist && i + dist + 1 < count)
2285 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2288 count -= dist;
2289 mw_vec->truncate (count);
2293 /* Sort and remove duplicates from the COLLECTION_REC. */
2295 static void
2296 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2298 df_sort_and_compress_refs (&collection_rec->def_vec);
2299 df_sort_and_compress_refs (&collection_rec->use_vec);
2300 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2301 df_sort_and_compress_mws (&collection_rec->mw_vec);
2305 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2307 static void
2308 df_install_ref (df_ref this_ref,
2309 struct df_reg_info *reg_info,
2310 struct df_ref_info *ref_info,
2311 bool add_to_table)
2313 unsigned int regno = DF_REF_REGNO (this_ref);
2314 /* Add the ref to the reg_{def,use,eq_use} chain. */
2315 df_ref head = reg_info->reg_chain;
2317 reg_info->reg_chain = this_ref;
2318 reg_info->n_refs++;
2320 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2322 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2323 df->hard_regs_live_count[regno]++;
2326 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2327 && DF_REF_PREV_REG (this_ref) == NULL);
2329 DF_REF_NEXT_REG (this_ref) = head;
2331 /* We cannot actually link to the head of the chain. */
2332 DF_REF_PREV_REG (this_ref) = NULL;
2334 if (head)
2335 DF_REF_PREV_REG (head) = this_ref;
2337 if (add_to_table)
2339 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2340 df_check_and_grow_ref_info (ref_info, 1);
2341 DF_REF_ID (this_ref) = ref_info->table_size;
2342 /* Add the ref to the big array of defs. */
2343 ref_info->refs[ref_info->table_size] = this_ref;
2344 ref_info->table_size++;
2346 else
2347 DF_REF_ID (this_ref) = -1;
2349 ref_info->total_size++;
2353 /* This function takes one of the groups of refs (defs, uses or
2354 eq_uses) and installs the entire group into the insn. It also adds
2355 each of these refs into the appropriate chains. */
2357 static df_ref
2358 df_install_refs (basic_block bb,
2359 const vec<df_ref, va_heap> *old_vec,
2360 struct df_reg_info **reg_info,
2361 struct df_ref_info *ref_info,
2362 bool is_notes)
2364 unsigned int count = old_vec->length ();
2365 if (count)
2367 bool add_to_table;
2368 df_ref this_ref;
2369 unsigned int ix;
2371 switch (ref_info->ref_order)
2373 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2374 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2375 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2376 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2377 add_to_table = true;
2378 break;
2379 case DF_REF_ORDER_UNORDERED:
2380 case DF_REF_ORDER_BY_REG:
2381 case DF_REF_ORDER_BY_INSN:
2382 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2383 add_to_table = !is_notes;
2384 break;
2385 default:
2386 add_to_table = false;
2387 break;
2390 /* Do not add if ref is not in the right blocks. */
2391 if (add_to_table && df->analyze_subset)
2392 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2394 FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2396 DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2397 ? (*old_vec)[ix + 1]
2398 : NULL);
2399 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2400 ref_info, add_to_table);
2402 return (*old_vec)[0];
2404 else
2405 return 0;
2409 /* This function takes the mws installs the entire group into the
2410 insn. */
2412 static struct df_mw_hardreg *
2413 df_install_mws (const vec<df_mw_hardreg_ptr, va_heap> *old_vec)
2415 unsigned int count = old_vec->length ();
2416 if (count)
2418 for (unsigned int i = 0; i < count - 1; i++)
2419 DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2420 DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2421 return (*old_vec)[0];
2423 else
2424 return 0;
2428 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2429 chains and update other necessary information. */
2431 static void
2432 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2433 basic_block bb, rtx_insn *insn, unsigned int flags)
2435 if (insn)
2437 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2438 /* If there is a vector in the collection rec, add it to the
2439 insn. A null rec is a signal that the caller will handle the
2440 chain specially. */
2441 if (flags & copy_defs)
2443 gcc_checking_assert (!insn_rec->defs);
2444 insn_rec->defs
2445 = df_install_refs (bb, &collection_rec->def_vec,
2446 df->def_regs,
2447 &df->def_info, false);
2449 if (flags & copy_uses)
2451 gcc_checking_assert (!insn_rec->uses);
2452 insn_rec->uses
2453 = df_install_refs (bb, &collection_rec->use_vec,
2454 df->use_regs,
2455 &df->use_info, false);
2457 if (flags & copy_eq_uses)
2459 gcc_checking_assert (!insn_rec->eq_uses);
2460 insn_rec->eq_uses
2461 = df_install_refs (bb, &collection_rec->eq_use_vec,
2462 df->eq_use_regs,
2463 &df->use_info, true);
2465 if (flags & copy_mw)
2467 gcc_checking_assert (!insn_rec->mw_hardregs);
2468 insn_rec->mw_hardregs
2469 = df_install_mws (&collection_rec->mw_vec);
2472 else
2474 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2476 gcc_checking_assert (!bb_info->artificial_defs);
2477 bb_info->artificial_defs
2478 = df_install_refs (bb, &collection_rec->def_vec,
2479 df->def_regs,
2480 &df->def_info, false);
2481 gcc_checking_assert (!bb_info->artificial_uses);
2482 bb_info->artificial_uses
2483 = df_install_refs (bb, &collection_rec->use_vec,
2484 df->use_regs,
2485 &df->use_info, false);
2490 /* Allocate a ref and initialize its fields. */
2492 static df_ref
2493 df_ref_create_structure (enum df_ref_class cl,
2494 struct df_collection_rec *collection_rec,
2495 rtx reg, rtx *loc,
2496 basic_block bb, struct df_insn_info *info,
2497 enum df_ref_type ref_type,
2498 int ref_flags)
2500 df_ref this_ref = NULL;
2501 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2502 struct df_scan_problem_data *problem_data
2503 = (struct df_scan_problem_data *) df_scan->problem_data;
2505 switch (cl)
2507 case DF_REF_BASE:
2508 this_ref = (df_ref) (problem_data->ref_base_pool->allocate ());
2509 gcc_checking_assert (loc == NULL);
2510 break;
2512 case DF_REF_ARTIFICIAL:
2513 this_ref = (df_ref) (problem_data->ref_artificial_pool->allocate ());
2514 this_ref->artificial_ref.bb = bb;
2515 gcc_checking_assert (loc == NULL);
2516 break;
2518 case DF_REF_REGULAR:
2519 this_ref = (df_ref) (problem_data->ref_regular_pool->allocate ());
2520 this_ref->regular_ref.loc = loc;
2521 gcc_checking_assert (loc);
2522 break;
2525 DF_REF_CLASS (this_ref) = cl;
2526 DF_REF_ID (this_ref) = -1;
2527 DF_REF_REG (this_ref) = reg;
2528 DF_REF_REGNO (this_ref) = regno;
2529 DF_REF_TYPE (this_ref) = ref_type;
2530 DF_REF_INSN_INFO (this_ref) = info;
2531 DF_REF_CHAIN (this_ref) = NULL;
2532 DF_REF_FLAGS (this_ref) = ref_flags;
2533 DF_REF_NEXT_REG (this_ref) = NULL;
2534 DF_REF_PREV_REG (this_ref) = NULL;
2535 DF_REF_ORDER (this_ref) = df->ref_order++;
2537 /* We need to clear this bit because fwprop, and in the future
2538 possibly other optimizations sometimes create new refs using ond
2539 refs as the model. */
2540 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2542 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2543 if (regno < FIRST_PSEUDO_REGISTER
2544 && !DF_REF_IS_ARTIFICIAL (this_ref)
2545 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2547 if (DF_REF_REG_DEF_P (this_ref))
2549 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2550 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2552 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2553 && (regno == FRAME_POINTER_REGNUM
2554 || regno == ARG_POINTER_REGNUM)))
2555 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2558 if (collection_rec)
2560 if (DF_REF_REG_DEF_P (this_ref))
2561 collection_rec->def_vec.safe_push (this_ref);
2562 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2563 collection_rec->eq_use_vec.safe_push (this_ref);
2564 else
2565 collection_rec->use_vec.safe_push (this_ref);
2567 else
2568 df_install_ref_incremental (this_ref);
2570 return this_ref;
2574 /* Create new references of type DF_REF_TYPE for each part of register REG
2575 at address LOC within INSN of BB. */
2578 static void
2579 df_ref_record (enum df_ref_class cl,
2580 struct df_collection_rec *collection_rec,
2581 rtx reg, rtx *loc,
2582 basic_block bb, struct df_insn_info *insn_info,
2583 enum df_ref_type ref_type,
2584 int ref_flags)
2586 unsigned int regno;
2588 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2590 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2591 if (regno < FIRST_PSEUDO_REGISTER)
2593 struct df_mw_hardreg *hardreg = NULL;
2594 struct df_scan_problem_data *problem_data
2595 = (struct df_scan_problem_data *) df_scan->problem_data;
2596 unsigned int i;
2597 unsigned int endregno;
2598 df_ref ref;
2600 if (GET_CODE (reg) == SUBREG)
2602 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2603 SUBREG_BYTE (reg), GET_MODE (reg));
2604 endregno = regno + subreg_nregs (reg);
2606 else
2607 endregno = END_REGNO (reg);
2609 /* If this is a multiword hardreg, we create some extra
2610 datastructures that will enable us to easily build REG_DEAD
2611 and REG_UNUSED notes. */
2612 if (collection_rec
2613 && (endregno != regno + 1) && insn_info)
2615 /* Sets to a subreg of a multiword register are partial.
2616 Sets to a non-subreg of a multiword register are not. */
2617 if (GET_CODE (reg) == SUBREG)
2618 ref_flags |= DF_REF_PARTIAL;
2619 ref_flags |= DF_REF_MW_HARDREG;
2621 hardreg = problem_data->mw_reg_pool->allocate ();
2622 hardreg->type = ref_type;
2623 hardreg->flags = ref_flags;
2624 hardreg->mw_reg = reg;
2625 hardreg->start_regno = regno;
2626 hardreg->end_regno = endregno - 1;
2627 hardreg->mw_order = df->ref_order++;
2628 collection_rec->mw_vec.safe_push (hardreg);
2631 for (i = regno; i < endregno; i++)
2633 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2634 bb, insn_info, ref_type, ref_flags);
2636 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2639 else
2641 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2642 ref_type, ref_flags);
2647 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2648 covered by the outer mode is smaller than that covered by the inner mode,
2649 is a read-modify-write operation.
2650 This function returns true iff the SUBREG X is such a SUBREG. */
2652 bool
2653 df_read_modify_subreg_p (rtx x)
2655 unsigned int isize, osize;
2656 if (GET_CODE (x) != SUBREG)
2657 return false;
2658 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2659 osize = GET_MODE_SIZE (GET_MODE (x));
2660 return isize > osize
2661 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2665 /* Process all the registers defined in the rtx pointed by LOC.
2666 Autoincrement/decrement definitions will be picked up by df_uses_record.
2667 Any change here has to be matched in df_find_hard_reg_defs_1. */
2669 static void
2670 df_def_record_1 (struct df_collection_rec *collection_rec,
2671 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2672 int flags)
2674 rtx dst = *loc;
2676 /* It is legal to have a set destination be a parallel. */
2677 if (GET_CODE (dst) == PARALLEL)
2679 int i;
2680 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2682 rtx temp = XVECEXP (dst, 0, i);
2683 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2684 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2685 bb, insn_info, flags);
2687 return;
2690 if (GET_CODE (dst) == STRICT_LOW_PART)
2692 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2694 loc = &XEXP (dst, 0);
2695 dst = *loc;
2698 if (GET_CODE (dst) == ZERO_EXTRACT)
2700 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2702 loc = &XEXP (dst, 0);
2703 dst = *loc;
2706 /* At this point if we do not have a reg or a subreg, just return. */
2707 if (REG_P (dst))
2709 df_ref_record (DF_REF_REGULAR, collection_rec,
2710 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2712 /* We want to keep sp alive everywhere - by making all
2713 writes to sp also use of sp. */
2714 if (REGNO (dst) == STACK_POINTER_REGNUM)
2715 df_ref_record (DF_REF_BASE, collection_rec,
2716 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2718 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2720 if (df_read_modify_subreg_p (dst))
2721 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2723 flags |= DF_REF_SUBREG;
2725 df_ref_record (DF_REF_REGULAR, collection_rec,
2726 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2731 /* Process all the registers defined in the pattern rtx, X. Any change
2732 here has to be matched in df_find_hard_reg_defs. */
2734 static void
2735 df_defs_record (struct df_collection_rec *collection_rec,
2736 rtx x, basic_block bb, struct df_insn_info *insn_info,
2737 int flags)
2739 RTX_CODE code = GET_CODE (x);
2740 int i;
2742 switch (code)
2744 case SET:
2745 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2746 break;
2748 case CLOBBER:
2749 flags |= DF_REF_MUST_CLOBBER;
2750 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2751 break;
2753 case COND_EXEC:
2754 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2755 bb, insn_info, DF_REF_CONDITIONAL);
2756 break;
2758 case PARALLEL:
2759 for (i = 0; i < XVECLEN (x, 0); i++)
2760 df_defs_record (collection_rec, XVECEXP (x, 0, i),
2761 bb, insn_info, flags);
2762 break;
2763 default:
2764 /* No DEFs to record in other cases */
2765 break;
2769 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2770 destination of a set or clobber. This has to match the logic in
2771 df_defs_record_1. */
2773 static void
2774 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2776 /* It is legal to have a set destination be a parallel. */
2777 if (GET_CODE (dst) == PARALLEL)
2779 int i;
2780 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2782 rtx temp = XVECEXP (dst, 0, i);
2783 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2784 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2786 return;
2789 if (GET_CODE (dst) == STRICT_LOW_PART)
2790 dst = XEXP (dst, 0);
2792 if (GET_CODE (dst) == ZERO_EXTRACT)
2793 dst = XEXP (dst, 0);
2795 /* At this point if we do not have a reg or a subreg, just return. */
2796 if (REG_P (dst) && HARD_REGISTER_P (dst))
2797 SET_HARD_REG_BIT (*defs, REGNO (dst));
2798 else if (GET_CODE (dst) == SUBREG
2799 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2800 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2803 /* Set bits in *DEFS for hard registers defined in the pattern X. This
2804 has to match the logic in df_defs_record. */
2806 static void
2807 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2809 RTX_CODE code = GET_CODE (x);
2810 int i;
2812 switch (code)
2814 case SET:
2815 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2816 break;
2818 case CLOBBER:
2819 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2820 break;
2822 case COND_EXEC:
2823 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2824 break;
2826 case PARALLEL:
2827 for (i = 0; i < XVECLEN (x, 0); i++)
2828 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2829 break;
2830 default:
2831 /* No DEFs to record in other cases */
2832 break;
2837 /* Process all the registers used in the rtx at address LOC. */
2839 static void
2840 df_uses_record (struct df_collection_rec *collection_rec,
2841 rtx *loc, enum df_ref_type ref_type,
2842 basic_block bb, struct df_insn_info *insn_info,
2843 int flags)
2845 RTX_CODE code;
2846 rtx x;
2848 retry:
2849 x = *loc;
2850 if (!x)
2851 return;
2852 code = GET_CODE (x);
2853 switch (code)
2855 case LABEL_REF:
2856 case SYMBOL_REF:
2857 case CONST:
2858 CASE_CONST_ANY:
2859 case PC:
2860 case CC0:
2861 case ADDR_VEC:
2862 case ADDR_DIFF_VEC:
2863 return;
2865 case CLOBBER:
2866 /* If we are clobbering a MEM, mark any registers inside the address
2867 as being used. */
2868 if (MEM_P (XEXP (x, 0)))
2869 df_uses_record (collection_rec,
2870 &XEXP (XEXP (x, 0), 0),
2871 DF_REF_REG_MEM_STORE,
2872 bb, insn_info,
2873 flags);
2875 /* If we're clobbering a REG then we have a def so ignore. */
2876 return;
2878 case MEM:
2879 df_uses_record (collection_rec,
2880 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2881 bb, insn_info, flags & DF_REF_IN_NOTE);
2882 return;
2884 case SUBREG:
2885 /* While we're here, optimize this case. */
2886 flags |= DF_REF_PARTIAL;
2887 /* In case the SUBREG is not of a REG, do not optimize. */
2888 if (!REG_P (SUBREG_REG (x)))
2890 loc = &SUBREG_REG (x);
2891 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2892 return;
2894 /* ... Fall through ... */
2896 case REG:
2897 df_ref_record (DF_REF_REGULAR, collection_rec,
2898 x, loc, bb, insn_info,
2899 ref_type, flags);
2900 return;
2902 case SIGN_EXTRACT:
2903 case ZERO_EXTRACT:
2905 df_uses_record (collection_rec,
2906 &XEXP (x, 1), ref_type, bb, insn_info, flags);
2907 df_uses_record (collection_rec,
2908 &XEXP (x, 2), ref_type, bb, insn_info, flags);
2910 /* If the parameters to the zero or sign extract are
2911 constants, strip them off and recurse, otherwise there is
2912 no information that we can gain from this operation. */
2913 if (code == ZERO_EXTRACT)
2914 flags |= DF_REF_ZERO_EXTRACT;
2915 else
2916 flags |= DF_REF_SIGN_EXTRACT;
2918 df_uses_record (collection_rec,
2919 &XEXP (x, 0), ref_type, bb, insn_info, flags);
2920 return;
2922 break;
2924 case SET:
2926 rtx dst = SET_DEST (x);
2927 gcc_assert (!(flags & DF_REF_IN_NOTE));
2928 df_uses_record (collection_rec,
2929 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2931 switch (GET_CODE (dst))
2933 case SUBREG:
2934 if (df_read_modify_subreg_p (dst))
2936 df_uses_record (collection_rec, &SUBREG_REG (dst),
2937 DF_REF_REG_USE, bb, insn_info,
2938 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2939 break;
2941 /* Fall through. */
2942 case REG:
2943 case PARALLEL:
2944 case SCRATCH:
2945 case PC:
2946 case CC0:
2947 break;
2948 case MEM:
2949 df_uses_record (collection_rec, &XEXP (dst, 0),
2950 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2951 break;
2952 case STRICT_LOW_PART:
2954 rtx *temp = &XEXP (dst, 0);
2955 /* A strict_low_part uses the whole REG and not just the
2956 SUBREG. */
2957 dst = XEXP (dst, 0);
2958 df_uses_record (collection_rec,
2959 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2960 DF_REF_REG_USE, bb, insn_info,
2961 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2963 break;
2964 case ZERO_EXTRACT:
2966 df_uses_record (collection_rec, &XEXP (dst, 1),
2967 DF_REF_REG_USE, bb, insn_info, flags);
2968 df_uses_record (collection_rec, &XEXP (dst, 2),
2969 DF_REF_REG_USE, bb, insn_info, flags);
2970 if (GET_CODE (XEXP (dst,0)) == MEM)
2971 df_uses_record (collection_rec, &XEXP (dst, 0),
2972 DF_REF_REG_USE, bb, insn_info,
2973 flags);
2974 else
2975 df_uses_record (collection_rec, &XEXP (dst, 0),
2976 DF_REF_REG_USE, bb, insn_info,
2977 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2979 break;
2981 default:
2982 gcc_unreachable ();
2984 return;
2987 case RETURN:
2988 case SIMPLE_RETURN:
2989 break;
2991 case ASM_OPERANDS:
2992 case UNSPEC_VOLATILE:
2993 case TRAP_IF:
2994 case ASM_INPUT:
2996 /* Traditional and volatile asm instructions must be
2997 considered to use and clobber all hard registers, all
2998 pseudo-registers and all of memory. So must TRAP_IF and
2999 UNSPEC_VOLATILE operations.
3001 Consider for instance a volatile asm that changes the fpu
3002 rounding mode. An insn should not be moved across this
3003 even if it only uses pseudo-regs because it might give an
3004 incorrectly rounded result.
3006 However, flow.c's liveness computation did *not* do this,
3007 giving the reasoning as " ?!? Unfortunately, marking all
3008 hard registers as live causes massive problems for the
3009 register allocator and marking all pseudos as live creates
3010 mountains of uninitialized variable warnings."
3012 In order to maintain the status quo with regard to liveness
3013 and uses, we do what flow.c did and just mark any regs we
3014 can find in ASM_OPERANDS as used. In global asm insns are
3015 scanned and regs_asm_clobbered is filled out.
3017 For all ASM_OPERANDS, we must traverse the vector of input
3018 operands. We can not just fall through here since then we
3019 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3020 which do not indicate traditional asms unlike their normal
3021 usage. */
3022 if (code == ASM_OPERANDS)
3024 int j;
3026 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3027 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3028 DF_REF_REG_USE, bb, insn_info, flags);
3029 return;
3031 break;
3034 case VAR_LOCATION:
3035 df_uses_record (collection_rec,
3036 &PAT_VAR_LOCATION_LOC (x),
3037 DF_REF_REG_USE, bb, insn_info, flags);
3038 return;
3040 case PRE_DEC:
3041 case POST_DEC:
3042 case PRE_INC:
3043 case POST_INC:
3044 case PRE_MODIFY:
3045 case POST_MODIFY:
3046 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3047 /* Catch the def of the register being modified. */
3048 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3049 bb, insn_info,
3050 DF_REF_REG_DEF,
3051 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3053 /* ... Fall through to handle uses ... */
3055 default:
3056 break;
3059 /* Recursively scan the operands of this expression. */
3061 const char *fmt = GET_RTX_FORMAT (code);
3062 int i;
3064 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3066 if (fmt[i] == 'e')
3068 /* Tail recursive case: save a function call level. */
3069 if (i == 0)
3071 loc = &XEXP (x, 0);
3072 goto retry;
3074 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3075 bb, insn_info, flags);
3077 else if (fmt[i] == 'E')
3079 int j;
3080 for (j = 0; j < XVECLEN (x, i); j++)
3081 df_uses_record (collection_rec,
3082 &XVECEXP (x, i, j), ref_type,
3083 bb, insn_info, flags);
3088 return;
3092 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3094 static void
3095 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3097 unsigned int ix;
3098 df_ref ref;
3100 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3102 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3104 df_ref use;
3106 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3107 DF_REF_LOC (ref), DF_REF_BB (ref),
3108 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3109 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3110 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3116 /* Get call's extra defs and uses (track caller-saved registers). */
3118 static void
3119 df_get_call_refs (struct df_collection_rec *collection_rec,
3120 basic_block bb,
3121 struct df_insn_info *insn_info,
3122 int flags)
3124 rtx note;
3125 bool is_sibling_call;
3126 unsigned int i;
3127 HARD_REG_SET defs_generated;
3128 HARD_REG_SET fn_reg_set_usage;
3130 CLEAR_HARD_REG_SET (defs_generated);
3131 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3132 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3133 get_call_reg_set_usage (insn_info->insn, &fn_reg_set_usage,
3134 regs_invalidated_by_call);
3136 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3138 if (i == STACK_POINTER_REGNUM)
3139 /* The stack ptr is used (honorarily) by a CALL insn. */
3140 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3141 NULL, bb, insn_info, DF_REF_REG_USE,
3142 DF_REF_CALL_STACK_USAGE | flags);
3143 else if (global_regs[i])
3145 /* Calls to const functions cannot access any global registers and
3146 calls to pure functions cannot set them. All other calls may
3147 reference any of the global registers, so they are recorded as
3148 used. */
3149 if (!RTL_CONST_CALL_P (insn_info->insn))
3151 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3152 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3153 if (!RTL_PURE_CALL_P (insn_info->insn))
3154 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3155 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3158 else if (TEST_HARD_REG_BIT (fn_reg_set_usage, i)
3159 /* no clobbers for regs that are the result of the call */
3160 && !TEST_HARD_REG_BIT (defs_generated, i)
3161 && (!is_sibling_call
3162 || !bitmap_bit_p (df->exit_block_uses, i)
3163 || refers_to_regno_p (i, crtl->return_rtx)))
3164 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3165 NULL, bb, insn_info, DF_REF_REG_DEF,
3166 DF_REF_MAY_CLOBBER | flags);
3169 /* Record the registers used to pass arguments, and explicitly
3170 noted as clobbered. */
3171 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3172 note = XEXP (note, 1))
3174 if (GET_CODE (XEXP (note, 0)) == USE)
3175 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3176 DF_REF_REG_USE, bb, insn_info, flags);
3177 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3179 if (REG_P (XEXP (XEXP (note, 0), 0)))
3181 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3182 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3183 df_defs_record (collection_rec, XEXP (note, 0), bb,
3184 insn_info, flags);
3186 else
3187 df_uses_record (collection_rec, &XEXP (note, 0),
3188 DF_REF_REG_USE, bb, insn_info, flags);
3192 return;
3195 /* Collect all refs in the INSN. This function is free of any
3196 side-effect - it will create and return a lists of df_ref's in the
3197 COLLECTION_REC without putting those refs into existing ref chains
3198 and reg chains. */
3200 static void
3201 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3202 basic_block bb, struct df_insn_info *insn_info)
3204 rtx note;
3205 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3207 /* Clear out the collection record. */
3208 collection_rec->def_vec.truncate (0);
3209 collection_rec->use_vec.truncate (0);
3210 collection_rec->eq_use_vec.truncate (0);
3211 collection_rec->mw_vec.truncate (0);
3213 /* Process REG_EQUIV/REG_EQUAL notes. */
3214 for (note = REG_NOTES (insn_info->insn); note;
3215 note = XEXP (note, 1))
3217 switch (REG_NOTE_KIND (note))
3219 case REG_EQUIV:
3220 case REG_EQUAL:
3221 df_uses_record (collection_rec,
3222 &XEXP (note, 0), DF_REF_REG_USE,
3223 bb, insn_info, DF_REF_IN_NOTE);
3224 break;
3225 case REG_NON_LOCAL_GOTO:
3226 /* The frame ptr is used by a non-local goto. */
3227 df_ref_record (DF_REF_BASE, collection_rec,
3228 regno_reg_rtx[FRAME_POINTER_REGNUM],
3229 NULL, bb, insn_info,
3230 DF_REF_REG_USE, 0);
3231 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3232 df_ref_record (DF_REF_BASE, collection_rec,
3233 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3234 NULL, bb, insn_info,
3235 DF_REF_REG_USE, 0);
3236 break;
3237 default:
3238 break;
3242 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3243 uses from CALL_INSN_FUNCTION_USAGE. */
3244 if (CALL_P (insn_info->insn))
3245 df_get_call_refs (collection_rec, bb, insn_info,
3246 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3248 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3249 that a qsort on the defs is unnecessary in most cases. */
3250 df_defs_record (collection_rec,
3251 PATTERN (insn_info->insn), bb, insn_info, 0);
3253 /* Record the register uses. */
3254 df_uses_record (collection_rec,
3255 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3257 /* DF_REF_CONDITIONAL needs corresponding USES. */
3258 if (is_cond_exec)
3259 df_get_conditional_uses (collection_rec);
3261 df_canonize_collection_rec (collection_rec);
3264 /* Recompute the luids for the insns in BB. */
3266 void
3267 df_recompute_luids (basic_block bb)
3269 rtx_insn *insn;
3270 int luid = 0;
3272 df_grow_insn_info ();
3274 /* Scan the block an insn at a time from beginning to end. */
3275 FOR_BB_INSNS (bb, insn)
3277 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3278 /* Inserting labels does not always trigger the incremental
3279 rescanning. */
3280 if (!insn_info)
3282 gcc_assert (!INSN_P (insn));
3283 insn_info = df_insn_create_insn_record (insn);
3286 DF_INSN_INFO_LUID (insn_info) = luid;
3287 if (INSN_P (insn))
3288 luid++;
3293 /* Collect all artificial refs at the block level for BB and add them
3294 to COLLECTION_REC. */
3296 static void
3297 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3299 collection_rec->def_vec.truncate (0);
3300 collection_rec->use_vec.truncate (0);
3301 collection_rec->eq_use_vec.truncate (0);
3302 collection_rec->mw_vec.truncate (0);
3304 if (bb->index == ENTRY_BLOCK)
3306 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3307 return;
3309 else if (bb->index == EXIT_BLOCK)
3311 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3312 return;
3315 if (bb_has_eh_pred (bb))
3317 unsigned int i;
3318 /* Mark the registers that will contain data for the handler. */
3319 for (i = 0; ; ++i)
3321 unsigned regno = EH_RETURN_DATA_REGNO (i);
3322 if (regno == INVALID_REGNUM)
3323 break;
3324 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3325 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3329 /* Add the hard_frame_pointer if this block is the target of a
3330 non-local goto. */
3331 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3332 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3333 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3335 /* Add the artificial uses. */
3336 if (bb->index >= NUM_FIXED_BLOCKS)
3338 bitmap_iterator bi;
3339 unsigned int regno;
3340 bitmap au = bb_has_eh_pred (bb)
3341 ? &df->eh_block_artificial_uses
3342 : &df->regular_block_artificial_uses;
3344 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3346 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3347 bb, NULL, DF_REF_REG_USE, 0);
3351 df_canonize_collection_rec (collection_rec);
3355 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3357 void
3358 df_bb_refs_record (int bb_index, bool scan_insns)
3360 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3361 rtx_insn *insn;
3362 int luid = 0;
3364 if (!df)
3365 return;
3367 df_collection_rec collection_rec;
3368 df_grow_bb_info (df_scan);
3369 if (scan_insns)
3370 /* Scan the block an insn at a time from beginning to end. */
3371 FOR_BB_INSNS (bb, insn)
3373 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3374 gcc_assert (!insn_info);
3376 insn_info = df_insn_create_insn_record (insn);
3377 if (INSN_P (insn))
3379 /* Record refs within INSN. */
3380 DF_INSN_INFO_LUID (insn_info) = luid++;
3381 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3382 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3384 DF_INSN_INFO_LUID (insn_info) = luid;
3387 /* Other block level artificial refs */
3388 df_bb_refs_collect (&collection_rec, bb);
3389 df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3391 /* Now that the block has been processed, set the block as dirty so
3392 LR and LIVE will get it processed. */
3393 df_set_bb_dirty (bb);
3397 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3398 block. */
3400 static void
3401 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3403 #ifdef EH_USES
3404 unsigned int i;
3405 #endif
3407 bitmap_clear (regular_block_artificial_uses);
3409 if (reload_completed)
3411 if (frame_pointer_needed)
3412 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3414 else
3415 /* Before reload, there are a few registers that must be forced
3416 live everywhere -- which might not already be the case for
3417 blocks within infinite loops. */
3419 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3421 /* Any reference to any pseudo before reload is a potential
3422 reference of the frame pointer. */
3423 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3425 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3426 bitmap_set_bit (regular_block_artificial_uses,
3427 HARD_FRAME_POINTER_REGNUM);
3429 /* Pseudos with argument area equivalences may require
3430 reloading via the argument pointer. */
3431 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3432 && fixed_regs[ARG_POINTER_REGNUM])
3433 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3435 /* Any constant, or pseudo with constant equivalences, may
3436 require reloading from memory using the pic register. */
3437 if (picreg != INVALID_REGNUM
3438 && fixed_regs[picreg])
3439 bitmap_set_bit (regular_block_artificial_uses, picreg);
3441 /* The all-important stack pointer must always be live. */
3442 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3444 #ifdef EH_USES
3445 /* EH_USES registers are used:
3446 1) at all insns that might throw (calls or with -fnon-call-exceptions
3447 trapping insns)
3448 2) in all EH edges
3449 3) to support backtraces and/or debugging, anywhere between their
3450 initialization and where they the saved registers are restored
3451 from them, including the cases where we don't reach the epilogue
3452 (noreturn call or infinite loop). */
3453 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3454 if (EH_USES (i))
3455 bitmap_set_bit (regular_block_artificial_uses, i);
3456 #endif
3460 /* Get the artificial use set for an eh block. */
3462 static void
3463 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3465 bitmap_clear (eh_block_artificial_uses);
3467 /* The following code (down through the arg_pointer setting APPEARS
3468 to be necessary because there is nothing that actually
3469 describes what the exception handling code may actually need
3470 to keep alive. */
3471 if (reload_completed)
3473 if (frame_pointer_needed)
3475 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3476 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3477 bitmap_set_bit (eh_block_artificial_uses,
3478 HARD_FRAME_POINTER_REGNUM);
3480 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3481 && fixed_regs[ARG_POINTER_REGNUM])
3482 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3488 /*----------------------------------------------------------------------------
3489 Specialized hard register scanning functions.
3490 ----------------------------------------------------------------------------*/
3493 /* Mark a register in SET. Hard registers in large modes get all
3494 of their component registers set as well. */
3496 static void
3497 df_mark_reg (rtx reg, void *vset)
3499 bitmap_set_range ((bitmap) vset, REGNO (reg), REG_NREGS (reg));
3503 /* Set the bit for regs that are considered being defined at the entry. */
3505 static void
3506 df_get_entry_block_def_set (bitmap entry_block_defs)
3508 rtx r;
3509 int i;
3511 bitmap_clear (entry_block_defs);
3513 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 if (global_regs[i])
3516 bitmap_set_bit (entry_block_defs, i);
3517 if (FUNCTION_ARG_REGNO_P (i))
3518 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3521 /* The always important stack pointer. */
3522 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3524 /* Once the prologue has been generated, all of these registers
3525 should just show up in the first regular block. */
3526 if (HAVE_prologue && epilogue_completed)
3528 /* Defs for the callee saved registers are inserted so that the
3529 pushes have some defining location. */
3530 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3531 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3532 bitmap_set_bit (entry_block_defs, i);
3535 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3536 if (r && REG_P (r))
3537 bitmap_set_bit (entry_block_defs, REGNO (r));
3539 /* If the function has an incoming STATIC_CHAIN, it has to show up
3540 in the entry def set. */
3541 r = targetm.calls.static_chain (current_function_decl, true);
3542 if (r && REG_P (r))
3543 bitmap_set_bit (entry_block_defs, REGNO (r));
3545 if ((!reload_completed) || frame_pointer_needed)
3547 /* Any reference to any pseudo before reload is a potential
3548 reference of the frame pointer. */
3549 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3551 /* If they are different, also mark the hard frame pointer as live. */
3552 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3553 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3554 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3557 /* These registers are live everywhere. */
3558 if (!reload_completed)
3560 /* Pseudos with argument area equivalences may require
3561 reloading via the argument pointer. */
3562 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3563 && fixed_regs[ARG_POINTER_REGNUM])
3564 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3566 /* Any constant, or pseudo with constant equivalences, may
3567 require reloading from memory using the pic register. */
3568 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3569 if (picreg != INVALID_REGNUM
3570 && fixed_regs[picreg])
3571 bitmap_set_bit (entry_block_defs, picreg);
3574 #ifdef INCOMING_RETURN_ADDR_RTX
3575 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3576 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3577 #endif
3579 targetm.extra_live_on_entry (entry_block_defs);
3583 /* Return the (conservative) set of hard registers that are defined on
3584 entry to the function.
3585 It uses df->entry_block_defs to determine which register
3586 reference to include. */
3588 static void
3589 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3590 bitmap entry_block_defs)
3592 unsigned int i;
3593 bitmap_iterator bi;
3595 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3597 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3598 ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3601 df_canonize_collection_rec (collection_rec);
3605 /* Record the (conservative) set of hard registers that are defined on
3606 entry to the function. */
3608 static void
3609 df_record_entry_block_defs (bitmap entry_block_defs)
3611 struct df_collection_rec collection_rec;
3612 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3614 /* Process bb_refs chain */
3615 df_refs_add_to_chains (&collection_rec,
3616 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3617 NULL,
3618 copy_defs);
3622 /* Update the defs in the entry block. */
3624 void
3625 df_update_entry_block_defs (void)
3627 bitmap_head refs;
3628 bool changed = false;
3630 bitmap_initialize (&refs, &df_bitmap_obstack);
3631 df_get_entry_block_def_set (&refs);
3632 if (df->entry_block_defs)
3634 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3636 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3637 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3638 df_ref_chain_delete (bb_info->artificial_defs);
3639 bb_info->artificial_defs = NULL;
3640 changed = true;
3643 else
3645 struct df_scan_problem_data *problem_data
3646 = (struct df_scan_problem_data *) df_scan->problem_data;
3647 gcc_unreachable ();
3648 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3649 changed = true;
3652 if (changed)
3654 df_record_entry_block_defs (&refs);
3655 bitmap_copy (df->entry_block_defs, &refs);
3656 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3658 bitmap_clear (&refs);
3662 /* Set the bit for regs that are considered being used at the exit. */
3664 static void
3665 df_get_exit_block_use_set (bitmap exit_block_uses)
3667 unsigned int i;
3668 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3670 bitmap_clear (exit_block_uses);
3672 /* Stack pointer is always live at the exit. */
3673 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3675 /* Mark the frame pointer if needed at the end of the function.
3676 If we end up eliminating it, it will be removed from the live
3677 list of each basic block by reload. */
3679 if ((!reload_completed) || frame_pointer_needed)
3681 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3683 /* If they are different, also mark the hard frame pointer as live. */
3684 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3685 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3686 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3689 /* Many architectures have a GP register even without flag_pic.
3690 Assume the pic register is not in use, or will be handled by
3691 other means, if it is not fixed. */
3692 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3693 && picreg != INVALID_REGNUM
3694 && fixed_regs[picreg])
3695 bitmap_set_bit (exit_block_uses, picreg);
3697 /* Mark all global registers, and all registers used by the
3698 epilogue as being live at the end of the function since they
3699 may be referenced by our caller. */
3700 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3701 if (global_regs[i] || EPILOGUE_USES (i))
3702 bitmap_set_bit (exit_block_uses, i);
3704 if (HAVE_epilogue && epilogue_completed)
3706 /* Mark all call-saved registers that we actually used. */
3707 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3708 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3709 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3710 bitmap_set_bit (exit_block_uses, i);
3713 /* Mark the registers that will contain data for the handler. */
3714 if (reload_completed && crtl->calls_eh_return)
3715 for (i = 0; ; ++i)
3717 unsigned regno = EH_RETURN_DATA_REGNO (i);
3718 if (regno == INVALID_REGNUM)
3719 break;
3720 bitmap_set_bit (exit_block_uses, regno);
3723 #ifdef EH_RETURN_STACKADJ_RTX
3724 if ((!HAVE_epilogue || ! epilogue_completed)
3725 && crtl->calls_eh_return)
3727 rtx tmp = EH_RETURN_STACKADJ_RTX;
3728 if (tmp && REG_P (tmp))
3729 df_mark_reg (tmp, exit_block_uses);
3731 #endif
3733 #ifdef EH_RETURN_HANDLER_RTX
3734 if ((!HAVE_epilogue || ! epilogue_completed)
3735 && crtl->calls_eh_return)
3737 rtx tmp = EH_RETURN_HANDLER_RTX;
3738 if (tmp && REG_P (tmp))
3739 df_mark_reg (tmp, exit_block_uses);
3741 #endif
3743 /* Mark function return value. */
3744 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3748 /* Return the refs of hard registers that are used in the exit block.
3749 It uses df->exit_block_uses to determine register to include. */
3751 static void
3752 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3754 unsigned int i;
3755 bitmap_iterator bi;
3757 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3758 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3759 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3761 /* It is deliberate that this is not put in the exit block uses but
3762 I do not know why. */
3763 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3764 && reload_completed
3765 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3766 && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3767 && fixed_regs[ARG_POINTER_REGNUM])
3768 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3769 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3771 df_canonize_collection_rec (collection_rec);
3775 /* Record the set of hard registers that are used in the exit block.
3776 It uses df->exit_block_uses to determine which bit to include. */
3778 static void
3779 df_record_exit_block_uses (bitmap exit_block_uses)
3781 struct df_collection_rec collection_rec;
3782 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3784 /* Process bb_refs chain */
3785 df_refs_add_to_chains (&collection_rec,
3786 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3787 NULL,
3788 copy_uses);
3792 /* Update the uses in the exit block. */
3794 void
3795 df_update_exit_block_uses (void)
3797 bitmap_head refs;
3798 bool changed = false;
3800 bitmap_initialize (&refs, &df_bitmap_obstack);
3801 df_get_exit_block_use_set (&refs);
3802 if (df->exit_block_uses)
3804 if (!bitmap_equal_p (df->exit_block_uses, &refs))
3806 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3807 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3808 df_ref_chain_delete (bb_info->artificial_uses);
3809 bb_info->artificial_uses = NULL;
3810 changed = true;
3813 else
3815 struct df_scan_problem_data *problem_data
3816 = (struct df_scan_problem_data *) df_scan->problem_data;
3817 gcc_unreachable ();
3818 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3819 changed = true;
3822 if (changed)
3824 df_record_exit_block_uses (&refs);
3825 bitmap_copy (df->exit_block_uses,& refs);
3826 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3828 bitmap_clear (&refs);
3831 static bool initialized = false;
3834 /* Initialize some platform specific structures. */
3836 void
3837 df_hard_reg_init (void)
3839 #ifdef ELIMINABLE_REGS
3840 int i;
3841 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3842 #endif
3843 if (initialized)
3844 return;
3846 /* Record which registers will be eliminated. We use this in
3847 mark_used_regs. */
3848 CLEAR_HARD_REG_SET (elim_reg_set);
3850 #ifdef ELIMINABLE_REGS
3851 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3852 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3853 #else
3854 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3855 #endif
3857 initialized = true;
3861 /* Recompute the parts of scanning that are based on regs_ever_live
3862 because something changed in that array. */
3864 void
3865 df_update_entry_exit_and_calls (void)
3867 basic_block bb;
3869 df_update_entry_block_defs ();
3870 df_update_exit_block_uses ();
3872 /* The call insns need to be rescanned because there may be changes
3873 in the set of registers clobbered across the call. */
3874 FOR_EACH_BB_FN (bb, cfun)
3876 rtx_insn *insn;
3877 FOR_BB_INSNS (bb, insn)
3879 if (INSN_P (insn) && CALL_P (insn))
3880 df_insn_rescan (insn);
3886 /* Return true if hard REG is actually used in the some instruction.
3887 There are a fair number of conditions that affect the setting of
3888 this array. See the comment in df.h for df->hard_regs_live_count
3889 for the conditions that this array is set. */
3891 bool
3892 df_hard_reg_used_p (unsigned int reg)
3894 return df->hard_regs_live_count[reg] != 0;
3898 /* A count of the number of times REG is actually used in the some
3899 instruction. There are a fair number of conditions that affect the
3900 setting of this array. See the comment in df.h for
3901 df->hard_regs_live_count for the conditions that this array is
3902 set. */
3905 unsigned int
3906 df_hard_reg_used_count (unsigned int reg)
3908 return df->hard_regs_live_count[reg];
3912 /* Get the value of regs_ever_live[REGNO]. */
3914 bool
3915 df_regs_ever_live_p (unsigned int regno)
3917 return regs_ever_live[regno];
3921 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3922 to change, schedule that change for the next update. */
3924 void
3925 df_set_regs_ever_live (unsigned int regno, bool value)
3927 if (regs_ever_live[regno] == value)
3928 return;
3930 regs_ever_live[regno] = value;
3931 if (df)
3932 df->redo_entry_and_exit = true;
3936 /* Compute "regs_ever_live" information from the underlying df
3937 information. Set the vector to all false if RESET. */
3939 void
3940 df_compute_regs_ever_live (bool reset)
3942 unsigned int i;
3943 bool changed = df->redo_entry_and_exit;
3945 if (reset)
3946 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3948 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3949 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3951 regs_ever_live[i] = true;
3952 changed = true;
3954 if (changed)
3955 df_update_entry_exit_and_calls ();
3956 df->redo_entry_and_exit = false;
3960 /*----------------------------------------------------------------------------
3961 Dataflow ref information verification functions.
3963 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3964 df_reg_chain_verify_unmarked (refs)
3965 df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
3966 df_mws_verify (mw*, mw*, bool)
3967 df_insn_refs_verify (collection_rec, bb, insn, bool)
3968 df_bb_refs_verify (bb, refs, bool)
3969 df_bb_verify (bb)
3970 df_exit_block_bitmap_verify (bool)
3971 df_entry_block_bitmap_verify (bool)
3972 df_scan_verify ()
3973 ----------------------------------------------------------------------------*/
3976 /* Mark all refs in the reg chain. Verify that all of the registers
3977 are in the correct chain. */
3979 static unsigned int
3980 df_reg_chain_mark (df_ref refs, unsigned int regno,
3981 bool is_def, bool is_eq_use)
3983 unsigned int count = 0;
3984 df_ref ref;
3985 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3987 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3989 /* If there are no def-use or use-def chains, make sure that all
3990 of the chains are clear. */
3991 if (!df_chain)
3992 gcc_assert (!DF_REF_CHAIN (ref));
3994 /* Check to make sure the ref is in the correct chain. */
3995 gcc_assert (DF_REF_REGNO (ref) == regno);
3996 if (is_def)
3997 gcc_assert (DF_REF_REG_DEF_P (ref));
3998 else
3999 gcc_assert (!DF_REF_REG_DEF_P (ref));
4001 if (is_eq_use)
4002 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4003 else
4004 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4006 if (DF_REF_NEXT_REG (ref))
4007 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4008 count++;
4009 DF_REF_REG_MARK (ref);
4011 return count;
4015 /* Verify that all of the registers in the chain are unmarked. */
4017 static void
4018 df_reg_chain_verify_unmarked (df_ref refs)
4020 df_ref ref;
4021 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4022 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4026 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4028 static bool
4029 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
4030 bool abort_if_fail)
4032 unsigned int ix;
4033 df_ref new_ref;
4035 FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4037 if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
4039 if (abort_if_fail)
4040 gcc_assert (0);
4041 else
4042 return false;
4045 /* Abort if fail is called from the function level verifier. If
4046 that is the context, mark this reg as being seem. */
4047 if (abort_if_fail)
4049 gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
4050 DF_REF_REG_UNMARK (old_rec);
4053 old_rec = DF_REF_NEXT_LOC (old_rec);
4056 if (abort_if_fail)
4057 gcc_assert (old_rec == NULL);
4058 else
4059 return old_rec == NULL;
4060 return false;
4064 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4066 static bool
4067 df_mws_verify (const vec<df_mw_hardreg_ptr, va_heap> *new_rec,
4068 struct df_mw_hardreg *old_rec,
4069 bool abort_if_fail)
4071 unsigned int ix;
4072 struct df_mw_hardreg *new_reg;
4074 FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4076 if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4078 if (abort_if_fail)
4079 gcc_assert (0);
4080 else
4081 return false;
4083 old_rec = DF_MWS_NEXT (old_rec);
4086 if (abort_if_fail)
4087 gcc_assert (old_rec == NULL);
4088 else
4089 return old_rec == NULL;
4090 return false;
4094 /* Return true if the existing insn refs information is complete and
4095 correct. Otherwise (i.e. if there's any missing or extra refs),
4096 return the correct df_ref chain in REFS_RETURN.
4098 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4099 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4100 verification mode instead of the whole function, so unmark
4101 everything.
4103 If ABORT_IF_FAIL is set, this function never returns false. */
4105 static bool
4106 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4107 basic_block bb,
4108 rtx_insn *insn,
4109 bool abort_if_fail)
4111 bool ret1, ret2, ret3, ret4;
4112 unsigned int uid = INSN_UID (insn);
4113 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4115 df_insn_refs_collect (collection_rec, bb, insn_info);
4117 /* Unfortunately we cannot opt out early if one of these is not
4118 right because the marks will not get cleared. */
4119 ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4120 abort_if_fail);
4121 ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4122 abort_if_fail);
4123 ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4124 abort_if_fail);
4125 ret4 = df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4126 abort_if_fail);
4127 return (ret1 && ret2 && ret3 && ret4);
4131 /* Return true if all refs in the basic block are correct and complete.
4132 Due to df_ref_chain_verify, it will cause all refs
4133 that are verified to have DF_REF_MARK bit set. */
4135 static bool
4136 df_bb_verify (basic_block bb)
4138 rtx_insn *insn;
4139 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4140 struct df_collection_rec collection_rec;
4142 gcc_assert (bb_info);
4144 /* Scan the block, one insn at a time, from beginning to end. */
4145 FOR_BB_INSNS_REVERSE (bb, insn)
4147 if (!INSN_P (insn))
4148 continue;
4149 df_insn_refs_verify (&collection_rec, bb, insn, true);
4150 df_free_collection_rec (&collection_rec);
4153 /* Do the artificial defs and uses. */
4154 df_bb_refs_collect (&collection_rec, bb);
4155 df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4156 df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4157 df_free_collection_rec (&collection_rec);
4159 return true;
4163 /* Returns true if the entry block has correct and complete df_ref set.
4164 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4166 static bool
4167 df_entry_block_bitmap_verify (bool abort_if_fail)
4169 bitmap_head entry_block_defs;
4170 bool is_eq;
4172 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4173 df_get_entry_block_def_set (&entry_block_defs);
4175 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4177 if (!is_eq && abort_if_fail)
4179 fprintf (stderr, "entry_block_defs = ");
4180 df_print_regset (stderr, &entry_block_defs);
4181 fprintf (stderr, "df->entry_block_defs = ");
4182 df_print_regset (stderr, df->entry_block_defs);
4183 gcc_assert (0);
4186 bitmap_clear (&entry_block_defs);
4188 return is_eq;
4192 /* Returns true if the exit block has correct and complete df_ref set.
4193 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4195 static bool
4196 df_exit_block_bitmap_verify (bool abort_if_fail)
4198 bitmap_head exit_block_uses;
4199 bool is_eq;
4201 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4202 df_get_exit_block_use_set (&exit_block_uses);
4204 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4206 if (!is_eq && abort_if_fail)
4208 fprintf (stderr, "exit_block_uses = ");
4209 df_print_regset (stderr, &exit_block_uses);
4210 fprintf (stderr, "df->exit_block_uses = ");
4211 df_print_regset (stderr, df->exit_block_uses);
4212 gcc_assert (0);
4215 bitmap_clear (&exit_block_uses);
4217 return is_eq;
4221 /* Return true if df_ref information for all insns in all blocks are
4222 correct and complete. */
4224 void
4225 df_scan_verify (void)
4227 unsigned int i;
4228 basic_block bb;
4229 bitmap_head regular_block_artificial_uses;
4230 bitmap_head eh_block_artificial_uses;
4232 if (!df)
4233 return;
4235 /* Verification is a 4 step process. */
4237 /* (1) All of the refs are marked by going through the reg chains. */
4238 for (i = 0; i < DF_REG_SIZE (df); i++)
4240 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4241 == DF_REG_DEF_COUNT (i));
4242 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4243 == DF_REG_USE_COUNT (i));
4244 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4245 == DF_REG_EQ_USE_COUNT (i));
4248 /* (2) There are various bitmaps whose value may change over the
4249 course of the compilation. This step recomputes them to make
4250 sure that they have not slipped out of date. */
4251 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4252 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4254 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4255 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4257 bitmap_ior_into (&eh_block_artificial_uses,
4258 &regular_block_artificial_uses);
4260 /* Check artificial_uses bitmaps didn't change. */
4261 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4262 &df->regular_block_artificial_uses));
4263 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4264 &df->eh_block_artificial_uses));
4266 bitmap_clear (&regular_block_artificial_uses);
4267 bitmap_clear (&eh_block_artificial_uses);
4269 /* Verify entry block and exit block. These only verify the bitmaps,
4270 the refs are verified in df_bb_verify. */
4271 df_entry_block_bitmap_verify (true);
4272 df_exit_block_bitmap_verify (true);
4274 /* (3) All of the insns in all of the blocks are traversed and the
4275 marks are cleared both in the artificial refs attached to the
4276 blocks and the real refs inside the insns. It is a failure to
4277 clear a mark that has not been set as this means that the ref in
4278 the block or insn was not in the reg chain. */
4280 FOR_ALL_BB_FN (bb, cfun)
4281 df_bb_verify (bb);
4283 /* (4) See if all reg chains are traversed a second time. This time
4284 a check is made that the marks are clear. A set mark would be a
4285 from a reg that is not in any insn or basic block. */
4287 for (i = 0; i < DF_REG_SIZE (df); i++)
4289 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4290 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4291 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));