[Ada] Missing range check on assignment to bit-packed array
[official-gcc.git] / gcc / df-scan.c
blob81e221ee38fbcc4fa6034b18006df59182074454
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999-2019 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "df.h"
32 #include "memmodel.h"
33 #include "tm_p.h"
34 #include "regs.h"
35 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
36 #include "dumpfile.h"
37 #include "calls.h"
40 /* The set of hard registers in eliminables[i].from. */
42 static HARD_REG_SET elim_reg_set;
44 /* Initialize ur_in and ur_out as if all hard registers were partially
45 available. */
47 struct df_collection_rec
49 auto_vec<df_ref, 128> def_vec;
50 auto_vec<df_ref, 32> use_vec;
51 auto_vec<df_ref, 32> eq_use_vec;
52 auto_vec<df_mw_hardreg *, 32> mw_vec;
55 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
56 rtx, rtx *,
57 basic_block, struct df_insn_info *,
58 enum df_ref_type, int ref_flags);
59 static void df_def_record_1 (struct df_collection_rec *, rtx *,
60 basic_block, struct df_insn_info *,
61 int ref_flags);
62 static void df_defs_record (struct df_collection_rec *, rtx,
63 basic_block, struct df_insn_info *,
64 int ref_flags);
65 static void df_uses_record (struct df_collection_rec *,
66 rtx *, enum df_ref_type,
67 basic_block, struct df_insn_info *,
68 int ref_flags);
70 static void df_install_ref_incremental (df_ref);
71 static void df_insn_refs_collect (struct df_collection_rec*,
72 basic_block, struct df_insn_info *);
73 static void df_canonize_collection_rec (struct df_collection_rec *);
75 static void df_get_regular_block_artificial_uses (bitmap);
76 static void df_get_eh_block_artificial_uses (bitmap);
78 static void df_record_entry_block_defs (bitmap);
79 static void df_record_exit_block_uses (bitmap);
80 static void df_get_exit_block_use_set (bitmap);
81 static void df_get_entry_block_def_set (bitmap);
82 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
83 static void df_ref_chain_delete_du_chain (df_ref);
84 static void df_ref_chain_delete (df_ref);
86 static void df_refs_add_to_chains (struct df_collection_rec *,
87 basic_block, rtx_insn *, unsigned int);
89 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block,
90 rtx_insn *, bool);
91 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
92 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
93 static void df_install_ref (df_ref, struct df_reg_info *,
94 struct df_ref_info *, bool);
96 static int df_ref_compare (df_ref, df_ref);
97 static int df_ref_ptr_compare (const void *, const void *);
98 static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
99 static int df_mw_ptr_compare (const void *, const void *);
101 static void df_insn_info_delete (unsigned int);
103 /* Indexed by hardware reg number, is true if that register is ever
104 used in the current function.
106 In df-scan.c, this is set up to record the hard regs used
107 explicitly. Reload adds in the hard regs used for holding pseudo
108 regs. Final uses it to generate the code in the function prologue
109 and epilogue to save and restore registers as needed. */
111 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
113 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
114 static const unsigned int copy_defs = 0x1;
115 static const unsigned int copy_uses = 0x2;
116 static const unsigned int copy_eq_uses = 0x4;
117 static const unsigned int copy_mw = 0x8;
118 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
119 | copy_mw;
121 /*----------------------------------------------------------------------------
122 SCANNING DATAFLOW PROBLEM
124 There are several ways in which scanning looks just like the other
125 dataflow problems. It shares the all the mechanisms for local info
126 as well as basic block info. Where it differs is when and how often
127 it gets run. It also has no need for the iterative solver.
128 ----------------------------------------------------------------------------*/
130 /* Problem data for the scanning dataflow function. */
131 struct df_scan_problem_data
133 object_allocator<df_base_ref> *ref_base_pool;
134 object_allocator<df_artificial_ref> *ref_artificial_pool;
135 object_allocator<df_regular_ref> *ref_regular_pool;
136 object_allocator<df_insn_info> *insn_pool;
137 object_allocator<df_reg_info> *reg_pool;
138 object_allocator<df_mw_hardreg> *mw_reg_pool;
140 bitmap_obstack reg_bitmaps;
141 bitmap_obstack insn_bitmaps;
144 /* Internal function to shut down the scanning problem. */
145 static void
146 df_scan_free_internal (void)
148 struct df_scan_problem_data *problem_data
149 = (struct df_scan_problem_data *) df_scan->problem_data;
151 free (df->def_info.refs);
152 free (df->def_info.begin);
153 free (df->def_info.count);
154 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
156 free (df->use_info.refs);
157 free (df->use_info.begin);
158 free (df->use_info.count);
159 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
161 free (df->def_regs);
162 df->def_regs = NULL;
163 free (df->use_regs);
164 df->use_regs = NULL;
165 free (df->eq_use_regs);
166 df->eq_use_regs = NULL;
167 df->regs_size = 0;
168 DF_REG_SIZE (df) = 0;
170 free (df->insns);
171 df->insns = NULL;
172 DF_INSN_SIZE () = 0;
174 free (df_scan->block_info);
175 df_scan->block_info = NULL;
176 df_scan->block_info_size = 0;
178 bitmap_clear (&df->hardware_regs_used);
179 bitmap_clear (&df->regular_block_artificial_uses);
180 bitmap_clear (&df->eh_block_artificial_uses);
181 BITMAP_FREE (df->entry_block_defs);
182 BITMAP_FREE (df->exit_block_uses);
183 bitmap_clear (&df->insns_to_delete);
184 bitmap_clear (&df->insns_to_rescan);
185 bitmap_clear (&df->insns_to_notes_rescan);
187 delete problem_data->ref_base_pool;
188 delete problem_data->ref_artificial_pool;
189 delete problem_data->ref_regular_pool;
190 delete problem_data->insn_pool;
191 delete problem_data->reg_pool;
192 delete problem_data->mw_reg_pool;
193 bitmap_obstack_release (&problem_data->reg_bitmaps);
194 bitmap_obstack_release (&problem_data->insn_bitmaps);
195 free (df_scan->problem_data);
199 /* Free basic block info. */
201 static void
202 df_scan_free_bb_info (basic_block bb, void *vbb_info)
204 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
205 unsigned int bb_index = bb->index;
206 rtx_insn *insn;
208 FOR_BB_INSNS (bb, insn)
209 if (INSN_P (insn))
210 df_insn_info_delete (INSN_UID (insn));
212 if (bb_index < df_scan->block_info_size)
213 bb_info = df_scan_get_bb_info (bb_index);
215 /* Get rid of any artificial uses or defs. */
216 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
217 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
218 df_ref_chain_delete (bb_info->artificial_defs);
219 df_ref_chain_delete (bb_info->artificial_uses);
220 bb_info->artificial_defs = NULL;
221 bb_info->artificial_uses = NULL;
225 /* Allocate the problem data for the scanning problem. This should be
226 called when the problem is created or when the entire function is to
227 be rescanned. */
228 void
229 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
231 struct df_scan_problem_data *problem_data;
232 basic_block bb;
234 /* Given the number of pools, this is really faster than tearing
235 everything apart. */
236 if (df_scan->problem_data)
237 df_scan_free_internal ();
239 problem_data = XNEW (struct df_scan_problem_data);
240 df_scan->problem_data = problem_data;
241 df_scan->computed = true;
243 problem_data->ref_base_pool = new object_allocator<df_base_ref>
244 ("df_scan ref base");
245 problem_data->ref_artificial_pool = new object_allocator<df_artificial_ref>
246 ("df_scan ref artificial");
247 problem_data->ref_regular_pool = new object_allocator<df_regular_ref>
248 ("df_scan ref regular");
249 problem_data->insn_pool = new object_allocator<df_insn_info>
250 ("df_scan insn");
251 problem_data->reg_pool = new object_allocator<df_reg_info>
252 ("df_scan reg");
253 problem_data->mw_reg_pool = new object_allocator<df_mw_hardreg>
254 ("df_scan mw_reg");
256 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
257 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
259 df_grow_reg_info ();
261 df_grow_insn_info ();
262 df_grow_bb_info (df_scan);
264 FOR_ALL_BB_FN (bb, cfun)
266 unsigned int bb_index = bb->index;
267 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
268 bb_info->artificial_defs = NULL;
269 bb_info->artificial_uses = NULL;
272 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
273 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
274 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
275 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
276 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
277 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
278 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
279 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
280 df_scan->optional_p = false;
284 /* Free all of the data associated with the scan problem. */
286 static void
287 df_scan_free (void)
289 if (df_scan->problem_data)
290 df_scan_free_internal ();
292 if (df->blocks_to_analyze)
294 BITMAP_FREE (df->blocks_to_analyze);
295 df->blocks_to_analyze = NULL;
298 free (df_scan);
301 /* Dump the preamble for DF_SCAN dump. */
302 static void
303 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
305 int i;
306 int dcount = 0;
307 int ucount = 0;
308 int ecount = 0;
309 int icount = 0;
310 int ccount = 0;
311 basic_block bb;
312 rtx_insn *insn;
314 fprintf (file, ";; invalidated by call \t");
315 df_print_regset (file, regs_invalidated_by_call_regset);
316 fprintf (file, ";; hardware regs used \t");
317 df_print_regset (file, &df->hardware_regs_used);
318 fprintf (file, ";; regular block artificial uses \t");
319 df_print_regset (file, &df->regular_block_artificial_uses);
320 fprintf (file, ";; eh block artificial uses \t");
321 df_print_regset (file, &df->eh_block_artificial_uses);
322 fprintf (file, ";; entry block defs \t");
323 df_print_regset (file, df->entry_block_defs);
324 fprintf (file, ";; exit block uses \t");
325 df_print_regset (file, df->exit_block_uses);
326 fprintf (file, ";; regs ever live \t");
327 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
328 if (df_regs_ever_live_p (i))
329 fprintf (file, " %d [%s]", i, reg_names[i]);
330 fprintf (file, "\n;; ref usage \t");
332 for (i = 0; i < (int)df->regs_inited; i++)
333 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
335 const char * sep = "";
337 fprintf (file, "r%d={", i);
338 if (DF_REG_DEF_COUNT (i))
340 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
341 sep = ",";
342 dcount += DF_REG_DEF_COUNT (i);
344 if (DF_REG_USE_COUNT (i))
346 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
347 sep = ",";
348 ucount += DF_REG_USE_COUNT (i);
350 if (DF_REG_EQ_USE_COUNT (i))
352 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
353 ecount += DF_REG_EQ_USE_COUNT (i);
355 fprintf (file, "} ");
358 FOR_EACH_BB_FN (bb, cfun)
359 FOR_BB_INSNS (bb, insn)
360 if (INSN_P (insn))
362 if (CALL_P (insn))
363 ccount++;
364 else
365 icount++;
368 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
369 " in %d{%d regular + %d call} insns.\n",
370 dcount + ucount + ecount, dcount, ucount, ecount,
371 icount + ccount, icount, ccount);
374 /* Dump the bb_info for a given basic block. */
375 static void
376 df_scan_start_block (basic_block bb, FILE *file)
378 struct df_scan_bb_info *bb_info
379 = df_scan_get_bb_info (bb->index);
381 if (bb_info)
383 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
384 df_refs_chain_dump (bb_info->artificial_defs, true, file);
385 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
386 df_refs_chain_dump (bb_info->artificial_uses, true, file);
387 fprintf (file, "\n");
389 #if 0
391 rtx_insn *insn;
392 FOR_BB_INSNS (bb, insn)
393 if (INSN_P (insn))
394 df_insn_debug (insn, false, file);
396 #endif
399 static const struct df_problem problem_SCAN =
401 DF_SCAN, /* Problem id. */
402 DF_NONE, /* Direction. */
403 df_scan_alloc, /* Allocate the problem specific data. */
404 NULL, /* Reset global information. */
405 df_scan_free_bb_info, /* Free basic block info. */
406 NULL, /* Local compute function. */
407 NULL, /* Init the solution specific data. */
408 NULL, /* Iterative solver. */
409 NULL, /* Confluence operator 0. */
410 NULL, /* Confluence operator n. */
411 NULL, /* Transfer function. */
412 NULL, /* Finalize function. */
413 df_scan_free, /* Free all of the problem information. */
414 NULL, /* Remove this problem from the stack of dataflow problems. */
415 df_scan_start_dump, /* Debugging. */
416 df_scan_start_block, /* Debugging start block. */
417 NULL, /* Debugging end block. */
418 NULL, /* Debugging start insn. */
419 NULL, /* Debugging end insn. */
420 NULL, /* Incremental solution verify start. */
421 NULL, /* Incremental solution verify end. */
422 NULL, /* Dependent problem. */
423 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
424 TV_DF_SCAN, /* Timing variable. */
425 false /* Reset blocks on dropping out of blocks_to_analyze. */
429 /* Create a new DATAFLOW instance and add it to an existing instance
430 of DF. The returned structure is what is used to get at the
431 solution. */
433 void
434 df_scan_add_problem (void)
436 df_add_problem (&problem_SCAN);
440 /*----------------------------------------------------------------------------
441 Storage Allocation Utilities
442 ----------------------------------------------------------------------------*/
445 /* First, grow the reg_info information. If the current size is less than
446 the number of pseudos, grow to 25% more than the number of
447 pseudos.
449 Second, assure that all of the slots up to max_reg_num have been
450 filled with reg_info structures. */
452 void
453 df_grow_reg_info (void)
455 unsigned int max_reg = max_reg_num ();
456 unsigned int new_size = max_reg;
457 struct df_scan_problem_data *problem_data
458 = (struct df_scan_problem_data *) df_scan->problem_data;
459 unsigned int i;
461 if (df->regs_size < new_size)
463 new_size += new_size / 4;
464 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
465 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
466 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
467 new_size);
468 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
469 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
470 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
471 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
472 df->regs_size = new_size;
475 for (i = df->regs_inited; i < max_reg; i++)
477 struct df_reg_info *reg_info;
479 // TODO
480 reg_info = problem_data->reg_pool->allocate ();
481 memset (reg_info, 0, sizeof (struct df_reg_info));
482 df->def_regs[i] = reg_info;
483 reg_info = problem_data->reg_pool->allocate ();
484 memset (reg_info, 0, sizeof (struct df_reg_info));
485 df->use_regs[i] = reg_info;
486 reg_info = problem_data->reg_pool->allocate ();
487 memset (reg_info, 0, sizeof (struct df_reg_info));
488 df->eq_use_regs[i] = reg_info;
489 df->def_info.begin[i] = 0;
490 df->def_info.count[i] = 0;
491 df->use_info.begin[i] = 0;
492 df->use_info.count[i] = 0;
495 df->regs_inited = max_reg;
499 /* Grow the ref information. */
501 static void
502 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
504 if (ref_info->refs_size < new_size)
506 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
507 memset (ref_info->refs + ref_info->refs_size, 0,
508 (new_size - ref_info->refs_size) *sizeof (df_ref));
509 ref_info->refs_size = new_size;
514 /* Check and grow the ref information if necessary. This routine
515 guarantees total_size + BITMAP_ADDEND amount of entries in refs
516 array. It updates ref_info->refs_size only and does not change
517 ref_info->total_size. */
519 static void
520 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
521 unsigned bitmap_addend)
523 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
525 int new_size = ref_info->total_size + bitmap_addend;
526 new_size += ref_info->total_size / 4;
527 df_grow_ref_info (ref_info, new_size);
532 /* Grow the ref information. If the current size is less than the
533 number of instructions, grow to 25% more than the number of
534 instructions. */
536 void
537 df_grow_insn_info (void)
539 unsigned int new_size = get_max_uid () + 1;
540 if (DF_INSN_SIZE () < new_size)
542 new_size += new_size / 4;
543 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
544 memset (df->insns + df->insns_size, 0,
545 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
546 DF_INSN_SIZE () = new_size;
553 /*----------------------------------------------------------------------------
554 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
555 ----------------------------------------------------------------------------*/
557 /* Rescan all of the block_to_analyze or all of the blocks in the
558 function if df_set_blocks if blocks_to_analyze is NULL; */
560 void
561 df_scan_blocks (void)
563 basic_block bb;
565 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
566 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
568 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
569 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
571 bitmap_ior_into (&df->eh_block_artificial_uses,
572 &df->regular_block_artificial_uses);
574 /* ENTRY and EXIT blocks have special defs/uses. */
575 df_get_entry_block_def_set (df->entry_block_defs);
576 df_record_entry_block_defs (df->entry_block_defs);
577 df_get_exit_block_use_set (df->exit_block_uses);
578 df_record_exit_block_uses (df->exit_block_uses);
579 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
580 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
582 /* Regular blocks */
583 FOR_EACH_BB_FN (bb, cfun)
585 unsigned int bb_index = bb->index;
586 df_bb_refs_record (bb_index, true);
590 /* Create new refs under address LOC within INSN. This function is
591 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
592 depending on whether LOC is inside PATTERN (INSN) or a note. */
594 void
595 df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
597 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
598 df_uses_record (NULL, loc, DF_REF_REG_USE,
599 BLOCK_FOR_INSN (insn),
600 DF_INSN_INFO_GET (insn),
601 ref_flags);
604 static void
605 df_install_ref_incremental (df_ref ref)
607 struct df_reg_info **reg_info;
608 struct df_ref_info *ref_info;
609 df_ref *ref_ptr;
610 bool add_to_table;
612 rtx_insn *insn = DF_REF_INSN (ref);
613 basic_block bb = BLOCK_FOR_INSN (insn);
615 if (DF_REF_REG_DEF_P (ref))
617 reg_info = df->def_regs;
618 ref_info = &df->def_info;
619 ref_ptr = &DF_INSN_DEFS (insn);
620 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
622 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
624 reg_info = df->eq_use_regs;
625 ref_info = &df->use_info;
626 ref_ptr = &DF_INSN_EQ_USES (insn);
627 switch (ref_info->ref_order)
629 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
630 case DF_REF_ORDER_BY_REG_WITH_NOTES:
631 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
632 add_to_table = true;
633 break;
634 default:
635 add_to_table = false;
636 break;
639 else
641 reg_info = df->use_regs;
642 ref_info = &df->use_info;
643 ref_ptr = &DF_INSN_USES (insn);
644 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
647 /* Do not add if ref is not in the right blocks. */
648 if (add_to_table && df->analyze_subset)
649 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
651 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
653 if (add_to_table)
654 switch (ref_info->ref_order)
656 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
657 case DF_REF_ORDER_BY_REG_WITH_NOTES:
658 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
659 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
660 break;
661 default:
662 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
663 break;
666 while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
667 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
669 DF_REF_NEXT_LOC (ref) = *ref_ptr;
670 *ref_ptr = ref;
672 #if 0
673 if (dump_file)
675 fprintf (dump_file, "adding ref ");
676 df_ref_debug (ref, dump_file);
678 #endif
679 /* By adding the ref directly, df_insn_rescan my not find any
680 differences even though the block will have changed. So we need
681 to mark the block dirty ourselves. */
682 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
683 df_set_bb_dirty (bb);
688 /*----------------------------------------------------------------------------
689 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
690 ----------------------------------------------------------------------------*/
692 static void
693 df_free_ref (df_ref ref)
695 struct df_scan_problem_data *problem_data
696 = (struct df_scan_problem_data *) df_scan->problem_data;
698 switch (DF_REF_CLASS (ref))
700 case DF_REF_BASE:
701 problem_data->ref_base_pool->remove ((df_base_ref *) (ref));
702 break;
704 case DF_REF_ARTIFICIAL:
705 problem_data->ref_artificial_pool->remove
706 ((df_artificial_ref *) (ref));
707 break;
709 case DF_REF_REGULAR:
710 problem_data->ref_regular_pool->remove
711 ((df_regular_ref *) (ref));
712 break;
717 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
718 Also delete the def-use or use-def chain if it exists. */
720 static void
721 df_reg_chain_unlink (df_ref ref)
723 df_ref next = DF_REF_NEXT_REG (ref);
724 df_ref prev = DF_REF_PREV_REG (ref);
725 int id = DF_REF_ID (ref);
726 struct df_reg_info *reg_info;
727 df_ref *refs = NULL;
729 if (DF_REF_REG_DEF_P (ref))
731 int regno = DF_REF_REGNO (ref);
732 reg_info = DF_REG_DEF_GET (regno);
733 refs = df->def_info.refs;
735 else
737 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
739 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
740 switch (df->use_info.ref_order)
742 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
743 case DF_REF_ORDER_BY_REG_WITH_NOTES:
744 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
745 refs = df->use_info.refs;
746 break;
747 default:
748 break;
751 else
753 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
754 refs = df->use_info.refs;
758 if (refs)
760 if (df->analyze_subset)
762 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
763 refs[id] = NULL;
765 else
766 refs[id] = NULL;
769 /* Delete any def-use or use-def chains that start here. It is
770 possible that there is trash in this field. This happens for
771 insns that have been deleted when rescanning has been deferred
772 and the chain problem has also been deleted. The chain tear down
773 code skips deleted insns. */
774 if (df_chain && DF_REF_CHAIN (ref))
775 df_chain_unlink (ref);
777 reg_info->n_refs--;
778 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
780 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
781 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
784 /* Unlink from the reg chain. If there is no prev, this is the
785 first of the list. If not, just join the next and prev. */
786 if (prev)
787 DF_REF_NEXT_REG (prev) = next;
788 else
790 gcc_assert (reg_info->reg_chain == ref);
791 reg_info->reg_chain = next;
793 if (next)
794 DF_REF_PREV_REG (next) = prev;
796 df_free_ref (ref);
799 /* Initialize INSN_INFO to describe INSN. */
801 static void
802 df_insn_info_init_fields (df_insn_info *insn_info, rtx_insn *insn)
804 memset (insn_info, 0, sizeof (struct df_insn_info));
805 insn_info->insn = insn;
808 /* Create the insn record for INSN. If there was one there, zero it
809 out. */
811 struct df_insn_info *
812 df_insn_create_insn_record (rtx_insn *insn)
814 struct df_scan_problem_data *problem_data
815 = (struct df_scan_problem_data *) df_scan->problem_data;
816 struct df_insn_info *insn_rec;
818 df_grow_insn_info ();
819 insn_rec = DF_INSN_INFO_GET (insn);
820 if (!insn_rec)
822 insn_rec = problem_data->insn_pool->allocate ();
823 DF_INSN_INFO_SET (insn, insn_rec);
825 df_insn_info_init_fields (insn_rec, insn);
826 return insn_rec;
830 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
832 static void
833 df_ref_chain_delete_du_chain (df_ref ref)
835 for (; ref; ref = DF_REF_NEXT_LOC (ref))
836 /* CHAIN is allocated by DF_CHAIN. So make sure to
837 pass df_scan instance for the problem. */
838 if (DF_REF_CHAIN (ref))
839 df_chain_unlink (ref);
843 /* Delete all refs in the ref chain. */
845 static void
846 df_ref_chain_delete (df_ref ref)
848 df_ref next;
849 for (; ref; ref = next)
851 next = DF_REF_NEXT_LOC (ref);
852 df_reg_chain_unlink (ref);
857 /* Delete the hardreg chain. */
859 static void
860 df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
862 struct df_scan_problem_data *problem_data
863 = (struct df_scan_problem_data *) df_scan->problem_data;
864 df_mw_hardreg *next;
866 for (; hardregs; hardregs = next)
868 next = DF_MWS_NEXT (hardregs);
869 problem_data->mw_reg_pool->remove (hardregs);
873 /* Remove the contents of INSN_INFO (but don't free INSN_INFO itself). */
875 static void
876 df_insn_info_free_fields (df_insn_info *insn_info)
878 /* In general, notes do not have the insn_info fields
879 initialized. However, combine deletes insns by changing them
880 to notes. How clever. So we cannot just check if it is a
881 valid insn before short circuiting this code, we need to see
882 if we actually initialized it. */
883 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
885 if (df_chain)
887 df_ref_chain_delete_du_chain (insn_info->defs);
888 df_ref_chain_delete_du_chain (insn_info->uses);
889 df_ref_chain_delete_du_chain (insn_info->eq_uses);
892 df_ref_chain_delete (insn_info->defs);
893 df_ref_chain_delete (insn_info->uses);
894 df_ref_chain_delete (insn_info->eq_uses);
897 /* Delete all of the refs information from the insn with UID.
898 Internal helper for df_insn_delete, df_insn_rescan, and other
899 df-scan routines that don't have to work in deferred mode
900 and do not have to mark basic blocks for re-processing. */
902 static void
903 df_insn_info_delete (unsigned int uid)
905 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
907 bitmap_clear_bit (&df->insns_to_delete, uid);
908 bitmap_clear_bit (&df->insns_to_rescan, uid);
909 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
910 if (insn_info)
912 struct df_scan_problem_data *problem_data
913 = (struct df_scan_problem_data *) df_scan->problem_data;
915 df_insn_info_free_fields (insn_info);
916 problem_data->insn_pool->remove (insn_info);
917 DF_INSN_UID_SET (uid, NULL);
921 /* Delete all of the refs information from INSN, either right now
922 or marked for later in deferred mode. */
924 void
925 df_insn_delete (rtx_insn *insn)
927 unsigned int uid;
928 basic_block bb;
930 gcc_checking_assert (INSN_P (insn));
932 if (!df)
933 return;
935 uid = INSN_UID (insn);
936 bb = BLOCK_FOR_INSN (insn);
938 /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
939 not exist anymore (as mentioned in df-core.c: "The only requirement
940 [for DF] is that there be a correct control flow graph." Clearly
941 that isn't the case after pass_free_cfg. But DF is freed much later
942 because some back-ends want to use DF info even though the CFG is
943 already gone. It's not clear to me whether that is safe, actually.
944 In any case, we expect BB to be non-NULL at least up to register
945 allocation, so disallow a non-NULL BB up to there. Not perfect
946 but better than nothing... */
947 gcc_checking_assert (bb != NULL || reload_completed);
949 df_grow_bb_info (df_scan);
950 df_grow_reg_info ();
952 /* The block must be marked as dirty now, rather than later as in
953 df_insn_rescan and df_notes_rescan because it may not be there at
954 rescanning time and the mark would blow up.
955 DEBUG_INSNs do not make a block's data flow solution dirty (at
956 worst the LUIDs are no longer contiguous). */
957 if (bb != NULL && NONDEBUG_INSN_P (insn))
958 df_set_bb_dirty (bb);
960 /* The client has deferred rescanning. */
961 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
963 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
964 if (insn_info)
966 bitmap_clear_bit (&df->insns_to_rescan, uid);
967 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
968 bitmap_set_bit (&df->insns_to_delete, uid);
970 if (dump_file)
971 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
972 return;
975 if (dump_file)
976 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
978 df_insn_info_delete (uid);
982 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
984 static void
985 df_free_collection_rec (struct df_collection_rec *collection_rec)
987 unsigned int ix;
988 struct df_scan_problem_data *problem_data
989 = (struct df_scan_problem_data *) df_scan->problem_data;
990 df_ref ref;
991 struct df_mw_hardreg *mw;
993 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
994 df_free_ref (ref);
995 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
996 df_free_ref (ref);
997 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
998 df_free_ref (ref);
999 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1000 problem_data->mw_reg_pool->remove (mw);
1002 collection_rec->def_vec.release ();
1003 collection_rec->use_vec.release ();
1004 collection_rec->eq_use_vec.release ();
1005 collection_rec->mw_vec.release ();
1008 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1010 bool
1011 df_insn_rescan (rtx_insn *insn)
1013 unsigned int uid = INSN_UID (insn);
1014 struct df_insn_info *insn_info = NULL;
1015 basic_block bb = BLOCK_FOR_INSN (insn);
1016 struct df_collection_rec collection_rec;
1018 if ((!df) || (!INSN_P (insn)))
1019 return false;
1021 if (!bb)
1023 if (dump_file)
1024 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1025 return false;
1028 /* The client has disabled rescanning and plans to do it itself. */
1029 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1030 return false;
1032 df_grow_bb_info (df_scan);
1033 df_grow_reg_info ();
1035 insn_info = DF_INSN_UID_SAFE_GET (uid);
1037 /* The client has deferred rescanning. */
1038 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1040 if (!insn_info)
1042 insn_info = df_insn_create_insn_record (insn);
1043 insn_info->defs = 0;
1044 insn_info->uses = 0;
1045 insn_info->eq_uses = 0;
1046 insn_info->mw_hardregs = 0;
1048 if (dump_file)
1049 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1051 bitmap_clear_bit (&df->insns_to_delete, uid);
1052 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1053 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1054 return false;
1057 bitmap_clear_bit (&df->insns_to_delete, uid);
1058 bitmap_clear_bit (&df->insns_to_rescan, uid);
1059 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1060 if (insn_info)
1062 int luid;
1063 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1064 /* If there's no change, return false. */
1065 if (the_same)
1067 df_free_collection_rec (&collection_rec);
1068 if (dump_file)
1069 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1070 return false;
1072 if (dump_file)
1073 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1075 /* There's change - we need to delete the existing info.
1076 Since the insn isn't moved, we can salvage its LUID. */
1077 luid = DF_INSN_LUID (insn);
1078 df_insn_info_free_fields (insn_info);
1079 df_insn_info_init_fields (insn_info, insn);
1080 DF_INSN_LUID (insn) = luid;
1082 else
1084 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1085 df_insn_refs_collect (&collection_rec, bb, insn_info);
1086 if (dump_file)
1087 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1090 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1091 if (!DEBUG_INSN_P (insn))
1092 df_set_bb_dirty (bb);
1094 return true;
1097 /* Same as df_insn_rescan, but don't mark the basic block as
1098 dirty. */
1100 bool
1101 df_insn_rescan_debug_internal (rtx_insn *insn)
1103 unsigned int uid = INSN_UID (insn);
1104 struct df_insn_info *insn_info;
1106 gcc_assert (DEBUG_INSN_P (insn)
1107 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1109 if (!df)
1110 return false;
1112 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1113 if (!insn_info)
1114 return false;
1116 if (dump_file)
1117 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1119 bitmap_clear_bit (&df->insns_to_delete, uid);
1120 bitmap_clear_bit (&df->insns_to_rescan, uid);
1121 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1123 if (insn_info->defs == 0
1124 && insn_info->uses == 0
1125 && insn_info->eq_uses == 0
1126 && insn_info->mw_hardregs == 0)
1127 return false;
1129 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1131 if (df_chain)
1133 df_ref_chain_delete_du_chain (insn_info->defs);
1134 df_ref_chain_delete_du_chain (insn_info->uses);
1135 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1138 df_ref_chain_delete (insn_info->defs);
1139 df_ref_chain_delete (insn_info->uses);
1140 df_ref_chain_delete (insn_info->eq_uses);
1142 insn_info->defs = 0;
1143 insn_info->uses = 0;
1144 insn_info->eq_uses = 0;
1145 insn_info->mw_hardregs = 0;
1147 return true;
1151 /* Rescan all of the insns in the function. Note that the artificial
1152 uses and defs are not touched. This function will destroy def-use
1153 or use-def chains. */
1155 void
1156 df_insn_rescan_all (void)
1158 bool no_insn_rescan = false;
1159 bool defer_insn_rescan = false;
1160 basic_block bb;
1161 bitmap_iterator bi;
1162 unsigned int uid;
1164 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1166 df_clear_flags (DF_NO_INSN_RESCAN);
1167 no_insn_rescan = true;
1170 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1172 df_clear_flags (DF_DEFER_INSN_RESCAN);
1173 defer_insn_rescan = true;
1176 auto_bitmap tmp (&df_bitmap_obstack);
1177 bitmap_copy (tmp, &df->insns_to_delete);
1178 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1180 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1181 if (insn_info)
1182 df_insn_info_delete (uid);
1185 bitmap_clear (&df->insns_to_delete);
1186 bitmap_clear (&df->insns_to_rescan);
1187 bitmap_clear (&df->insns_to_notes_rescan);
1189 FOR_EACH_BB_FN (bb, cfun)
1191 rtx_insn *insn;
1192 FOR_BB_INSNS (bb, insn)
1194 df_insn_rescan (insn);
1198 if (no_insn_rescan)
1199 df_set_flags (DF_NO_INSN_RESCAN);
1200 if (defer_insn_rescan)
1201 df_set_flags (DF_DEFER_INSN_RESCAN);
1205 /* Process all of the deferred rescans or deletions. */
1207 void
1208 df_process_deferred_rescans (void)
1210 bool no_insn_rescan = false;
1211 bool defer_insn_rescan = false;
1212 bitmap_iterator bi;
1213 unsigned int uid;
1215 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1217 df_clear_flags (DF_NO_INSN_RESCAN);
1218 no_insn_rescan = true;
1221 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1223 df_clear_flags (DF_DEFER_INSN_RESCAN);
1224 defer_insn_rescan = true;
1227 if (dump_file)
1228 fprintf (dump_file, "starting the processing of deferred insns\n");
1230 auto_bitmap tmp (&df_bitmap_obstack);
1231 bitmap_copy (tmp, &df->insns_to_delete);
1232 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1234 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1235 if (insn_info)
1236 df_insn_info_delete (uid);
1239 bitmap_copy (tmp, &df->insns_to_rescan);
1240 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1242 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1243 if (insn_info)
1244 df_insn_rescan (insn_info->insn);
1247 bitmap_copy (tmp, &df->insns_to_notes_rescan);
1248 EXECUTE_IF_SET_IN_BITMAP (tmp, 0, uid, bi)
1250 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1251 if (insn_info)
1252 df_notes_rescan (insn_info->insn);
1255 if (dump_file)
1256 fprintf (dump_file, "ending the processing of deferred insns\n");
1258 bitmap_clear (&df->insns_to_delete);
1259 bitmap_clear (&df->insns_to_rescan);
1260 bitmap_clear (&df->insns_to_notes_rescan);
1262 if (no_insn_rescan)
1263 df_set_flags (DF_NO_INSN_RESCAN);
1264 if (defer_insn_rescan)
1265 df_set_flags (DF_DEFER_INSN_RESCAN);
1267 /* If someone changed regs_ever_live during this pass, fix up the
1268 entry and exit blocks. */
1269 if (df->redo_entry_and_exit)
1271 df_update_entry_exit_and_calls ();
1272 df->redo_entry_and_exit = false;
1277 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1278 the uses if INCLUDE_USES. Include the eq_uses if
1279 INCLUDE_EQ_USES. */
1281 static unsigned int
1282 df_count_refs (bool include_defs, bool include_uses,
1283 bool include_eq_uses)
1285 unsigned int regno;
1286 int size = 0;
1287 unsigned int m = df->regs_inited;
1289 for (regno = 0; regno < m; regno++)
1291 if (include_defs)
1292 size += DF_REG_DEF_COUNT (regno);
1293 if (include_uses)
1294 size += DF_REG_USE_COUNT (regno);
1295 if (include_eq_uses)
1296 size += DF_REG_EQ_USE_COUNT (regno);
1298 return size;
1302 /* Take build ref table for either the uses or defs from the reg-use
1303 or reg-def chains. This version processes the refs in reg order
1304 which is likely to be best if processing the whole function. */
1306 static void
1307 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1308 bool include_defs,
1309 bool include_uses,
1310 bool include_eq_uses)
1312 unsigned int m = df->regs_inited;
1313 unsigned int regno;
1314 unsigned int offset = 0;
1315 unsigned int start;
1317 if (df->changeable_flags & DF_NO_HARD_REGS)
1319 start = FIRST_PSEUDO_REGISTER;
1320 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1321 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1323 else
1324 start = 0;
1326 ref_info->total_size
1327 = df_count_refs (include_defs, include_uses, include_eq_uses);
1329 df_check_and_grow_ref_info (ref_info, 1);
1331 for (regno = start; regno < m; regno++)
1333 int count = 0;
1334 ref_info->begin[regno] = offset;
1335 if (include_defs)
1337 df_ref ref = DF_REG_DEF_CHAIN (regno);
1338 while (ref)
1340 ref_info->refs[offset] = ref;
1341 DF_REF_ID (ref) = offset++;
1342 count++;
1343 ref = DF_REF_NEXT_REG (ref);
1344 gcc_checking_assert (offset < ref_info->refs_size);
1347 if (include_uses)
1349 df_ref ref = DF_REG_USE_CHAIN (regno);
1350 while (ref)
1352 ref_info->refs[offset] = ref;
1353 DF_REF_ID (ref) = offset++;
1354 count++;
1355 ref = DF_REF_NEXT_REG (ref);
1356 gcc_checking_assert (offset < ref_info->refs_size);
1359 if (include_eq_uses)
1361 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1362 while (ref)
1364 ref_info->refs[offset] = ref;
1365 DF_REF_ID (ref) = offset++;
1366 count++;
1367 ref = DF_REF_NEXT_REG (ref);
1368 gcc_checking_assert (offset < ref_info->refs_size);
1371 ref_info->count[regno] = count;
1374 /* The bitmap size is not decremented when refs are deleted. So
1375 reset it now that we have squished out all of the empty
1376 slots. */
1377 ref_info->table_size = offset;
1381 /* Take build ref table for either the uses or defs from the reg-use
1382 or reg-def chains. This version processes the refs in insn order
1383 which is likely to be best if processing some segment of the
1384 function. */
1386 static void
1387 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1388 bool include_defs,
1389 bool include_uses,
1390 bool include_eq_uses)
1392 bitmap_iterator bi;
1393 unsigned int bb_index;
1394 unsigned int m = df->regs_inited;
1395 unsigned int offset = 0;
1396 unsigned int r;
1397 unsigned int start
1398 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1400 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1401 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1403 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1404 df_check_and_grow_ref_info (ref_info, 1);
1406 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1408 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1409 rtx_insn *insn;
1410 df_ref def, use;
1412 if (include_defs)
1413 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1415 unsigned int regno = DF_REF_REGNO (def);
1416 ref_info->count[regno]++;
1418 if (include_uses)
1419 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1421 unsigned int regno = DF_REF_REGNO (use);
1422 ref_info->count[regno]++;
1425 FOR_BB_INSNS (bb, insn)
1427 if (INSN_P (insn))
1429 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1431 if (include_defs)
1432 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1434 unsigned int regno = DF_REF_REGNO (def);
1435 ref_info->count[regno]++;
1437 if (include_uses)
1438 FOR_EACH_INSN_INFO_USE (use, insn_info)
1440 unsigned int regno = DF_REF_REGNO (use);
1441 ref_info->count[regno]++;
1443 if (include_eq_uses)
1444 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1446 unsigned int regno = DF_REF_REGNO (use);
1447 ref_info->count[regno]++;
1453 for (r = start; r < m; r++)
1455 ref_info->begin[r] = offset;
1456 offset += ref_info->count[r];
1457 ref_info->count[r] = 0;
1460 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1462 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1463 rtx_insn *insn;
1464 df_ref def, use;
1466 if (include_defs)
1467 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1469 unsigned int regno = DF_REF_REGNO (def);
1470 if (regno >= start)
1472 unsigned int id
1473 = ref_info->begin[regno] + ref_info->count[regno]++;
1474 DF_REF_ID (def) = id;
1475 ref_info->refs[id] = def;
1478 if (include_uses)
1479 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1481 unsigned int regno = DF_REF_REGNO (def);
1482 if (regno >= start)
1484 unsigned int id
1485 = ref_info->begin[regno] + ref_info->count[regno]++;
1486 DF_REF_ID (use) = id;
1487 ref_info->refs[id] = use;
1491 FOR_BB_INSNS (bb, insn)
1493 if (INSN_P (insn))
1495 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1497 if (include_defs)
1498 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1500 unsigned int regno = DF_REF_REGNO (def);
1501 if (regno >= start)
1503 unsigned int id
1504 = ref_info->begin[regno] + ref_info->count[regno]++;
1505 DF_REF_ID (def) = id;
1506 ref_info->refs[id] = def;
1509 if (include_uses)
1510 FOR_EACH_INSN_INFO_USE (use, insn_info)
1512 unsigned int regno = DF_REF_REGNO (use);
1513 if (regno >= start)
1515 unsigned int id
1516 = ref_info->begin[regno] + ref_info->count[regno]++;
1517 DF_REF_ID (use) = id;
1518 ref_info->refs[id] = use;
1521 if (include_eq_uses)
1522 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1524 unsigned int regno = DF_REF_REGNO (use);
1525 if (regno >= start)
1527 unsigned int id
1528 = ref_info->begin[regno] + ref_info->count[regno]++;
1529 DF_REF_ID (use) = id;
1530 ref_info->refs[id] = use;
1537 /* The bitmap size is not decremented when refs are deleted. So
1538 reset it now that we have squished out all of the empty
1539 slots. */
1541 ref_info->table_size = offset;
1544 /* Take build ref table for either the uses or defs from the reg-use
1545 or reg-def chains. */
1547 static void
1548 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1549 bool include_defs,
1550 bool include_uses,
1551 bool include_eq_uses)
1553 if (df->analyze_subset)
1554 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1555 include_uses, include_eq_uses);
1556 else
1557 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1558 include_uses, include_eq_uses);
1562 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1563 static unsigned int
1564 df_add_refs_to_table (unsigned int offset,
1565 struct df_ref_info *ref_info,
1566 df_ref ref)
1568 for (; ref; ref = DF_REF_NEXT_LOC (ref))
1569 if (!(df->changeable_flags & DF_NO_HARD_REGS)
1570 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1572 ref_info->refs[offset] = ref;
1573 DF_REF_ID (ref) = offset++;
1575 return offset;
1579 /* Count the number of refs in all of the insns of BB. Include the
1580 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1581 eq_uses if INCLUDE_EQ_USES. */
1583 static unsigned int
1584 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1585 struct df_ref_info *ref_info,
1586 bool include_defs, bool include_uses,
1587 bool include_eq_uses)
1589 rtx_insn *insn;
1591 if (include_defs)
1592 offset = df_add_refs_to_table (offset, ref_info,
1593 df_get_artificial_defs (bb->index));
1594 if (include_uses)
1595 offset = df_add_refs_to_table (offset, ref_info,
1596 df_get_artificial_uses (bb->index));
1598 FOR_BB_INSNS (bb, insn)
1599 if (INSN_P (insn))
1601 unsigned int uid = INSN_UID (insn);
1602 if (include_defs)
1603 offset = df_add_refs_to_table (offset, ref_info,
1604 DF_INSN_UID_DEFS (uid));
1605 if (include_uses)
1606 offset = df_add_refs_to_table (offset, ref_info,
1607 DF_INSN_UID_USES (uid));
1608 if (include_eq_uses)
1609 offset = df_add_refs_to_table (offset, ref_info,
1610 DF_INSN_UID_EQ_USES (uid));
1612 return offset;
1616 /* Organize the refs by insn into the table in REF_INFO. If
1617 blocks_to_analyze is defined, use that set, otherwise the entire
1618 program. Include the defs if INCLUDE_DEFS. Include the uses if
1619 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1621 static void
1622 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1623 bool include_defs, bool include_uses,
1624 bool include_eq_uses)
1626 basic_block bb;
1627 unsigned int offset = 0;
1629 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1630 df_check_and_grow_ref_info (ref_info, 1);
1631 if (df->blocks_to_analyze)
1633 bitmap_iterator bi;
1634 unsigned int index;
1636 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1638 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1639 index),
1640 offset, ref_info,
1641 include_defs, include_uses,
1642 include_eq_uses);
1645 ref_info->table_size = offset;
1647 else
1649 FOR_ALL_BB_FN (bb, cfun)
1650 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1651 include_defs, include_uses,
1652 include_eq_uses);
1653 ref_info->table_size = offset;
1658 /* If the use refs in DF are not organized, reorganize them. */
1660 void
1661 df_maybe_reorganize_use_refs (enum df_ref_order order)
1663 if (order == df->use_info.ref_order)
1664 return;
1666 switch (order)
1668 case DF_REF_ORDER_BY_REG:
1669 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1670 break;
1672 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1673 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1674 break;
1676 case DF_REF_ORDER_BY_INSN:
1677 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1678 break;
1680 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1681 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1682 break;
1684 case DF_REF_ORDER_NO_TABLE:
1685 free (df->use_info.refs);
1686 df->use_info.refs = NULL;
1687 df->use_info.refs_size = 0;
1688 break;
1690 case DF_REF_ORDER_UNORDERED:
1691 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1692 gcc_unreachable ();
1693 break;
1696 df->use_info.ref_order = order;
1700 /* If the def refs in DF are not organized, reorganize them. */
1702 void
1703 df_maybe_reorganize_def_refs (enum df_ref_order order)
1705 if (order == df->def_info.ref_order)
1706 return;
1708 switch (order)
1710 case DF_REF_ORDER_BY_REG:
1711 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1712 break;
1714 case DF_REF_ORDER_BY_INSN:
1715 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1716 break;
1718 case DF_REF_ORDER_NO_TABLE:
1719 free (df->def_info.refs);
1720 df->def_info.refs = NULL;
1721 df->def_info.refs_size = 0;
1722 break;
1724 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1725 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1726 case DF_REF_ORDER_UNORDERED:
1727 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1728 gcc_unreachable ();
1729 break;
1732 df->def_info.ref_order = order;
1736 /* Change all of the basic block references in INSN to use the insn's
1737 current basic block. This function is called from routines that move
1738 instructions from one block to another. */
1740 void
1741 df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1743 basic_block old_bb = BLOCK_FOR_INSN (insn);
1744 struct df_insn_info *insn_info;
1745 unsigned int uid = INSN_UID (insn);
1747 if (old_bb == new_bb)
1748 return;
1750 set_block_for_insn (insn, new_bb);
1752 if (!df)
1753 return;
1755 if (dump_file)
1756 fprintf (dump_file, "changing bb of uid %d\n", uid);
1758 insn_info = DF_INSN_UID_SAFE_GET (uid);
1759 if (insn_info == NULL)
1761 if (dump_file)
1762 fprintf (dump_file, " unscanned insn\n");
1763 df_insn_rescan (insn);
1764 return;
1767 if (!INSN_P (insn))
1768 return;
1770 df_set_bb_dirty (new_bb);
1771 if (old_bb)
1773 if (dump_file)
1774 fprintf (dump_file, " from %d to %d\n",
1775 old_bb->index, new_bb->index);
1776 df_set_bb_dirty (old_bb);
1778 else
1779 if (dump_file)
1780 fprintf (dump_file, " to %d\n", new_bb->index);
1784 /* Helper function for df_ref_change_reg_with_loc. */
1786 static void
1787 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1788 struct df_reg_info *new_df,
1789 unsigned int new_regno, rtx loc)
1791 df_ref the_ref = old_df->reg_chain;
1793 while (the_ref)
1795 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1796 && DF_REF_LOC (the_ref)
1797 && (*DF_REF_LOC (the_ref) == loc))
1799 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1800 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1801 df_ref *ref_ptr;
1802 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1804 DF_REF_REGNO (the_ref) = new_regno;
1805 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1807 /* Pull the_ref out of the old regno chain. */
1808 if (prev_ref)
1809 DF_REF_NEXT_REG (prev_ref) = next_ref;
1810 else
1811 old_df->reg_chain = next_ref;
1812 if (next_ref)
1813 DF_REF_PREV_REG (next_ref) = prev_ref;
1814 old_df->n_refs--;
1816 /* Put the ref into the new regno chain. */
1817 DF_REF_PREV_REG (the_ref) = NULL;
1818 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1819 if (new_df->reg_chain)
1820 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1821 new_df->reg_chain = the_ref;
1822 new_df->n_refs++;
1823 if (DF_REF_BB (the_ref))
1824 df_set_bb_dirty (DF_REF_BB (the_ref));
1826 /* Need to sort the record again that the ref was in because
1827 the regno is a sorting key. First, find the right
1828 record. */
1829 if (DF_REF_REG_DEF_P (the_ref))
1830 ref_ptr = &insn_info->defs;
1831 else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1832 ref_ptr = &insn_info->eq_uses;
1833 else
1834 ref_ptr = &insn_info->uses;
1835 if (dump_file)
1836 fprintf (dump_file, "changing reg in insn %d\n",
1837 DF_REF_INSN_UID (the_ref));
1839 /* Stop if we find the current reference or where the reference
1840 needs to be. */
1841 while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1842 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1843 if (*ref_ptr != the_ref)
1845 /* The reference needs to be promoted up the list. */
1846 df_ref next = DF_REF_NEXT_LOC (the_ref);
1847 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1848 *ref_ptr = the_ref;
1850 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1851 while (*ref_ptr != the_ref);
1852 *ref_ptr = next;
1854 else if (DF_REF_NEXT_LOC (the_ref)
1855 && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1857 /* The reference needs to be demoted down the list. */
1858 *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1860 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1861 while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1862 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1863 *ref_ptr = the_ref;
1866 the_ref = next_ref;
1868 else
1869 the_ref = DF_REF_NEXT_REG (the_ref);
1874 /* Change the regno of register LOC to NEW_REGNO and update the df
1875 information accordingly. Refs that do not match LOC are not changed
1876 which means that artificial refs are not changed since they have no loc.
1877 This call is to support the SET_REGNO macro. */
1879 void
1880 df_ref_change_reg_with_loc (rtx loc, unsigned int new_regno)
1882 unsigned int old_regno = REGNO (loc);
1883 if (old_regno == new_regno)
1884 return;
1886 if (df)
1888 df_grow_reg_info ();
1890 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1891 DF_REG_DEF_GET (new_regno),
1892 new_regno, loc);
1893 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1894 DF_REG_USE_GET (new_regno),
1895 new_regno, loc);
1896 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1897 DF_REG_EQ_USE_GET (new_regno),
1898 new_regno, loc);
1900 set_mode_and_regno (loc, GET_MODE (loc), new_regno);
1904 /* Delete the mw_hardregs that point into the eq_notes. */
1906 static void
1907 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1909 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1910 struct df_scan_problem_data *problem_data
1911 = (struct df_scan_problem_data *) df_scan->problem_data;
1913 while (*mw_ptr)
1915 df_mw_hardreg *mw = *mw_ptr;
1916 if (mw->flags & DF_REF_IN_NOTE)
1918 *mw_ptr = DF_MWS_NEXT (mw);
1919 problem_data->mw_reg_pool->remove (mw);
1921 else
1922 mw_ptr = &DF_MWS_NEXT (mw);
1927 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1929 void
1930 df_notes_rescan (rtx_insn *insn)
1932 struct df_insn_info *insn_info;
1933 unsigned int uid = INSN_UID (insn);
1935 if (!df)
1936 return;
1938 /* The client has disabled rescanning and plans to do it itself. */
1939 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1940 return;
1942 /* Do nothing if the insn hasn't been emitted yet. */
1943 if (!BLOCK_FOR_INSN (insn))
1944 return;
1946 df_grow_bb_info (df_scan);
1947 df_grow_reg_info ();
1949 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1951 /* The client has deferred rescanning. */
1952 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1954 if (!insn_info)
1956 insn_info = df_insn_create_insn_record (insn);
1957 insn_info->defs = 0;
1958 insn_info->uses = 0;
1959 insn_info->eq_uses = 0;
1960 insn_info->mw_hardregs = 0;
1963 bitmap_clear_bit (&df->insns_to_delete, uid);
1964 /* If the insn is set to be rescanned, it does not need to also
1965 be notes rescanned. */
1966 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1967 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
1968 return;
1971 bitmap_clear_bit (&df->insns_to_delete, uid);
1972 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1974 if (insn_info)
1976 basic_block bb = BLOCK_FOR_INSN (insn);
1977 rtx note;
1978 struct df_collection_rec collection_rec;
1979 unsigned int i;
1981 df_mw_hardreg_chain_delete_eq_uses (insn_info);
1982 df_ref_chain_delete (insn_info->eq_uses);
1983 insn_info->eq_uses = NULL;
1985 /* Process REG_EQUIV/REG_EQUAL notes */
1986 for (note = REG_NOTES (insn); note;
1987 note = XEXP (note, 1))
1989 switch (REG_NOTE_KIND (note))
1991 case REG_EQUIV:
1992 case REG_EQUAL:
1993 df_uses_record (&collection_rec,
1994 &XEXP (note, 0), DF_REF_REG_USE,
1995 bb, insn_info, DF_REF_IN_NOTE);
1996 default:
1997 break;
2001 /* Find some place to put any new mw_hardregs. */
2002 df_canonize_collection_rec (&collection_rec);
2003 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2004 FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2006 while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2007 mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2008 DF_MWS_NEXT (mw) = *mw_ptr;
2009 *mw_ptr = mw;
2010 mw_ptr = &DF_MWS_NEXT (mw);
2012 df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2014 else
2015 df_insn_rescan (insn);
2020 /*----------------------------------------------------------------------------
2021 Hard core instruction scanning code. No external interfaces here,
2022 just a lot of routines that look inside insns.
2023 ----------------------------------------------------------------------------*/
2026 /* Return true if the contents of two df_ref's are identical.
2027 It ignores DF_REF_MARKER. */
2029 static bool
2030 df_ref_equal_p (df_ref ref1, df_ref ref2)
2032 if (!ref2)
2033 return false;
2035 if (ref1 == ref2)
2036 return true;
2038 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2039 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2040 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2041 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2042 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2043 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2044 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2045 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2046 return false;
2048 switch (DF_REF_CLASS (ref1))
2050 case DF_REF_ARTIFICIAL:
2051 case DF_REF_BASE:
2052 return true;
2054 case DF_REF_REGULAR:
2055 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2057 default:
2058 gcc_unreachable ();
2060 return false;
2064 /* Compare REF1 and REF2 for sorting. This is only called from places
2065 where all of the refs are of the same type, in the same insn, and
2066 have the same bb. So these fields are not checked. */
2068 static int
2069 df_ref_compare (df_ref ref1, df_ref ref2)
2071 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2072 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2074 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2075 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2077 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2078 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2080 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2081 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2083 /* Cannot look at the LOC field on artificial refs. */
2084 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2085 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2086 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2088 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2090 /* If two refs are identical except that one of them has is from
2091 a mw and one is not, we need to have the one with the mw
2092 first. */
2093 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2094 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2095 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2096 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2097 return -1;
2098 else
2099 return 1;
2102 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2105 /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2. */
2107 static int
2108 df_ref_ptr_compare (const void *r1, const void *r2)
2110 return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2113 /* Sort and compress a set of refs. */
2115 static void
2116 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2118 unsigned int count;
2119 unsigned int i;
2120 unsigned int dist = 0;
2122 count = ref_vec->length ();
2124 /* If there are 1 or 0 elements, there is nothing to do. */
2125 if (count < 2)
2126 return;
2127 else if (count == 2)
2129 df_ref r0 = (*ref_vec)[0];
2130 df_ref r1 = (*ref_vec)[1];
2131 if (df_ref_compare (r0, r1) > 0)
2132 std::swap ((*ref_vec)[0], (*ref_vec)[1]);
2134 else
2136 for (i = 0; i < count - 1; i++)
2138 df_ref r0 = (*ref_vec)[i];
2139 df_ref r1 = (*ref_vec)[i + 1];
2140 if (df_ref_compare (r0, r1) >= 0)
2141 break;
2143 /* If the array is already strictly ordered,
2144 which is the most common case for large COUNT case
2145 (which happens for CALL INSNs),
2146 no need to sort and filter out duplicate.
2147 Simply return the count.
2148 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2149 of DF_REF_COMPARE. */
2150 if (i == count - 1)
2151 return;
2152 ref_vec->qsort (df_ref_ptr_compare);
2155 for (i=0; i<count-dist; i++)
2157 /* Find the next ref that is not equal to the current ref. */
2158 while (i + dist + 1 < count
2159 && df_ref_equal_p ((*ref_vec)[i],
2160 (*ref_vec)[i + dist + 1]))
2162 df_free_ref ((*ref_vec)[i + dist + 1]);
2163 dist++;
2165 /* Copy it down to the next position. */
2166 if (dist && i + dist + 1 < count)
2167 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2170 count -= dist;
2171 ref_vec->truncate (count);
2175 /* Return true if the contents of two df_ref's are identical.
2176 It ignores DF_REF_MARKER. */
2178 static bool
2179 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2181 if (!mw2)
2182 return false;
2183 return (mw1 == mw2) ||
2184 (mw1->mw_reg == mw2->mw_reg
2185 && mw1->type == mw2->type
2186 && mw1->flags == mw2->flags
2187 && mw1->start_regno == mw2->start_regno
2188 && mw1->end_regno == mw2->end_regno);
2192 /* Compare MW1 and MW2 for sorting. */
2194 static int
2195 df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2197 if (mw1->type != mw2->type)
2198 return mw1->type - mw2->type;
2200 if (mw1->flags != mw2->flags)
2201 return mw1->flags - mw2->flags;
2203 if (mw1->start_regno != mw2->start_regno)
2204 return mw1->start_regno - mw2->start_regno;
2206 if (mw1->end_regno != mw2->end_regno)
2207 return mw1->end_regno - mw2->end_regno;
2209 return mw1->mw_order - mw2->mw_order;
2212 /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2. */
2214 static int
2215 df_mw_ptr_compare (const void *m1, const void *m2)
2217 return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2218 *(const df_mw_hardreg *const *) m2);
2221 /* Sort and compress a set of refs. */
2223 static void
2224 df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
2226 unsigned int count;
2227 struct df_scan_problem_data *problem_data
2228 = (struct df_scan_problem_data *) df_scan->problem_data;
2229 unsigned int i;
2230 unsigned int dist = 0;
2232 count = mw_vec->length ();
2233 if (count < 2)
2234 return;
2235 else if (count == 2)
2237 struct df_mw_hardreg *m0 = (*mw_vec)[0];
2238 struct df_mw_hardreg *m1 = (*mw_vec)[1];
2239 if (df_mw_compare (m0, m1) > 0)
2241 struct df_mw_hardreg *tmp = (*mw_vec)[0];
2242 (*mw_vec)[0] = (*mw_vec)[1];
2243 (*mw_vec)[1] = tmp;
2246 else
2247 mw_vec->qsort (df_mw_ptr_compare);
2249 for (i=0; i<count-dist; i++)
2251 /* Find the next ref that is not equal to the current ref. */
2252 while (i + dist + 1 < count
2253 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2255 problem_data->mw_reg_pool->remove ((*mw_vec)[i + dist + 1]);
2256 dist++;
2258 /* Copy it down to the next position. */
2259 if (dist && i + dist + 1 < count)
2260 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2263 count -= dist;
2264 mw_vec->truncate (count);
2268 /* Sort and remove duplicates from the COLLECTION_REC. */
2270 static void
2271 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2273 df_sort_and_compress_refs (&collection_rec->def_vec);
2274 df_sort_and_compress_refs (&collection_rec->use_vec);
2275 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2276 df_sort_and_compress_mws (&collection_rec->mw_vec);
2280 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2282 static void
2283 df_install_ref (df_ref this_ref,
2284 struct df_reg_info *reg_info,
2285 struct df_ref_info *ref_info,
2286 bool add_to_table)
2288 unsigned int regno = DF_REF_REGNO (this_ref);
2289 /* Add the ref to the reg_{def,use,eq_use} chain. */
2290 df_ref head = reg_info->reg_chain;
2292 reg_info->reg_chain = this_ref;
2293 reg_info->n_refs++;
2295 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2297 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2298 df->hard_regs_live_count[regno]++;
2301 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2302 && DF_REF_PREV_REG (this_ref) == NULL);
2304 DF_REF_NEXT_REG (this_ref) = head;
2306 /* We cannot actually link to the head of the chain. */
2307 DF_REF_PREV_REG (this_ref) = NULL;
2309 if (head)
2310 DF_REF_PREV_REG (head) = this_ref;
2312 if (add_to_table)
2314 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2315 df_check_and_grow_ref_info (ref_info, 1);
2316 DF_REF_ID (this_ref) = ref_info->table_size;
2317 /* Add the ref to the big array of defs. */
2318 ref_info->refs[ref_info->table_size] = this_ref;
2319 ref_info->table_size++;
2321 else
2322 DF_REF_ID (this_ref) = -1;
2324 ref_info->total_size++;
2328 /* This function takes one of the groups of refs (defs, uses or
2329 eq_uses) and installs the entire group into the insn. It also adds
2330 each of these refs into the appropriate chains. */
2332 static df_ref
2333 df_install_refs (basic_block bb,
2334 const vec<df_ref, va_heap> *old_vec,
2335 struct df_reg_info **reg_info,
2336 struct df_ref_info *ref_info,
2337 bool is_notes)
2339 unsigned int count = old_vec->length ();
2340 if (count)
2342 bool add_to_table;
2343 df_ref this_ref;
2344 unsigned int ix;
2346 switch (ref_info->ref_order)
2348 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2349 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2350 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2351 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2352 add_to_table = true;
2353 break;
2354 case DF_REF_ORDER_UNORDERED:
2355 case DF_REF_ORDER_BY_REG:
2356 case DF_REF_ORDER_BY_INSN:
2357 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2358 add_to_table = !is_notes;
2359 break;
2360 default:
2361 add_to_table = false;
2362 break;
2365 /* Do not add if ref is not in the right blocks. */
2366 if (add_to_table && df->analyze_subset)
2367 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2369 FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2371 DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2372 ? (*old_vec)[ix + 1]
2373 : NULL);
2374 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2375 ref_info, add_to_table);
2377 return (*old_vec)[0];
2379 else
2380 return 0;
2384 /* This function takes the mws installs the entire group into the
2385 insn. */
2387 static struct df_mw_hardreg *
2388 df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
2390 unsigned int count = old_vec->length ();
2391 if (count)
2393 for (unsigned int i = 0; i < count - 1; i++)
2394 DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2395 DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2396 return (*old_vec)[0];
2398 else
2399 return 0;
2403 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2404 chains and update other necessary information. */
2406 static void
2407 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2408 basic_block bb, rtx_insn *insn, unsigned int flags)
2410 if (insn)
2412 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2413 /* If there is a vector in the collection rec, add it to the
2414 insn. A null rec is a signal that the caller will handle the
2415 chain specially. */
2416 if (flags & copy_defs)
2418 gcc_checking_assert (!insn_rec->defs);
2419 insn_rec->defs
2420 = df_install_refs (bb, &collection_rec->def_vec,
2421 df->def_regs,
2422 &df->def_info, false);
2424 if (flags & copy_uses)
2426 gcc_checking_assert (!insn_rec->uses);
2427 insn_rec->uses
2428 = df_install_refs (bb, &collection_rec->use_vec,
2429 df->use_regs,
2430 &df->use_info, false);
2432 if (flags & copy_eq_uses)
2434 gcc_checking_assert (!insn_rec->eq_uses);
2435 insn_rec->eq_uses
2436 = df_install_refs (bb, &collection_rec->eq_use_vec,
2437 df->eq_use_regs,
2438 &df->use_info, true);
2440 if (flags & copy_mw)
2442 gcc_checking_assert (!insn_rec->mw_hardregs);
2443 insn_rec->mw_hardregs
2444 = df_install_mws (&collection_rec->mw_vec);
2447 else
2449 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2451 gcc_checking_assert (!bb_info->artificial_defs);
2452 bb_info->artificial_defs
2453 = df_install_refs (bb, &collection_rec->def_vec,
2454 df->def_regs,
2455 &df->def_info, false);
2456 gcc_checking_assert (!bb_info->artificial_uses);
2457 bb_info->artificial_uses
2458 = df_install_refs (bb, &collection_rec->use_vec,
2459 df->use_regs,
2460 &df->use_info, false);
2465 /* Allocate a ref and initialize its fields. */
2467 static df_ref
2468 df_ref_create_structure (enum df_ref_class cl,
2469 struct df_collection_rec *collection_rec,
2470 rtx reg, rtx *loc,
2471 basic_block bb, struct df_insn_info *info,
2472 enum df_ref_type ref_type,
2473 int ref_flags)
2475 df_ref this_ref = NULL;
2476 unsigned int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2477 struct df_scan_problem_data *problem_data
2478 = (struct df_scan_problem_data *) df_scan->problem_data;
2480 switch (cl)
2482 case DF_REF_BASE:
2483 this_ref = (df_ref) (problem_data->ref_base_pool->allocate ());
2484 gcc_checking_assert (loc == NULL);
2485 break;
2487 case DF_REF_ARTIFICIAL:
2488 this_ref = (df_ref) (problem_data->ref_artificial_pool->allocate ());
2489 this_ref->artificial_ref.bb = bb;
2490 gcc_checking_assert (loc == NULL);
2491 break;
2493 case DF_REF_REGULAR:
2494 this_ref = (df_ref) (problem_data->ref_regular_pool->allocate ());
2495 this_ref->regular_ref.loc = loc;
2496 gcc_checking_assert (loc);
2497 break;
2500 DF_REF_CLASS (this_ref) = cl;
2501 DF_REF_ID (this_ref) = -1;
2502 DF_REF_REG (this_ref) = reg;
2503 DF_REF_REGNO (this_ref) = regno;
2504 DF_REF_TYPE (this_ref) = ref_type;
2505 DF_REF_INSN_INFO (this_ref) = info;
2506 DF_REF_CHAIN (this_ref) = NULL;
2507 DF_REF_FLAGS (this_ref) = ref_flags;
2508 DF_REF_NEXT_REG (this_ref) = NULL;
2509 DF_REF_PREV_REG (this_ref) = NULL;
2510 DF_REF_ORDER (this_ref) = df->ref_order++;
2512 /* We need to clear this bit because fwprop, and in the future
2513 possibly other optimizations sometimes create new refs using ond
2514 refs as the model. */
2515 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2517 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2518 if (regno < FIRST_PSEUDO_REGISTER
2519 && !DF_REF_IS_ARTIFICIAL (this_ref)
2520 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2522 if (DF_REF_REG_DEF_P (this_ref))
2524 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2525 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2527 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2528 && (regno == FRAME_POINTER_REGNUM
2529 || regno == ARG_POINTER_REGNUM)))
2530 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2533 if (collection_rec)
2535 if (DF_REF_REG_DEF_P (this_ref))
2536 collection_rec->def_vec.safe_push (this_ref);
2537 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2538 collection_rec->eq_use_vec.safe_push (this_ref);
2539 else
2540 collection_rec->use_vec.safe_push (this_ref);
2542 else
2543 df_install_ref_incremental (this_ref);
2545 return this_ref;
2549 /* Create new references of type DF_REF_TYPE for each part of register REG
2550 at address LOC within INSN of BB. */
2553 static void
2554 df_ref_record (enum df_ref_class cl,
2555 struct df_collection_rec *collection_rec,
2556 rtx reg, rtx *loc,
2557 basic_block bb, struct df_insn_info *insn_info,
2558 enum df_ref_type ref_type,
2559 int ref_flags)
2561 unsigned int regno;
2563 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2565 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2566 if (regno < FIRST_PSEUDO_REGISTER)
2568 struct df_mw_hardreg *hardreg = NULL;
2569 struct df_scan_problem_data *problem_data
2570 = (struct df_scan_problem_data *) df_scan->problem_data;
2571 unsigned int i;
2572 unsigned int endregno;
2573 df_ref ref;
2575 if (GET_CODE (reg) == SUBREG)
2577 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2578 SUBREG_BYTE (reg), GET_MODE (reg));
2579 endregno = regno + subreg_nregs (reg);
2581 else
2582 endregno = END_REGNO (reg);
2584 /* If this is a multiword hardreg, we create some extra
2585 datastructures that will enable us to easily build REG_DEAD
2586 and REG_UNUSED notes. */
2587 if (collection_rec
2588 && (endregno != regno + 1) && insn_info)
2590 /* Sets to a subreg of a multiword register are partial.
2591 Sets to a non-subreg of a multiword register are not. */
2592 if (GET_CODE (reg) == SUBREG)
2593 ref_flags |= DF_REF_PARTIAL;
2594 ref_flags |= DF_REF_MW_HARDREG;
2596 hardreg = problem_data->mw_reg_pool->allocate ();
2597 hardreg->type = ref_type;
2598 hardreg->flags = ref_flags;
2599 hardreg->mw_reg = reg;
2600 hardreg->start_regno = regno;
2601 hardreg->end_regno = endregno - 1;
2602 hardreg->mw_order = df->ref_order++;
2603 collection_rec->mw_vec.safe_push (hardreg);
2606 for (i = regno; i < endregno; i++)
2608 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2609 bb, insn_info, ref_type, ref_flags);
2611 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2614 else
2616 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2617 ref_type, ref_flags);
2622 /* Process all the registers defined in the rtx pointed by LOC.
2623 Autoincrement/decrement definitions will be picked up by df_uses_record.
2624 Any change here has to be matched in df_find_hard_reg_defs_1. */
2626 static void
2627 df_def_record_1 (struct df_collection_rec *collection_rec,
2628 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2629 int flags)
2631 rtx dst = *loc;
2633 /* It is legal to have a set destination be a parallel. */
2634 if (GET_CODE (dst) == PARALLEL)
2636 int i;
2637 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2639 rtx temp = XVECEXP (dst, 0, i);
2640 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2641 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2642 bb, insn_info, flags);
2644 return;
2647 if (GET_CODE (dst) == STRICT_LOW_PART)
2649 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2651 loc = &XEXP (dst, 0);
2652 dst = *loc;
2655 if (GET_CODE (dst) == ZERO_EXTRACT)
2657 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2659 loc = &XEXP (dst, 0);
2660 dst = *loc;
2663 /* At this point if we do not have a reg or a subreg, just return. */
2664 if (REG_P (dst))
2666 df_ref_record (DF_REF_REGULAR, collection_rec,
2667 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2669 /* We want to keep sp alive everywhere - by making all
2670 writes to sp also use of sp. */
2671 if (REGNO (dst) == STACK_POINTER_REGNUM)
2672 df_ref_record (DF_REF_BASE, collection_rec,
2673 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2675 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2677 if (read_modify_subreg_p (dst))
2678 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2680 flags |= DF_REF_SUBREG;
2682 df_ref_record (DF_REF_REGULAR, collection_rec,
2683 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2688 /* Process all the registers defined in the pattern rtx, X. Any change
2689 here has to be matched in df_find_hard_reg_defs. */
2691 static void
2692 df_defs_record (struct df_collection_rec *collection_rec,
2693 rtx x, basic_block bb, struct df_insn_info *insn_info,
2694 int flags)
2696 RTX_CODE code = GET_CODE (x);
2697 int i;
2699 switch (code)
2701 case SET:
2702 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2703 break;
2705 case CLOBBER:
2706 flags |= DF_REF_MUST_CLOBBER;
2707 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2708 break;
2710 case COND_EXEC:
2711 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2712 bb, insn_info, DF_REF_CONDITIONAL);
2713 break;
2715 case PARALLEL:
2716 for (i = 0; i < XVECLEN (x, 0); i++)
2717 df_defs_record (collection_rec, XVECEXP (x, 0, i),
2718 bb, insn_info, flags);
2719 break;
2720 default:
2721 /* No DEFs to record in other cases */
2722 break;
2726 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2727 destination of a set or clobber. This has to match the logic in
2728 df_defs_record_1. */
2730 static void
2731 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2733 /* It is legal to have a set destination be a parallel. */
2734 if (GET_CODE (dst) == PARALLEL)
2736 int i;
2737 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2739 rtx temp = XVECEXP (dst, 0, i);
2740 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2741 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2743 return;
2746 if (GET_CODE (dst) == STRICT_LOW_PART)
2747 dst = XEXP (dst, 0);
2749 if (GET_CODE (dst) == ZERO_EXTRACT)
2750 dst = XEXP (dst, 0);
2752 /* At this point if we do not have a reg or a subreg, just return. */
2753 if (REG_P (dst) && HARD_REGISTER_P (dst))
2754 SET_HARD_REG_BIT (*defs, REGNO (dst));
2755 else if (GET_CODE (dst) == SUBREG
2756 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2757 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2760 /* Set bits in *DEFS for hard registers defined in the pattern X. This
2761 has to match the logic in df_defs_record. */
2763 static void
2764 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2766 RTX_CODE code = GET_CODE (x);
2767 int i;
2769 switch (code)
2771 case SET:
2772 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2773 break;
2775 case CLOBBER:
2776 case CLOBBER_HIGH:
2777 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2778 break;
2780 case COND_EXEC:
2781 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2782 break;
2784 case PARALLEL:
2785 for (i = 0; i < XVECLEN (x, 0); i++)
2786 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2787 break;
2788 default:
2789 /* No DEFs to record in other cases */
2790 break;
2795 /* Process all the registers used in the rtx at address LOC. */
2797 static void
2798 df_uses_record (struct df_collection_rec *collection_rec,
2799 rtx *loc, enum df_ref_type ref_type,
2800 basic_block bb, struct df_insn_info *insn_info,
2801 int flags)
2803 RTX_CODE code;
2804 rtx x;
2806 retry:
2807 x = *loc;
2808 if (!x)
2809 return;
2810 code = GET_CODE (x);
2811 switch (code)
2813 case LABEL_REF:
2814 case SYMBOL_REF:
2815 case CONST:
2816 CASE_CONST_ANY:
2817 case PC:
2818 case CC0:
2819 case ADDR_VEC:
2820 case ADDR_DIFF_VEC:
2821 return;
2823 case CLOBBER:
2824 /* If we are clobbering a MEM, mark any registers inside the address
2825 as being used. */
2826 if (MEM_P (XEXP (x, 0)))
2827 df_uses_record (collection_rec,
2828 &XEXP (XEXP (x, 0), 0),
2829 DF_REF_REG_MEM_STORE,
2830 bb, insn_info,
2831 flags);
2833 /* If we're clobbering a REG then we have a def so ignore. */
2834 return;
2836 case CLOBBER_HIGH:
2837 gcc_assert (REG_P (XEXP (x, 0)));
2838 return;
2840 case MEM:
2841 df_uses_record (collection_rec,
2842 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2843 bb, insn_info, flags & DF_REF_IN_NOTE);
2844 return;
2846 case SUBREG:
2847 /* While we're here, optimize this case. */
2848 flags |= DF_REF_PARTIAL;
2849 /* In case the SUBREG is not of a REG, do not optimize. */
2850 if (!REG_P (SUBREG_REG (x)))
2852 loc = &SUBREG_REG (x);
2853 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2854 return;
2856 /* Fall through */
2858 case REG:
2859 df_ref_record (DF_REF_REGULAR, collection_rec,
2860 x, loc, bb, insn_info,
2861 ref_type, flags);
2862 return;
2864 case SIGN_EXTRACT:
2865 case ZERO_EXTRACT:
2867 df_uses_record (collection_rec,
2868 &XEXP (x, 1), ref_type, bb, insn_info, flags);
2869 df_uses_record (collection_rec,
2870 &XEXP (x, 2), ref_type, bb, insn_info, flags);
2872 /* If the parameters to the zero or sign extract are
2873 constants, strip them off and recurse, otherwise there is
2874 no information that we can gain from this operation. */
2875 if (code == ZERO_EXTRACT)
2876 flags |= DF_REF_ZERO_EXTRACT;
2877 else
2878 flags |= DF_REF_SIGN_EXTRACT;
2880 df_uses_record (collection_rec,
2881 &XEXP (x, 0), ref_type, bb, insn_info, flags);
2882 return;
2884 break;
2886 case SET:
2888 rtx dst = SET_DEST (x);
2889 gcc_assert (!(flags & DF_REF_IN_NOTE));
2890 df_uses_record (collection_rec,
2891 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2893 switch (GET_CODE (dst))
2895 case SUBREG:
2896 if (read_modify_subreg_p (dst))
2898 df_uses_record (collection_rec, &SUBREG_REG (dst),
2899 DF_REF_REG_USE, bb, insn_info,
2900 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2901 break;
2903 /* Fall through. */
2904 case REG:
2905 case PARALLEL:
2906 case SCRATCH:
2907 case PC:
2908 case CC0:
2909 break;
2910 case MEM:
2911 df_uses_record (collection_rec, &XEXP (dst, 0),
2912 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2913 break;
2914 case STRICT_LOW_PART:
2916 rtx *temp = &XEXP (dst, 0);
2917 /* A strict_low_part uses the whole REG and not just the
2918 SUBREG. */
2919 dst = XEXP (dst, 0);
2920 df_uses_record (collection_rec,
2921 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2922 DF_REF_REG_USE, bb, insn_info,
2923 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2925 break;
2926 case ZERO_EXTRACT:
2928 df_uses_record (collection_rec, &XEXP (dst, 1),
2929 DF_REF_REG_USE, bb, insn_info, flags);
2930 df_uses_record (collection_rec, &XEXP (dst, 2),
2931 DF_REF_REG_USE, bb, insn_info, flags);
2932 if (GET_CODE (XEXP (dst,0)) == MEM)
2933 df_uses_record (collection_rec, &XEXP (dst, 0),
2934 DF_REF_REG_USE, bb, insn_info,
2935 flags);
2936 else
2937 df_uses_record (collection_rec, &XEXP (dst, 0),
2938 DF_REF_REG_USE, bb, insn_info,
2939 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2941 break;
2943 default:
2944 gcc_unreachable ();
2946 return;
2949 case RETURN:
2950 case SIMPLE_RETURN:
2951 break;
2953 case ASM_OPERANDS:
2954 case UNSPEC_VOLATILE:
2955 case TRAP_IF:
2956 case ASM_INPUT:
2958 /* Traditional and volatile asm instructions must be
2959 considered to use and clobber all hard registers, all
2960 pseudo-registers and all of memory. So must TRAP_IF and
2961 UNSPEC_VOLATILE operations.
2963 Consider for instance a volatile asm that changes the fpu
2964 rounding mode. An insn should not be moved across this
2965 even if it only uses pseudo-regs because it might give an
2966 incorrectly rounded result.
2968 However, flow.c's liveness computation did *not* do this,
2969 giving the reasoning as " ?!? Unfortunately, marking all
2970 hard registers as live causes massive problems for the
2971 register allocator and marking all pseudos as live creates
2972 mountains of uninitialized variable warnings."
2974 In order to maintain the status quo with regard to liveness
2975 and uses, we do what flow.c did and just mark any regs we
2976 can find in ASM_OPERANDS as used. In global asm insns are
2977 scanned and regs_asm_clobbered is filled out.
2979 For all ASM_OPERANDS, we must traverse the vector of input
2980 operands. We cannot just fall through here since then we
2981 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
2982 which do not indicate traditional asms unlike their normal
2983 usage. */
2984 if (code == ASM_OPERANDS)
2986 int j;
2988 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
2989 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
2990 DF_REF_REG_USE, bb, insn_info, flags);
2991 return;
2993 break;
2996 case VAR_LOCATION:
2997 df_uses_record (collection_rec,
2998 &PAT_VAR_LOCATION_LOC (x),
2999 DF_REF_REG_USE, bb, insn_info, flags);
3000 return;
3002 case PRE_DEC:
3003 case POST_DEC:
3004 case PRE_INC:
3005 case POST_INC:
3006 case PRE_MODIFY:
3007 case POST_MODIFY:
3008 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3009 /* Catch the def of the register being modified. */
3010 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3011 bb, insn_info,
3012 DF_REF_REG_DEF,
3013 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3015 /* ... Fall through to handle uses ... */
3017 default:
3018 break;
3021 /* Recursively scan the operands of this expression. */
3023 const char *fmt = GET_RTX_FORMAT (code);
3024 int i;
3026 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3028 if (fmt[i] == 'e')
3030 /* Tail recursive case: save a function call level. */
3031 if (i == 0)
3033 loc = &XEXP (x, 0);
3034 goto retry;
3036 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3037 bb, insn_info, flags);
3039 else if (fmt[i] == 'E')
3041 int j;
3042 for (j = 0; j < XVECLEN (x, i); j++)
3043 df_uses_record (collection_rec,
3044 &XVECEXP (x, i, j), ref_type,
3045 bb, insn_info, flags);
3050 return;
3054 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3056 static void
3057 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3059 unsigned int ix;
3060 df_ref ref;
3062 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3064 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3066 df_ref use;
3068 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3069 DF_REF_LOC (ref), DF_REF_BB (ref),
3070 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3071 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3072 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3078 /* Get call's extra defs and uses (track caller-saved registers). */
3080 static void
3081 df_get_call_refs (struct df_collection_rec *collection_rec,
3082 basic_block bb,
3083 struct df_insn_info *insn_info,
3084 int flags)
3086 rtx note;
3087 bool is_sibling_call;
3088 unsigned int i;
3089 HARD_REG_SET defs_generated;
3090 HARD_REG_SET fn_reg_set_usage;
3092 CLEAR_HARD_REG_SET (defs_generated);
3093 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3094 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3095 get_call_reg_set_usage (insn_info->insn, &fn_reg_set_usage,
3096 regs_invalidated_by_call);
3098 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3100 if (i == STACK_POINTER_REGNUM)
3101 /* The stack ptr is used (honorarily) by a CALL insn. */
3102 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3103 NULL, bb, insn_info, DF_REF_REG_USE,
3104 DF_REF_CALL_STACK_USAGE | flags);
3105 else if (global_regs[i])
3107 /* Calls to const functions cannot access any global registers and
3108 calls to pure functions cannot set them. All other calls may
3109 reference any of the global registers, so they are recorded as
3110 used. */
3111 if (!RTL_CONST_CALL_P (insn_info->insn))
3113 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3114 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3115 if (!RTL_PURE_CALL_P (insn_info->insn))
3116 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3117 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3120 else if (TEST_HARD_REG_BIT (fn_reg_set_usage, i)
3121 /* no clobbers for regs that are the result of the call */
3122 && !TEST_HARD_REG_BIT (defs_generated, i)
3123 && (!is_sibling_call
3124 || !bitmap_bit_p (df->exit_block_uses, i)
3125 || refers_to_regno_p (i, crtl->return_rtx)))
3126 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3127 NULL, bb, insn_info, DF_REF_REG_DEF,
3128 DF_REF_MAY_CLOBBER | flags);
3131 /* Record the registers used to pass arguments, and explicitly
3132 noted as clobbered. */
3133 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3134 note = XEXP (note, 1))
3136 gcc_assert (GET_CODE (XEXP (note, 0)) != CLOBBER_HIGH);
3137 if (GET_CODE (XEXP (note, 0)) == USE)
3138 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3139 DF_REF_REG_USE, bb, insn_info, flags);
3140 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3142 if (REG_P (XEXP (XEXP (note, 0), 0)))
3144 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3145 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3146 df_defs_record (collection_rec, XEXP (note, 0), bb,
3147 insn_info, flags);
3149 else
3150 df_uses_record (collection_rec, &XEXP (note, 0),
3151 DF_REF_REG_USE, bb, insn_info, flags);
3155 return;
3158 /* Collect all refs in the INSN. This function is free of any
3159 side-effect - it will create and return a lists of df_ref's in the
3160 COLLECTION_REC without putting those refs into existing ref chains
3161 and reg chains. */
3163 static void
3164 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3165 basic_block bb, struct df_insn_info *insn_info)
3167 rtx note;
3168 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3170 /* Clear out the collection record. */
3171 collection_rec->def_vec.truncate (0);
3172 collection_rec->use_vec.truncate (0);
3173 collection_rec->eq_use_vec.truncate (0);
3174 collection_rec->mw_vec.truncate (0);
3176 /* Process REG_EQUIV/REG_EQUAL notes. */
3177 for (note = REG_NOTES (insn_info->insn); note;
3178 note = XEXP (note, 1))
3180 switch (REG_NOTE_KIND (note))
3182 case REG_EQUIV:
3183 case REG_EQUAL:
3184 df_uses_record (collection_rec,
3185 &XEXP (note, 0), DF_REF_REG_USE,
3186 bb, insn_info, DF_REF_IN_NOTE);
3187 break;
3188 case REG_NON_LOCAL_GOTO:
3189 /* The frame ptr is used by a non-local goto. */
3190 df_ref_record (DF_REF_BASE, collection_rec,
3191 regno_reg_rtx[FRAME_POINTER_REGNUM],
3192 NULL, bb, insn_info,
3193 DF_REF_REG_USE, 0);
3194 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3195 df_ref_record (DF_REF_BASE, collection_rec,
3196 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3197 NULL, bb, insn_info,
3198 DF_REF_REG_USE, 0);
3199 break;
3200 default:
3201 break;
3205 int flags = (is_cond_exec) ? DF_REF_CONDITIONAL : 0;
3206 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3207 uses from CALL_INSN_FUNCTION_USAGE. */
3208 if (CALL_P (insn_info->insn))
3209 df_get_call_refs (collection_rec, bb, insn_info, flags);
3211 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3212 that a qsort on the defs is unnecessary in most cases. */
3213 df_defs_record (collection_rec,
3214 PATTERN (insn_info->insn), bb, insn_info, 0);
3216 /* Record the register uses. */
3217 df_uses_record (collection_rec,
3218 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3220 /* DF_REF_CONDITIONAL needs corresponding USES. */
3221 if (is_cond_exec)
3222 df_get_conditional_uses (collection_rec);
3224 df_canonize_collection_rec (collection_rec);
3227 /* Recompute the luids for the insns in BB. */
3229 void
3230 df_recompute_luids (basic_block bb)
3232 rtx_insn *insn;
3233 int luid = 0;
3235 df_grow_insn_info ();
3237 /* Scan the block an insn at a time from beginning to end. */
3238 FOR_BB_INSNS (bb, insn)
3240 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3241 /* Inserting labels does not always trigger the incremental
3242 rescanning. */
3243 if (!insn_info)
3245 gcc_assert (!INSN_P (insn));
3246 insn_info = df_insn_create_insn_record (insn);
3249 DF_INSN_INFO_LUID (insn_info) = luid;
3250 if (INSN_P (insn))
3251 luid++;
3256 /* Collect all artificial refs at the block level for BB and add them
3257 to COLLECTION_REC. */
3259 static void
3260 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3262 collection_rec->def_vec.truncate (0);
3263 collection_rec->use_vec.truncate (0);
3264 collection_rec->eq_use_vec.truncate (0);
3265 collection_rec->mw_vec.truncate (0);
3267 if (bb->index == ENTRY_BLOCK)
3269 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3270 return;
3272 else if (bb->index == EXIT_BLOCK)
3274 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3275 return;
3278 if (bb_has_eh_pred (bb))
3280 unsigned int i;
3281 /* Mark the registers that will contain data for the handler. */
3282 for (i = 0; ; ++i)
3284 unsigned regno = EH_RETURN_DATA_REGNO (i);
3285 if (regno == INVALID_REGNUM)
3286 break;
3287 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3288 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3292 /* Add the hard_frame_pointer if this block is the target of a
3293 non-local goto. */
3294 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3295 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3296 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3298 /* Add the artificial uses. */
3299 if (bb->index >= NUM_FIXED_BLOCKS)
3301 bitmap_iterator bi;
3302 unsigned int regno;
3303 bitmap au = bb_has_eh_pred (bb)
3304 ? &df->eh_block_artificial_uses
3305 : &df->regular_block_artificial_uses;
3307 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3309 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3310 bb, NULL, DF_REF_REG_USE, 0);
3314 df_canonize_collection_rec (collection_rec);
3318 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3320 void
3321 df_bb_refs_record (int bb_index, bool scan_insns)
3323 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3324 rtx_insn *insn;
3325 int luid = 0;
3327 if (!df)
3328 return;
3330 df_collection_rec collection_rec;
3331 df_grow_bb_info (df_scan);
3332 if (scan_insns)
3333 /* Scan the block an insn at a time from beginning to end. */
3334 FOR_BB_INSNS (bb, insn)
3336 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3337 gcc_assert (!insn_info);
3339 insn_info = df_insn_create_insn_record (insn);
3340 if (INSN_P (insn))
3342 /* Record refs within INSN. */
3343 DF_INSN_INFO_LUID (insn_info) = luid++;
3344 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3345 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3347 DF_INSN_INFO_LUID (insn_info) = luid;
3350 /* Other block level artificial refs */
3351 df_bb_refs_collect (&collection_rec, bb);
3352 df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3354 /* Now that the block has been processed, set the block as dirty so
3355 LR and LIVE will get it processed. */
3356 df_set_bb_dirty (bb);
3360 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3361 block. */
3363 static void
3364 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3366 #ifdef EH_USES
3367 unsigned int i;
3368 #endif
3370 bitmap_clear (regular_block_artificial_uses);
3372 if (reload_completed)
3374 if (frame_pointer_needed)
3375 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3377 else
3378 /* Before reload, there are a few registers that must be forced
3379 live everywhere -- which might not already be the case for
3380 blocks within infinite loops. */
3382 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3384 /* Any reference to any pseudo before reload is a potential
3385 reference of the frame pointer. */
3386 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3388 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3389 bitmap_set_bit (regular_block_artificial_uses,
3390 HARD_FRAME_POINTER_REGNUM);
3392 /* Pseudos with argument area equivalences may require
3393 reloading via the argument pointer. */
3394 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3395 && fixed_regs[ARG_POINTER_REGNUM])
3396 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3398 /* Any constant, or pseudo with constant equivalences, may
3399 require reloading from memory using the pic register. */
3400 if (picreg != INVALID_REGNUM
3401 && fixed_regs[picreg])
3402 bitmap_set_bit (regular_block_artificial_uses, picreg);
3404 /* The all-important stack pointer must always be live. */
3405 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3407 #ifdef EH_USES
3408 /* EH_USES registers are used:
3409 1) at all insns that might throw (calls or with -fnon-call-exceptions
3410 trapping insns)
3411 2) in all EH edges
3412 3) to support backtraces and/or debugging, anywhere between their
3413 initialization and where they the saved registers are restored
3414 from them, including the cases where we don't reach the epilogue
3415 (noreturn call or infinite loop). */
3416 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3417 if (EH_USES (i))
3418 bitmap_set_bit (regular_block_artificial_uses, i);
3419 #endif
3423 /* Get the artificial use set for an eh block. */
3425 static void
3426 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3428 bitmap_clear (eh_block_artificial_uses);
3430 /* The following code (down through the arg_pointer setting APPEARS
3431 to be necessary because there is nothing that actually
3432 describes what the exception handling code may actually need
3433 to keep alive. */
3434 if (reload_completed)
3436 if (frame_pointer_needed)
3438 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3439 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3440 bitmap_set_bit (eh_block_artificial_uses,
3441 HARD_FRAME_POINTER_REGNUM);
3443 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3444 && fixed_regs[ARG_POINTER_REGNUM])
3445 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3451 /*----------------------------------------------------------------------------
3452 Specialized hard register scanning functions.
3453 ----------------------------------------------------------------------------*/
3456 /* Mark a register in SET. Hard registers in large modes get all
3457 of their component registers set as well. */
3459 static void
3460 df_mark_reg (rtx reg, void *vset)
3462 bitmap_set_range ((bitmap) vset, REGNO (reg), REG_NREGS (reg));
3466 /* Set the bit for regs that are considered being defined at the entry. */
3468 static void
3469 df_get_entry_block_def_set (bitmap entry_block_defs)
3471 rtx r;
3472 int i;
3474 bitmap_clear (entry_block_defs);
3476 /* For separate shrink-wrapping we use LIVE to analyze which basic blocks
3477 need a prologue for some component to be executed before that block,
3478 and we do not care about any other registers. Hence, we do not want
3479 any register for any component defined in the entry block, and we can
3480 just leave all registers undefined. */
3481 if (df_scan->local_flags & DF_SCAN_EMPTY_ENTRY_EXIT)
3482 return;
3484 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3486 if (global_regs[i])
3487 bitmap_set_bit (entry_block_defs, i);
3488 if (FUNCTION_ARG_REGNO_P (i))
3489 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3492 /* The always important stack pointer. */
3493 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3495 /* Once the prologue has been generated, all of these registers
3496 should just show up in the first regular block. */
3497 if (targetm.have_prologue () && epilogue_completed)
3499 /* Defs for the callee saved registers are inserted so that the
3500 pushes have some defining location. */
3501 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3502 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3503 bitmap_set_bit (entry_block_defs, i);
3506 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3507 if (r && REG_P (r))
3508 bitmap_set_bit (entry_block_defs, REGNO (r));
3510 /* If the function has an incoming STATIC_CHAIN, it has to show up
3511 in the entry def set. */
3512 r = rtx_for_static_chain (current_function_decl, true);
3513 if (r && REG_P (r))
3514 bitmap_set_bit (entry_block_defs, REGNO (r));
3516 if ((!reload_completed) || frame_pointer_needed)
3518 /* Any reference to any pseudo before reload is a potential
3519 reference of the frame pointer. */
3520 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3522 /* If they are different, also mark the hard frame pointer as live. */
3523 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3524 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3525 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3528 /* These registers are live everywhere. */
3529 if (!reload_completed)
3531 /* Pseudos with argument area equivalences may require
3532 reloading via the argument pointer. */
3533 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3534 && fixed_regs[ARG_POINTER_REGNUM])
3535 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3537 /* Any constant, or pseudo with constant equivalences, may
3538 require reloading from memory using the pic register. */
3539 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3540 if (picreg != INVALID_REGNUM
3541 && fixed_regs[picreg])
3542 bitmap_set_bit (entry_block_defs, picreg);
3545 #ifdef INCOMING_RETURN_ADDR_RTX
3546 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3547 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3548 #endif
3550 targetm.extra_live_on_entry (entry_block_defs);
3554 /* Return the (conservative) set of hard registers that are defined on
3555 entry to the function.
3556 It uses df->entry_block_defs to determine which register
3557 reference to include. */
3559 static void
3560 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3561 bitmap entry_block_defs)
3563 unsigned int i;
3564 bitmap_iterator bi;
3566 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3568 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3569 ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3572 df_canonize_collection_rec (collection_rec);
3576 /* Record the (conservative) set of hard registers that are defined on
3577 entry to the function. */
3579 static void
3580 df_record_entry_block_defs (bitmap entry_block_defs)
3582 struct df_collection_rec collection_rec;
3583 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3585 /* Process bb_refs chain */
3586 df_refs_add_to_chains (&collection_rec,
3587 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3588 NULL,
3589 copy_defs);
3593 /* Update the defs in the entry block. */
3595 void
3596 df_update_entry_block_defs (void)
3598 bool changed = false;
3600 auto_bitmap refs (&df_bitmap_obstack);
3601 df_get_entry_block_def_set (refs);
3602 gcc_assert (df->entry_block_defs);
3603 if (!bitmap_equal_p (df->entry_block_defs, refs))
3605 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3606 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3607 df_ref_chain_delete (bb_info->artificial_defs);
3608 bb_info->artificial_defs = NULL;
3609 changed = true;
3612 if (changed)
3614 df_record_entry_block_defs (refs);
3615 bitmap_copy (df->entry_block_defs, refs);
3616 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3621 /* Set the bit for regs that are considered being used at the exit. */
3623 static void
3624 df_get_exit_block_use_set (bitmap exit_block_uses)
3626 unsigned int i;
3627 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3629 bitmap_clear (exit_block_uses);
3631 /* For separate shrink-wrapping we use LIVE to analyze which basic blocks
3632 need an epilogue for some component to be executed after that block,
3633 and we do not care about any other registers. Hence, we do not want
3634 any register for any component seen as used in the exit block, and we
3635 can just say no registers at all are used. */
3636 if (df_scan->local_flags & DF_SCAN_EMPTY_ENTRY_EXIT)
3637 return;
3639 /* Stack pointer is always live at the exit. */
3640 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3642 /* Mark the frame pointer if needed at the end of the function.
3643 If we end up eliminating it, it will be removed from the live
3644 list of each basic block by reload. */
3646 if ((!reload_completed) || frame_pointer_needed)
3648 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3650 /* If they are different, also mark the hard frame pointer as live. */
3651 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3652 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3653 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3656 /* Many architectures have a GP register even without flag_pic.
3657 Assume the pic register is not in use, or will be handled by
3658 other means, if it is not fixed. */
3659 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3660 && picreg != INVALID_REGNUM
3661 && fixed_regs[picreg])
3662 bitmap_set_bit (exit_block_uses, picreg);
3664 /* Mark all global registers, and all registers used by the
3665 epilogue as being live at the end of the function since they
3666 may be referenced by our caller. */
3667 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3668 if (global_regs[i] || EPILOGUE_USES (i))
3669 bitmap_set_bit (exit_block_uses, i);
3671 if (targetm.have_epilogue () && epilogue_completed)
3673 /* Mark all call-saved registers that we actually used. */
3674 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3675 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3676 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3677 bitmap_set_bit (exit_block_uses, i);
3680 /* Mark the registers that will contain data for the handler. */
3681 if (reload_completed && crtl->calls_eh_return)
3682 for (i = 0; ; ++i)
3684 unsigned regno = EH_RETURN_DATA_REGNO (i);
3685 if (regno == INVALID_REGNUM)
3686 break;
3687 bitmap_set_bit (exit_block_uses, regno);
3690 #ifdef EH_RETURN_STACKADJ_RTX
3691 if ((!targetm.have_epilogue () || ! epilogue_completed)
3692 && crtl->calls_eh_return)
3694 rtx tmp = EH_RETURN_STACKADJ_RTX;
3695 if (tmp && REG_P (tmp))
3696 df_mark_reg (tmp, exit_block_uses);
3698 #endif
3700 if ((!targetm.have_epilogue () || ! epilogue_completed)
3701 && crtl->calls_eh_return)
3703 rtx tmp = EH_RETURN_HANDLER_RTX;
3704 if (tmp && REG_P (tmp))
3705 df_mark_reg (tmp, exit_block_uses);
3708 /* Mark function return value. */
3709 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3713 /* Return the refs of hard registers that are used in the exit block.
3714 It uses df->exit_block_uses to determine register to include. */
3716 static void
3717 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3719 unsigned int i;
3720 bitmap_iterator bi;
3722 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3723 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3724 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3726 /* It is deliberate that this is not put in the exit block uses but
3727 I do not know why. */
3728 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3729 && reload_completed
3730 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3731 && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3732 && fixed_regs[ARG_POINTER_REGNUM])
3733 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3734 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3736 df_canonize_collection_rec (collection_rec);
3740 /* Record the set of hard registers that are used in the exit block.
3741 It uses df->exit_block_uses to determine which bit to include. */
3743 static void
3744 df_record_exit_block_uses (bitmap exit_block_uses)
3746 struct df_collection_rec collection_rec;
3747 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3749 /* Process bb_refs chain */
3750 df_refs_add_to_chains (&collection_rec,
3751 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3752 NULL,
3753 copy_uses);
3757 /* Update the uses in the exit block. */
3759 void
3760 df_update_exit_block_uses (void)
3762 bool changed = false;
3764 auto_bitmap refs (&df_bitmap_obstack);
3765 df_get_exit_block_use_set (refs);
3766 gcc_assert (df->exit_block_uses);
3767 if (!bitmap_equal_p (df->exit_block_uses, refs))
3769 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3770 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3771 df_ref_chain_delete (bb_info->artificial_uses);
3772 bb_info->artificial_uses = NULL;
3773 changed = true;
3776 if (changed)
3778 df_record_exit_block_uses (refs);
3779 bitmap_copy (df->exit_block_uses, refs);
3780 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3784 static bool initialized = false;
3787 /* Initialize some platform specific structures. */
3789 void
3790 df_hard_reg_init (void)
3792 int i;
3793 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3795 if (initialized)
3796 return;
3798 /* Record which registers will be eliminated. We use this in
3799 mark_used_regs. */
3800 CLEAR_HARD_REG_SET (elim_reg_set);
3802 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3803 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3805 initialized = true;
3809 /* Recompute the parts of scanning that are based on regs_ever_live
3810 because something changed in that array. */
3812 void
3813 df_update_entry_exit_and_calls (void)
3815 basic_block bb;
3817 df_update_entry_block_defs ();
3818 df_update_exit_block_uses ();
3820 /* The call insns need to be rescanned because there may be changes
3821 in the set of registers clobbered across the call. */
3822 FOR_EACH_BB_FN (bb, cfun)
3824 rtx_insn *insn;
3825 FOR_BB_INSNS (bb, insn)
3827 if (INSN_P (insn) && CALL_P (insn))
3828 df_insn_rescan (insn);
3834 /* Return true if hard REG is actually used in the some instruction.
3835 There are a fair number of conditions that affect the setting of
3836 this array. See the comment in df.h for df->hard_regs_live_count
3837 for the conditions that this array is set. */
3839 bool
3840 df_hard_reg_used_p (unsigned int reg)
3842 return df->hard_regs_live_count[reg] != 0;
3846 /* A count of the number of times REG is actually used in the some
3847 instruction. There are a fair number of conditions that affect the
3848 setting of this array. See the comment in df.h for
3849 df->hard_regs_live_count for the conditions that this array is
3850 set. */
3853 unsigned int
3854 df_hard_reg_used_count (unsigned int reg)
3856 return df->hard_regs_live_count[reg];
3860 /* Get the value of regs_ever_live[REGNO]. */
3862 bool
3863 df_regs_ever_live_p (unsigned int regno)
3865 return regs_ever_live[regno];
3869 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3870 to change, schedule that change for the next update. */
3872 void
3873 df_set_regs_ever_live (unsigned int regno, bool value)
3875 if (regs_ever_live[regno] == value)
3876 return;
3878 regs_ever_live[regno] = value;
3879 if (df)
3880 df->redo_entry_and_exit = true;
3884 /* Compute "regs_ever_live" information from the underlying df
3885 information. Set the vector to all false if RESET. */
3887 void
3888 df_compute_regs_ever_live (bool reset)
3890 unsigned int i;
3891 bool changed = df->redo_entry_and_exit;
3893 if (reset)
3894 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3896 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3897 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3899 regs_ever_live[i] = true;
3900 changed = true;
3902 if (changed)
3903 df_update_entry_exit_and_calls ();
3904 df->redo_entry_and_exit = false;
3908 /*----------------------------------------------------------------------------
3909 Dataflow ref information verification functions.
3911 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3912 df_reg_chain_verify_unmarked (refs)
3913 df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
3914 df_mws_verify (mw*, mw*, bool)
3915 df_insn_refs_verify (collection_rec, bb, insn, bool)
3916 df_bb_refs_verify (bb, refs, bool)
3917 df_bb_verify (bb)
3918 df_exit_block_bitmap_verify (bool)
3919 df_entry_block_bitmap_verify (bool)
3920 df_scan_verify ()
3921 ----------------------------------------------------------------------------*/
3924 /* Mark all refs in the reg chain. Verify that all of the registers
3925 are in the correct chain. */
3927 static unsigned int
3928 df_reg_chain_mark (df_ref refs, unsigned int regno,
3929 bool is_def, bool is_eq_use)
3931 unsigned int count = 0;
3932 df_ref ref;
3933 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3935 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3937 /* If there are no def-use or use-def chains, make sure that all
3938 of the chains are clear. */
3939 if (!df_chain)
3940 gcc_assert (!DF_REF_CHAIN (ref));
3942 /* Check to make sure the ref is in the correct chain. */
3943 gcc_assert (DF_REF_REGNO (ref) == regno);
3944 if (is_def)
3945 gcc_assert (DF_REF_REG_DEF_P (ref));
3946 else
3947 gcc_assert (!DF_REF_REG_DEF_P (ref));
3949 if (is_eq_use)
3950 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
3951 else
3952 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
3954 if (DF_REF_NEXT_REG (ref))
3955 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
3956 count++;
3957 DF_REF_REG_MARK (ref);
3959 return count;
3963 /* Verify that all of the registers in the chain are unmarked. */
3965 static void
3966 df_reg_chain_verify_unmarked (df_ref refs)
3968 df_ref ref;
3969 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3970 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3974 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
3976 static bool
3977 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
3978 bool abort_if_fail)
3980 unsigned int ix;
3981 df_ref new_ref;
3983 FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
3985 if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
3987 if (abort_if_fail)
3988 gcc_assert (0);
3989 else
3990 return false;
3993 /* Abort if fail is called from the function level verifier. If
3994 that is the context, mark this reg as being seem. */
3995 if (abort_if_fail)
3997 gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
3998 DF_REF_REG_UNMARK (old_rec);
4001 old_rec = DF_REF_NEXT_LOC (old_rec);
4004 if (abort_if_fail)
4005 gcc_assert (old_rec == NULL);
4006 else
4007 return old_rec == NULL;
4008 return false;
4012 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4014 static bool
4015 df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
4016 struct df_mw_hardreg *old_rec,
4017 bool abort_if_fail)
4019 unsigned int ix;
4020 struct df_mw_hardreg *new_reg;
4022 FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4024 if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4026 if (abort_if_fail)
4027 gcc_assert (0);
4028 else
4029 return false;
4031 old_rec = DF_MWS_NEXT (old_rec);
4034 if (abort_if_fail)
4035 gcc_assert (old_rec == NULL);
4036 else
4037 return old_rec == NULL;
4038 return false;
4042 /* Return true if the existing insn refs information is complete and
4043 correct. Otherwise (i.e. if there's any missing or extra refs),
4044 return the correct df_ref chain in REFS_RETURN.
4046 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4047 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4048 verification mode instead of the whole function, so unmark
4049 everything.
4051 If ABORT_IF_FAIL is set, this function never returns false. */
4053 static bool
4054 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4055 basic_block bb,
4056 rtx_insn *insn,
4057 bool abort_if_fail)
4059 bool ret1, ret2, ret3;
4060 unsigned int uid = INSN_UID (insn);
4061 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4063 df_insn_refs_collect (collection_rec, bb, insn_info);
4065 /* Unfortunately we cannot opt out early if one of these is not
4066 right and abort_if_fail is set because the marks will not get cleared. */
4067 ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4068 abort_if_fail);
4069 if (!ret1 && !abort_if_fail)
4070 return false;
4071 ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4072 abort_if_fail);
4073 if (!ret2 && !abort_if_fail)
4074 return false;
4075 ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4076 abort_if_fail);
4077 if (!ret3 && !abort_if_fail)
4078 return false;
4079 if (! df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4080 abort_if_fail))
4081 return false;
4082 return (ret1 && ret2 && ret3);
4086 /* Return true if all refs in the basic block are correct and complete.
4087 Due to df_ref_chain_verify, it will cause all refs
4088 that are verified to have DF_REF_MARK bit set. */
4090 static bool
4091 df_bb_verify (basic_block bb)
4093 rtx_insn *insn;
4094 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4095 struct df_collection_rec collection_rec;
4097 gcc_assert (bb_info);
4099 /* Scan the block, one insn at a time, from beginning to end. */
4100 FOR_BB_INSNS_REVERSE (bb, insn)
4102 if (!INSN_P (insn))
4103 continue;
4104 df_insn_refs_verify (&collection_rec, bb, insn, true);
4105 df_free_collection_rec (&collection_rec);
4108 /* Do the artificial defs and uses. */
4109 df_bb_refs_collect (&collection_rec, bb);
4110 df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4111 df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4112 df_free_collection_rec (&collection_rec);
4114 return true;
4118 /* Returns true if the entry block has correct and complete df_ref set.
4119 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4121 static bool
4122 df_entry_block_bitmap_verify (bool abort_if_fail)
4124 bool is_eq;
4126 auto_bitmap entry_block_defs (&df_bitmap_obstack);
4127 df_get_entry_block_def_set (entry_block_defs);
4129 is_eq = bitmap_equal_p (entry_block_defs, df->entry_block_defs);
4131 if (!is_eq && abort_if_fail)
4133 fprintf (stderr, "entry_block_defs = ");
4134 df_print_regset (stderr, entry_block_defs);
4135 fprintf (stderr, "df->entry_block_defs = ");
4136 df_print_regset (stderr, df->entry_block_defs);
4137 gcc_assert (0);
4140 return is_eq;
4144 /* Returns true if the exit block has correct and complete df_ref set.
4145 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4147 static bool
4148 df_exit_block_bitmap_verify (bool abort_if_fail)
4150 bool is_eq;
4152 auto_bitmap exit_block_uses (&df_bitmap_obstack);
4153 df_get_exit_block_use_set (exit_block_uses);
4155 is_eq = bitmap_equal_p (exit_block_uses, df->exit_block_uses);
4157 if (!is_eq && abort_if_fail)
4159 fprintf (stderr, "exit_block_uses = ");
4160 df_print_regset (stderr, exit_block_uses);
4161 fprintf (stderr, "df->exit_block_uses = ");
4162 df_print_regset (stderr, df->exit_block_uses);
4163 gcc_assert (0);
4166 return is_eq;
4170 /* Return true if df_ref information for all insns in all blocks are
4171 correct and complete. */
4173 void
4174 df_scan_verify (void)
4176 unsigned int i;
4177 basic_block bb;
4179 if (!df)
4180 return;
4182 /* Verification is a 4 step process. */
4184 /* (1) All of the refs are marked by going through the reg chains. */
4185 for (i = 0; i < DF_REG_SIZE (df); i++)
4187 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4188 == DF_REG_DEF_COUNT (i));
4189 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4190 == DF_REG_USE_COUNT (i));
4191 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4192 == DF_REG_EQ_USE_COUNT (i));
4195 /* (2) There are various bitmaps whose value may change over the
4196 course of the compilation. This step recomputes them to make
4197 sure that they have not slipped out of date. */
4198 auto_bitmap regular_block_artificial_uses (&df_bitmap_obstack);
4199 auto_bitmap eh_block_artificial_uses (&df_bitmap_obstack);
4201 df_get_regular_block_artificial_uses (regular_block_artificial_uses);
4202 df_get_eh_block_artificial_uses (eh_block_artificial_uses);
4204 bitmap_ior_into (eh_block_artificial_uses,
4205 regular_block_artificial_uses);
4207 /* Check artificial_uses bitmaps didn't change. */
4208 gcc_assert (bitmap_equal_p (regular_block_artificial_uses,
4209 &df->regular_block_artificial_uses));
4210 gcc_assert (bitmap_equal_p (eh_block_artificial_uses,
4211 &df->eh_block_artificial_uses));
4213 /* Verify entry block and exit block. These only verify the bitmaps,
4214 the refs are verified in df_bb_verify. */
4215 df_entry_block_bitmap_verify (true);
4216 df_exit_block_bitmap_verify (true);
4218 /* (3) All of the insns in all of the blocks are traversed and the
4219 marks are cleared both in the artificial refs attached to the
4220 blocks and the real refs inside the insns. It is a failure to
4221 clear a mark that has not been set as this means that the ref in
4222 the block or insn was not in the reg chain. */
4224 FOR_ALL_BB_FN (bb, cfun)
4225 df_bb_verify (bb);
4227 /* (4) See if all reg chains are traversed a second time. This time
4228 a check is made that the marks are clear. A set mark would be a
4229 from a reg that is not in any insn or basic block. */
4231 for (i = 0; i < DF_REG_SIZE (df); i++)
4233 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4234 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4235 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));