[PR67828] don't unswitch on default defs of non-parms
[official-gcc.git] / gcc / df-scan.c
blob7a22b10371d432b8b5e859d39fa461447ab1b517
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999-2015 Free Software Foundation, Inc.
3 Originally contributed by Michael P. Hayes
4 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
5 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
6 and Kenneth Zadeck (zadeck@naturalbridge.com).
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "df.h"
31 #include "tm_p.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "alias.h"
35 #include "regs.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "dumpfile.h"
39 #include "target.h"
40 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
43 /* The set of hard registers in eliminables[i].from. */
45 static HARD_REG_SET elim_reg_set;
47 /* Initialize ur_in and ur_out as if all hard registers were partially
48 available. */
50 struct df_collection_rec
52 auto_vec<df_ref, 128> def_vec;
53 auto_vec<df_ref, 32> use_vec;
54 auto_vec<df_ref, 32> eq_use_vec;
55 auto_vec<df_mw_hardreg *, 32> mw_vec;
58 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
59 rtx, rtx *,
60 basic_block, struct df_insn_info *,
61 enum df_ref_type, int ref_flags);
62 static void df_def_record_1 (struct df_collection_rec *, rtx *,
63 basic_block, struct df_insn_info *,
64 int ref_flags);
65 static void df_defs_record (struct df_collection_rec *, rtx,
66 basic_block, struct df_insn_info *,
67 int ref_flags);
68 static void df_uses_record (struct df_collection_rec *,
69 rtx *, enum df_ref_type,
70 basic_block, struct df_insn_info *,
71 int ref_flags);
73 static void df_install_ref_incremental (df_ref);
74 static void df_insn_refs_collect (struct df_collection_rec*,
75 basic_block, struct df_insn_info *);
76 static void df_canonize_collection_rec (struct df_collection_rec *);
78 static void df_get_regular_block_artificial_uses (bitmap);
79 static void df_get_eh_block_artificial_uses (bitmap);
81 static void df_record_entry_block_defs (bitmap);
82 static void df_record_exit_block_uses (bitmap);
83 static void df_get_exit_block_use_set (bitmap);
84 static void df_get_entry_block_def_set (bitmap);
85 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
86 static void df_ref_chain_delete_du_chain (df_ref);
87 static void df_ref_chain_delete (df_ref);
89 static void df_refs_add_to_chains (struct df_collection_rec *,
90 basic_block, rtx_insn *, unsigned int);
92 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block,
93 rtx_insn *, bool);
94 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
95 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
96 static void df_install_ref (df_ref, struct df_reg_info *,
97 struct df_ref_info *, bool);
99 static int df_ref_compare (df_ref, df_ref);
100 static int df_ref_ptr_compare (const void *, const void *);
101 static int df_mw_compare (const df_mw_hardreg *, const df_mw_hardreg *);
102 static int df_mw_ptr_compare (const void *, const void *);
104 static void df_insn_info_delete (unsigned int);
106 /* Indexed by hardware reg number, is true if that register is ever
107 used in the current function.
109 In df-scan.c, this is set up to record the hard regs used
110 explicitly. Reload adds in the hard regs used for holding pseudo
111 regs. Final uses it to generate the code in the function prologue
112 and epilogue to save and restore registers as needed. */
114 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
116 /* Flags used to tell df_refs_add_to_chains() which vectors it should copy. */
117 static const unsigned int copy_defs = 0x1;
118 static const unsigned int copy_uses = 0x2;
119 static const unsigned int copy_eq_uses = 0x4;
120 static const unsigned int copy_mw = 0x8;
121 static const unsigned int copy_all = copy_defs | copy_uses | copy_eq_uses
122 | copy_mw;
124 /*----------------------------------------------------------------------------
125 SCANNING DATAFLOW PROBLEM
127 There are several ways in which scanning looks just like the other
128 dataflow problems. It shares the all the mechanisms for local info
129 as well as basic block info. Where it differs is when and how often
130 it gets run. It also has no need for the iterative solver.
131 ----------------------------------------------------------------------------*/
133 /* Problem data for the scanning dataflow function. */
134 struct df_scan_problem_data
136 object_allocator<df_base_ref> *ref_base_pool;
137 object_allocator<df_artificial_ref> *ref_artificial_pool;
138 object_allocator<df_regular_ref> *ref_regular_pool;
139 object_allocator<df_insn_info> *insn_pool;
140 object_allocator<df_reg_info> *reg_pool;
141 object_allocator<df_mw_hardreg> *mw_reg_pool;
143 bitmap_obstack reg_bitmaps;
144 bitmap_obstack insn_bitmaps;
147 /* Internal function to shut down the scanning problem. */
148 static void
149 df_scan_free_internal (void)
151 struct df_scan_problem_data *problem_data
152 = (struct df_scan_problem_data *) df_scan->problem_data;
154 free (df->def_info.refs);
155 free (df->def_info.begin);
156 free (df->def_info.count);
157 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
159 free (df->use_info.refs);
160 free (df->use_info.begin);
161 free (df->use_info.count);
162 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
164 free (df->def_regs);
165 df->def_regs = NULL;
166 free (df->use_regs);
167 df->use_regs = NULL;
168 free (df->eq_use_regs);
169 df->eq_use_regs = NULL;
170 df->regs_size = 0;
171 DF_REG_SIZE (df) = 0;
173 free (df->insns);
174 df->insns = NULL;
175 DF_INSN_SIZE () = 0;
177 free (df_scan->block_info);
178 df_scan->block_info = NULL;
179 df_scan->block_info_size = 0;
181 bitmap_clear (&df->hardware_regs_used);
182 bitmap_clear (&df->regular_block_artificial_uses);
183 bitmap_clear (&df->eh_block_artificial_uses);
184 BITMAP_FREE (df->entry_block_defs);
185 BITMAP_FREE (df->exit_block_uses);
186 bitmap_clear (&df->insns_to_delete);
187 bitmap_clear (&df->insns_to_rescan);
188 bitmap_clear (&df->insns_to_notes_rescan);
190 delete problem_data->ref_base_pool;
191 delete problem_data->ref_artificial_pool;
192 delete problem_data->ref_regular_pool;
193 delete problem_data->insn_pool;
194 delete problem_data->reg_pool;
195 delete problem_data->mw_reg_pool;
196 bitmap_obstack_release (&problem_data->reg_bitmaps);
197 bitmap_obstack_release (&problem_data->insn_bitmaps);
198 free (df_scan->problem_data);
202 /* Free basic block info. */
204 static void
205 df_scan_free_bb_info (basic_block bb, void *vbb_info)
207 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
208 unsigned int bb_index = bb->index;
209 rtx_insn *insn;
211 FOR_BB_INSNS (bb, insn)
212 if (INSN_P (insn))
213 df_insn_info_delete (INSN_UID (insn));
215 if (bb_index < df_scan->block_info_size)
216 bb_info = df_scan_get_bb_info (bb_index);
218 /* Get rid of any artificial uses or defs. */
219 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
220 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
221 df_ref_chain_delete (bb_info->artificial_defs);
222 df_ref_chain_delete (bb_info->artificial_uses);
223 bb_info->artificial_defs = NULL;
224 bb_info->artificial_uses = NULL;
228 /* Allocate the problem data for the scanning problem. This should be
229 called when the problem is created or when the entire function is to
230 be rescanned. */
231 void
232 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
234 struct df_scan_problem_data *problem_data;
235 unsigned int insn_num = get_max_uid () + 1;
236 basic_block bb;
238 /* Given the number of pools, this is really faster than tearing
239 everything apart. */
240 if (df_scan->problem_data)
241 df_scan_free_internal ();
243 problem_data = XNEW (struct df_scan_problem_data);
244 df_scan->problem_data = problem_data;
245 df_scan->computed = true;
247 problem_data->ref_base_pool = new object_allocator<df_base_ref>
248 ("df_scan ref base");
249 problem_data->ref_artificial_pool = new object_allocator<df_artificial_ref>
250 ("df_scan ref artificial");
251 problem_data->ref_regular_pool = new object_allocator<df_regular_ref>
252 ("df_scan ref regular");
253 problem_data->insn_pool = new object_allocator<df_insn_info>
254 ("df_scan insn");
255 problem_data->reg_pool = new object_allocator<df_reg_info>
256 ("df_scan reg");
257 problem_data->mw_reg_pool = new object_allocator<df_mw_hardreg>
258 ("df_scan mw_reg");
260 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
261 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
263 insn_num += insn_num / 4;
264 df_grow_reg_info ();
266 df_grow_insn_info ();
267 df_grow_bb_info (df_scan);
269 FOR_ALL_BB_FN (bb, cfun)
271 unsigned int bb_index = bb->index;
272 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
273 bb_info->artificial_defs = NULL;
274 bb_info->artificial_uses = NULL;
277 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
278 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
279 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
280 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
281 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
282 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
283 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
284 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
285 df_scan->optional_p = false;
289 /* Free all of the data associated with the scan problem. */
291 static void
292 df_scan_free (void)
294 if (df_scan->problem_data)
295 df_scan_free_internal ();
297 if (df->blocks_to_analyze)
299 BITMAP_FREE (df->blocks_to_analyze);
300 df->blocks_to_analyze = NULL;
303 free (df_scan);
306 /* Dump the preamble for DF_SCAN dump. */
307 static void
308 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
310 int i;
311 int dcount = 0;
312 int ucount = 0;
313 int ecount = 0;
314 int icount = 0;
315 int ccount = 0;
316 basic_block bb;
317 rtx_insn *insn;
319 fprintf (file, ";; invalidated by call \t");
320 df_print_regset (file, regs_invalidated_by_call_regset);
321 fprintf (file, ";; hardware regs used \t");
322 df_print_regset (file, &df->hardware_regs_used);
323 fprintf (file, ";; regular block artificial uses \t");
324 df_print_regset (file, &df->regular_block_artificial_uses);
325 fprintf (file, ";; eh block artificial uses \t");
326 df_print_regset (file, &df->eh_block_artificial_uses);
327 fprintf (file, ";; entry block defs \t");
328 df_print_regset (file, df->entry_block_defs);
329 fprintf (file, ";; exit block uses \t");
330 df_print_regset (file, df->exit_block_uses);
331 fprintf (file, ";; regs ever live \t");
332 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
333 if (df_regs_ever_live_p (i))
334 fprintf (file, " %d [%s]", i, reg_names[i]);
335 fprintf (file, "\n;; ref usage \t");
337 for (i = 0; i < (int)df->regs_inited; i++)
338 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
340 const char * sep = "";
342 fprintf (file, "r%d={", i);
343 if (DF_REG_DEF_COUNT (i))
345 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
346 sep = ",";
347 dcount += DF_REG_DEF_COUNT (i);
349 if (DF_REG_USE_COUNT (i))
351 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
352 sep = ",";
353 ucount += DF_REG_USE_COUNT (i);
355 if (DF_REG_EQ_USE_COUNT (i))
357 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
358 ecount += DF_REG_EQ_USE_COUNT (i);
360 fprintf (file, "} ");
363 FOR_EACH_BB_FN (bb, cfun)
364 FOR_BB_INSNS (bb, insn)
365 if (INSN_P (insn))
367 if (CALL_P (insn))
368 ccount++;
369 else
370 icount++;
373 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
374 " in %d{%d regular + %d call} insns.\n",
375 dcount + ucount + ecount, dcount, ucount, ecount,
376 icount + ccount, icount, ccount);
379 /* Dump the bb_info for a given basic block. */
380 static void
381 df_scan_start_block (basic_block bb, FILE *file)
383 struct df_scan_bb_info *bb_info
384 = df_scan_get_bb_info (bb->index);
386 if (bb_info)
388 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
389 df_refs_chain_dump (bb_info->artificial_defs, true, file);
390 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
391 df_refs_chain_dump (bb_info->artificial_uses, true, file);
392 fprintf (file, "\n");
394 #if 0
396 rtx_insn *insn;
397 FOR_BB_INSNS (bb, insn)
398 if (INSN_P (insn))
399 df_insn_debug (insn, false, file);
401 #endif
404 static struct df_problem problem_SCAN =
406 DF_SCAN, /* Problem id. */
407 DF_NONE, /* Direction. */
408 df_scan_alloc, /* Allocate the problem specific data. */
409 NULL, /* Reset global information. */
410 df_scan_free_bb_info, /* Free basic block info. */
411 NULL, /* Local compute function. */
412 NULL, /* Init the solution specific data. */
413 NULL, /* Iterative solver. */
414 NULL, /* Confluence operator 0. */
415 NULL, /* Confluence operator n. */
416 NULL, /* Transfer function. */
417 NULL, /* Finalize function. */
418 df_scan_free, /* Free all of the problem information. */
419 NULL, /* Remove this problem from the stack of dataflow problems. */
420 df_scan_start_dump, /* Debugging. */
421 df_scan_start_block, /* Debugging start block. */
422 NULL, /* Debugging end block. */
423 NULL, /* Debugging start insn. */
424 NULL, /* Debugging end insn. */
425 NULL, /* Incremental solution verify start. */
426 NULL, /* Incremental solution verify end. */
427 NULL, /* Dependent problem. */
428 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
429 TV_DF_SCAN, /* Timing variable. */
430 false /* Reset blocks on dropping out of blocks_to_analyze. */
434 /* Create a new DATAFLOW instance and add it to an existing instance
435 of DF. The returned structure is what is used to get at the
436 solution. */
438 void
439 df_scan_add_problem (void)
441 df_add_problem (&problem_SCAN);
445 /*----------------------------------------------------------------------------
446 Storage Allocation Utilities
447 ----------------------------------------------------------------------------*/
450 /* First, grow the reg_info information. If the current size is less than
451 the number of pseudos, grow to 25% more than the number of
452 pseudos.
454 Second, assure that all of the slots up to max_reg_num have been
455 filled with reg_info structures. */
457 void
458 df_grow_reg_info (void)
460 unsigned int max_reg = max_reg_num ();
461 unsigned int new_size = max_reg;
462 struct df_scan_problem_data *problem_data
463 = (struct df_scan_problem_data *) df_scan->problem_data;
464 unsigned int i;
466 if (df->regs_size < new_size)
468 new_size += new_size / 4;
469 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
470 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
471 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
472 new_size);
473 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
474 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
475 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
476 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
477 df->regs_size = new_size;
480 for (i = df->regs_inited; i < max_reg; i++)
482 struct df_reg_info *reg_info;
484 // TODO
485 reg_info = problem_data->reg_pool->allocate ();
486 memset (reg_info, 0, sizeof (struct df_reg_info));
487 df->def_regs[i] = reg_info;
488 reg_info = problem_data->reg_pool->allocate ();
489 memset (reg_info, 0, sizeof (struct df_reg_info));
490 df->use_regs[i] = reg_info;
491 reg_info = problem_data->reg_pool->allocate ();
492 memset (reg_info, 0, sizeof (struct df_reg_info));
493 df->eq_use_regs[i] = reg_info;
494 df->def_info.begin[i] = 0;
495 df->def_info.count[i] = 0;
496 df->use_info.begin[i] = 0;
497 df->use_info.count[i] = 0;
500 df->regs_inited = max_reg;
504 /* Grow the ref information. */
506 static void
507 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
509 if (ref_info->refs_size < new_size)
511 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
512 memset (ref_info->refs + ref_info->refs_size, 0,
513 (new_size - ref_info->refs_size) *sizeof (df_ref));
514 ref_info->refs_size = new_size;
519 /* Check and grow the ref information if necessary. This routine
520 guarantees total_size + BITMAP_ADDEND amount of entries in refs
521 array. It updates ref_info->refs_size only and does not change
522 ref_info->total_size. */
524 static void
525 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
526 unsigned bitmap_addend)
528 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
530 int new_size = ref_info->total_size + bitmap_addend;
531 new_size += ref_info->total_size / 4;
532 df_grow_ref_info (ref_info, new_size);
537 /* Grow the ref information. If the current size is less than the
538 number of instructions, grow to 25% more than the number of
539 instructions. */
541 void
542 df_grow_insn_info (void)
544 unsigned int new_size = get_max_uid () + 1;
545 if (DF_INSN_SIZE () < new_size)
547 new_size += new_size / 4;
548 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
549 memset (df->insns + df->insns_size, 0,
550 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
551 DF_INSN_SIZE () = new_size;
558 /*----------------------------------------------------------------------------
559 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
560 ----------------------------------------------------------------------------*/
562 /* Rescan all of the block_to_analyze or all of the blocks in the
563 function if df_set_blocks if blocks_to_analyze is NULL; */
565 void
566 df_scan_blocks (void)
568 basic_block bb;
570 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
571 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
573 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
574 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
576 bitmap_ior_into (&df->eh_block_artificial_uses,
577 &df->regular_block_artificial_uses);
579 /* ENTRY and EXIT blocks have special defs/uses. */
580 df_get_entry_block_def_set (df->entry_block_defs);
581 df_record_entry_block_defs (df->entry_block_defs);
582 df_get_exit_block_use_set (df->exit_block_uses);
583 df_record_exit_block_uses (df->exit_block_uses);
584 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
585 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
587 /* Regular blocks */
588 FOR_EACH_BB_FN (bb, cfun)
590 unsigned int bb_index = bb->index;
591 df_bb_refs_record (bb_index, true);
595 /* Create new refs under address LOC within INSN. This function is
596 only used externally. REF_FLAGS must be either 0 or DF_REF_IN_NOTE,
597 depending on whether LOC is inside PATTERN (INSN) or a note. */
599 void
600 df_uses_create (rtx *loc, rtx_insn *insn, int ref_flags)
602 gcc_assert (!(ref_flags & ~DF_REF_IN_NOTE));
603 df_uses_record (NULL, loc, DF_REF_REG_USE,
604 BLOCK_FOR_INSN (insn),
605 DF_INSN_INFO_GET (insn),
606 ref_flags);
609 static void
610 df_install_ref_incremental (df_ref ref)
612 struct df_reg_info **reg_info;
613 struct df_ref_info *ref_info;
614 df_ref *ref_ptr;
615 bool add_to_table;
617 rtx_insn *insn = DF_REF_INSN (ref);
618 basic_block bb = BLOCK_FOR_INSN (insn);
620 if (DF_REF_REG_DEF_P (ref))
622 reg_info = df->def_regs;
623 ref_info = &df->def_info;
624 ref_ptr = &DF_INSN_DEFS (insn);
625 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
627 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
629 reg_info = df->eq_use_regs;
630 ref_info = &df->use_info;
631 ref_ptr = &DF_INSN_EQ_USES (insn);
632 switch (ref_info->ref_order)
634 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
635 case DF_REF_ORDER_BY_REG_WITH_NOTES:
636 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
637 add_to_table = true;
638 break;
639 default:
640 add_to_table = false;
641 break;
644 else
646 reg_info = df->use_regs;
647 ref_info = &df->use_info;
648 ref_ptr = &DF_INSN_USES (insn);
649 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
652 /* Do not add if ref is not in the right blocks. */
653 if (add_to_table && df->analyze_subset)
654 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
656 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
658 if (add_to_table)
659 switch (ref_info->ref_order)
661 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
662 case DF_REF_ORDER_BY_REG_WITH_NOTES:
663 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
664 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
665 break;
666 default:
667 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
668 break;
671 while (*ref_ptr && df_ref_compare (*ref_ptr, ref) < 0)
672 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
674 DF_REF_NEXT_LOC (ref) = *ref_ptr;
675 *ref_ptr = ref;
677 #if 0
678 if (dump_file)
680 fprintf (dump_file, "adding ref ");
681 df_ref_debug (ref, dump_file);
683 #endif
684 /* By adding the ref directly, df_insn_rescan my not find any
685 differences even though the block will have changed. So we need
686 to mark the block dirty ourselves. */
687 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
688 df_set_bb_dirty (bb);
693 /*----------------------------------------------------------------------------
694 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
695 ----------------------------------------------------------------------------*/
697 static void
698 df_free_ref (df_ref ref)
700 struct df_scan_problem_data *problem_data
701 = (struct df_scan_problem_data *) df_scan->problem_data;
703 switch (DF_REF_CLASS (ref))
705 case DF_REF_BASE:
706 problem_data->ref_base_pool->remove ((df_base_ref *) (ref));
707 break;
709 case DF_REF_ARTIFICIAL:
710 problem_data->ref_artificial_pool->remove
711 ((df_artificial_ref *) (ref));
712 break;
714 case DF_REF_REGULAR:
715 problem_data->ref_regular_pool->remove
716 ((df_regular_ref *) (ref));
717 break;
722 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
723 Also delete the def-use or use-def chain if it exists. */
725 static void
726 df_reg_chain_unlink (df_ref ref)
728 df_ref next = DF_REF_NEXT_REG (ref);
729 df_ref prev = DF_REF_PREV_REG (ref);
730 int id = DF_REF_ID (ref);
731 struct df_reg_info *reg_info;
732 df_ref *refs = NULL;
734 if (DF_REF_REG_DEF_P (ref))
736 int regno = DF_REF_REGNO (ref);
737 reg_info = DF_REG_DEF_GET (regno);
738 refs = df->def_info.refs;
740 else
742 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
744 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
745 switch (df->use_info.ref_order)
747 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
748 case DF_REF_ORDER_BY_REG_WITH_NOTES:
749 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
750 refs = df->use_info.refs;
751 break;
752 default:
753 break;
756 else
758 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
759 refs = df->use_info.refs;
763 if (refs)
765 if (df->analyze_subset)
767 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
768 refs[id] = NULL;
770 else
771 refs[id] = NULL;
774 /* Delete any def-use or use-def chains that start here. It is
775 possible that there is trash in this field. This happens for
776 insns that have been deleted when rescanning has been deferred
777 and the chain problem has also been deleted. The chain tear down
778 code skips deleted insns. */
779 if (df_chain && DF_REF_CHAIN (ref))
780 df_chain_unlink (ref);
782 reg_info->n_refs--;
783 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
785 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
786 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
789 /* Unlink from the reg chain. If there is no prev, this is the
790 first of the list. If not, just join the next and prev. */
791 if (prev)
792 DF_REF_NEXT_REG (prev) = next;
793 else
795 gcc_assert (reg_info->reg_chain == ref);
796 reg_info->reg_chain = next;
798 if (next)
799 DF_REF_PREV_REG (next) = prev;
801 df_free_ref (ref);
804 /* Initialize INSN_INFO to describe INSN. */
806 static void
807 df_insn_info_init_fields (df_insn_info *insn_info, rtx_insn *insn)
809 memset (insn_info, 0, sizeof (struct df_insn_info));
810 insn_info->insn = insn;
813 /* Create the insn record for INSN. If there was one there, zero it
814 out. */
816 struct df_insn_info *
817 df_insn_create_insn_record (rtx_insn *insn)
819 struct df_scan_problem_data *problem_data
820 = (struct df_scan_problem_data *) df_scan->problem_data;
821 struct df_insn_info *insn_rec;
823 df_grow_insn_info ();
824 insn_rec = DF_INSN_INFO_GET (insn);
825 if (!insn_rec)
827 insn_rec = problem_data->insn_pool->allocate ();
828 DF_INSN_INFO_SET (insn, insn_rec);
830 df_insn_info_init_fields (insn_rec, insn);
831 return insn_rec;
835 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
837 static void
838 df_ref_chain_delete_du_chain (df_ref ref)
840 for (; ref; ref = DF_REF_NEXT_LOC (ref))
841 /* CHAIN is allocated by DF_CHAIN. So make sure to
842 pass df_scan instance for the problem. */
843 if (DF_REF_CHAIN (ref))
844 df_chain_unlink (ref);
848 /* Delete all refs in the ref chain. */
850 static void
851 df_ref_chain_delete (df_ref ref)
853 df_ref next;
854 for (; ref; ref = next)
856 next = DF_REF_NEXT_LOC (ref);
857 df_reg_chain_unlink (ref);
862 /* Delete the hardreg chain. */
864 static void
865 df_mw_hardreg_chain_delete (struct df_mw_hardreg *hardregs)
867 struct df_scan_problem_data *problem_data
868 = (struct df_scan_problem_data *) df_scan->problem_data;
869 df_mw_hardreg *next;
871 for (; hardregs; hardregs = next)
873 next = DF_MWS_NEXT (hardregs);
874 problem_data->mw_reg_pool->remove (hardregs);
878 /* Remove the contents of INSN_INFO (but don't free INSN_INFO itself). */
880 static void
881 df_insn_info_free_fields (df_insn_info *insn_info)
883 /* In general, notes do not have the insn_info fields
884 initialized. However, combine deletes insns by changing them
885 to notes. How clever. So we cannot just check if it is a
886 valid insn before short circuiting this code, we need to see
887 if we actually initialized it. */
888 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
890 if (df_chain)
892 df_ref_chain_delete_du_chain (insn_info->defs);
893 df_ref_chain_delete_du_chain (insn_info->uses);
894 df_ref_chain_delete_du_chain (insn_info->eq_uses);
897 df_ref_chain_delete (insn_info->defs);
898 df_ref_chain_delete (insn_info->uses);
899 df_ref_chain_delete (insn_info->eq_uses);
902 /* Delete all of the refs information from the insn with UID.
903 Internal helper for df_insn_delete, df_insn_rescan, and other
904 df-scan routines that don't have to work in deferred mode
905 and do not have to mark basic blocks for re-processing. */
907 static void
908 df_insn_info_delete (unsigned int uid)
910 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
912 bitmap_clear_bit (&df->insns_to_delete, uid);
913 bitmap_clear_bit (&df->insns_to_rescan, uid);
914 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
915 if (insn_info)
917 struct df_scan_problem_data *problem_data
918 = (struct df_scan_problem_data *) df_scan->problem_data;
920 df_insn_info_free_fields (insn_info);
921 problem_data->insn_pool->remove (insn_info);
922 DF_INSN_UID_SET (uid, NULL);
926 /* Delete all of the refs information from INSN, either right now
927 or marked for later in deferred mode. */
929 void
930 df_insn_delete (rtx_insn *insn)
932 unsigned int uid;
933 basic_block bb;
935 gcc_checking_assert (INSN_P (insn));
937 if (!df)
938 return;
940 uid = INSN_UID (insn);
941 bb = BLOCK_FOR_INSN (insn);
943 /* ??? bb can be NULL after pass_free_cfg. At that point, DF should
944 not exist anymore (as mentioned in df-core.c: "The only requirement
945 [for DF] is that there be a correct control flow graph." Clearly
946 that isn't the case after pass_free_cfg. But DF is freed much later
947 because some back-ends want to use DF info even though the CFG is
948 already gone. It's not clear to me whether that is safe, actually.
949 In any case, we expect BB to be non-NULL at least up to register
950 allocation, so disallow a non-NULL BB up to there. Not perfect
951 but better than nothing... */
952 gcc_checking_assert (bb != NULL || reload_completed);
954 df_grow_bb_info (df_scan);
955 df_grow_reg_info ();
957 /* The block must be marked as dirty now, rather than later as in
958 df_insn_rescan and df_notes_rescan because it may not be there at
959 rescanning time and the mark would blow up.
960 DEBUG_INSNs do not make a block's data flow solution dirty (at
961 worst the LUIDs are no longer contiguous). */
962 if (bb != NULL && NONDEBUG_INSN_P (insn))
963 df_set_bb_dirty (bb);
965 /* The client has deferred rescanning. */
966 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
968 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
969 if (insn_info)
971 bitmap_clear_bit (&df->insns_to_rescan, uid);
972 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
973 bitmap_set_bit (&df->insns_to_delete, uid);
975 if (dump_file)
976 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
977 return;
980 if (dump_file)
981 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
983 df_insn_info_delete (uid);
987 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
989 static void
990 df_free_collection_rec (struct df_collection_rec *collection_rec)
992 unsigned int ix;
993 struct df_scan_problem_data *problem_data
994 = (struct df_scan_problem_data *) df_scan->problem_data;
995 df_ref ref;
996 struct df_mw_hardreg *mw;
998 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
999 df_free_ref (ref);
1000 FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
1001 df_free_ref (ref);
1002 FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
1003 df_free_ref (ref);
1004 FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
1005 problem_data->mw_reg_pool->remove (mw);
1007 collection_rec->def_vec.release ();
1008 collection_rec->use_vec.release ();
1009 collection_rec->eq_use_vec.release ();
1010 collection_rec->mw_vec.release ();
1013 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1015 bool
1016 df_insn_rescan (rtx_insn *insn)
1018 unsigned int uid = INSN_UID (insn);
1019 struct df_insn_info *insn_info = NULL;
1020 basic_block bb = BLOCK_FOR_INSN (insn);
1021 struct df_collection_rec collection_rec;
1023 if ((!df) || (!INSN_P (insn)))
1024 return false;
1026 if (!bb)
1028 if (dump_file)
1029 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1030 return false;
1033 /* The client has disabled rescanning and plans to do it itself. */
1034 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1035 return false;
1037 df_grow_bb_info (df_scan);
1038 df_grow_reg_info ();
1040 insn_info = DF_INSN_UID_SAFE_GET (uid);
1042 /* The client has deferred rescanning. */
1043 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1045 if (!insn_info)
1047 insn_info = df_insn_create_insn_record (insn);
1048 insn_info->defs = 0;
1049 insn_info->uses = 0;
1050 insn_info->eq_uses = 0;
1051 insn_info->mw_hardregs = 0;
1053 if (dump_file)
1054 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1056 bitmap_clear_bit (&df->insns_to_delete, uid);
1057 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1058 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1059 return false;
1062 bitmap_clear_bit (&df->insns_to_delete, uid);
1063 bitmap_clear_bit (&df->insns_to_rescan, uid);
1064 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1065 if (insn_info)
1067 int luid;
1068 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1069 /* If there's no change, return false. */
1070 if (the_same)
1072 df_free_collection_rec (&collection_rec);
1073 if (dump_file)
1074 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1075 return false;
1077 if (dump_file)
1078 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1080 /* There's change - we need to delete the existing info.
1081 Since the insn isn't moved, we can salvage its LUID. */
1082 luid = DF_INSN_LUID (insn);
1083 df_insn_info_free_fields (insn_info);
1084 df_insn_info_init_fields (insn_info, insn);
1085 DF_INSN_LUID (insn) = luid;
1087 else
1089 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1090 df_insn_refs_collect (&collection_rec, bb, insn_info);
1091 if (dump_file)
1092 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1095 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
1096 if (!DEBUG_INSN_P (insn))
1097 df_set_bb_dirty (bb);
1099 return true;
1102 /* Same as df_insn_rescan, but don't mark the basic block as
1103 dirty. */
1105 bool
1106 df_insn_rescan_debug_internal (rtx_insn *insn)
1108 unsigned int uid = INSN_UID (insn);
1109 struct df_insn_info *insn_info;
1111 gcc_assert (DEBUG_INSN_P (insn)
1112 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1114 if (!df)
1115 return false;
1117 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1118 if (!insn_info)
1119 return false;
1121 if (dump_file)
1122 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1124 bitmap_clear_bit (&df->insns_to_delete, uid);
1125 bitmap_clear_bit (&df->insns_to_rescan, uid);
1126 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1128 if (insn_info->defs == 0
1129 && insn_info->uses == 0
1130 && insn_info->eq_uses == 0
1131 && insn_info->mw_hardregs == 0)
1132 return false;
1134 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1136 if (df_chain)
1138 df_ref_chain_delete_du_chain (insn_info->defs);
1139 df_ref_chain_delete_du_chain (insn_info->uses);
1140 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1143 df_ref_chain_delete (insn_info->defs);
1144 df_ref_chain_delete (insn_info->uses);
1145 df_ref_chain_delete (insn_info->eq_uses);
1147 insn_info->defs = 0;
1148 insn_info->uses = 0;
1149 insn_info->eq_uses = 0;
1150 insn_info->mw_hardregs = 0;
1152 return true;
1156 /* Rescan all of the insns in the function. Note that the artificial
1157 uses and defs are not touched. This function will destroy def-use
1158 or use-def chains. */
1160 void
1161 df_insn_rescan_all (void)
1163 bool no_insn_rescan = false;
1164 bool defer_insn_rescan = false;
1165 basic_block bb;
1166 bitmap_iterator bi;
1167 unsigned int uid;
1168 bitmap_head tmp;
1170 bitmap_initialize (&tmp, &df_bitmap_obstack);
1172 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1174 df_clear_flags (DF_NO_INSN_RESCAN);
1175 no_insn_rescan = true;
1178 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1180 df_clear_flags (DF_DEFER_INSN_RESCAN);
1181 defer_insn_rescan = true;
1184 bitmap_copy (&tmp, &df->insns_to_delete);
1185 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1187 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1188 if (insn_info)
1189 df_insn_info_delete (uid);
1192 bitmap_clear (&tmp);
1193 bitmap_clear (&df->insns_to_delete);
1194 bitmap_clear (&df->insns_to_rescan);
1195 bitmap_clear (&df->insns_to_notes_rescan);
1197 FOR_EACH_BB_FN (bb, cfun)
1199 rtx_insn *insn;
1200 FOR_BB_INSNS (bb, insn)
1202 df_insn_rescan (insn);
1206 if (no_insn_rescan)
1207 df_set_flags (DF_NO_INSN_RESCAN);
1208 if (defer_insn_rescan)
1209 df_set_flags (DF_DEFER_INSN_RESCAN);
1213 /* Process all of the deferred rescans or deletions. */
1215 void
1216 df_process_deferred_rescans (void)
1218 bool no_insn_rescan = false;
1219 bool defer_insn_rescan = false;
1220 bitmap_iterator bi;
1221 unsigned int uid;
1222 bitmap_head tmp;
1224 bitmap_initialize (&tmp, &df_bitmap_obstack);
1226 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1228 df_clear_flags (DF_NO_INSN_RESCAN);
1229 no_insn_rescan = true;
1232 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1234 df_clear_flags (DF_DEFER_INSN_RESCAN);
1235 defer_insn_rescan = true;
1238 if (dump_file)
1239 fprintf (dump_file, "starting the processing of deferred insns\n");
1241 bitmap_copy (&tmp, &df->insns_to_delete);
1242 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1244 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1245 if (insn_info)
1246 df_insn_info_delete (uid);
1249 bitmap_copy (&tmp, &df->insns_to_rescan);
1250 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1252 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1253 if (insn_info)
1254 df_insn_rescan (insn_info->insn);
1257 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1258 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1260 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1261 if (insn_info)
1262 df_notes_rescan (insn_info->insn);
1265 if (dump_file)
1266 fprintf (dump_file, "ending the processing of deferred insns\n");
1268 bitmap_clear (&tmp);
1269 bitmap_clear (&df->insns_to_delete);
1270 bitmap_clear (&df->insns_to_rescan);
1271 bitmap_clear (&df->insns_to_notes_rescan);
1273 if (no_insn_rescan)
1274 df_set_flags (DF_NO_INSN_RESCAN);
1275 if (defer_insn_rescan)
1276 df_set_flags (DF_DEFER_INSN_RESCAN);
1278 /* If someone changed regs_ever_live during this pass, fix up the
1279 entry and exit blocks. */
1280 if (df->redo_entry_and_exit)
1282 df_update_entry_exit_and_calls ();
1283 df->redo_entry_and_exit = false;
1288 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1289 the uses if INCLUDE_USES. Include the eq_uses if
1290 INCLUDE_EQ_USES. */
1292 static unsigned int
1293 df_count_refs (bool include_defs, bool include_uses,
1294 bool include_eq_uses)
1296 unsigned int regno;
1297 int size = 0;
1298 unsigned int m = df->regs_inited;
1300 for (regno = 0; regno < m; regno++)
1302 if (include_defs)
1303 size += DF_REG_DEF_COUNT (regno);
1304 if (include_uses)
1305 size += DF_REG_USE_COUNT (regno);
1306 if (include_eq_uses)
1307 size += DF_REG_EQ_USE_COUNT (regno);
1309 return size;
1313 /* Take build ref table for either the uses or defs from the reg-use
1314 or reg-def chains. This version processes the refs in reg order
1315 which is likely to be best if processing the whole function. */
1317 static void
1318 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1319 bool include_defs,
1320 bool include_uses,
1321 bool include_eq_uses)
1323 unsigned int m = df->regs_inited;
1324 unsigned int regno;
1325 unsigned int offset = 0;
1326 unsigned int start;
1328 if (df->changeable_flags & DF_NO_HARD_REGS)
1330 start = FIRST_PSEUDO_REGISTER;
1331 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1332 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1334 else
1335 start = 0;
1337 ref_info->total_size
1338 = df_count_refs (include_defs, include_uses, include_eq_uses);
1340 df_check_and_grow_ref_info (ref_info, 1);
1342 for (regno = start; regno < m; regno++)
1344 int count = 0;
1345 ref_info->begin[regno] = offset;
1346 if (include_defs)
1348 df_ref ref = DF_REG_DEF_CHAIN (regno);
1349 while (ref)
1351 ref_info->refs[offset] = ref;
1352 DF_REF_ID (ref) = offset++;
1353 count++;
1354 ref = DF_REF_NEXT_REG (ref);
1355 gcc_checking_assert (offset < ref_info->refs_size);
1358 if (include_uses)
1360 df_ref ref = DF_REG_USE_CHAIN (regno);
1361 while (ref)
1363 ref_info->refs[offset] = ref;
1364 DF_REF_ID (ref) = offset++;
1365 count++;
1366 ref = DF_REF_NEXT_REG (ref);
1367 gcc_checking_assert (offset < ref_info->refs_size);
1370 if (include_eq_uses)
1372 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1373 while (ref)
1375 ref_info->refs[offset] = ref;
1376 DF_REF_ID (ref) = offset++;
1377 count++;
1378 ref = DF_REF_NEXT_REG (ref);
1379 gcc_checking_assert (offset < ref_info->refs_size);
1382 ref_info->count[regno] = count;
1385 /* The bitmap size is not decremented when refs are deleted. So
1386 reset it now that we have squished out all of the empty
1387 slots. */
1388 ref_info->table_size = offset;
1392 /* Take build ref table for either the uses or defs from the reg-use
1393 or reg-def chains. This version processes the refs in insn order
1394 which is likely to be best if processing some segment of the
1395 function. */
1397 static void
1398 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1399 bool include_defs,
1400 bool include_uses,
1401 bool include_eq_uses)
1403 bitmap_iterator bi;
1404 unsigned int bb_index;
1405 unsigned int m = df->regs_inited;
1406 unsigned int offset = 0;
1407 unsigned int r;
1408 unsigned int start
1409 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1411 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1412 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1414 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1415 df_check_and_grow_ref_info (ref_info, 1);
1417 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1419 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1420 rtx_insn *insn;
1421 df_ref def, use;
1423 if (include_defs)
1424 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1426 unsigned int regno = DF_REF_REGNO (def);
1427 ref_info->count[regno]++;
1429 if (include_uses)
1430 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1432 unsigned int regno = DF_REF_REGNO (use);
1433 ref_info->count[regno]++;
1436 FOR_BB_INSNS (bb, insn)
1438 if (INSN_P (insn))
1440 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1442 if (include_defs)
1443 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1445 unsigned int regno = DF_REF_REGNO (def);
1446 ref_info->count[regno]++;
1448 if (include_uses)
1449 FOR_EACH_INSN_INFO_USE (use, insn_info)
1451 unsigned int regno = DF_REF_REGNO (use);
1452 ref_info->count[regno]++;
1454 if (include_eq_uses)
1455 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1457 unsigned int regno = DF_REF_REGNO (use);
1458 ref_info->count[regno]++;
1464 for (r = start; r < m; r++)
1466 ref_info->begin[r] = offset;
1467 offset += ref_info->count[r];
1468 ref_info->count[r] = 0;
1471 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1473 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
1474 rtx_insn *insn;
1475 df_ref def, use;
1477 if (include_defs)
1478 FOR_EACH_ARTIFICIAL_DEF (def, bb_index)
1480 unsigned int regno = DF_REF_REGNO (def);
1481 if (regno >= start)
1483 unsigned int id
1484 = ref_info->begin[regno] + ref_info->count[regno]++;
1485 DF_REF_ID (def) = id;
1486 ref_info->refs[id] = def;
1489 if (include_uses)
1490 FOR_EACH_ARTIFICIAL_USE (use, bb_index)
1492 unsigned int regno = DF_REF_REGNO (def);
1493 if (regno >= start)
1495 unsigned int id
1496 = ref_info->begin[regno] + ref_info->count[regno]++;
1497 DF_REF_ID (use) = id;
1498 ref_info->refs[id] = use;
1502 FOR_BB_INSNS (bb, insn)
1504 if (INSN_P (insn))
1506 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
1508 if (include_defs)
1509 FOR_EACH_INSN_INFO_DEF (def, insn_info)
1511 unsigned int regno = DF_REF_REGNO (def);
1512 if (regno >= start)
1514 unsigned int id
1515 = ref_info->begin[regno] + ref_info->count[regno]++;
1516 DF_REF_ID (def) = id;
1517 ref_info->refs[id] = def;
1520 if (include_uses)
1521 FOR_EACH_INSN_INFO_USE (use, insn_info)
1523 unsigned int regno = DF_REF_REGNO (use);
1524 if (regno >= start)
1526 unsigned int id
1527 = ref_info->begin[regno] + ref_info->count[regno]++;
1528 DF_REF_ID (use) = id;
1529 ref_info->refs[id] = use;
1532 if (include_eq_uses)
1533 FOR_EACH_INSN_INFO_EQ_USE (use, insn_info)
1535 unsigned int regno = DF_REF_REGNO (use);
1536 if (regno >= start)
1538 unsigned int id
1539 = ref_info->begin[regno] + ref_info->count[regno]++;
1540 DF_REF_ID (use) = id;
1541 ref_info->refs[id] = use;
1548 /* The bitmap size is not decremented when refs are deleted. So
1549 reset it now that we have squished out all of the empty
1550 slots. */
1552 ref_info->table_size = offset;
1555 /* Take build ref table for either the uses or defs from the reg-use
1556 or reg-def chains. */
1558 static void
1559 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1560 bool include_defs,
1561 bool include_uses,
1562 bool include_eq_uses)
1564 if (df->analyze_subset)
1565 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1566 include_uses, include_eq_uses);
1567 else
1568 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1569 include_uses, include_eq_uses);
1573 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1574 static unsigned int
1575 df_add_refs_to_table (unsigned int offset,
1576 struct df_ref_info *ref_info,
1577 df_ref ref)
1579 for (; ref; ref = DF_REF_NEXT_LOC (ref))
1580 if (!(df->changeable_flags & DF_NO_HARD_REGS)
1581 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1583 ref_info->refs[offset] = ref;
1584 DF_REF_ID (ref) = offset++;
1586 return offset;
1590 /* Count the number of refs in all of the insns of BB. Include the
1591 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1592 eq_uses if INCLUDE_EQ_USES. */
1594 static unsigned int
1595 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1596 struct df_ref_info *ref_info,
1597 bool include_defs, bool include_uses,
1598 bool include_eq_uses)
1600 rtx_insn *insn;
1602 if (include_defs)
1603 offset = df_add_refs_to_table (offset, ref_info,
1604 df_get_artificial_defs (bb->index));
1605 if (include_uses)
1606 offset = df_add_refs_to_table (offset, ref_info,
1607 df_get_artificial_uses (bb->index));
1609 FOR_BB_INSNS (bb, insn)
1610 if (INSN_P (insn))
1612 unsigned int uid = INSN_UID (insn);
1613 if (include_defs)
1614 offset = df_add_refs_to_table (offset, ref_info,
1615 DF_INSN_UID_DEFS (uid));
1616 if (include_uses)
1617 offset = df_add_refs_to_table (offset, ref_info,
1618 DF_INSN_UID_USES (uid));
1619 if (include_eq_uses)
1620 offset = df_add_refs_to_table (offset, ref_info,
1621 DF_INSN_UID_EQ_USES (uid));
1623 return offset;
1627 /* Organize the refs by insn into the table in REF_INFO. If
1628 blocks_to_analyze is defined, use that set, otherwise the entire
1629 program. Include the defs if INCLUDE_DEFS. Include the uses if
1630 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1632 static void
1633 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1634 bool include_defs, bool include_uses,
1635 bool include_eq_uses)
1637 basic_block bb;
1638 unsigned int offset = 0;
1640 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1641 df_check_and_grow_ref_info (ref_info, 1);
1642 if (df->blocks_to_analyze)
1644 bitmap_iterator bi;
1645 unsigned int index;
1647 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1649 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK_FOR_FN (cfun,
1650 index),
1651 offset, ref_info,
1652 include_defs, include_uses,
1653 include_eq_uses);
1656 ref_info->table_size = offset;
1658 else
1660 FOR_ALL_BB_FN (bb, cfun)
1661 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1662 include_defs, include_uses,
1663 include_eq_uses);
1664 ref_info->table_size = offset;
1669 /* If the use refs in DF are not organized, reorganize them. */
1671 void
1672 df_maybe_reorganize_use_refs (enum df_ref_order order)
1674 if (order == df->use_info.ref_order)
1675 return;
1677 switch (order)
1679 case DF_REF_ORDER_BY_REG:
1680 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1681 break;
1683 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1684 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1685 break;
1687 case DF_REF_ORDER_BY_INSN:
1688 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1689 break;
1691 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1692 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1693 break;
1695 case DF_REF_ORDER_NO_TABLE:
1696 free (df->use_info.refs);
1697 df->use_info.refs = NULL;
1698 df->use_info.refs_size = 0;
1699 break;
1701 case DF_REF_ORDER_UNORDERED:
1702 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1703 gcc_unreachable ();
1704 break;
1707 df->use_info.ref_order = order;
1711 /* If the def refs in DF are not organized, reorganize them. */
1713 void
1714 df_maybe_reorganize_def_refs (enum df_ref_order order)
1716 if (order == df->def_info.ref_order)
1717 return;
1719 switch (order)
1721 case DF_REF_ORDER_BY_REG:
1722 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1723 break;
1725 case DF_REF_ORDER_BY_INSN:
1726 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1727 break;
1729 case DF_REF_ORDER_NO_TABLE:
1730 free (df->def_info.refs);
1731 df->def_info.refs = NULL;
1732 df->def_info.refs_size = 0;
1733 break;
1735 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1736 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1737 case DF_REF_ORDER_UNORDERED:
1738 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1739 gcc_unreachable ();
1740 break;
1743 df->def_info.ref_order = order;
1747 /* Change all of the basic block references in INSN to use the insn's
1748 current basic block. This function is called from routines that move
1749 instructions from one block to another. */
1751 void
1752 df_insn_change_bb (rtx_insn *insn, basic_block new_bb)
1754 basic_block old_bb = BLOCK_FOR_INSN (insn);
1755 struct df_insn_info *insn_info;
1756 unsigned int uid = INSN_UID (insn);
1758 if (old_bb == new_bb)
1759 return;
1761 set_block_for_insn (insn, new_bb);
1763 if (!df)
1764 return;
1766 if (dump_file)
1767 fprintf (dump_file, "changing bb of uid %d\n", uid);
1769 insn_info = DF_INSN_UID_SAFE_GET (uid);
1770 if (insn_info == NULL)
1772 if (dump_file)
1773 fprintf (dump_file, " unscanned insn\n");
1774 df_insn_rescan (insn);
1775 return;
1778 if (!INSN_P (insn))
1779 return;
1781 df_set_bb_dirty (new_bb);
1782 if (old_bb)
1784 if (dump_file)
1785 fprintf (dump_file, " from %d to %d\n",
1786 old_bb->index, new_bb->index);
1787 df_set_bb_dirty (old_bb);
1789 else
1790 if (dump_file)
1791 fprintf (dump_file, " to %d\n", new_bb->index);
1795 /* Helper function for df_ref_change_reg_with_loc. */
1797 static void
1798 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1799 struct df_reg_info *new_df,
1800 unsigned int new_regno, rtx loc)
1802 df_ref the_ref = old_df->reg_chain;
1804 while (the_ref)
1806 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1807 && DF_REF_LOC (the_ref)
1808 && (*DF_REF_LOC (the_ref) == loc))
1810 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1811 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1812 df_ref *ref_ptr;
1813 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1815 DF_REF_REGNO (the_ref) = new_regno;
1816 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
1818 /* Pull the_ref out of the old regno chain. */
1819 if (prev_ref)
1820 DF_REF_NEXT_REG (prev_ref) = next_ref;
1821 else
1822 old_df->reg_chain = next_ref;
1823 if (next_ref)
1824 DF_REF_PREV_REG (next_ref) = prev_ref;
1825 old_df->n_refs--;
1827 /* Put the ref into the new regno chain. */
1828 DF_REF_PREV_REG (the_ref) = NULL;
1829 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
1830 if (new_df->reg_chain)
1831 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
1832 new_df->reg_chain = the_ref;
1833 new_df->n_refs++;
1834 if (DF_REF_BB (the_ref))
1835 df_set_bb_dirty (DF_REF_BB (the_ref));
1837 /* Need to sort the record again that the ref was in because
1838 the regno is a sorting key. First, find the right
1839 record. */
1840 if (DF_REF_REG_DEF_P (the_ref))
1841 ref_ptr = &insn_info->defs;
1842 else if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
1843 ref_ptr = &insn_info->eq_uses;
1844 else
1845 ref_ptr = &insn_info->uses;
1846 if (dump_file)
1847 fprintf (dump_file, "changing reg in insn %d\n",
1848 DF_REF_INSN_UID (the_ref));
1850 /* Stop if we find the current reference or where the reference
1851 needs to be. */
1852 while (*ref_ptr != the_ref && df_ref_compare (*ref_ptr, the_ref) < 0)
1853 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1854 if (*ref_ptr != the_ref)
1856 /* The reference needs to be promoted up the list. */
1857 df_ref next = DF_REF_NEXT_LOC (the_ref);
1858 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1859 *ref_ptr = the_ref;
1861 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1862 while (*ref_ptr != the_ref);
1863 *ref_ptr = next;
1865 else if (DF_REF_NEXT_LOC (the_ref)
1866 && df_ref_compare (the_ref, DF_REF_NEXT_LOC (the_ref)) > 0)
1868 /* The reference needs to be demoted down the list. */
1869 *ref_ptr = DF_REF_NEXT_LOC (the_ref);
1871 ref_ptr = &DF_REF_NEXT_LOC (*ref_ptr);
1872 while (*ref_ptr && df_ref_compare (the_ref, *ref_ptr) > 0);
1873 DF_REF_NEXT_LOC (the_ref) = *ref_ptr;
1874 *ref_ptr = the_ref;
1877 the_ref = next_ref;
1879 else
1880 the_ref = DF_REF_NEXT_REG (the_ref);
1885 /* Change the regno of register LOC to NEW_REGNO and update the df
1886 information accordingly. Refs that do not match LOC are not changed
1887 which means that artificial refs are not changed since they have no loc.
1888 This call is to support the SET_REGNO macro. */
1890 void
1891 df_ref_change_reg_with_loc (rtx loc, unsigned int new_regno)
1893 unsigned int old_regno = REGNO (loc);
1894 if (old_regno == new_regno)
1895 return;
1897 if (df)
1899 df_grow_reg_info ();
1901 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
1902 DF_REG_DEF_GET (new_regno),
1903 new_regno, loc);
1904 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
1905 DF_REG_USE_GET (new_regno),
1906 new_regno, loc);
1907 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
1908 DF_REG_EQ_USE_GET (new_regno),
1909 new_regno, loc);
1911 set_mode_and_regno (loc, GET_MODE (loc), new_regno);
1915 /* Delete the mw_hardregs that point into the eq_notes. */
1917 static void
1918 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
1920 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs;
1921 struct df_scan_problem_data *problem_data
1922 = (struct df_scan_problem_data *) df_scan->problem_data;
1924 while (*mw_ptr)
1926 df_mw_hardreg *mw = *mw_ptr;
1927 if (mw->flags & DF_REF_IN_NOTE)
1929 *mw_ptr = DF_MWS_NEXT (mw);
1930 problem_data->mw_reg_pool->remove (mw);
1932 else
1933 mw_ptr = &DF_MWS_NEXT (mw);
1938 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
1940 void
1941 df_notes_rescan (rtx_insn *insn)
1943 struct df_insn_info *insn_info;
1944 unsigned int uid = INSN_UID (insn);
1946 if (!df)
1947 return;
1949 /* The client has disabled rescanning and plans to do it itself. */
1950 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1951 return;
1953 /* Do nothing if the insn hasn't been emitted yet. */
1954 if (!BLOCK_FOR_INSN (insn))
1955 return;
1957 df_grow_bb_info (df_scan);
1958 df_grow_reg_info ();
1960 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1962 /* The client has deferred rescanning. */
1963 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1965 if (!insn_info)
1967 insn_info = df_insn_create_insn_record (insn);
1968 insn_info->defs = 0;
1969 insn_info->uses = 0;
1970 insn_info->eq_uses = 0;
1971 insn_info->mw_hardregs = 0;
1974 bitmap_clear_bit (&df->insns_to_delete, uid);
1975 /* If the insn is set to be rescanned, it does not need to also
1976 be notes rescanned. */
1977 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
1978 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
1979 return;
1982 bitmap_clear_bit (&df->insns_to_delete, uid);
1983 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1985 if (insn_info)
1987 basic_block bb = BLOCK_FOR_INSN (insn);
1988 rtx note;
1989 struct df_collection_rec collection_rec;
1990 unsigned int i;
1992 df_mw_hardreg_chain_delete_eq_uses (insn_info);
1993 df_ref_chain_delete (insn_info->eq_uses);
1994 insn_info->eq_uses = NULL;
1996 /* Process REG_EQUIV/REG_EQUAL notes */
1997 for (note = REG_NOTES (insn); note;
1998 note = XEXP (note, 1))
2000 switch (REG_NOTE_KIND (note))
2002 case REG_EQUIV:
2003 case REG_EQUAL:
2004 df_uses_record (&collection_rec,
2005 &XEXP (note, 0), DF_REF_REG_USE,
2006 bb, insn_info, DF_REF_IN_NOTE);
2007 default:
2008 break;
2012 /* Find some place to put any new mw_hardregs. */
2013 df_canonize_collection_rec (&collection_rec);
2014 struct df_mw_hardreg **mw_ptr = &insn_info->mw_hardregs, *mw;
2015 FOR_EACH_VEC_ELT (collection_rec.mw_vec, i, mw)
2017 while (*mw_ptr && df_mw_compare (*mw_ptr, mw) < 0)
2018 mw_ptr = &DF_MWS_NEXT (*mw_ptr);
2019 DF_MWS_NEXT (mw) = *mw_ptr;
2020 *mw_ptr = mw;
2021 mw_ptr = &DF_MWS_NEXT (mw);
2023 df_refs_add_to_chains (&collection_rec, bb, insn, copy_eq_uses);
2025 else
2026 df_insn_rescan (insn);
2031 /*----------------------------------------------------------------------------
2032 Hard core instruction scanning code. No external interfaces here,
2033 just a lot of routines that look inside insns.
2034 ----------------------------------------------------------------------------*/
2037 /* Return true if the contents of two df_ref's are identical.
2038 It ignores DF_REF_MARKER. */
2040 static bool
2041 df_ref_equal_p (df_ref ref1, df_ref ref2)
2043 if (!ref2)
2044 return false;
2046 if (ref1 == ref2)
2047 return true;
2049 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2050 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2051 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2052 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2053 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2054 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2055 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2056 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2057 return false;
2059 switch (DF_REF_CLASS (ref1))
2061 case DF_REF_ARTIFICIAL:
2062 case DF_REF_BASE:
2063 return true;
2065 case DF_REF_REGULAR:
2066 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2068 default:
2069 gcc_unreachable ();
2071 return false;
2075 /* Compare REF1 and REF2 for sorting. This is only called from places
2076 where all of the refs are of the same type, in the same insn, and
2077 have the same bb. So these fields are not checked. */
2079 static int
2080 df_ref_compare (df_ref ref1, df_ref ref2)
2082 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2083 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2085 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2086 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2088 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2089 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2091 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2092 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2094 /* Cannot look at the LOC field on artificial refs. */
2095 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2096 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2097 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2099 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2101 /* If two refs are identical except that one of them has is from
2102 a mw and one is not, we need to have the one with the mw
2103 first. */
2104 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2105 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2106 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2107 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2108 return -1;
2109 else
2110 return 1;
2113 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2116 /* Like df_ref_compare, but compare two df_ref* pointers R1 and R2. */
2118 static int
2119 df_ref_ptr_compare (const void *r1, const void *r2)
2121 return df_ref_compare (*(const df_ref *) r1, *(const df_ref *) r2);
2124 /* Sort and compress a set of refs. */
2126 static void
2127 df_sort_and_compress_refs (vec<df_ref, va_heap> *ref_vec)
2129 unsigned int count;
2130 unsigned int i;
2131 unsigned int dist = 0;
2133 count = ref_vec->length ();
2135 /* If there are 1 or 0 elements, there is nothing to do. */
2136 if (count < 2)
2137 return;
2138 else if (count == 2)
2140 df_ref r0 = (*ref_vec)[0];
2141 df_ref r1 = (*ref_vec)[1];
2142 if (df_ref_compare (r0, r1) > 0)
2143 std::swap ((*ref_vec)[0], (*ref_vec)[1]);
2145 else
2147 for (i = 0; i < count - 1; i++)
2149 df_ref r0 = (*ref_vec)[i];
2150 df_ref r1 = (*ref_vec)[i + 1];
2151 if (df_ref_compare (r0, r1) >= 0)
2152 break;
2154 /* If the array is already strictly ordered,
2155 which is the most common case for large COUNT case
2156 (which happens for CALL INSNs),
2157 no need to sort and filter out duplicate.
2158 Simply return the count.
2159 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2160 of DF_REF_COMPARE. */
2161 if (i == count - 1)
2162 return;
2163 ref_vec->qsort (df_ref_ptr_compare);
2166 for (i=0; i<count-dist; i++)
2168 /* Find the next ref that is not equal to the current ref. */
2169 while (i + dist + 1 < count
2170 && df_ref_equal_p ((*ref_vec)[i],
2171 (*ref_vec)[i + dist + 1]))
2173 df_free_ref ((*ref_vec)[i + dist + 1]);
2174 dist++;
2176 /* Copy it down to the next position. */
2177 if (dist && i + dist + 1 < count)
2178 (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
2181 count -= dist;
2182 ref_vec->truncate (count);
2186 /* Return true if the contents of two df_ref's are identical.
2187 It ignores DF_REF_MARKER. */
2189 static bool
2190 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2192 if (!mw2)
2193 return false;
2194 return (mw1 == mw2) ||
2195 (mw1->mw_reg == mw2->mw_reg
2196 && mw1->type == mw2->type
2197 && mw1->flags == mw2->flags
2198 && mw1->start_regno == mw2->start_regno
2199 && mw1->end_regno == mw2->end_regno);
2203 /* Compare MW1 and MW2 for sorting. */
2205 static int
2206 df_mw_compare (const df_mw_hardreg *mw1, const df_mw_hardreg *mw2)
2208 if (mw1->type != mw2->type)
2209 return mw1->type - mw2->type;
2211 if (mw1->flags != mw2->flags)
2212 return mw1->flags - mw2->flags;
2214 if (mw1->start_regno != mw2->start_regno)
2215 return mw1->start_regno - mw2->start_regno;
2217 if (mw1->end_regno != mw2->end_regno)
2218 return mw1->end_regno - mw2->end_regno;
2220 if (mw1->mw_reg != mw2->mw_reg)
2221 return mw1->mw_order - mw2->mw_order;
2223 return 0;
2226 /* Like df_mw_compare, but compare two df_mw_hardreg** pointers R1 and R2. */
2228 static int
2229 df_mw_ptr_compare (const void *m1, const void *m2)
2231 return df_mw_compare (*(const df_mw_hardreg *const *) m1,
2232 *(const df_mw_hardreg *const *) m2);
2235 /* Sort and compress a set of refs. */
2237 static void
2238 df_sort_and_compress_mws (vec<df_mw_hardreg *, va_heap> *mw_vec)
2240 unsigned int count;
2241 struct df_scan_problem_data *problem_data
2242 = (struct df_scan_problem_data *) df_scan->problem_data;
2243 unsigned int i;
2244 unsigned int dist = 0;
2246 count = mw_vec->length ();
2247 if (count < 2)
2248 return;
2249 else if (count == 2)
2251 struct df_mw_hardreg *m0 = (*mw_vec)[0];
2252 struct df_mw_hardreg *m1 = (*mw_vec)[1];
2253 if (df_mw_compare (m0, m1) > 0)
2255 struct df_mw_hardreg *tmp = (*mw_vec)[0];
2256 (*mw_vec)[0] = (*mw_vec)[1];
2257 (*mw_vec)[1] = tmp;
2260 else
2261 mw_vec->qsort (df_mw_ptr_compare);
2263 for (i=0; i<count-dist; i++)
2265 /* Find the next ref that is not equal to the current ref. */
2266 while (i + dist + 1 < count
2267 && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
2269 problem_data->mw_reg_pool->remove ((*mw_vec)[i + dist + 1]);
2270 dist++;
2272 /* Copy it down to the next position. */
2273 if (dist && i + dist + 1 < count)
2274 (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
2277 count -= dist;
2278 mw_vec->truncate (count);
2282 /* Sort and remove duplicates from the COLLECTION_REC. */
2284 static void
2285 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2287 df_sort_and_compress_refs (&collection_rec->def_vec);
2288 df_sort_and_compress_refs (&collection_rec->use_vec);
2289 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2290 df_sort_and_compress_mws (&collection_rec->mw_vec);
2294 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2296 static void
2297 df_install_ref (df_ref this_ref,
2298 struct df_reg_info *reg_info,
2299 struct df_ref_info *ref_info,
2300 bool add_to_table)
2302 unsigned int regno = DF_REF_REGNO (this_ref);
2303 /* Add the ref to the reg_{def,use,eq_use} chain. */
2304 df_ref head = reg_info->reg_chain;
2306 reg_info->reg_chain = this_ref;
2307 reg_info->n_refs++;
2309 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2311 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2312 df->hard_regs_live_count[regno]++;
2315 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2316 && DF_REF_PREV_REG (this_ref) == NULL);
2318 DF_REF_NEXT_REG (this_ref) = head;
2320 /* We cannot actually link to the head of the chain. */
2321 DF_REF_PREV_REG (this_ref) = NULL;
2323 if (head)
2324 DF_REF_PREV_REG (head) = this_ref;
2326 if (add_to_table)
2328 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2329 df_check_and_grow_ref_info (ref_info, 1);
2330 DF_REF_ID (this_ref) = ref_info->table_size;
2331 /* Add the ref to the big array of defs. */
2332 ref_info->refs[ref_info->table_size] = this_ref;
2333 ref_info->table_size++;
2335 else
2336 DF_REF_ID (this_ref) = -1;
2338 ref_info->total_size++;
2342 /* This function takes one of the groups of refs (defs, uses or
2343 eq_uses) and installs the entire group into the insn. It also adds
2344 each of these refs into the appropriate chains. */
2346 static df_ref
2347 df_install_refs (basic_block bb,
2348 const vec<df_ref, va_heap> *old_vec,
2349 struct df_reg_info **reg_info,
2350 struct df_ref_info *ref_info,
2351 bool is_notes)
2353 unsigned int count = old_vec->length ();
2354 if (count)
2356 bool add_to_table;
2357 df_ref this_ref;
2358 unsigned int ix;
2360 switch (ref_info->ref_order)
2362 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2363 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2364 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2365 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2366 add_to_table = true;
2367 break;
2368 case DF_REF_ORDER_UNORDERED:
2369 case DF_REF_ORDER_BY_REG:
2370 case DF_REF_ORDER_BY_INSN:
2371 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2372 add_to_table = !is_notes;
2373 break;
2374 default:
2375 add_to_table = false;
2376 break;
2379 /* Do not add if ref is not in the right blocks. */
2380 if (add_to_table && df->analyze_subset)
2381 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2383 FOR_EACH_VEC_ELT (*old_vec, ix, this_ref)
2385 DF_REF_NEXT_LOC (this_ref) = (ix + 1 < old_vec->length ()
2386 ? (*old_vec)[ix + 1]
2387 : NULL);
2388 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2389 ref_info, add_to_table);
2391 return (*old_vec)[0];
2393 else
2394 return 0;
2398 /* This function takes the mws installs the entire group into the
2399 insn. */
2401 static struct df_mw_hardreg *
2402 df_install_mws (const vec<df_mw_hardreg *, va_heap> *old_vec)
2404 unsigned int count = old_vec->length ();
2405 if (count)
2407 for (unsigned int i = 0; i < count - 1; i++)
2408 DF_MWS_NEXT ((*old_vec)[i]) = (*old_vec)[i + 1];
2409 DF_MWS_NEXT ((*old_vec)[count - 1]) = 0;
2410 return (*old_vec)[0];
2412 else
2413 return 0;
2417 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2418 chains and update other necessary information. */
2420 static void
2421 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2422 basic_block bb, rtx_insn *insn, unsigned int flags)
2424 if (insn)
2426 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2427 /* If there is a vector in the collection rec, add it to the
2428 insn. A null rec is a signal that the caller will handle the
2429 chain specially. */
2430 if (flags & copy_defs)
2432 gcc_checking_assert (!insn_rec->defs);
2433 insn_rec->defs
2434 = df_install_refs (bb, &collection_rec->def_vec,
2435 df->def_regs,
2436 &df->def_info, false);
2438 if (flags & copy_uses)
2440 gcc_checking_assert (!insn_rec->uses);
2441 insn_rec->uses
2442 = df_install_refs (bb, &collection_rec->use_vec,
2443 df->use_regs,
2444 &df->use_info, false);
2446 if (flags & copy_eq_uses)
2448 gcc_checking_assert (!insn_rec->eq_uses);
2449 insn_rec->eq_uses
2450 = df_install_refs (bb, &collection_rec->eq_use_vec,
2451 df->eq_use_regs,
2452 &df->use_info, true);
2454 if (flags & copy_mw)
2456 gcc_checking_assert (!insn_rec->mw_hardregs);
2457 insn_rec->mw_hardregs
2458 = df_install_mws (&collection_rec->mw_vec);
2461 else
2463 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2465 gcc_checking_assert (!bb_info->artificial_defs);
2466 bb_info->artificial_defs
2467 = df_install_refs (bb, &collection_rec->def_vec,
2468 df->def_regs,
2469 &df->def_info, false);
2470 gcc_checking_assert (!bb_info->artificial_uses);
2471 bb_info->artificial_uses
2472 = df_install_refs (bb, &collection_rec->use_vec,
2473 df->use_regs,
2474 &df->use_info, false);
2479 /* Allocate a ref and initialize its fields. */
2481 static df_ref
2482 df_ref_create_structure (enum df_ref_class cl,
2483 struct df_collection_rec *collection_rec,
2484 rtx reg, rtx *loc,
2485 basic_block bb, struct df_insn_info *info,
2486 enum df_ref_type ref_type,
2487 int ref_flags)
2489 df_ref this_ref = NULL;
2490 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2491 struct df_scan_problem_data *problem_data
2492 = (struct df_scan_problem_data *) df_scan->problem_data;
2494 switch (cl)
2496 case DF_REF_BASE:
2497 this_ref = (df_ref) (problem_data->ref_base_pool->allocate ());
2498 gcc_checking_assert (loc == NULL);
2499 break;
2501 case DF_REF_ARTIFICIAL:
2502 this_ref = (df_ref) (problem_data->ref_artificial_pool->allocate ());
2503 this_ref->artificial_ref.bb = bb;
2504 gcc_checking_assert (loc == NULL);
2505 break;
2507 case DF_REF_REGULAR:
2508 this_ref = (df_ref) (problem_data->ref_regular_pool->allocate ());
2509 this_ref->regular_ref.loc = loc;
2510 gcc_checking_assert (loc);
2511 break;
2514 DF_REF_CLASS (this_ref) = cl;
2515 DF_REF_ID (this_ref) = -1;
2516 DF_REF_REG (this_ref) = reg;
2517 DF_REF_REGNO (this_ref) = regno;
2518 DF_REF_TYPE (this_ref) = ref_type;
2519 DF_REF_INSN_INFO (this_ref) = info;
2520 DF_REF_CHAIN (this_ref) = NULL;
2521 DF_REF_FLAGS (this_ref) = ref_flags;
2522 DF_REF_NEXT_REG (this_ref) = NULL;
2523 DF_REF_PREV_REG (this_ref) = NULL;
2524 DF_REF_ORDER (this_ref) = df->ref_order++;
2526 /* We need to clear this bit because fwprop, and in the future
2527 possibly other optimizations sometimes create new refs using ond
2528 refs as the model. */
2529 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2531 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2532 if (regno < FIRST_PSEUDO_REGISTER
2533 && !DF_REF_IS_ARTIFICIAL (this_ref)
2534 && !DEBUG_INSN_P (DF_REF_INSN (this_ref)))
2536 if (DF_REF_REG_DEF_P (this_ref))
2538 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2539 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2541 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2542 && (regno == FRAME_POINTER_REGNUM
2543 || regno == ARG_POINTER_REGNUM)))
2544 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2547 if (collection_rec)
2549 if (DF_REF_REG_DEF_P (this_ref))
2550 collection_rec->def_vec.safe_push (this_ref);
2551 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2552 collection_rec->eq_use_vec.safe_push (this_ref);
2553 else
2554 collection_rec->use_vec.safe_push (this_ref);
2556 else
2557 df_install_ref_incremental (this_ref);
2559 return this_ref;
2563 /* Create new references of type DF_REF_TYPE for each part of register REG
2564 at address LOC within INSN of BB. */
2567 static void
2568 df_ref_record (enum df_ref_class cl,
2569 struct df_collection_rec *collection_rec,
2570 rtx reg, rtx *loc,
2571 basic_block bb, struct df_insn_info *insn_info,
2572 enum df_ref_type ref_type,
2573 int ref_flags)
2575 unsigned int regno;
2577 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2579 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2580 if (regno < FIRST_PSEUDO_REGISTER)
2582 struct df_mw_hardreg *hardreg = NULL;
2583 struct df_scan_problem_data *problem_data
2584 = (struct df_scan_problem_data *) df_scan->problem_data;
2585 unsigned int i;
2586 unsigned int endregno;
2587 df_ref ref;
2589 if (GET_CODE (reg) == SUBREG)
2591 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2592 SUBREG_BYTE (reg), GET_MODE (reg));
2593 endregno = regno + subreg_nregs (reg);
2595 else
2596 endregno = END_REGNO (reg);
2598 /* If this is a multiword hardreg, we create some extra
2599 datastructures that will enable us to easily build REG_DEAD
2600 and REG_UNUSED notes. */
2601 if (collection_rec
2602 && (endregno != regno + 1) && insn_info)
2604 /* Sets to a subreg of a multiword register are partial.
2605 Sets to a non-subreg of a multiword register are not. */
2606 if (GET_CODE (reg) == SUBREG)
2607 ref_flags |= DF_REF_PARTIAL;
2608 ref_flags |= DF_REF_MW_HARDREG;
2610 hardreg = problem_data->mw_reg_pool->allocate ();
2611 hardreg->type = ref_type;
2612 hardreg->flags = ref_flags;
2613 hardreg->mw_reg = reg;
2614 hardreg->start_regno = regno;
2615 hardreg->end_regno = endregno - 1;
2616 hardreg->mw_order = df->ref_order++;
2617 collection_rec->mw_vec.safe_push (hardreg);
2620 for (i = regno; i < endregno; i++)
2622 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2623 bb, insn_info, ref_type, ref_flags);
2625 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2628 else
2630 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2631 ref_type, ref_flags);
2636 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2637 covered by the outer mode is smaller than that covered by the inner mode,
2638 is a read-modify-write operation.
2639 This function returns true iff the SUBREG X is such a SUBREG. */
2641 bool
2642 df_read_modify_subreg_p (rtx x)
2644 unsigned int isize, osize;
2645 if (GET_CODE (x) != SUBREG)
2646 return false;
2647 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2648 osize = GET_MODE_SIZE (GET_MODE (x));
2649 return isize > osize
2650 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2654 /* Process all the registers defined in the rtx pointed by LOC.
2655 Autoincrement/decrement definitions will be picked up by df_uses_record.
2656 Any change here has to be matched in df_find_hard_reg_defs_1. */
2658 static void
2659 df_def_record_1 (struct df_collection_rec *collection_rec,
2660 rtx *loc, basic_block bb, struct df_insn_info *insn_info,
2661 int flags)
2663 rtx dst = *loc;
2665 /* It is legal to have a set destination be a parallel. */
2666 if (GET_CODE (dst) == PARALLEL)
2668 int i;
2669 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2671 rtx temp = XVECEXP (dst, 0, i);
2672 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2673 df_def_record_1 (collection_rec, &XEXP (temp, 0),
2674 bb, insn_info, flags);
2676 return;
2679 if (GET_CODE (dst) == STRICT_LOW_PART)
2681 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2683 loc = &XEXP (dst, 0);
2684 dst = *loc;
2687 if (GET_CODE (dst) == ZERO_EXTRACT)
2689 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2691 loc = &XEXP (dst, 0);
2692 dst = *loc;
2695 /* At this point if we do not have a reg or a subreg, just return. */
2696 if (REG_P (dst))
2698 df_ref_record (DF_REF_REGULAR, collection_rec,
2699 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2701 /* We want to keep sp alive everywhere - by making all
2702 writes to sp also use of sp. */
2703 if (REGNO (dst) == STACK_POINTER_REGNUM)
2704 df_ref_record (DF_REF_BASE, collection_rec,
2705 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2707 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2709 if (df_read_modify_subreg_p (dst))
2710 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2712 flags |= DF_REF_SUBREG;
2714 df_ref_record (DF_REF_REGULAR, collection_rec,
2715 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2720 /* Process all the registers defined in the pattern rtx, X. Any change
2721 here has to be matched in df_find_hard_reg_defs. */
2723 static void
2724 df_defs_record (struct df_collection_rec *collection_rec,
2725 rtx x, basic_block bb, struct df_insn_info *insn_info,
2726 int flags)
2728 RTX_CODE code = GET_CODE (x);
2729 int i;
2731 switch (code)
2733 case SET:
2734 df_def_record_1 (collection_rec, &SET_DEST (x), bb, insn_info, flags);
2735 break;
2737 case CLOBBER:
2738 flags |= DF_REF_MUST_CLOBBER;
2739 df_def_record_1 (collection_rec, &XEXP (x, 0), bb, insn_info, flags);
2740 break;
2742 case COND_EXEC:
2743 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2744 bb, insn_info, DF_REF_CONDITIONAL);
2745 break;
2747 case PARALLEL:
2748 for (i = 0; i < XVECLEN (x, 0); i++)
2749 df_defs_record (collection_rec, XVECEXP (x, 0, i),
2750 bb, insn_info, flags);
2751 break;
2752 default:
2753 /* No DEFs to record in other cases */
2754 break;
2758 /* Set bits in *DEFS for hard registers found in the rtx DST, which is the
2759 destination of a set or clobber. This has to match the logic in
2760 df_defs_record_1. */
2762 static void
2763 df_find_hard_reg_defs_1 (rtx dst, HARD_REG_SET *defs)
2765 /* It is legal to have a set destination be a parallel. */
2766 if (GET_CODE (dst) == PARALLEL)
2768 int i;
2769 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2771 rtx temp = XVECEXP (dst, 0, i);
2772 gcc_assert (GET_CODE (temp) == EXPR_LIST);
2773 df_find_hard_reg_defs_1 (XEXP (temp, 0), defs);
2775 return;
2778 if (GET_CODE (dst) == STRICT_LOW_PART)
2779 dst = XEXP (dst, 0);
2781 if (GET_CODE (dst) == ZERO_EXTRACT)
2782 dst = XEXP (dst, 0);
2784 /* At this point if we do not have a reg or a subreg, just return. */
2785 if (REG_P (dst) && HARD_REGISTER_P (dst))
2786 SET_HARD_REG_BIT (*defs, REGNO (dst));
2787 else if (GET_CODE (dst) == SUBREG
2788 && REG_P (SUBREG_REG (dst)) && HARD_REGISTER_P (dst))
2789 SET_HARD_REG_BIT (*defs, REGNO (SUBREG_REG (dst)));
2792 /* Set bits in *DEFS for hard registers defined in the pattern X. This
2793 has to match the logic in df_defs_record. */
2795 static void
2796 df_find_hard_reg_defs (rtx x, HARD_REG_SET *defs)
2798 RTX_CODE code = GET_CODE (x);
2799 int i;
2801 switch (code)
2803 case SET:
2804 df_find_hard_reg_defs_1 (SET_DEST (x), defs);
2805 break;
2807 case CLOBBER:
2808 df_find_hard_reg_defs_1 (XEXP (x, 0), defs);
2809 break;
2811 case COND_EXEC:
2812 df_find_hard_reg_defs (COND_EXEC_CODE (x), defs);
2813 break;
2815 case PARALLEL:
2816 for (i = 0; i < XVECLEN (x, 0); i++)
2817 df_find_hard_reg_defs (XVECEXP (x, 0, i), defs);
2818 break;
2819 default:
2820 /* No DEFs to record in other cases */
2821 break;
2826 /* Process all the registers used in the rtx at address LOC. */
2828 static void
2829 df_uses_record (struct df_collection_rec *collection_rec,
2830 rtx *loc, enum df_ref_type ref_type,
2831 basic_block bb, struct df_insn_info *insn_info,
2832 int flags)
2834 RTX_CODE code;
2835 rtx x;
2837 retry:
2838 x = *loc;
2839 if (!x)
2840 return;
2841 code = GET_CODE (x);
2842 switch (code)
2844 case LABEL_REF:
2845 case SYMBOL_REF:
2846 case CONST:
2847 CASE_CONST_ANY:
2848 case PC:
2849 case CC0:
2850 case ADDR_VEC:
2851 case ADDR_DIFF_VEC:
2852 return;
2854 case CLOBBER:
2855 /* If we are clobbering a MEM, mark any registers inside the address
2856 as being used. */
2857 if (MEM_P (XEXP (x, 0)))
2858 df_uses_record (collection_rec,
2859 &XEXP (XEXP (x, 0), 0),
2860 DF_REF_REG_MEM_STORE,
2861 bb, insn_info,
2862 flags);
2864 /* If we're clobbering a REG then we have a def so ignore. */
2865 return;
2867 case MEM:
2868 df_uses_record (collection_rec,
2869 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
2870 bb, insn_info, flags & DF_REF_IN_NOTE);
2871 return;
2873 case SUBREG:
2874 /* While we're here, optimize this case. */
2875 flags |= DF_REF_PARTIAL;
2876 /* In case the SUBREG is not of a REG, do not optimize. */
2877 if (!REG_P (SUBREG_REG (x)))
2879 loc = &SUBREG_REG (x);
2880 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
2881 return;
2883 /* ... Fall through ... */
2885 case REG:
2886 df_ref_record (DF_REF_REGULAR, collection_rec,
2887 x, loc, bb, insn_info,
2888 ref_type, flags);
2889 return;
2891 case SIGN_EXTRACT:
2892 case ZERO_EXTRACT:
2894 df_uses_record (collection_rec,
2895 &XEXP (x, 1), ref_type, bb, insn_info, flags);
2896 df_uses_record (collection_rec,
2897 &XEXP (x, 2), ref_type, bb, insn_info, flags);
2899 /* If the parameters to the zero or sign extract are
2900 constants, strip them off and recurse, otherwise there is
2901 no information that we can gain from this operation. */
2902 if (code == ZERO_EXTRACT)
2903 flags |= DF_REF_ZERO_EXTRACT;
2904 else
2905 flags |= DF_REF_SIGN_EXTRACT;
2907 df_uses_record (collection_rec,
2908 &XEXP (x, 0), ref_type, bb, insn_info, flags);
2909 return;
2911 break;
2913 case SET:
2915 rtx dst = SET_DEST (x);
2916 gcc_assert (!(flags & DF_REF_IN_NOTE));
2917 df_uses_record (collection_rec,
2918 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
2920 switch (GET_CODE (dst))
2922 case SUBREG:
2923 if (df_read_modify_subreg_p (dst))
2925 df_uses_record (collection_rec, &SUBREG_REG (dst),
2926 DF_REF_REG_USE, bb, insn_info,
2927 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
2928 break;
2930 /* Fall through. */
2931 case REG:
2932 case PARALLEL:
2933 case SCRATCH:
2934 case PC:
2935 case CC0:
2936 break;
2937 case MEM:
2938 df_uses_record (collection_rec, &XEXP (dst, 0),
2939 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
2940 break;
2941 case STRICT_LOW_PART:
2943 rtx *temp = &XEXP (dst, 0);
2944 /* A strict_low_part uses the whole REG and not just the
2945 SUBREG. */
2946 dst = XEXP (dst, 0);
2947 df_uses_record (collection_rec,
2948 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
2949 DF_REF_REG_USE, bb, insn_info,
2950 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
2952 break;
2953 case ZERO_EXTRACT:
2955 df_uses_record (collection_rec, &XEXP (dst, 1),
2956 DF_REF_REG_USE, bb, insn_info, flags);
2957 df_uses_record (collection_rec, &XEXP (dst, 2),
2958 DF_REF_REG_USE, bb, insn_info, flags);
2959 if (GET_CODE (XEXP (dst,0)) == MEM)
2960 df_uses_record (collection_rec, &XEXP (dst, 0),
2961 DF_REF_REG_USE, bb, insn_info,
2962 flags);
2963 else
2964 df_uses_record (collection_rec, &XEXP (dst, 0),
2965 DF_REF_REG_USE, bb, insn_info,
2966 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
2968 break;
2970 default:
2971 gcc_unreachable ();
2973 return;
2976 case RETURN:
2977 case SIMPLE_RETURN:
2978 break;
2980 case ASM_OPERANDS:
2981 case UNSPEC_VOLATILE:
2982 case TRAP_IF:
2983 case ASM_INPUT:
2985 /* Traditional and volatile asm instructions must be
2986 considered to use and clobber all hard registers, all
2987 pseudo-registers and all of memory. So must TRAP_IF and
2988 UNSPEC_VOLATILE operations.
2990 Consider for instance a volatile asm that changes the fpu
2991 rounding mode. An insn should not be moved across this
2992 even if it only uses pseudo-regs because it might give an
2993 incorrectly rounded result.
2995 However, flow.c's liveness computation did *not* do this,
2996 giving the reasoning as " ?!? Unfortunately, marking all
2997 hard registers as live causes massive problems for the
2998 register allocator and marking all pseudos as live creates
2999 mountains of uninitialized variable warnings."
3001 In order to maintain the status quo with regard to liveness
3002 and uses, we do what flow.c did and just mark any regs we
3003 can find in ASM_OPERANDS as used. In global asm insns are
3004 scanned and regs_asm_clobbered is filled out.
3006 For all ASM_OPERANDS, we must traverse the vector of input
3007 operands. We can not just fall through here since then we
3008 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3009 which do not indicate traditional asms unlike their normal
3010 usage. */
3011 if (code == ASM_OPERANDS)
3013 int j;
3015 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3016 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3017 DF_REF_REG_USE, bb, insn_info, flags);
3018 return;
3020 break;
3023 case VAR_LOCATION:
3024 df_uses_record (collection_rec,
3025 &PAT_VAR_LOCATION_LOC (x),
3026 DF_REF_REG_USE, bb, insn_info, flags);
3027 return;
3029 case PRE_DEC:
3030 case POST_DEC:
3031 case PRE_INC:
3032 case POST_INC:
3033 case PRE_MODIFY:
3034 case POST_MODIFY:
3035 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3036 /* Catch the def of the register being modified. */
3037 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3038 bb, insn_info,
3039 DF_REF_REG_DEF,
3040 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3042 /* ... Fall through to handle uses ... */
3044 default:
3045 break;
3048 /* Recursively scan the operands of this expression. */
3050 const char *fmt = GET_RTX_FORMAT (code);
3051 int i;
3053 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3055 if (fmt[i] == 'e')
3057 /* Tail recursive case: save a function call level. */
3058 if (i == 0)
3060 loc = &XEXP (x, 0);
3061 goto retry;
3063 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3064 bb, insn_info, flags);
3066 else if (fmt[i] == 'E')
3068 int j;
3069 for (j = 0; j < XVECLEN (x, i); j++)
3070 df_uses_record (collection_rec,
3071 &XVECEXP (x, i, j), ref_type,
3072 bb, insn_info, flags);
3077 return;
3081 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3083 static void
3084 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3086 unsigned int ix;
3087 df_ref ref;
3089 FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
3091 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3093 df_ref use;
3095 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3096 DF_REF_LOC (ref), DF_REF_BB (ref),
3097 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3098 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3099 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3105 /* Get call's extra defs and uses (track caller-saved registers). */
3107 static void
3108 df_get_call_refs (struct df_collection_rec *collection_rec,
3109 basic_block bb,
3110 struct df_insn_info *insn_info,
3111 int flags)
3113 rtx note;
3114 bool is_sibling_call;
3115 unsigned int i;
3116 HARD_REG_SET defs_generated;
3117 HARD_REG_SET fn_reg_set_usage;
3119 CLEAR_HARD_REG_SET (defs_generated);
3120 df_find_hard_reg_defs (PATTERN (insn_info->insn), &defs_generated);
3121 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3122 get_call_reg_set_usage (insn_info->insn, &fn_reg_set_usage,
3123 regs_invalidated_by_call);
3125 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3127 if (i == STACK_POINTER_REGNUM)
3128 /* The stack ptr is used (honorarily) by a CALL insn. */
3129 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3130 NULL, bb, insn_info, DF_REF_REG_USE,
3131 DF_REF_CALL_STACK_USAGE | flags);
3132 else if (global_regs[i])
3134 /* Calls to const functions cannot access any global registers and
3135 calls to pure functions cannot set them. All other calls may
3136 reference any of the global registers, so they are recorded as
3137 used. */
3138 if (!RTL_CONST_CALL_P (insn_info->insn))
3140 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3141 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3142 if (!RTL_PURE_CALL_P (insn_info->insn))
3143 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3144 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3147 else if (TEST_HARD_REG_BIT (fn_reg_set_usage, i)
3148 /* no clobbers for regs that are the result of the call */
3149 && !TEST_HARD_REG_BIT (defs_generated, i)
3150 && (!is_sibling_call
3151 || !bitmap_bit_p (df->exit_block_uses, i)
3152 || refers_to_regno_p (i, crtl->return_rtx)))
3153 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3154 NULL, bb, insn_info, DF_REF_REG_DEF,
3155 DF_REF_MAY_CLOBBER | flags);
3158 /* Record the registers used to pass arguments, and explicitly
3159 noted as clobbered. */
3160 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3161 note = XEXP (note, 1))
3163 if (GET_CODE (XEXP (note, 0)) == USE)
3164 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3165 DF_REF_REG_USE, bb, insn_info, flags);
3166 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3168 if (REG_P (XEXP (XEXP (note, 0), 0)))
3170 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3171 if (!TEST_HARD_REG_BIT (defs_generated, regno))
3172 df_defs_record (collection_rec, XEXP (note, 0), bb,
3173 insn_info, flags);
3175 else
3176 df_uses_record (collection_rec, &XEXP (note, 0),
3177 DF_REF_REG_USE, bb, insn_info, flags);
3181 return;
3184 /* Collect all refs in the INSN. This function is free of any
3185 side-effect - it will create and return a lists of df_ref's in the
3186 COLLECTION_REC without putting those refs into existing ref chains
3187 and reg chains. */
3189 static void
3190 df_insn_refs_collect (struct df_collection_rec *collection_rec,
3191 basic_block bb, struct df_insn_info *insn_info)
3193 rtx note;
3194 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3196 /* Clear out the collection record. */
3197 collection_rec->def_vec.truncate (0);
3198 collection_rec->use_vec.truncate (0);
3199 collection_rec->eq_use_vec.truncate (0);
3200 collection_rec->mw_vec.truncate (0);
3202 /* Process REG_EQUIV/REG_EQUAL notes. */
3203 for (note = REG_NOTES (insn_info->insn); note;
3204 note = XEXP (note, 1))
3206 switch (REG_NOTE_KIND (note))
3208 case REG_EQUIV:
3209 case REG_EQUAL:
3210 df_uses_record (collection_rec,
3211 &XEXP (note, 0), DF_REF_REG_USE,
3212 bb, insn_info, DF_REF_IN_NOTE);
3213 break;
3214 case REG_NON_LOCAL_GOTO:
3215 /* The frame ptr is used by a non-local goto. */
3216 df_ref_record (DF_REF_BASE, collection_rec,
3217 regno_reg_rtx[FRAME_POINTER_REGNUM],
3218 NULL, bb, insn_info,
3219 DF_REF_REG_USE, 0);
3220 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3221 df_ref_record (DF_REF_BASE, collection_rec,
3222 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3223 NULL, bb, insn_info,
3224 DF_REF_REG_USE, 0);
3225 break;
3226 default:
3227 break;
3231 /* For CALL_INSNs, first record DF_REF_BASE register defs, as well as
3232 uses from CALL_INSN_FUNCTION_USAGE. */
3233 if (CALL_P (insn_info->insn))
3234 df_get_call_refs (collection_rec, bb, insn_info,
3235 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3237 /* Record other defs. These should be mostly for DF_REF_REGULAR, so
3238 that a qsort on the defs is unnecessary in most cases. */
3239 df_defs_record (collection_rec,
3240 PATTERN (insn_info->insn), bb, insn_info, 0);
3242 /* Record the register uses. */
3243 df_uses_record (collection_rec,
3244 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3246 /* DF_REF_CONDITIONAL needs corresponding USES. */
3247 if (is_cond_exec)
3248 df_get_conditional_uses (collection_rec);
3250 df_canonize_collection_rec (collection_rec);
3253 /* Recompute the luids for the insns in BB. */
3255 void
3256 df_recompute_luids (basic_block bb)
3258 rtx_insn *insn;
3259 int luid = 0;
3261 df_grow_insn_info ();
3263 /* Scan the block an insn at a time from beginning to end. */
3264 FOR_BB_INSNS (bb, insn)
3266 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3267 /* Inserting labels does not always trigger the incremental
3268 rescanning. */
3269 if (!insn_info)
3271 gcc_assert (!INSN_P (insn));
3272 insn_info = df_insn_create_insn_record (insn);
3275 DF_INSN_INFO_LUID (insn_info) = luid;
3276 if (INSN_P (insn))
3277 luid++;
3282 /* Collect all artificial refs at the block level for BB and add them
3283 to COLLECTION_REC. */
3285 static void
3286 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3288 collection_rec->def_vec.truncate (0);
3289 collection_rec->use_vec.truncate (0);
3290 collection_rec->eq_use_vec.truncate (0);
3291 collection_rec->mw_vec.truncate (0);
3293 if (bb->index == ENTRY_BLOCK)
3295 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3296 return;
3298 else if (bb->index == EXIT_BLOCK)
3300 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3301 return;
3304 if (bb_has_eh_pred (bb))
3306 unsigned int i;
3307 /* Mark the registers that will contain data for the handler. */
3308 for (i = 0; ; ++i)
3310 unsigned regno = EH_RETURN_DATA_REGNO (i);
3311 if (regno == INVALID_REGNUM)
3312 break;
3313 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3314 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3318 /* Add the hard_frame_pointer if this block is the target of a
3319 non-local goto. */
3320 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3321 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3322 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3324 /* Add the artificial uses. */
3325 if (bb->index >= NUM_FIXED_BLOCKS)
3327 bitmap_iterator bi;
3328 unsigned int regno;
3329 bitmap au = bb_has_eh_pred (bb)
3330 ? &df->eh_block_artificial_uses
3331 : &df->regular_block_artificial_uses;
3333 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3335 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3336 bb, NULL, DF_REF_REG_USE, 0);
3340 df_canonize_collection_rec (collection_rec);
3344 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3346 void
3347 df_bb_refs_record (int bb_index, bool scan_insns)
3349 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, bb_index);
3350 rtx_insn *insn;
3351 int luid = 0;
3353 if (!df)
3354 return;
3356 df_collection_rec collection_rec;
3357 df_grow_bb_info (df_scan);
3358 if (scan_insns)
3359 /* Scan the block an insn at a time from beginning to end. */
3360 FOR_BB_INSNS (bb, insn)
3362 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3363 gcc_assert (!insn_info);
3365 insn_info = df_insn_create_insn_record (insn);
3366 if (INSN_P (insn))
3368 /* Record refs within INSN. */
3369 DF_INSN_INFO_LUID (insn_info) = luid++;
3370 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3371 df_refs_add_to_chains (&collection_rec, bb, insn, copy_all);
3373 DF_INSN_INFO_LUID (insn_info) = luid;
3376 /* Other block level artificial refs */
3377 df_bb_refs_collect (&collection_rec, bb);
3378 df_refs_add_to_chains (&collection_rec, bb, NULL, copy_all);
3380 /* Now that the block has been processed, set the block as dirty so
3381 LR and LIVE will get it processed. */
3382 df_set_bb_dirty (bb);
3386 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3387 block. */
3389 static void
3390 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3392 #ifdef EH_USES
3393 unsigned int i;
3394 #endif
3396 bitmap_clear (regular_block_artificial_uses);
3398 if (reload_completed)
3400 if (frame_pointer_needed)
3401 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3403 else
3404 /* Before reload, there are a few registers that must be forced
3405 live everywhere -- which might not already be the case for
3406 blocks within infinite loops. */
3408 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3410 /* Any reference to any pseudo before reload is a potential
3411 reference of the frame pointer. */
3412 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3414 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3415 bitmap_set_bit (regular_block_artificial_uses,
3416 HARD_FRAME_POINTER_REGNUM);
3418 /* Pseudos with argument area equivalences may require
3419 reloading via the argument pointer. */
3420 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3421 && fixed_regs[ARG_POINTER_REGNUM])
3422 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3424 /* Any constant, or pseudo with constant equivalences, may
3425 require reloading from memory using the pic register. */
3426 if (picreg != INVALID_REGNUM
3427 && fixed_regs[picreg])
3428 bitmap_set_bit (regular_block_artificial_uses, picreg);
3430 /* The all-important stack pointer must always be live. */
3431 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3433 #ifdef EH_USES
3434 /* EH_USES registers are used:
3435 1) at all insns that might throw (calls or with -fnon-call-exceptions
3436 trapping insns)
3437 2) in all EH edges
3438 3) to support backtraces and/or debugging, anywhere between their
3439 initialization and where they the saved registers are restored
3440 from them, including the cases where we don't reach the epilogue
3441 (noreturn call or infinite loop). */
3442 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3443 if (EH_USES (i))
3444 bitmap_set_bit (regular_block_artificial_uses, i);
3445 #endif
3449 /* Get the artificial use set for an eh block. */
3451 static void
3452 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3454 bitmap_clear (eh_block_artificial_uses);
3456 /* The following code (down through the arg_pointer setting APPEARS
3457 to be necessary because there is nothing that actually
3458 describes what the exception handling code may actually need
3459 to keep alive. */
3460 if (reload_completed)
3462 if (frame_pointer_needed)
3464 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3465 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
3466 bitmap_set_bit (eh_block_artificial_uses,
3467 HARD_FRAME_POINTER_REGNUM);
3469 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3470 && fixed_regs[ARG_POINTER_REGNUM])
3471 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3477 /*----------------------------------------------------------------------------
3478 Specialized hard register scanning functions.
3479 ----------------------------------------------------------------------------*/
3482 /* Mark a register in SET. Hard registers in large modes get all
3483 of their component registers set as well. */
3485 static void
3486 df_mark_reg (rtx reg, void *vset)
3488 bitmap_set_range ((bitmap) vset, REGNO (reg), REG_NREGS (reg));
3492 /* Set the bit for regs that are considered being defined at the entry. */
3494 static void
3495 df_get_entry_block_def_set (bitmap entry_block_defs)
3497 rtx r;
3498 int i;
3500 bitmap_clear (entry_block_defs);
3502 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3504 if (global_regs[i])
3505 bitmap_set_bit (entry_block_defs, i);
3506 if (FUNCTION_ARG_REGNO_P (i))
3507 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3510 /* The always important stack pointer. */
3511 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3513 /* Once the prologue has been generated, all of these registers
3514 should just show up in the first regular block. */
3515 if (targetm.have_prologue () && epilogue_completed)
3517 /* Defs for the callee saved registers are inserted so that the
3518 pushes have some defining location. */
3519 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3520 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3521 bitmap_set_bit (entry_block_defs, i);
3524 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3525 if (r && REG_P (r))
3526 bitmap_set_bit (entry_block_defs, REGNO (r));
3528 /* If the function has an incoming STATIC_CHAIN, it has to show up
3529 in the entry def set. */
3530 r = targetm.calls.static_chain (current_function_decl, true);
3531 if (r && REG_P (r))
3532 bitmap_set_bit (entry_block_defs, REGNO (r));
3534 if ((!reload_completed) || frame_pointer_needed)
3536 /* Any reference to any pseudo before reload is a potential
3537 reference of the frame pointer. */
3538 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3540 /* If they are different, also mark the hard frame pointer as live. */
3541 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3542 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3543 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3546 /* These registers are live everywhere. */
3547 if (!reload_completed)
3549 /* Pseudos with argument area equivalences may require
3550 reloading via the argument pointer. */
3551 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3552 && fixed_regs[ARG_POINTER_REGNUM])
3553 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3555 /* Any constant, or pseudo with constant equivalences, may
3556 require reloading from memory using the pic register. */
3557 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3558 if (picreg != INVALID_REGNUM
3559 && fixed_regs[picreg])
3560 bitmap_set_bit (entry_block_defs, picreg);
3563 #ifdef INCOMING_RETURN_ADDR_RTX
3564 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3565 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3566 #endif
3568 targetm.extra_live_on_entry (entry_block_defs);
3572 /* Return the (conservative) set of hard registers that are defined on
3573 entry to the function.
3574 It uses df->entry_block_defs to determine which register
3575 reference to include. */
3577 static void
3578 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3579 bitmap entry_block_defs)
3581 unsigned int i;
3582 bitmap_iterator bi;
3584 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3586 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3587 ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_DEF, 0);
3590 df_canonize_collection_rec (collection_rec);
3594 /* Record the (conservative) set of hard registers that are defined on
3595 entry to the function. */
3597 static void
3598 df_record_entry_block_defs (bitmap entry_block_defs)
3600 struct df_collection_rec collection_rec;
3601 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3603 /* Process bb_refs chain */
3604 df_refs_add_to_chains (&collection_rec,
3605 BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK),
3606 NULL,
3607 copy_defs);
3611 /* Update the defs in the entry block. */
3613 void
3614 df_update_entry_block_defs (void)
3616 bitmap_head refs;
3617 bool changed = false;
3619 bitmap_initialize (&refs, &df_bitmap_obstack);
3620 df_get_entry_block_def_set (&refs);
3621 if (df->entry_block_defs)
3623 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3625 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3626 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3627 df_ref_chain_delete (bb_info->artificial_defs);
3628 bb_info->artificial_defs = NULL;
3629 changed = true;
3632 else
3634 struct df_scan_problem_data *problem_data
3635 = (struct df_scan_problem_data *) df_scan->problem_data;
3636 gcc_unreachable ();
3637 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3638 changed = true;
3641 if (changed)
3643 df_record_entry_block_defs (&refs);
3644 bitmap_copy (df->entry_block_defs, &refs);
3645 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, ENTRY_BLOCK));
3647 bitmap_clear (&refs);
3651 /* Set the bit for regs that are considered being used at the exit. */
3653 static void
3654 df_get_exit_block_use_set (bitmap exit_block_uses)
3656 unsigned int i;
3657 unsigned int picreg = PIC_OFFSET_TABLE_REGNUM;
3659 bitmap_clear (exit_block_uses);
3661 /* Stack pointer is always live at the exit. */
3662 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3664 /* Mark the frame pointer if needed at the end of the function.
3665 If we end up eliminating it, it will be removed from the live
3666 list of each basic block by reload. */
3668 if ((!reload_completed) || frame_pointer_needed)
3670 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3672 /* If they are different, also mark the hard frame pointer as live. */
3673 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
3674 && !LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3675 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3678 /* Many architectures have a GP register even without flag_pic.
3679 Assume the pic register is not in use, or will be handled by
3680 other means, if it is not fixed. */
3681 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3682 && picreg != INVALID_REGNUM
3683 && fixed_regs[picreg])
3684 bitmap_set_bit (exit_block_uses, picreg);
3686 /* Mark all global registers, and all registers used by the
3687 epilogue as being live at the end of the function since they
3688 may be referenced by our caller. */
3689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3690 if (global_regs[i] || EPILOGUE_USES (i))
3691 bitmap_set_bit (exit_block_uses, i);
3693 if (targetm.have_epilogue () && epilogue_completed)
3695 /* Mark all call-saved registers that we actually used. */
3696 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3697 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3698 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3699 bitmap_set_bit (exit_block_uses, i);
3702 /* Mark the registers that will contain data for the handler. */
3703 if (reload_completed && crtl->calls_eh_return)
3704 for (i = 0; ; ++i)
3706 unsigned regno = EH_RETURN_DATA_REGNO (i);
3707 if (regno == INVALID_REGNUM)
3708 break;
3709 bitmap_set_bit (exit_block_uses, regno);
3712 #ifdef EH_RETURN_STACKADJ_RTX
3713 if ((!targetm.have_epilogue () || ! epilogue_completed)
3714 && crtl->calls_eh_return)
3716 rtx tmp = EH_RETURN_STACKADJ_RTX;
3717 if (tmp && REG_P (tmp))
3718 df_mark_reg (tmp, exit_block_uses);
3720 #endif
3722 #ifdef EH_RETURN_HANDLER_RTX
3723 if ((!targetm.have_epilogue () || ! epilogue_completed)
3724 && crtl->calls_eh_return)
3726 rtx tmp = EH_RETURN_HANDLER_RTX;
3727 if (tmp && REG_P (tmp))
3728 df_mark_reg (tmp, exit_block_uses);
3730 #endif
3732 /* Mark function return value. */
3733 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3737 /* Return the refs of hard registers that are used in the exit block.
3738 It uses df->exit_block_uses to determine register to include. */
3740 static void
3741 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3743 unsigned int i;
3744 bitmap_iterator bi;
3746 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3747 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3748 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3750 /* It is deliberate that this is not put in the exit block uses but
3751 I do not know why. */
3752 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3753 && reload_completed
3754 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3755 && bb_has_eh_pred (EXIT_BLOCK_PTR_FOR_FN (cfun))
3756 && fixed_regs[ARG_POINTER_REGNUM])
3757 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3758 EXIT_BLOCK_PTR_FOR_FN (cfun), NULL, DF_REF_REG_USE, 0);
3760 df_canonize_collection_rec (collection_rec);
3764 /* Record the set of hard registers that are used in the exit block.
3765 It uses df->exit_block_uses to determine which bit to include. */
3767 static void
3768 df_record_exit_block_uses (bitmap exit_block_uses)
3770 struct df_collection_rec collection_rec;
3771 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3773 /* Process bb_refs chain */
3774 df_refs_add_to_chains (&collection_rec,
3775 BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK),
3776 NULL,
3777 copy_uses);
3781 /* Update the uses in the exit block. */
3783 void
3784 df_update_exit_block_uses (void)
3786 bitmap_head refs;
3787 bool changed = false;
3789 bitmap_initialize (&refs, &df_bitmap_obstack);
3790 df_get_exit_block_use_set (&refs);
3791 if (df->exit_block_uses)
3793 if (!bitmap_equal_p (df->exit_block_uses, &refs))
3795 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
3796 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
3797 df_ref_chain_delete (bb_info->artificial_uses);
3798 bb_info->artificial_uses = NULL;
3799 changed = true;
3802 else
3804 struct df_scan_problem_data *problem_data
3805 = (struct df_scan_problem_data *) df_scan->problem_data;
3806 gcc_unreachable ();
3807 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3808 changed = true;
3811 if (changed)
3813 df_record_exit_block_uses (&refs);
3814 bitmap_copy (df->exit_block_uses,& refs);
3815 df_set_bb_dirty (BASIC_BLOCK_FOR_FN (cfun, EXIT_BLOCK));
3817 bitmap_clear (&refs);
3820 static bool initialized = false;
3823 /* Initialize some platform specific structures. */
3825 void
3826 df_hard_reg_init (void)
3828 #ifdef ELIMINABLE_REGS
3829 int i;
3830 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
3831 #endif
3832 if (initialized)
3833 return;
3835 /* Record which registers will be eliminated. We use this in
3836 mark_used_regs. */
3837 CLEAR_HARD_REG_SET (elim_reg_set);
3839 #ifdef ELIMINABLE_REGS
3840 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
3841 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
3842 #else
3843 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
3844 #endif
3846 initialized = true;
3850 /* Recompute the parts of scanning that are based on regs_ever_live
3851 because something changed in that array. */
3853 void
3854 df_update_entry_exit_and_calls (void)
3856 basic_block bb;
3858 df_update_entry_block_defs ();
3859 df_update_exit_block_uses ();
3861 /* The call insns need to be rescanned because there may be changes
3862 in the set of registers clobbered across the call. */
3863 FOR_EACH_BB_FN (bb, cfun)
3865 rtx_insn *insn;
3866 FOR_BB_INSNS (bb, insn)
3868 if (INSN_P (insn) && CALL_P (insn))
3869 df_insn_rescan (insn);
3875 /* Return true if hard REG is actually used in the some instruction.
3876 There are a fair number of conditions that affect the setting of
3877 this array. See the comment in df.h for df->hard_regs_live_count
3878 for the conditions that this array is set. */
3880 bool
3881 df_hard_reg_used_p (unsigned int reg)
3883 return df->hard_regs_live_count[reg] != 0;
3887 /* A count of the number of times REG is actually used in the some
3888 instruction. There are a fair number of conditions that affect the
3889 setting of this array. See the comment in df.h for
3890 df->hard_regs_live_count for the conditions that this array is
3891 set. */
3894 unsigned int
3895 df_hard_reg_used_count (unsigned int reg)
3897 return df->hard_regs_live_count[reg];
3901 /* Get the value of regs_ever_live[REGNO]. */
3903 bool
3904 df_regs_ever_live_p (unsigned int regno)
3906 return regs_ever_live[regno];
3910 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
3911 to change, schedule that change for the next update. */
3913 void
3914 df_set_regs_ever_live (unsigned int regno, bool value)
3916 if (regs_ever_live[regno] == value)
3917 return;
3919 regs_ever_live[regno] = value;
3920 if (df)
3921 df->redo_entry_and_exit = true;
3925 /* Compute "regs_ever_live" information from the underlying df
3926 information. Set the vector to all false if RESET. */
3928 void
3929 df_compute_regs_ever_live (bool reset)
3931 unsigned int i;
3932 bool changed = df->redo_entry_and_exit;
3934 if (reset)
3935 memset (regs_ever_live, 0, sizeof (regs_ever_live));
3937 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3938 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
3940 regs_ever_live[i] = true;
3941 changed = true;
3943 if (changed)
3944 df_update_entry_exit_and_calls ();
3945 df->redo_entry_and_exit = false;
3949 /*----------------------------------------------------------------------------
3950 Dataflow ref information verification functions.
3952 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
3953 df_reg_chain_verify_unmarked (refs)
3954 df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
3955 df_mws_verify (mw*, mw*, bool)
3956 df_insn_refs_verify (collection_rec, bb, insn, bool)
3957 df_bb_refs_verify (bb, refs, bool)
3958 df_bb_verify (bb)
3959 df_exit_block_bitmap_verify (bool)
3960 df_entry_block_bitmap_verify (bool)
3961 df_scan_verify ()
3962 ----------------------------------------------------------------------------*/
3965 /* Mark all refs in the reg chain. Verify that all of the registers
3966 are in the correct chain. */
3968 static unsigned int
3969 df_reg_chain_mark (df_ref refs, unsigned int regno,
3970 bool is_def, bool is_eq_use)
3972 unsigned int count = 0;
3973 df_ref ref;
3974 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
3976 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
3978 /* If there are no def-use or use-def chains, make sure that all
3979 of the chains are clear. */
3980 if (!df_chain)
3981 gcc_assert (!DF_REF_CHAIN (ref));
3983 /* Check to make sure the ref is in the correct chain. */
3984 gcc_assert (DF_REF_REGNO (ref) == regno);
3985 if (is_def)
3986 gcc_assert (DF_REF_REG_DEF_P (ref));
3987 else
3988 gcc_assert (!DF_REF_REG_DEF_P (ref));
3990 if (is_eq_use)
3991 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
3992 else
3993 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
3995 if (DF_REF_NEXT_REG (ref))
3996 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
3997 count++;
3998 DF_REF_REG_MARK (ref);
4000 return count;
4004 /* Verify that all of the registers in the chain are unmarked. */
4006 static void
4007 df_reg_chain_verify_unmarked (df_ref refs)
4009 df_ref ref;
4010 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4011 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4015 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4017 static bool
4018 df_refs_verify (const vec<df_ref, va_heap> *new_rec, df_ref old_rec,
4019 bool abort_if_fail)
4021 unsigned int ix;
4022 df_ref new_ref;
4024 FOR_EACH_VEC_ELT (*new_rec, ix, new_ref)
4026 if (old_rec == NULL || !df_ref_equal_p (new_ref, old_rec))
4028 if (abort_if_fail)
4029 gcc_assert (0);
4030 else
4031 return false;
4034 /* Abort if fail is called from the function level verifier. If
4035 that is the context, mark this reg as being seem. */
4036 if (abort_if_fail)
4038 gcc_assert (DF_REF_IS_REG_MARKED (old_rec));
4039 DF_REF_REG_UNMARK (old_rec);
4042 old_rec = DF_REF_NEXT_LOC (old_rec);
4045 if (abort_if_fail)
4046 gcc_assert (old_rec == NULL);
4047 else
4048 return old_rec == NULL;
4049 return false;
4053 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4055 static bool
4056 df_mws_verify (const vec<df_mw_hardreg *, va_heap> *new_rec,
4057 struct df_mw_hardreg *old_rec,
4058 bool abort_if_fail)
4060 unsigned int ix;
4061 struct df_mw_hardreg *new_reg;
4063 FOR_EACH_VEC_ELT (*new_rec, ix, new_reg)
4065 if (old_rec == NULL || !df_mw_equal_p (new_reg, old_rec))
4067 if (abort_if_fail)
4068 gcc_assert (0);
4069 else
4070 return false;
4072 old_rec = DF_MWS_NEXT (old_rec);
4075 if (abort_if_fail)
4076 gcc_assert (old_rec == NULL);
4077 else
4078 return old_rec == NULL;
4079 return false;
4083 /* Return true if the existing insn refs information is complete and
4084 correct. Otherwise (i.e. if there's any missing or extra refs),
4085 return the correct df_ref chain in REFS_RETURN.
4087 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4088 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4089 verification mode instead of the whole function, so unmark
4090 everything.
4092 If ABORT_IF_FAIL is set, this function never returns false. */
4094 static bool
4095 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4096 basic_block bb,
4097 rtx_insn *insn,
4098 bool abort_if_fail)
4100 bool ret1, ret2, ret3, ret4;
4101 unsigned int uid = INSN_UID (insn);
4102 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4104 df_insn_refs_collect (collection_rec, bb, insn_info);
4106 /* Unfortunately we cannot opt out early if one of these is not
4107 right because the marks will not get cleared. */
4108 ret1 = df_refs_verify (&collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4109 abort_if_fail);
4110 ret2 = df_refs_verify (&collection_rec->use_vec, DF_INSN_UID_USES (uid),
4111 abort_if_fail);
4112 ret3 = df_refs_verify (&collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4113 abort_if_fail);
4114 ret4 = df_mws_verify (&collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4115 abort_if_fail);
4116 return (ret1 && ret2 && ret3 && ret4);
4120 /* Return true if all refs in the basic block are correct and complete.
4121 Due to df_ref_chain_verify, it will cause all refs
4122 that are verified to have DF_REF_MARK bit set. */
4124 static bool
4125 df_bb_verify (basic_block bb)
4127 rtx_insn *insn;
4128 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4129 struct df_collection_rec collection_rec;
4131 gcc_assert (bb_info);
4133 /* Scan the block, one insn at a time, from beginning to end. */
4134 FOR_BB_INSNS_REVERSE (bb, insn)
4136 if (!INSN_P (insn))
4137 continue;
4138 df_insn_refs_verify (&collection_rec, bb, insn, true);
4139 df_free_collection_rec (&collection_rec);
4142 /* Do the artificial defs and uses. */
4143 df_bb_refs_collect (&collection_rec, bb);
4144 df_refs_verify (&collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4145 df_refs_verify (&collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4146 df_free_collection_rec (&collection_rec);
4148 return true;
4152 /* Returns true if the entry block has correct and complete df_ref set.
4153 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4155 static bool
4156 df_entry_block_bitmap_verify (bool abort_if_fail)
4158 bitmap_head entry_block_defs;
4159 bool is_eq;
4161 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4162 df_get_entry_block_def_set (&entry_block_defs);
4164 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4166 if (!is_eq && abort_if_fail)
4168 fprintf (stderr, "entry_block_defs = ");
4169 df_print_regset (stderr, &entry_block_defs);
4170 fprintf (stderr, "df->entry_block_defs = ");
4171 df_print_regset (stderr, df->entry_block_defs);
4172 gcc_assert (0);
4175 bitmap_clear (&entry_block_defs);
4177 return is_eq;
4181 /* Returns true if the exit block has correct and complete df_ref set.
4182 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4184 static bool
4185 df_exit_block_bitmap_verify (bool abort_if_fail)
4187 bitmap_head exit_block_uses;
4188 bool is_eq;
4190 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4191 df_get_exit_block_use_set (&exit_block_uses);
4193 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4195 if (!is_eq && abort_if_fail)
4197 fprintf (stderr, "exit_block_uses = ");
4198 df_print_regset (stderr, &exit_block_uses);
4199 fprintf (stderr, "df->exit_block_uses = ");
4200 df_print_regset (stderr, df->exit_block_uses);
4201 gcc_assert (0);
4204 bitmap_clear (&exit_block_uses);
4206 return is_eq;
4210 /* Return true if df_ref information for all insns in all blocks are
4211 correct and complete. */
4213 void
4214 df_scan_verify (void)
4216 unsigned int i;
4217 basic_block bb;
4218 bitmap_head regular_block_artificial_uses;
4219 bitmap_head eh_block_artificial_uses;
4221 if (!df)
4222 return;
4224 /* Verification is a 4 step process. */
4226 /* (1) All of the refs are marked by going through the reg chains. */
4227 for (i = 0; i < DF_REG_SIZE (df); i++)
4229 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4230 == DF_REG_DEF_COUNT (i));
4231 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4232 == DF_REG_USE_COUNT (i));
4233 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4234 == DF_REG_EQ_USE_COUNT (i));
4237 /* (2) There are various bitmaps whose value may change over the
4238 course of the compilation. This step recomputes them to make
4239 sure that they have not slipped out of date. */
4240 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4241 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4243 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4244 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4246 bitmap_ior_into (&eh_block_artificial_uses,
4247 &regular_block_artificial_uses);
4249 /* Check artificial_uses bitmaps didn't change. */
4250 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4251 &df->regular_block_artificial_uses));
4252 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4253 &df->eh_block_artificial_uses));
4255 bitmap_clear (&regular_block_artificial_uses);
4256 bitmap_clear (&eh_block_artificial_uses);
4258 /* Verify entry block and exit block. These only verify the bitmaps,
4259 the refs are verified in df_bb_verify. */
4260 df_entry_block_bitmap_verify (true);
4261 df_exit_block_bitmap_verify (true);
4263 /* (3) All of the insns in all of the blocks are traversed and the
4264 marks are cleared both in the artificial refs attached to the
4265 blocks and the real refs inside the insns. It is a failure to
4266 clear a mark that has not been set as this means that the ref in
4267 the block or insn was not in the reg chain. */
4269 FOR_ALL_BB_FN (bb, cfun)
4270 df_bb_verify (bb);
4272 /* (4) See if all reg chains are traversed a second time. This time
4273 a check is made that the marks are clear. A set mark would be a
4274 from a reg that is not in any insn or basic block. */
4276 for (i = 0; i < DF_REG_SIZE (df); i++)
4278 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4279 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4280 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));